From 2a9ca2dcab2320f68366a4bdcd7ed540d68e4a8c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 7 Jun 2024 00:49:18 +0000 Subject: [PATCH] Bump github.com/hashicorp/terraform-plugin-docs from 0.16.0 to 0.19.4 Bumps [github.com/hashicorp/terraform-plugin-docs](https://github.com/hashicorp/terraform-plugin-docs) from 0.16.0 to 0.19.4. - [Release notes](https://github.com/hashicorp/terraform-plugin-docs/releases) - [Changelog](https://github.com/hashicorp/terraform-plugin-docs/blob/main/CHANGELOG.md) - [Commits](https://github.com/hashicorp/terraform-plugin-docs/compare/v0.16.0...v0.19.4) --- updated-dependencies: - dependency-name: github.com/hashicorp/terraform-plugin-docs dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- go.mod | 33 +- go.sum | 99 +- vendor/github.com/BurntSushi/toml/.gitignore | 2 + vendor/github.com/BurntSushi/toml/COPYING | 21 + vendor/github.com/BurntSushi/toml/README.md | 120 + vendor/github.com/BurntSushi/toml/decode.go | 602 ++++ .../BurntSushi/toml/decode_go116.go | 19 + .../github.com/BurntSushi/toml/deprecated.go | 21 + vendor/github.com/BurntSushi/toml/doc.go | 11 + vendor/github.com/BurntSushi/toml/encode.go | 750 ++++ vendor/github.com/BurntSushi/toml/error.go | 279 ++ .../github.com/BurntSushi/toml/internal/tz.go | 36 + vendor/github.com/BurntSushi/toml/lex.go | 1233 +++++++ vendor/github.com/BurntSushi/toml/meta.go | 121 + vendor/github.com/BurntSushi/toml/parse.go | 781 +++++ .../github.com/BurntSushi/toml/type_fields.go | 242 ++ .../github.com/BurntSushi/toml/type_toml.go | 70 + .../github.com/Kunde21/markdownfmt/v3/LICENSE | 22 + .../Kunde21/markdownfmt/v3/markdown/doc.go | 2 + .../Kunde21/markdownfmt/v3/markdown/indent.go | 61 + .../markdownfmt/v3/markdown/renderer.go | 646 ++++ .../v3/markdown/renderer_heading.go | 89 + .../markdownfmt/v3/markdown/renderer_table.go | 140 + .../markdownfmt/v3/markdown/writer_indent.go | 106 + .../go-crypto/openpgp/packet/signature.go | 7 - .../ProtonMail/go-crypto/openpgp/read.go | 45 +- .../bmatcuk/doublestar/v4/.codecov.yml | 10 + .../bmatcuk/doublestar/v4/.gitignore | 32 + .../github.com/bmatcuk/doublestar/v4/LICENSE | 22 + .../bmatcuk/doublestar/v4/README.md | 402 +++ .../bmatcuk/doublestar/v4/UPGRADING.md | 63 + .../bmatcuk/doublestar/v4/doublestar.go | 13 + .../github.com/bmatcuk/doublestar/v4/glob.go | 473 +++ .../bmatcuk/doublestar/v4/globoptions.go | 144 + .../bmatcuk/doublestar/v4/globwalk.go | 414 +++ .../github.com/bmatcuk/doublestar/v4/match.go | 381 +++ .../github.com/bmatcuk/doublestar/v4/utils.go | 147 + .../bmatcuk/doublestar/v4/validate.go | 82 + .../{mitchellh => hashicorp}/cli/LICENSE | 0 .../{mitchellh => hashicorp}/cli/Makefile | 0 .../{mitchellh => hashicorp}/cli/README.md | 6 +- .../cli/autocomplete.go | 3 + .../{mitchellh => hashicorp}/cli/cli.go | 3 + .../{mitchellh => hashicorp}/cli/command.go | 3 + .../cli/command_mock.go | 3 + .../{mitchellh => hashicorp}/cli/help.go | 3 + vendor/github.com/hashicorp/cli/ui.go | 63 + .../cli/ui_colored.go | 5 + .../cli/ui.go => hashicorp/cli/ui_common.go} | 53 - .../cli/ui_concurrent.go | 3 + vendor/github.com/hashicorp/cli/ui_js.go | 10 + .../{mitchellh => hashicorp}/cli/ui_mock.go | 3 + .../{mitchellh => hashicorp}/cli/ui_writer.go | 3 + .../hashicorp/go-version/CHANGELOG.md | 19 + .../github.com/hashicorp/go-version/LICENSE | 2 + .../github.com/hashicorp/go-version/README.md | 2 +- .../hashicorp/go-version/constraint.go | 6 +- .../hashicorp/go-version/version.go | 46 +- .../go-version/version_collection.go | 3 + .../github.com/hashicorp/hc-install/README.md | 88 +- .../hc-install/checkpoint/latest_version.go | 12 +- .../github.com/hashicorp/hc-install/fs/fs.go | 4 +- .../hashicorp/hc-install/fs/fs_unix.go | 4 +- .../hashicorp/hc-install/installer.go | 4 +- .../hc-install/internal/build/go_build.go | 8 +- .../releasesjson/checksum_downloader.go | 4 +- .../internal/releasesjson/downloader.go | 68 +- .../internal/releasesjson/product_version.go | 3 +- .../internal/releasesjson/releases.go | 8 +- .../hc-install/releases/exact_version.go | 22 +- .../hc-install/releases/latest_version.go | 22 +- .../hashicorp/hc-install/releases/releases.go | 4 +- .../hashicorp/hc-install/version/VERSION | 2 +- .../internal/version/version.go | 2 +- .../hashicorp/terraform-exec/tfexec/apply.go | 29 +- .../terraform-exec/tfexec/options.go | 12 + .../hashicorp/terraform-exec/tfexec/plan.go | 48 +- .../terraform-exec/tfexec/version.go | 17 + .../hashicorp/terraform-json/plan.go | 21 + .../cmd/tfplugindocs/build/version.go | 18 + .../cmd/tfplugindocs/main.go | 17 +- .../cmd/tfplugindocs/version.go | 10 - .../internal/check/directory.go | 174 + .../internal/check/file.go | 39 + .../internal/check/file_extension.go | 64 + .../internal/check/file_mismatch.go | 284 ++ .../internal/check/frontmatter.go | 104 + .../internal/check/provider_file.go | 67 + .../internal/cmd/generate.go | 7 +- .../internal/cmd/migrate.go | 100 + .../terraform-plugin-docs/internal/cmd/run.go | 27 +- .../internal/cmd/validate.go | 21 +- .../internal/functionmd/render.go | 96 + .../internal/mdplain/mdplain.go | 26 +- .../internal/mdplain/renderer.go | 250 +- .../internal/provider/generate.go | 441 +-- .../internal/provider/logger.go | 27 + .../internal/provider/migrate.go | 409 +++ .../internal/provider/schema.go | 136 + .../internal/provider/template.go | 156 +- .../internal/provider/util.go | 58 +- .../internal/provider/validate.go | 498 +-- .../{ => internal}/schemamd/behaviors.go | 0 .../{ => internal}/schemamd/render.go | 68 +- .../schemamd/write_attribute_description.go | 0 .../schemamd/write_block_type_description.go | 0 ...write_nested_attribute_type_description.go | 0 .../{ => internal}/schemamd/write_type.go | 0 .../github.com/mattn/go-runewidth/.travis.yml | 16 + vendor/github.com/mattn/go-runewidth/LICENSE | 21 + .../github.com/mattn/go-runewidth/README.md | 27 + .../github.com/mattn/go-runewidth/go.test.sh | 12 + .../mattn/go-runewidth/runewidth.go | 257 ++ .../mattn/go-runewidth/runewidth_appengine.go | 8 + .../mattn/go-runewidth/runewidth_js.go | 9 + .../mattn/go-runewidth/runewidth_posix.go | 82 + .../mattn/go-runewidth/runewidth_table.go | 437 +++ .../mattn/go-runewidth/runewidth_windows.go | 28 + .../russross/blackfriday/.gitignore | 8 - .../russross/blackfriday/.travis.yml | 18 - .../russross/blackfriday/LICENSE.txt | 28 - .../github.com/russross/blackfriday/README.md | 364 -- .../github.com/russross/blackfriday/block.go | 1480 -------- vendor/github.com/russross/blackfriday/doc.go | 32 - .../github.com/russross/blackfriday/html.go | 945 ----- .../github.com/russross/blackfriday/inline.go | 1154 ------- .../github.com/russross/blackfriday/latex.go | 334 -- .../russross/blackfriday/markdown.go | 943 ----- .../russross/blackfriday/smartypants.go | 430 --- .../github.com/yuin/goldmark-meta/.gitignore | 13 + vendor/github.com/yuin/goldmark-meta/LICENSE | 21 + .../github.com/yuin/goldmark-meta/README.md | 187 + vendor/github.com/yuin/goldmark-meta/meta.go | 320 ++ vendor/github.com/yuin/goldmark/.gitignore | 19 + vendor/github.com/yuin/goldmark/.golangci.yml | 105 + vendor/github.com/yuin/goldmark/LICENSE | 21 + vendor/github.com/yuin/goldmark/Makefile | 13 + vendor/github.com/yuin/goldmark/README.md | 568 +++ vendor/github.com/yuin/goldmark/ast/ast.go | 508 +++ vendor/github.com/yuin/goldmark/ast/block.go | 508 +++ vendor/github.com/yuin/goldmark/ast/inline.go | 549 +++ .../goldmark/extension/ast/definition_list.go | 83 + .../yuin/goldmark/extension/ast/footnote.go | 138 + .../goldmark/extension/ast/strikethrough.go | 29 + .../yuin/goldmark/extension/ast/table.go | 158 + .../yuin/goldmark/extension/ast/tasklist.go | 35 + .../github.com/yuin/goldmark/extension/cjk.go | 72 + .../goldmark/extension/definition_list.go | 274 ++ .../yuin/goldmark/extension/footnote.go | 691 ++++ .../github.com/yuin/goldmark/extension/gfm.go | 18 + .../yuin/goldmark/extension/linkify.go | 322 ++ .../yuin/goldmark/extension/package.go | 2 + .../yuin/goldmark/extension/strikethrough.go | 117 + .../yuin/goldmark/extension/table.go | 564 +++ .../yuin/goldmark/extension/tasklist.go | 120 + .../yuin/goldmark/extension/typographer.go | 348 ++ vendor/github.com/yuin/goldmark/markdown.go | 140 + .../yuin/goldmark/parser/attribute.go | 329 ++ .../yuin/goldmark/parser/atx_heading.go | 248 ++ .../yuin/goldmark/parser/auto_link.go | 42 + .../yuin/goldmark/parser/blockquote.go | 69 + .../yuin/goldmark/parser/code_block.go | 100 + .../yuin/goldmark/parser/code_span.go | 84 + .../yuin/goldmark/parser/delimiter.go | 238 ++ .../yuin/goldmark/parser/emphasis.go | 50 + .../yuin/goldmark/parser/fcode_block.go | 121 + .../yuin/goldmark/parser/html_block.go | 229 ++ .../github.com/yuin/goldmark/parser/link.go | 410 +++ .../yuin/goldmark/parser/link_ref.go | 152 + .../github.com/yuin/goldmark/parser/list.go | 287 ++ .../yuin/goldmark/parser/list_item.go | 90 + .../yuin/goldmark/parser/paragraph.go | 72 + .../github.com/yuin/goldmark/parser/parser.go | 1259 +++++++ .../yuin/goldmark/parser/raw_html.go | 153 + .../yuin/goldmark/parser/setext_headings.go | 126 + .../yuin/goldmark/parser/thematic_break.go | 75 + .../yuin/goldmark/renderer/html/html.go | 1026 ++++++ .../yuin/goldmark/renderer/renderer.go | 174 + .../github.com/yuin/goldmark/text/package.go | 2 + .../github.com/yuin/goldmark/text/reader.go | 660 ++++ .../github.com/yuin/goldmark/text/segment.go | 209 ++ .../yuin/goldmark/util/html5entities.go | 2143 ++++++++++++ .../goldmark/util/unicode_case_folding.go | 1535 +++++++++ vendor/github.com/yuin/goldmark/util/util.go | 982 ++++++ .../github.com/yuin/goldmark/util/util_cjk.go | 469 +++ .../yuin/goldmark/util/util_safe.go | 14 + .../yuin/goldmark/util/util_unsafe.go | 24 + .../go-cty/cty/function/stdlib/collection.go | 4 +- .../go-cty/cty/function/stdlib/conversion.go | 5 +- .../zclconf/go-cty/cty/json/marshal.go | 12 +- .../goldmark/frontmatter/.changie.yaml | 26 + .../goldmark/frontmatter/.gitignore | 4 + .../goldmark/frontmatter/.golangci.yml | 26 + .../goldmark/frontmatter/CHANGELOG.md | 13 + .../go.abhg.dev/goldmark/frontmatter/LICENSE | 28 + .../go.abhg.dev/goldmark/frontmatter/Makefile | 63 + .../goldmark/frontmatter/README.md | 197 ++ .../go.abhg.dev/goldmark/frontmatter/data.go | 45 + .../go.abhg.dev/goldmark/frontmatter/doc.go | 6 + .../goldmark/frontmatter/extend.go | 70 + .../goldmark/frontmatter/format.go | 58 + .../goldmark/frontmatter/mode_string.go | 24 + .../go.abhg.dev/goldmark/frontmatter/parse.go | 192 ++ .../goldmark/frontmatter/transform.go | 35 + vendor/golang.org/x/mod/modfile/read.go | 2 +- vendor/golang.org/x/mod/modfile/rule.go | 9 +- vendor/golang.org/x/net/http2/frame.go | 31 + vendor/golang.org/x/net/http2/pipe.go | 11 +- vendor/golang.org/x/net/http2/server.go | 13 +- vendor/golang.org/x/net/http2/testsync.go | 331 ++ vendor/golang.org/x/net/http2/transport.go | 307 +- vendor/gopkg.in/yaml.v2/.travis.yml | 16 + vendor/gopkg.in/yaml.v2/LICENSE | 201 ++ vendor/gopkg.in/yaml.v2/LICENSE.libyaml | 31 + vendor/gopkg.in/yaml.v2/NOTICE | 13 + vendor/gopkg.in/yaml.v2/README.md | 133 + vendor/gopkg.in/yaml.v2/apic.go | 740 ++++ vendor/gopkg.in/yaml.v2/decode.go | 815 +++++ vendor/gopkg.in/yaml.v2/emitterc.go | 1685 +++++++++ vendor/gopkg.in/yaml.v2/encode.go | 390 +++ vendor/gopkg.in/yaml.v2/parserc.go | 1095 ++++++ vendor/gopkg.in/yaml.v2/readerc.go | 412 +++ vendor/gopkg.in/yaml.v2/resolve.go | 258 ++ vendor/gopkg.in/yaml.v2/scannerc.go | 2711 +++++++++++++++ vendor/gopkg.in/yaml.v2/sorter.go | 113 + vendor/gopkg.in/yaml.v2/writerc.go | 26 + vendor/gopkg.in/yaml.v2/yaml.go | 466 +++ vendor/gopkg.in/yaml.v2/yamlh.go | 739 ++++ vendor/gopkg.in/yaml.v2/yamlprivateh.go | 173 + vendor/gopkg.in/yaml.v3/LICENSE | 50 + vendor/gopkg.in/yaml.v3/NOTICE | 13 + vendor/gopkg.in/yaml.v3/README.md | 150 + vendor/gopkg.in/yaml.v3/apic.go | 747 ++++ vendor/gopkg.in/yaml.v3/decode.go | 1000 ++++++ vendor/gopkg.in/yaml.v3/emitterc.go | 2020 +++++++++++ vendor/gopkg.in/yaml.v3/encode.go | 577 ++++ vendor/gopkg.in/yaml.v3/parserc.go | 1258 +++++++ vendor/gopkg.in/yaml.v3/readerc.go | 434 +++ vendor/gopkg.in/yaml.v3/resolve.go | 326 ++ vendor/gopkg.in/yaml.v3/scannerc.go | 3038 +++++++++++++++++ vendor/gopkg.in/yaml.v3/sorter.go | 134 + vendor/gopkg.in/yaml.v3/writerc.go | 48 + vendor/gopkg.in/yaml.v3/yaml.go | 698 ++++ vendor/gopkg.in/yaml.v3/yamlh.go | 807 +++++ vendor/gopkg.in/yaml.v3/yamlprivateh.go | 198 ++ vendor/modules.txt | 74 +- 246 files changed, 51698 insertions(+), 6810 deletions(-) create mode 100644 vendor/github.com/BurntSushi/toml/.gitignore create mode 100644 vendor/github.com/BurntSushi/toml/COPYING create mode 100644 vendor/github.com/BurntSushi/toml/README.md create mode 100644 vendor/github.com/BurntSushi/toml/decode.go create mode 100644 vendor/github.com/BurntSushi/toml/decode_go116.go create mode 100644 vendor/github.com/BurntSushi/toml/deprecated.go create mode 100644 vendor/github.com/BurntSushi/toml/doc.go create mode 100644 vendor/github.com/BurntSushi/toml/encode.go create mode 100644 vendor/github.com/BurntSushi/toml/error.go create mode 100644 vendor/github.com/BurntSushi/toml/internal/tz.go create mode 100644 vendor/github.com/BurntSushi/toml/lex.go create mode 100644 vendor/github.com/BurntSushi/toml/meta.go create mode 100644 vendor/github.com/BurntSushi/toml/parse.go create mode 100644 vendor/github.com/BurntSushi/toml/type_fields.go create mode 100644 vendor/github.com/BurntSushi/toml/type_toml.go create mode 100644 vendor/github.com/Kunde21/markdownfmt/v3/LICENSE create mode 100644 vendor/github.com/Kunde21/markdownfmt/v3/markdown/doc.go create mode 100644 vendor/github.com/Kunde21/markdownfmt/v3/markdown/indent.go create mode 100644 vendor/github.com/Kunde21/markdownfmt/v3/markdown/renderer.go create mode 100644 vendor/github.com/Kunde21/markdownfmt/v3/markdown/renderer_heading.go create mode 100644 vendor/github.com/Kunde21/markdownfmt/v3/markdown/renderer_table.go create mode 100644 vendor/github.com/Kunde21/markdownfmt/v3/markdown/writer_indent.go create mode 100644 vendor/github.com/bmatcuk/doublestar/v4/.codecov.yml create mode 100644 vendor/github.com/bmatcuk/doublestar/v4/.gitignore create mode 100644 vendor/github.com/bmatcuk/doublestar/v4/LICENSE create mode 100644 vendor/github.com/bmatcuk/doublestar/v4/README.md create mode 100644 vendor/github.com/bmatcuk/doublestar/v4/UPGRADING.md create mode 100644 vendor/github.com/bmatcuk/doublestar/v4/doublestar.go create mode 100644 vendor/github.com/bmatcuk/doublestar/v4/glob.go create mode 100644 vendor/github.com/bmatcuk/doublestar/v4/globoptions.go create mode 100644 vendor/github.com/bmatcuk/doublestar/v4/globwalk.go create mode 100644 vendor/github.com/bmatcuk/doublestar/v4/match.go create mode 100644 vendor/github.com/bmatcuk/doublestar/v4/utils.go create mode 100644 vendor/github.com/bmatcuk/doublestar/v4/validate.go rename vendor/github.com/{mitchellh => hashicorp}/cli/LICENSE (100%) rename vendor/github.com/{mitchellh => hashicorp}/cli/Makefile (100%) rename vendor/github.com/{mitchellh => hashicorp}/cli/README.md (88%) rename vendor/github.com/{mitchellh => hashicorp}/cli/autocomplete.go (94%) rename vendor/github.com/{mitchellh => hashicorp}/cli/cli.go (99%) rename vendor/github.com/{mitchellh => hashicorp}/cli/command.go (97%) rename vendor/github.com/{mitchellh => hashicorp}/cli/command_mock.go (94%) rename vendor/github.com/{mitchellh => hashicorp}/cli/help.go (96%) create mode 100644 vendor/github.com/hashicorp/cli/ui.go rename vendor/github.com/{mitchellh => hashicorp}/cli/ui_colored.go (94%) rename vendor/github.com/{mitchellh/cli/ui.go => hashicorp/cli/ui_common.go} (72%) rename vendor/github.com/{mitchellh => hashicorp}/cli/ui_concurrent.go (92%) create mode 100644 vendor/github.com/hashicorp/cli/ui_js.go rename vendor/github.com/{mitchellh => hashicorp}/cli/ui_mock.go (96%) rename vendor/github.com/{mitchellh => hashicorp}/cli/ui_writer.go (83%) create mode 100644 vendor/github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs/build/version.go delete mode 100644 vendor/github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs/version.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/directory.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/file.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/file_extension.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/file_mismatch.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/frontmatter.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/provider_file.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/migrate.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-docs/internal/functionmd/render.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/logger.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/migrate.go create mode 100644 vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/schema.go rename vendor/github.com/hashicorp/terraform-plugin-docs/{ => internal}/schemamd/behaviors.go (100%) rename vendor/github.com/hashicorp/terraform-plugin-docs/{ => internal}/schemamd/render.go (90%) rename vendor/github.com/hashicorp/terraform-plugin-docs/{ => internal}/schemamd/write_attribute_description.go (100%) rename vendor/github.com/hashicorp/terraform-plugin-docs/{ => internal}/schemamd/write_block_type_description.go (100%) rename vendor/github.com/hashicorp/terraform-plugin-docs/{ => internal}/schemamd/write_nested_attribute_type_description.go (100%) rename vendor/github.com/hashicorp/terraform-plugin-docs/{ => internal}/schemamd/write_type.go (100%) create mode 100644 vendor/github.com/mattn/go-runewidth/.travis.yml create mode 100644 vendor/github.com/mattn/go-runewidth/LICENSE create mode 100644 vendor/github.com/mattn/go-runewidth/README.md create mode 100644 vendor/github.com/mattn/go-runewidth/go.test.sh create mode 100644 vendor/github.com/mattn/go-runewidth/runewidth.go create mode 100644 vendor/github.com/mattn/go-runewidth/runewidth_appengine.go create mode 100644 vendor/github.com/mattn/go-runewidth/runewidth_js.go create mode 100644 vendor/github.com/mattn/go-runewidth/runewidth_posix.go create mode 100644 vendor/github.com/mattn/go-runewidth/runewidth_table.go create mode 100644 vendor/github.com/mattn/go-runewidth/runewidth_windows.go delete mode 100644 vendor/github.com/russross/blackfriday/.gitignore delete mode 100644 vendor/github.com/russross/blackfriday/.travis.yml delete mode 100644 vendor/github.com/russross/blackfriday/LICENSE.txt delete mode 100644 vendor/github.com/russross/blackfriday/README.md delete mode 100644 vendor/github.com/russross/blackfriday/block.go delete mode 100644 vendor/github.com/russross/blackfriday/doc.go delete mode 100644 vendor/github.com/russross/blackfriday/html.go delete mode 100644 vendor/github.com/russross/blackfriday/inline.go delete mode 100644 vendor/github.com/russross/blackfriday/latex.go delete mode 100644 vendor/github.com/russross/blackfriday/markdown.go delete mode 100644 vendor/github.com/russross/blackfriday/smartypants.go create mode 100644 vendor/github.com/yuin/goldmark-meta/.gitignore create mode 100644 vendor/github.com/yuin/goldmark-meta/LICENSE create mode 100644 vendor/github.com/yuin/goldmark-meta/README.md create mode 100644 vendor/github.com/yuin/goldmark-meta/meta.go create mode 100644 vendor/github.com/yuin/goldmark/.gitignore create mode 100644 vendor/github.com/yuin/goldmark/.golangci.yml create mode 100644 vendor/github.com/yuin/goldmark/LICENSE create mode 100644 vendor/github.com/yuin/goldmark/Makefile create mode 100644 vendor/github.com/yuin/goldmark/README.md create mode 100644 vendor/github.com/yuin/goldmark/ast/ast.go create mode 100644 vendor/github.com/yuin/goldmark/ast/block.go create mode 100644 vendor/github.com/yuin/goldmark/ast/inline.go create mode 100644 vendor/github.com/yuin/goldmark/extension/ast/definition_list.go create mode 100644 vendor/github.com/yuin/goldmark/extension/ast/footnote.go create mode 100644 vendor/github.com/yuin/goldmark/extension/ast/strikethrough.go create mode 100644 vendor/github.com/yuin/goldmark/extension/ast/table.go create mode 100644 vendor/github.com/yuin/goldmark/extension/ast/tasklist.go create mode 100644 vendor/github.com/yuin/goldmark/extension/cjk.go create mode 100644 vendor/github.com/yuin/goldmark/extension/definition_list.go create mode 100644 vendor/github.com/yuin/goldmark/extension/footnote.go create mode 100644 vendor/github.com/yuin/goldmark/extension/gfm.go create mode 100644 vendor/github.com/yuin/goldmark/extension/linkify.go create mode 100644 vendor/github.com/yuin/goldmark/extension/package.go create mode 100644 vendor/github.com/yuin/goldmark/extension/strikethrough.go create mode 100644 vendor/github.com/yuin/goldmark/extension/table.go create mode 100644 vendor/github.com/yuin/goldmark/extension/tasklist.go create mode 100644 vendor/github.com/yuin/goldmark/extension/typographer.go create mode 100644 vendor/github.com/yuin/goldmark/markdown.go create mode 100644 vendor/github.com/yuin/goldmark/parser/attribute.go create mode 100644 vendor/github.com/yuin/goldmark/parser/atx_heading.go create mode 100644 vendor/github.com/yuin/goldmark/parser/auto_link.go create mode 100644 vendor/github.com/yuin/goldmark/parser/blockquote.go create mode 100644 vendor/github.com/yuin/goldmark/parser/code_block.go create mode 100644 vendor/github.com/yuin/goldmark/parser/code_span.go create mode 100644 vendor/github.com/yuin/goldmark/parser/delimiter.go create mode 100644 vendor/github.com/yuin/goldmark/parser/emphasis.go create mode 100644 vendor/github.com/yuin/goldmark/parser/fcode_block.go create mode 100644 vendor/github.com/yuin/goldmark/parser/html_block.go create mode 100644 vendor/github.com/yuin/goldmark/parser/link.go create mode 100644 vendor/github.com/yuin/goldmark/parser/link_ref.go create mode 100644 vendor/github.com/yuin/goldmark/parser/list.go create mode 100644 vendor/github.com/yuin/goldmark/parser/list_item.go create mode 100644 vendor/github.com/yuin/goldmark/parser/paragraph.go create mode 100644 vendor/github.com/yuin/goldmark/parser/parser.go create mode 100644 vendor/github.com/yuin/goldmark/parser/raw_html.go create mode 100644 vendor/github.com/yuin/goldmark/parser/setext_headings.go create mode 100644 vendor/github.com/yuin/goldmark/parser/thematic_break.go create mode 100644 vendor/github.com/yuin/goldmark/renderer/html/html.go create mode 100644 vendor/github.com/yuin/goldmark/renderer/renderer.go create mode 100644 vendor/github.com/yuin/goldmark/text/package.go create mode 100644 vendor/github.com/yuin/goldmark/text/reader.go create mode 100644 vendor/github.com/yuin/goldmark/text/segment.go create mode 100644 vendor/github.com/yuin/goldmark/util/html5entities.go create mode 100644 vendor/github.com/yuin/goldmark/util/unicode_case_folding.go create mode 100644 vendor/github.com/yuin/goldmark/util/util.go create mode 100644 vendor/github.com/yuin/goldmark/util/util_cjk.go create mode 100644 vendor/github.com/yuin/goldmark/util/util_safe.go create mode 100644 vendor/github.com/yuin/goldmark/util/util_unsafe.go create mode 100644 vendor/go.abhg.dev/goldmark/frontmatter/.changie.yaml create mode 100644 vendor/go.abhg.dev/goldmark/frontmatter/.gitignore create mode 100644 vendor/go.abhg.dev/goldmark/frontmatter/.golangci.yml create mode 100644 vendor/go.abhg.dev/goldmark/frontmatter/CHANGELOG.md create mode 100644 vendor/go.abhg.dev/goldmark/frontmatter/LICENSE create mode 100644 vendor/go.abhg.dev/goldmark/frontmatter/Makefile create mode 100644 vendor/go.abhg.dev/goldmark/frontmatter/README.md create mode 100644 vendor/go.abhg.dev/goldmark/frontmatter/data.go create mode 100644 vendor/go.abhg.dev/goldmark/frontmatter/doc.go create mode 100644 vendor/go.abhg.dev/goldmark/frontmatter/extend.go create mode 100644 vendor/go.abhg.dev/goldmark/frontmatter/format.go create mode 100644 vendor/go.abhg.dev/goldmark/frontmatter/mode_string.go create mode 100644 vendor/go.abhg.dev/goldmark/frontmatter/parse.go create mode 100644 vendor/go.abhg.dev/goldmark/frontmatter/transform.go create mode 100644 vendor/golang.org/x/net/http2/testsync.go create mode 100644 vendor/gopkg.in/yaml.v2/.travis.yml create mode 100644 vendor/gopkg.in/yaml.v2/LICENSE create mode 100644 vendor/gopkg.in/yaml.v2/LICENSE.libyaml create mode 100644 vendor/gopkg.in/yaml.v2/NOTICE create mode 100644 vendor/gopkg.in/yaml.v2/README.md create mode 100644 vendor/gopkg.in/yaml.v2/apic.go create mode 100644 vendor/gopkg.in/yaml.v2/decode.go create mode 100644 vendor/gopkg.in/yaml.v2/emitterc.go create mode 100644 vendor/gopkg.in/yaml.v2/encode.go create mode 100644 vendor/gopkg.in/yaml.v2/parserc.go create mode 100644 vendor/gopkg.in/yaml.v2/readerc.go create mode 100644 vendor/gopkg.in/yaml.v2/resolve.go create mode 100644 vendor/gopkg.in/yaml.v2/scannerc.go create mode 100644 vendor/gopkg.in/yaml.v2/sorter.go create mode 100644 vendor/gopkg.in/yaml.v2/writerc.go create mode 100644 vendor/gopkg.in/yaml.v2/yaml.go create mode 100644 vendor/gopkg.in/yaml.v2/yamlh.go create mode 100644 vendor/gopkg.in/yaml.v2/yamlprivateh.go create mode 100644 vendor/gopkg.in/yaml.v3/LICENSE create mode 100644 vendor/gopkg.in/yaml.v3/NOTICE create mode 100644 vendor/gopkg.in/yaml.v3/README.md create mode 100644 vendor/gopkg.in/yaml.v3/apic.go create mode 100644 vendor/gopkg.in/yaml.v3/decode.go create mode 100644 vendor/gopkg.in/yaml.v3/emitterc.go create mode 100644 vendor/gopkg.in/yaml.v3/encode.go create mode 100644 vendor/gopkg.in/yaml.v3/parserc.go create mode 100644 vendor/gopkg.in/yaml.v3/readerc.go create mode 100644 vendor/gopkg.in/yaml.v3/resolve.go create mode 100644 vendor/gopkg.in/yaml.v3/scannerc.go create mode 100644 vendor/gopkg.in/yaml.v3/sorter.go create mode 100644 vendor/gopkg.in/yaml.v3/writerc.go create mode 100644 vendor/gopkg.in/yaml.v3/yaml.go create mode 100644 vendor/gopkg.in/yaml.v3/yamlh.go create mode 100644 vendor/gopkg.in/yaml.v3/yamlprivateh.go diff --git a/go.mod b/go.mod index 89f73f5d..92cbeda9 100644 --- a/go.mod +++ b/go.mod @@ -7,7 +7,7 @@ toolchain go1.21.5 require ( github.com/davecgh/go-spew v1.1.1 github.com/hashicorp/go-cleanhttp v0.5.2 - github.com/hashicorp/terraform-plugin-docs v0.16.0 + github.com/hashicorp/terraform-plugin-docs v0.19.4 github.com/hashicorp/terraform-plugin-framework v1.4.2 github.com/hashicorp/terraform-plugin-go v0.22.0 github.com/hashicorp/terraform-plugin-log v0.9.0 @@ -16,19 +16,23 @@ require ( ) require ( + github.com/BurntSushi/toml v1.2.1 // indirect + github.com/Kunde21/markdownfmt/v3 v3.1.0 // indirect github.com/Masterminds/goutils v1.1.1 // indirect github.com/Masterminds/semver/v3 v3.2.0 // indirect github.com/Masterminds/sprig/v3 v3.2.3 // indirect - github.com/ProtonMail/go-crypto v1.1.0-alpha.0 // indirect + github.com/ProtonMail/go-crypto v1.1.0-alpha.2 // indirect github.com/agext/levenshtein v1.2.2 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/armon/go-radix v1.0.0 // indirect github.com/bgentry/speakeasy v0.1.0 // indirect + github.com/bmatcuk/doublestar/v4 v4.6.1 // indirect github.com/cloudflare/circl v1.3.7 // indirect github.com/fatih/color v1.16.0 // indirect github.com/golang/protobuf v1.5.3 // indirect github.com/google/go-cmp v0.6.0 // indirect github.com/google/uuid v1.4.0 // indirect + github.com/hashicorp/cli v1.1.6 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-checkpoint v0.5.0 // indirect github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320 // indirect @@ -36,12 +40,12 @@ require ( github.com/hashicorp/go-multierror v1.1.1 // indirect github.com/hashicorp/go-plugin v1.6.0 // indirect github.com/hashicorp/go-uuid v1.0.3 // indirect - github.com/hashicorp/go-version v1.6.0 // indirect - github.com/hashicorp/hc-install v0.6.3 // indirect + github.com/hashicorp/go-version v1.7.0 // indirect + github.com/hashicorp/hc-install v0.7.0 // indirect github.com/hashicorp/hcl/v2 v2.20.0 // indirect github.com/hashicorp/logutils v1.0.0 // indirect - github.com/hashicorp/terraform-exec v0.20.0 // indirect - github.com/hashicorp/terraform-json v0.21.0 // indirect + github.com/hashicorp/terraform-exec v0.21.0 // indirect + github.com/hashicorp/terraform-json v0.22.1 // indirect github.com/hashicorp/terraform-plugin-sdk/v2 v2.33.0 // indirect github.com/hashicorp/terraform-registry-address v0.2.3 // indirect github.com/hashicorp/terraform-svchost v0.1.1 // indirect @@ -50,7 +54,7 @@ require ( github.com/imdario/mergo v0.3.15 // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.20 // indirect - github.com/mitchellh/cli v1.1.5 // indirect + github.com/mattn/go-runewidth v0.0.9 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect github.com/mitchellh/go-testing-interface v1.14.1 // indirect github.com/mitchellh/go-wordwrap v1.0.0 // indirect @@ -58,23 +62,26 @@ require ( github.com/mitchellh/reflectwalk v1.0.2 // indirect github.com/oklog/run v1.1.0 // indirect github.com/posener/complete v1.2.3 // indirect - github.com/russross/blackfriday v1.6.0 // indirect github.com/shopspring/decimal v1.3.1 // indirect github.com/spf13/cast v1.5.0 // indirect - github.com/stretchr/testify v1.8.1 // indirect github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - github.com/zclconf/go-cty v1.14.3 // indirect + github.com/yuin/goldmark v1.7.1 // indirect + github.com/yuin/goldmark-meta v1.1.0 // indirect + github.com/zclconf/go-cty v1.14.4 // indirect + go.abhg.dev/goldmark/frontmatter v0.2.0 // indirect golang.org/x/crypto v0.21.0 // indirect golang.org/x/exp v0.0.0-20230809150735-7b3493d9a819 // indirect - golang.org/x/mod v0.15.0 // indirect - golang.org/x/net v0.21.0 // indirect + golang.org/x/mod v0.17.0 // indirect + golang.org/x/net v0.23.0 // indirect golang.org/x/sys v0.18.0 // indirect - golang.org/x/text v0.14.0 // indirect + golang.org/x/text v0.15.0 // indirect golang.org/x/tools v0.13.0 // indirect google.golang.org/appengine v1.6.8 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20231106174013-bbf56f31fb17 // indirect google.golang.org/grpc v1.61.1 // indirect google.golang.org/protobuf v1.32.0 // indirect + gopkg.in/yaml.v2 v2.3.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index fab6c090..29a5cf9e 100644 --- a/go.sum +++ b/go.sum @@ -1,27 +1,30 @@ dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= +github.com/BurntSushi/toml v1.2.1 h1:9F2/+DoOYIOksmaJFPw1tGFy1eDnIJXg+UHjuD8lTak= +github.com/BurntSushi/toml v1.2.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= +github.com/Kunde21/markdownfmt/v3 v3.1.0 h1:KiZu9LKs+wFFBQKhrZJrFZwtLnCCWJahL+S+E/3VnM0= +github.com/Kunde21/markdownfmt/v3 v3.1.0/go.mod h1:tPXN1RTyOzJwhfHoon9wUr4HGYmWgVxSQN6VBJDkrVc= github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI= github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= -github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= github.com/Masterminds/semver/v3 v3.2.0 h1:3MEsd0SM6jqZojhjLWWeBY+Kcjy9i6MQAeY7YgDP83g= github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= -github.com/Masterminds/sprig/v3 v3.2.1/go.mod h1:UoaO7Yp8KlPnJIYWTFkMaqPUYKTfGFPhxNuwnnxkKlk= github.com/Masterminds/sprig/v3 v3.2.3 h1:eL2fZNezLomi0uOLqjQoN6BfsDD+fyLtgbJMAj9n6YA= github.com/Masterminds/sprig/v3 v3.2.3/go.mod h1:rXcFaZ2zZbLRJv/xSysmlgIM1u11eBaRMhvYXJNkGuM= github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= -github.com/ProtonMail/go-crypto v1.1.0-alpha.0 h1:nHGfwXmFvJrSR9xu8qL7BkO4DqTHXE9N5vPhgY2I+j0= -github.com/ProtonMail/go-crypto v1.1.0-alpha.0/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= +github.com/ProtonMail/go-crypto v1.1.0-alpha.2 h1:bkyFVUP+ROOARdgCiJzNQo2V2kiB97LyUpzH9P6Hrlg= +github.com/ProtonMail/go-crypto v1.1.0-alpha.2/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= github.com/agext/levenshtein v1.2.2 h1:0S/Yg6LYmFJ5stwQeRp6EeOcCbj7xiqQSdNelsXvaqE= github.com/agext/levenshtein v1.2.2/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= github.com/apparentlymart/go-textseg/v12 v12.0.0/go.mod h1:S/4uRK2UtaQttw1GenVJEynmyUenKwP++x/+DdGV/Ec= github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY= github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4= -github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI= github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/bgentry/speakeasy v0.1.0 h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/bmatcuk/doublestar/v4 v4.6.1 h1:FH9SifrbvJhnlQpztAx++wlkk70QBf0iBWDwNy7PA4I= +github.com/bmatcuk/doublestar/v4 v4.6.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc= github.com/bufbuild/protocompile v0.4.0 h1:LbFKd2XowZvQ/kajzguUp2DC9UEIQhIq77fZZlaQsNA= github.com/bufbuild/protocompile v0.4.0/go.mod h1:3v93+mbWn/v3xzN+31nwkJfrEpAUwp+BagBSZWx+TP8= github.com/cloudflare/circl v1.3.7 h1:qlCDlTPz2n9fu58M0Nh1J/JzcFpfgkFHHX3O35r5vcU= @@ -33,7 +36,6 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= -github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM= github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE= @@ -43,8 +45,8 @@ github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66D github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= github.com/go-git/go-billy/v5 v5.5.0 h1:yEY4yhzCDuMGSv83oGxiBotRzhwhNr8VZyphhiu+mTU= github.com/go-git/go-billy/v5 v5.5.0/go.mod h1:hmexnoNsr2SJU1Ju67OaNz5ASJY3+sHgFRpCtpDCKow= -github.com/go-git/go-git/v5 v5.11.0 h1:XIZc1p+8YzypNr34itUfSvYJcv+eYdTnTvOZ2vD3cA4= -github.com/go-git/go-git/v5 v5.11.0/go.mod h1:6GFcX2P3NM7FPBfpePbpLd21XxsgdAt+lKqXmCUiUCY= +github.com/go-git/go-git/v5 v5.12.0 h1:7Md+ndsjrzZxbddRDZjF14qK+NN56sy6wkqaVrjZtys= +github.com/go-git/go-git/v5 v5.12.0/go.mod h1:FTM9VKtnI2m65hNI/TenDDDnUf2Q9FHnXYjuz9i5OEY= github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= @@ -59,9 +61,10 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.4.0 h1:MtMxsa51/r9yyhkyLsVeVt0B+BGQZzpQiTQ4eHZ8bc4= github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/hashicorp/cli v1.1.6 h1:CMOV+/LJfL1tXCOKrgAX0uRKnzjj/mpmqNXloRSy2K8= +github.com/hashicorp/cli v1.1.6/go.mod h1:MPon5QYlgjjo0BSoAiN0ESeT5fRzDjVRp+uioJ0piz4= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= @@ -82,20 +85,20 @@ github.com/hashicorp/go-plugin v1.6.0/go.mod h1:lBS5MtSSBZk0SHc66KACcjjlU6WzEVP/ github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mOkIeek= -github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= -github.com/hashicorp/hc-install v0.6.3 h1:yE/r1yJvWbtrJ0STwScgEnCanb0U9v7zp0Gbkmcoxqs= -github.com/hashicorp/hc-install v0.6.3/go.mod h1:KamGdbodYzlufbWh4r9NRo8y6GLHWZP2GBtdnms1Ln0= +github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY= +github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/hc-install v0.7.0 h1:Uu9edVqjKQxxuD28mR5TikkKDd/p55S8vzPC1659aBk= +github.com/hashicorp/hc-install v0.7.0/go.mod h1:ELmmzZlGnEcqoUMKUuykHaPCIR1sYLYX+KSggWSKZuA= github.com/hashicorp/hcl/v2 v2.20.0 h1:l++cRs/5jQOiKVvqXZm/P1ZEfVXJmvLS9WSVxkaeTb4= github.com/hashicorp/hcl/v2 v2.20.0/go.mod h1:WmcD/Ym72MDOOx5F62Ly+leloeu6H7m0pG7VBiU6pQk= github.com/hashicorp/logutils v1.0.0 h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI65Y= github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= -github.com/hashicorp/terraform-exec v0.20.0 h1:DIZnPsqzPGuUnq6cH8jWcPunBfY+C+M8JyYF3vpnuEo= -github.com/hashicorp/terraform-exec v0.20.0/go.mod h1:ckKGkJWbsNqFKV1itgMnE0hY9IYf1HoiekpuN0eWoDw= -github.com/hashicorp/terraform-json v0.21.0 h1:9NQxbLNqPbEMze+S6+YluEdXgJmhQykRyRNd+zTI05U= -github.com/hashicorp/terraform-json v0.21.0/go.mod h1:qdeBs11ovMzo5puhrRibdD6d2Dq6TyE/28JiU4tIQxk= -github.com/hashicorp/terraform-plugin-docs v0.16.0 h1:UmxFr3AScl6Wged84jndJIfFccGyBZn52KtMNsS12dI= -github.com/hashicorp/terraform-plugin-docs v0.16.0/go.mod h1:M3ZrlKBJAbPMtNOPwHicGi1c+hZUh7/g0ifT/z7TVfA= +github.com/hashicorp/terraform-exec v0.21.0 h1:uNkLAe95ey5Uux6KJdua6+cv8asgILFVWkd/RG0D2XQ= +github.com/hashicorp/terraform-exec v0.21.0/go.mod h1:1PPeMYou+KDUSSeRE9szMZ/oHf4fYUmB923Wzbq1ICg= +github.com/hashicorp/terraform-json v0.22.1 h1:xft84GZR0QzjPVWs4lRUwvTcPnegqlyS7orfb5Ltvec= +github.com/hashicorp/terraform-json v0.22.1/go.mod h1:JbWSQCLFSXFFhg42T7l9iJwdGXBYV8fmmD6o/ML4p3A= +github.com/hashicorp/terraform-plugin-docs v0.19.4 h1:G3Bgo7J22OMtegIgn8Cd/CaSeyEljqjH3G39w28JK4c= +github.com/hashicorp/terraform-plugin-docs v0.19.4/go.mod h1:4pLASsatTmRynVzsjEhbXZ6s7xBlUw/2Kt0zfrq8HxA= github.com/hashicorp/terraform-plugin-framework v1.4.2 h1:P7a7VP1GZbjc4rv921Xy5OckzhoiO3ig6SGxwelD2sI= github.com/hashicorp/terraform-plugin-framework v1.4.2/go.mod h1:GWl3InPFZi2wVQmdVnINPKys09s9mLmTZr95/ngLnbY= github.com/hashicorp/terraform-plugin-go v0.22.0 h1:1OS1Jk5mO0f5hrziWJGXXIxBrMe2j/B8E+DVGw43Xmc= @@ -112,8 +115,6 @@ github.com/hashicorp/terraform-svchost v0.1.1 h1:EZZimZ1GxdqFRinZ1tpJwVxxt49xc/S github.com/hashicorp/terraform-svchost v0.1.1/go.mod h1:mNsjQfZyf/Jhz35v6/0LWcv26+X7JPS+buii2c9/ctc= github.com/hashicorp/yamux v0.1.1 h1:yrQxtgseBDrq9Y652vSRDvsKCJKOUD+GzTS4Y0Y8pvE= github.com/hashicorp/yamux v0.1.1/go.mod h1:CtWFDAQgb7dxtzFs4tWbplKIe2jSi3+5vKbgIO0SLnQ= -github.com/huandu/xstrings v1.3.1/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= -github.com/huandu/xstrings v1.3.2/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/huandu/xstrings v1.3.3 h1:/Gcsuc1x8JVbJ9/rlye4xZnVAbEkGauT8lbebqcQws4= github.com/huandu/xstrings v1.3.3/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= @@ -134,19 +135,17 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= -github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= -github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= -github.com/mitchellh/cli v1.1.5 h1:OxRIeJXpAMztws/XHlN2vu6imG5Dpq+j61AzAX5fLng= -github.com/mitchellh/cli v1.1.5/go.mod h1:v8+iFts2sPIKUV1ltktPXMCC8fumSKFItNcD2cLtRR4= +github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0= +github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= @@ -165,38 +164,30 @@ github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4= github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= github.com/posener/complete v1.2.3 h1:NP0eAhjcjImqslEwo/1hq7gpajME0fTLTezBKDqfXqo= github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= -github.com/rogpeppe/go-internal v1.6.1 h1:/FiVV8dS/e+YqF2JvO3yXRFbBLTIuSDkuC7aBOAvL+k= -github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/russross/blackfriday v1.6.0 h1:KqfZb0pUVN2lYqZUYRddxF4OR8ZMURnJIG5Y3VRLtww= -github.com/russross/blackfriday v1.6.0/go.mod h1:ti0ldHuxg49ri4ksnFxlkCfN+hvslNlmVHqNRXXJNAY= -github.com/sergi/go-diff v1.2.0 h1:XU+rvMAioB0UC3q1MFrIQy4Vo5/4VsRDQQXHsEya6xQ= -github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= +github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= +github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= +github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8= +github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8= github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= -github.com/skeema/knownhosts v1.2.1 h1:SHWdIUa82uGZz+F+47k8SY4QhhI291cXCpopT1lK2AQ= -github.com/skeema/knownhosts v1.2.1/go.mod h1:xYbVRSPxqBZFrdmDyMmsOs+uX1UZC3nTN3ThzgDxUwo= +github.com/skeema/knownhosts v1.2.2 h1:Iug2P4fLmDw9f41PB6thxUkNUkJzB5i+1/exaj40L3A= +github.com/skeema/knownhosts v1.2.2/go.mod h1:xYbVRSPxqBZFrdmDyMmsOs+uX1UZC3nTN3ThzgDxUwo= github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w= github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= -github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= -github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= -github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= -github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8= +github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= github.com/vmihailenco/msgpack v4.0.4+incompatible h1:dSLoQfGFAo3F6OoNhwUmLwVgaUXK79GlxNBwueZn0xI= github.com/vmihailenco/msgpack v4.0.4+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= @@ -207,11 +198,15 @@ github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -github.com/zclconf/go-cty v1.14.3 h1:1JXy1XroaGrzZuG6X9dt7HL6s9AwbY+l4UNL8o5B6ho= -github.com/zclconf/go-cty v1.14.3/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE= +github.com/yuin/goldmark v1.7.1 h1:3bajkSilaCbjdKVsKdZjZCLBNPL9pYzrCakKaf4U49U= +github.com/yuin/goldmark v1.7.1/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E= +github.com/yuin/goldmark-meta v1.1.0 h1:pWw+JLHGZe8Rk0EGsMVssiNb/AaPMHfSRszZeUeiOUc= +github.com/yuin/goldmark-meta v1.1.0/go.mod h1:U4spWENafuA7Zyg+Lj5RqK/MF+ovMYtBvXi1lBb2VP0= +github.com/zclconf/go-cty v1.14.4 h1:uXXczd9QDGsgu0i/QFR/hzI5NYCHLf6NQw/atrbnhq8= +github.com/zclconf/go-cty v1.14.4/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE= +go.abhg.dev/goldmark/frontmatter v0.2.0 h1:P8kPG0YkL12+aYk2yU3xHv4tcXzeVnN+gU0tJ5JnxRw= +go.abhg.dev/goldmark/frontmatter v0.2.0/go.mod h1:XqrEkZuM57djk7zrlRUB02x8I5J0px76YjkOzhB4YlU= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20200414173820-0848c9571904/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA= @@ -219,22 +214,21 @@ golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOM golang.org/x/exp v0.0.0-20230809150735-7b3493d9a819 h1:EDuYyU/MkFXllv9QF9819VlI9a4tzGuCbhG0ExK9o1U= golang.org/x/exp v0.0.0-20230809150735-7b3493d9a819/go.mod h1:FXUEEKJgO7OQYeo8N01OfiKP8RXMtf6e8aTskBGqWdc= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.15.0 h1:SernR4v+D55NyBH2QiEQrlBAnj1ECL6AGrA5+dPaMY8= -golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA= +golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= -golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4= -golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= +golang.org/x/net v0.23.0 h1:7EYJ93RZ9vYSZAIb2x3lnuvqO5zneoD6IvWjuhfxjTs= +golang.org/x/net v0.23.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.5.0 h1:60k92dhOjHxJkrqnwsfl8KuaHbn/5dl0lUPUklKo3qE= golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -259,8 +253,8 @@ golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= -golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk= +golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= @@ -285,6 +279,7 @@ gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8 gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= diff --git a/vendor/github.com/BurntSushi/toml/.gitignore b/vendor/github.com/BurntSushi/toml/.gitignore new file mode 100644 index 00000000..fe79e3ad --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/.gitignore @@ -0,0 +1,2 @@ +/toml.test +/toml-test diff --git a/vendor/github.com/BurntSushi/toml/COPYING b/vendor/github.com/BurntSushi/toml/COPYING new file mode 100644 index 00000000..01b57432 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/COPYING @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2013 TOML authors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/vendor/github.com/BurntSushi/toml/README.md b/vendor/github.com/BurntSushi/toml/README.md new file mode 100644 index 00000000..3651cfa9 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/README.md @@ -0,0 +1,120 @@ +TOML stands for Tom's Obvious, Minimal Language. This Go package provides a +reflection interface similar to Go's standard library `json` and `xml` packages. + +Compatible with TOML version [v1.0.0](https://toml.io/en/v1.0.0). + +Documentation: https://godocs.io/github.com/BurntSushi/toml + +See the [releases page](https://github.com/BurntSushi/toml/releases) for a +changelog; this information is also in the git tag annotations (e.g. `git show +v0.4.0`). + +This library requires Go 1.13 or newer; add it to your go.mod with: + + % go get github.com/BurntSushi/toml@latest + +It also comes with a TOML validator CLI tool: + + % go install github.com/BurntSushi/toml/cmd/tomlv@latest + % tomlv some-toml-file.toml + +### Examples +For the simplest example, consider some TOML file as just a list of keys and +values: + +```toml +Age = 25 +Cats = [ "Cauchy", "Plato" ] +Pi = 3.14 +Perfection = [ 6, 28, 496, 8128 ] +DOB = 1987-07-05T05:45:00Z +``` + +Which can be decoded with: + +```go +type Config struct { + Age int + Cats []string + Pi float64 + Perfection []int + DOB time.Time +} + +var conf Config +_, err := toml.Decode(tomlData, &conf) +``` + +You can also use struct tags if your struct field name doesn't map to a TOML key +value directly: + +```toml +some_key_NAME = "wat" +``` + +```go +type TOML struct { + ObscureKey string `toml:"some_key_NAME"` +} +``` + +Beware that like other decoders **only exported fields** are considered when +encoding and decoding; private fields are silently ignored. + +### Using the `Marshaler` and `encoding.TextUnmarshaler` interfaces +Here's an example that automatically parses values in a `mail.Address`: + +```toml +contacts = [ + "Donald Duck ", + "Scrooge McDuck ", +] +``` + +Can be decoded with: + +```go +// Create address type which satisfies the encoding.TextUnmarshaler interface. +type address struct { + *mail.Address +} + +func (a *address) UnmarshalText(text []byte) error { + var err error + a.Address, err = mail.ParseAddress(string(text)) + return err +} + +// Decode it. +func decode() { + blob := ` + contacts = [ + "Donald Duck ", + "Scrooge McDuck ", + ] + ` + + var contacts struct { + Contacts []address + } + + _, err := toml.Decode(blob, &contacts) + if err != nil { + log.Fatal(err) + } + + for _, c := range contacts.Contacts { + fmt.Printf("%#v\n", c.Address) + } + + // Output: + // &mail.Address{Name:"Donald Duck", Address:"donald@duckburg.com"} + // &mail.Address{Name:"Scrooge McDuck", Address:"scrooge@duckburg.com"} +} +``` + +To target TOML specifically you can implement `UnmarshalTOML` TOML interface in +a similar way. + +### More complex usage +See the [`_example/`](/_example) directory for a more complex example. diff --git a/vendor/github.com/BurntSushi/toml/decode.go b/vendor/github.com/BurntSushi/toml/decode.go new file mode 100644 index 00000000..0ca1dc4f --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/decode.go @@ -0,0 +1,602 @@ +package toml + +import ( + "bytes" + "encoding" + "encoding/json" + "fmt" + "io" + "io/ioutil" + "math" + "os" + "reflect" + "strconv" + "strings" + "time" +) + +// Unmarshaler is the interface implemented by objects that can unmarshal a +// TOML description of themselves. +type Unmarshaler interface { + UnmarshalTOML(interface{}) error +} + +// Unmarshal decodes the contents of data in TOML format into a pointer v. +// +// See [Decoder] for a description of the decoding process. +func Unmarshal(data []byte, v interface{}) error { + _, err := NewDecoder(bytes.NewReader(data)).Decode(v) + return err +} + +// Decode the TOML data in to the pointer v. +// +// See [Decoder] for a description of the decoding process. +func Decode(data string, v interface{}) (MetaData, error) { + return NewDecoder(strings.NewReader(data)).Decode(v) +} + +// DecodeFile reads the contents of a file and decodes it with [Decode]. +func DecodeFile(path string, v interface{}) (MetaData, error) { + fp, err := os.Open(path) + if err != nil { + return MetaData{}, err + } + defer fp.Close() + return NewDecoder(fp).Decode(v) +} + +// Primitive is a TOML value that hasn't been decoded into a Go value. +// +// This type can be used for any value, which will cause decoding to be delayed. +// You can use [PrimitiveDecode] to "manually" decode these values. +// +// NOTE: The underlying representation of a `Primitive` value is subject to +// change. Do not rely on it. +// +// NOTE: Primitive values are still parsed, so using them will only avoid the +// overhead of reflection. They can be useful when you don't know the exact type +// of TOML data until runtime. +type Primitive struct { + undecoded interface{} + context Key +} + +// The significand precision for float32 and float64 is 24 and 53 bits; this is +// the range a natural number can be stored in a float without loss of data. +const ( + maxSafeFloat32Int = 16777215 // 2^24-1 + maxSafeFloat64Int = int64(9007199254740991) // 2^53-1 +) + +// Decoder decodes TOML data. +// +// TOML tables correspond to Go structs or maps; they can be used +// interchangeably, but structs offer better type safety. +// +// TOML table arrays correspond to either a slice of structs or a slice of maps. +// +// TOML datetimes correspond to [time.Time]. Local datetimes are parsed in the +// local timezone. +// +// [time.Duration] types are treated as nanoseconds if the TOML value is an +// integer, or they're parsed with time.ParseDuration() if they're strings. +// +// All other TOML types (float, string, int, bool and array) correspond to the +// obvious Go types. +// +// An exception to the above rules is if a type implements the TextUnmarshaler +// interface, in which case any primitive TOML value (floats, strings, integers, +// booleans, datetimes) will be converted to a []byte and given to the value's +// UnmarshalText method. See the Unmarshaler example for a demonstration with +// email addresses. +// +// ### Key mapping +// +// TOML keys can map to either keys in a Go map or field names in a Go struct. +// The special `toml` struct tag can be used to map TOML keys to struct fields +// that don't match the key name exactly (see the example). A case insensitive +// match to struct names will be tried if an exact match can't be found. +// +// The mapping between TOML values and Go values is loose. That is, there may +// exist TOML values that cannot be placed into your representation, and there +// may be parts of your representation that do not correspond to TOML values. +// This loose mapping can be made stricter by using the IsDefined and/or +// Undecoded methods on the MetaData returned. +// +// This decoder does not handle cyclic types. Decode will not terminate if a +// cyclic type is passed. +type Decoder struct { + r io.Reader +} + +// NewDecoder creates a new Decoder. +func NewDecoder(r io.Reader) *Decoder { + return &Decoder{r: r} +} + +var ( + unmarshalToml = reflect.TypeOf((*Unmarshaler)(nil)).Elem() + unmarshalText = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem() + primitiveType = reflect.TypeOf((*Primitive)(nil)).Elem() +) + +// Decode TOML data in to the pointer `v`. +func (dec *Decoder) Decode(v interface{}) (MetaData, error) { + rv := reflect.ValueOf(v) + if rv.Kind() != reflect.Ptr { + s := "%q" + if reflect.TypeOf(v) == nil { + s = "%v" + } + + return MetaData{}, fmt.Errorf("toml: cannot decode to non-pointer "+s, reflect.TypeOf(v)) + } + if rv.IsNil() { + return MetaData{}, fmt.Errorf("toml: cannot decode to nil value of %q", reflect.TypeOf(v)) + } + + // Check if this is a supported type: struct, map, interface{}, or something + // that implements UnmarshalTOML or UnmarshalText. + rv = indirect(rv) + rt := rv.Type() + if rv.Kind() != reflect.Struct && rv.Kind() != reflect.Map && + !(rv.Kind() == reflect.Interface && rv.NumMethod() == 0) && + !rt.Implements(unmarshalToml) && !rt.Implements(unmarshalText) { + return MetaData{}, fmt.Errorf("toml: cannot decode to type %s", rt) + } + + // TODO: parser should read from io.Reader? Or at the very least, make it + // read from []byte rather than string + data, err := ioutil.ReadAll(dec.r) + if err != nil { + return MetaData{}, err + } + + p, err := parse(string(data)) + if err != nil { + return MetaData{}, err + } + + md := MetaData{ + mapping: p.mapping, + keyInfo: p.keyInfo, + keys: p.ordered, + decoded: make(map[string]struct{}, len(p.ordered)), + context: nil, + data: data, + } + return md, md.unify(p.mapping, rv) +} + +// PrimitiveDecode is just like the other Decode* functions, except it decodes a +// TOML value that has already been parsed. Valid primitive values can *only* be +// obtained from values filled by the decoder functions, including this method. +// (i.e., v may contain more [Primitive] values.) +// +// Meta data for primitive values is included in the meta data returned by the +// Decode* functions with one exception: keys returned by the Undecoded method +// will only reflect keys that were decoded. Namely, any keys hidden behind a +// Primitive will be considered undecoded. Executing this method will update the +// undecoded keys in the meta data. (See the example.) +func (md *MetaData) PrimitiveDecode(primValue Primitive, v interface{}) error { + md.context = primValue.context + defer func() { md.context = nil }() + return md.unify(primValue.undecoded, rvalue(v)) +} + +// unify performs a sort of type unification based on the structure of `rv`, +// which is the client representation. +// +// Any type mismatch produces an error. Finding a type that we don't know +// how to handle produces an unsupported type error. +func (md *MetaData) unify(data interface{}, rv reflect.Value) error { + // Special case. Look for a `Primitive` value. + // TODO: #76 would make this superfluous after implemented. + if rv.Type() == primitiveType { + // Save the undecoded data and the key context into the primitive + // value. + context := make(Key, len(md.context)) + copy(context, md.context) + rv.Set(reflect.ValueOf(Primitive{ + undecoded: data, + context: context, + })) + return nil + } + + rvi := rv.Interface() + if v, ok := rvi.(Unmarshaler); ok { + return v.UnmarshalTOML(data) + } + if v, ok := rvi.(encoding.TextUnmarshaler); ok { + return md.unifyText(data, v) + } + + // TODO: + // The behavior here is incorrect whenever a Go type satisfies the + // encoding.TextUnmarshaler interface but also corresponds to a TOML hash or + // array. In particular, the unmarshaler should only be applied to primitive + // TOML values. But at this point, it will be applied to all kinds of values + // and produce an incorrect error whenever those values are hashes or arrays + // (including arrays of tables). + + k := rv.Kind() + + if k >= reflect.Int && k <= reflect.Uint64 { + return md.unifyInt(data, rv) + } + switch k { + case reflect.Ptr: + elem := reflect.New(rv.Type().Elem()) + err := md.unify(data, reflect.Indirect(elem)) + if err != nil { + return err + } + rv.Set(elem) + return nil + case reflect.Struct: + return md.unifyStruct(data, rv) + case reflect.Map: + return md.unifyMap(data, rv) + case reflect.Array: + return md.unifyArray(data, rv) + case reflect.Slice: + return md.unifySlice(data, rv) + case reflect.String: + return md.unifyString(data, rv) + case reflect.Bool: + return md.unifyBool(data, rv) + case reflect.Interface: + if rv.NumMethod() > 0 { // Only support empty interfaces are supported. + return md.e("unsupported type %s", rv.Type()) + } + return md.unifyAnything(data, rv) + case reflect.Float32, reflect.Float64: + return md.unifyFloat64(data, rv) + } + return md.e("unsupported type %s", rv.Kind()) +} + +func (md *MetaData) unifyStruct(mapping interface{}, rv reflect.Value) error { + tmap, ok := mapping.(map[string]interface{}) + if !ok { + if mapping == nil { + return nil + } + return md.e("type mismatch for %s: expected table but found %T", + rv.Type().String(), mapping) + } + + for key, datum := range tmap { + var f *field + fields := cachedTypeFields(rv.Type()) + for i := range fields { + ff := &fields[i] + if ff.name == key { + f = ff + break + } + if f == nil && strings.EqualFold(ff.name, key) { + f = ff + } + } + if f != nil { + subv := rv + for _, i := range f.index { + subv = indirect(subv.Field(i)) + } + + if isUnifiable(subv) { + md.decoded[md.context.add(key).String()] = struct{}{} + md.context = append(md.context, key) + + err := md.unify(datum, subv) + if err != nil { + return err + } + md.context = md.context[0 : len(md.context)-1] + } else if f.name != "" { + return md.e("cannot write unexported field %s.%s", rv.Type().String(), f.name) + } + } + } + return nil +} + +func (md *MetaData) unifyMap(mapping interface{}, rv reflect.Value) error { + keyType := rv.Type().Key().Kind() + if keyType != reflect.String && keyType != reflect.Interface { + return fmt.Errorf("toml: cannot decode to a map with non-string key type (%s in %q)", + keyType, rv.Type()) + } + + tmap, ok := mapping.(map[string]interface{}) + if !ok { + if tmap == nil { + return nil + } + return md.badtype("map", mapping) + } + if rv.IsNil() { + rv.Set(reflect.MakeMap(rv.Type())) + } + for k, v := range tmap { + md.decoded[md.context.add(k).String()] = struct{}{} + md.context = append(md.context, k) + + rvval := reflect.Indirect(reflect.New(rv.Type().Elem())) + + err := md.unify(v, indirect(rvval)) + if err != nil { + return err + } + md.context = md.context[0 : len(md.context)-1] + + rvkey := indirect(reflect.New(rv.Type().Key())) + + switch keyType { + case reflect.Interface: + rvkey.Set(reflect.ValueOf(k)) + case reflect.String: + rvkey.SetString(k) + } + + rv.SetMapIndex(rvkey, rvval) + } + return nil +} + +func (md *MetaData) unifyArray(data interface{}, rv reflect.Value) error { + datav := reflect.ValueOf(data) + if datav.Kind() != reflect.Slice { + if !datav.IsValid() { + return nil + } + return md.badtype("slice", data) + } + if l := datav.Len(); l != rv.Len() { + return md.e("expected array length %d; got TOML array of length %d", rv.Len(), l) + } + return md.unifySliceArray(datav, rv) +} + +func (md *MetaData) unifySlice(data interface{}, rv reflect.Value) error { + datav := reflect.ValueOf(data) + if datav.Kind() != reflect.Slice { + if !datav.IsValid() { + return nil + } + return md.badtype("slice", data) + } + n := datav.Len() + if rv.IsNil() || rv.Cap() < n { + rv.Set(reflect.MakeSlice(rv.Type(), n, n)) + } + rv.SetLen(n) + return md.unifySliceArray(datav, rv) +} + +func (md *MetaData) unifySliceArray(data, rv reflect.Value) error { + l := data.Len() + for i := 0; i < l; i++ { + err := md.unify(data.Index(i).Interface(), indirect(rv.Index(i))) + if err != nil { + return err + } + } + return nil +} + +func (md *MetaData) unifyString(data interface{}, rv reflect.Value) error { + _, ok := rv.Interface().(json.Number) + if ok { + if i, ok := data.(int64); ok { + rv.SetString(strconv.FormatInt(i, 10)) + } else if f, ok := data.(float64); ok { + rv.SetString(strconv.FormatFloat(f, 'f', -1, 64)) + } else { + return md.badtype("string", data) + } + return nil + } + + if s, ok := data.(string); ok { + rv.SetString(s) + return nil + } + return md.badtype("string", data) +} + +func (md *MetaData) unifyFloat64(data interface{}, rv reflect.Value) error { + rvk := rv.Kind() + + if num, ok := data.(float64); ok { + switch rvk { + case reflect.Float32: + if num < -math.MaxFloat32 || num > math.MaxFloat32 { + return md.parseErr(errParseRange{i: num, size: rvk.String()}) + } + fallthrough + case reflect.Float64: + rv.SetFloat(num) + default: + panic("bug") + } + return nil + } + + if num, ok := data.(int64); ok { + if (rvk == reflect.Float32 && (num < -maxSafeFloat32Int || num > maxSafeFloat32Int)) || + (rvk == reflect.Float64 && (num < -maxSafeFloat64Int || num > maxSafeFloat64Int)) { + return md.parseErr(errParseRange{i: num, size: rvk.String()}) + } + rv.SetFloat(float64(num)) + return nil + } + + return md.badtype("float", data) +} + +func (md *MetaData) unifyInt(data interface{}, rv reflect.Value) error { + _, ok := rv.Interface().(time.Duration) + if ok { + // Parse as string duration, and fall back to regular integer parsing + // (as nanosecond) if this is not a string. + if s, ok := data.(string); ok { + dur, err := time.ParseDuration(s) + if err != nil { + return md.parseErr(errParseDuration{s}) + } + rv.SetInt(int64(dur)) + return nil + } + } + + num, ok := data.(int64) + if !ok { + return md.badtype("integer", data) + } + + rvk := rv.Kind() + switch { + case rvk >= reflect.Int && rvk <= reflect.Int64: + if (rvk == reflect.Int8 && (num < math.MinInt8 || num > math.MaxInt8)) || + (rvk == reflect.Int16 && (num < math.MinInt16 || num > math.MaxInt16)) || + (rvk == reflect.Int32 && (num < math.MinInt32 || num > math.MaxInt32)) { + return md.parseErr(errParseRange{i: num, size: rvk.String()}) + } + rv.SetInt(num) + case rvk >= reflect.Uint && rvk <= reflect.Uint64: + unum := uint64(num) + if rvk == reflect.Uint8 && (num < 0 || unum > math.MaxUint8) || + rvk == reflect.Uint16 && (num < 0 || unum > math.MaxUint16) || + rvk == reflect.Uint32 && (num < 0 || unum > math.MaxUint32) { + return md.parseErr(errParseRange{i: num, size: rvk.String()}) + } + rv.SetUint(unum) + default: + panic("unreachable") + } + return nil +} + +func (md *MetaData) unifyBool(data interface{}, rv reflect.Value) error { + if b, ok := data.(bool); ok { + rv.SetBool(b) + return nil + } + return md.badtype("boolean", data) +} + +func (md *MetaData) unifyAnything(data interface{}, rv reflect.Value) error { + rv.Set(reflect.ValueOf(data)) + return nil +} + +func (md *MetaData) unifyText(data interface{}, v encoding.TextUnmarshaler) error { + var s string + switch sdata := data.(type) { + case Marshaler: + text, err := sdata.MarshalTOML() + if err != nil { + return err + } + s = string(text) + case encoding.TextMarshaler: + text, err := sdata.MarshalText() + if err != nil { + return err + } + s = string(text) + case fmt.Stringer: + s = sdata.String() + case string: + s = sdata + case bool: + s = fmt.Sprintf("%v", sdata) + case int64: + s = fmt.Sprintf("%d", sdata) + case float64: + s = fmt.Sprintf("%f", sdata) + default: + return md.badtype("primitive (string-like)", data) + } + if err := v.UnmarshalText([]byte(s)); err != nil { + return err + } + return nil +} + +func (md *MetaData) badtype(dst string, data interface{}) error { + return md.e("incompatible types: TOML value has type %T; destination has type %s", data, dst) +} + +func (md *MetaData) parseErr(err error) error { + k := md.context.String() + return ParseError{ + LastKey: k, + Position: md.keyInfo[k].pos, + Line: md.keyInfo[k].pos.Line, + err: err, + input: string(md.data), + } +} + +func (md *MetaData) e(format string, args ...interface{}) error { + f := "toml: " + if len(md.context) > 0 { + f = fmt.Sprintf("toml: (last key %q): ", md.context) + p := md.keyInfo[md.context.String()].pos + if p.Line > 0 { + f = fmt.Sprintf("toml: line %d (last key %q): ", p.Line, md.context) + } + } + return fmt.Errorf(f+format, args...) +} + +// rvalue returns a reflect.Value of `v`. All pointers are resolved. +func rvalue(v interface{}) reflect.Value { + return indirect(reflect.ValueOf(v)) +} + +// indirect returns the value pointed to by a pointer. +// +// Pointers are followed until the value is not a pointer. New values are +// allocated for each nil pointer. +// +// An exception to this rule is if the value satisfies an interface of interest +// to us (like encoding.TextUnmarshaler). +func indirect(v reflect.Value) reflect.Value { + if v.Kind() != reflect.Ptr { + if v.CanSet() { + pv := v.Addr() + pvi := pv.Interface() + if _, ok := pvi.(encoding.TextUnmarshaler); ok { + return pv + } + if _, ok := pvi.(Unmarshaler); ok { + return pv + } + } + return v + } + if v.IsNil() { + v.Set(reflect.New(v.Type().Elem())) + } + return indirect(reflect.Indirect(v)) +} + +func isUnifiable(rv reflect.Value) bool { + if rv.CanSet() { + return true + } + rvi := rv.Interface() + if _, ok := rvi.(encoding.TextUnmarshaler); ok { + return true + } + if _, ok := rvi.(Unmarshaler); ok { + return true + } + return false +} diff --git a/vendor/github.com/BurntSushi/toml/decode_go116.go b/vendor/github.com/BurntSushi/toml/decode_go116.go new file mode 100644 index 00000000..086d0b68 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/decode_go116.go @@ -0,0 +1,19 @@ +//go:build go1.16 +// +build go1.16 + +package toml + +import ( + "io/fs" +) + +// DecodeFS reads the contents of a file from [fs.FS] and decodes it with +// [Decode]. +func DecodeFS(fsys fs.FS, path string, v interface{}) (MetaData, error) { + fp, err := fsys.Open(path) + if err != nil { + return MetaData{}, err + } + defer fp.Close() + return NewDecoder(fp).Decode(v) +} diff --git a/vendor/github.com/BurntSushi/toml/deprecated.go b/vendor/github.com/BurntSushi/toml/deprecated.go new file mode 100644 index 00000000..c6af3f23 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/deprecated.go @@ -0,0 +1,21 @@ +package toml + +import ( + "encoding" + "io" +) + +// Deprecated: use encoding.TextMarshaler +type TextMarshaler encoding.TextMarshaler + +// Deprecated: use encoding.TextUnmarshaler +type TextUnmarshaler encoding.TextUnmarshaler + +// Deprecated: use MetaData.PrimitiveDecode. +func PrimitiveDecode(primValue Primitive, v interface{}) error { + md := MetaData{decoded: make(map[string]struct{})} + return md.unify(primValue.undecoded, rvalue(v)) +} + +// Deprecated: use NewDecoder(reader).Decode(&value). +func DecodeReader(r io.Reader, v interface{}) (MetaData, error) { return NewDecoder(r).Decode(v) } diff --git a/vendor/github.com/BurntSushi/toml/doc.go b/vendor/github.com/BurntSushi/toml/doc.go new file mode 100644 index 00000000..81a7c0fe --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/doc.go @@ -0,0 +1,11 @@ +// Package toml implements decoding and encoding of TOML files. +// +// This package supports TOML v1.0.0, as specified at https://toml.io +// +// There is also support for delaying decoding with the Primitive type, and +// querying the set of keys in a TOML document with the MetaData type. +// +// The github.com/BurntSushi/toml/cmd/tomlv package implements a TOML validator, +// and can be used to verify if TOML document is valid. It can also be used to +// print the type of each key. +package toml diff --git a/vendor/github.com/BurntSushi/toml/encode.go b/vendor/github.com/BurntSushi/toml/encode.go new file mode 100644 index 00000000..930e1d52 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/encode.go @@ -0,0 +1,750 @@ +package toml + +import ( + "bufio" + "encoding" + "encoding/json" + "errors" + "fmt" + "io" + "math" + "reflect" + "sort" + "strconv" + "strings" + "time" + + "github.com/BurntSushi/toml/internal" +) + +type tomlEncodeError struct{ error } + +var ( + errArrayNilElement = errors.New("toml: cannot encode array with nil element") + errNonString = errors.New("toml: cannot encode a map with non-string key type") + errNoKey = errors.New("toml: top-level values must be Go maps or structs") + errAnything = errors.New("") // used in testing +) + +var dblQuotedReplacer = strings.NewReplacer( + "\"", "\\\"", + "\\", "\\\\", + "\x00", `\u0000`, + "\x01", `\u0001`, + "\x02", `\u0002`, + "\x03", `\u0003`, + "\x04", `\u0004`, + "\x05", `\u0005`, + "\x06", `\u0006`, + "\x07", `\u0007`, + "\b", `\b`, + "\t", `\t`, + "\n", `\n`, + "\x0b", `\u000b`, + "\f", `\f`, + "\r", `\r`, + "\x0e", `\u000e`, + "\x0f", `\u000f`, + "\x10", `\u0010`, + "\x11", `\u0011`, + "\x12", `\u0012`, + "\x13", `\u0013`, + "\x14", `\u0014`, + "\x15", `\u0015`, + "\x16", `\u0016`, + "\x17", `\u0017`, + "\x18", `\u0018`, + "\x19", `\u0019`, + "\x1a", `\u001a`, + "\x1b", `\u001b`, + "\x1c", `\u001c`, + "\x1d", `\u001d`, + "\x1e", `\u001e`, + "\x1f", `\u001f`, + "\x7f", `\u007f`, +) + +var ( + marshalToml = reflect.TypeOf((*Marshaler)(nil)).Elem() + marshalText = reflect.TypeOf((*encoding.TextMarshaler)(nil)).Elem() + timeType = reflect.TypeOf((*time.Time)(nil)).Elem() +) + +// Marshaler is the interface implemented by types that can marshal themselves +// into valid TOML. +type Marshaler interface { + MarshalTOML() ([]byte, error) +} + +// Encoder encodes a Go to a TOML document. +// +// The mapping between Go values and TOML values should be precisely the same as +// for [Decode]. +// +// time.Time is encoded as a RFC 3339 string, and time.Duration as its string +// representation. +// +// The [Marshaler] and [encoding.TextMarshaler] interfaces are supported to +// encoding the value as custom TOML. +// +// If you want to write arbitrary binary data then you will need to use +// something like base64 since TOML does not have any binary types. +// +// When encoding TOML hashes (Go maps or structs), keys without any sub-hashes +// are encoded first. +// +// Go maps will be sorted alphabetically by key for deterministic output. +// +// The toml struct tag can be used to provide the key name; if omitted the +// struct field name will be used. If the "omitempty" option is present the +// following value will be skipped: +// +// - arrays, slices, maps, and string with len of 0 +// - struct with all zero values +// - bool false +// +// If omitzero is given all int and float types with a value of 0 will be +// skipped. +// +// Encoding Go values without a corresponding TOML representation will return an +// error. Examples of this includes maps with non-string keys, slices with nil +// elements, embedded non-struct types, and nested slices containing maps or +// structs. (e.g. [][]map[string]string is not allowed but []map[string]string +// is okay, as is []map[string][]string). +// +// NOTE: only exported keys are encoded due to the use of reflection. Unexported +// keys are silently discarded. +type Encoder struct { + // String to use for a single indentation level; default is two spaces. + Indent string + + w *bufio.Writer + hasWritten bool // written any output to w yet? +} + +// NewEncoder create a new Encoder. +func NewEncoder(w io.Writer) *Encoder { + return &Encoder{ + w: bufio.NewWriter(w), + Indent: " ", + } +} + +// Encode writes a TOML representation of the Go value to the [Encoder]'s writer. +// +// An error is returned if the value given cannot be encoded to a valid TOML +// document. +func (enc *Encoder) Encode(v interface{}) error { + rv := eindirect(reflect.ValueOf(v)) + if err := enc.safeEncode(Key([]string{}), rv); err != nil { + return err + } + return enc.w.Flush() +} + +func (enc *Encoder) safeEncode(key Key, rv reflect.Value) (err error) { + defer func() { + if r := recover(); r != nil { + if terr, ok := r.(tomlEncodeError); ok { + err = terr.error + return + } + panic(r) + } + }() + enc.encode(key, rv) + return nil +} + +func (enc *Encoder) encode(key Key, rv reflect.Value) { + // If we can marshal the type to text, then we use that. This prevents the + // encoder for handling these types as generic structs (or whatever the + // underlying type of a TextMarshaler is). + switch { + case isMarshaler(rv): + enc.writeKeyValue(key, rv, false) + return + case rv.Type() == primitiveType: // TODO: #76 would make this superfluous after implemented. + enc.encode(key, reflect.ValueOf(rv.Interface().(Primitive).undecoded)) + return + } + + k := rv.Kind() + switch k { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, + reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, + reflect.Uint64, + reflect.Float32, reflect.Float64, reflect.String, reflect.Bool: + enc.writeKeyValue(key, rv, false) + case reflect.Array, reflect.Slice: + if typeEqual(tomlArrayHash, tomlTypeOfGo(rv)) { + enc.eArrayOfTables(key, rv) + } else { + enc.writeKeyValue(key, rv, false) + } + case reflect.Interface: + if rv.IsNil() { + return + } + enc.encode(key, rv.Elem()) + case reflect.Map: + if rv.IsNil() { + return + } + enc.eTable(key, rv) + case reflect.Ptr: + if rv.IsNil() { + return + } + enc.encode(key, rv.Elem()) + case reflect.Struct: + enc.eTable(key, rv) + default: + encPanic(fmt.Errorf("unsupported type for key '%s': %s", key, k)) + } +} + +// eElement encodes any value that can be an array element. +func (enc *Encoder) eElement(rv reflect.Value) { + switch v := rv.Interface().(type) { + case time.Time: // Using TextMarshaler adds extra quotes, which we don't want. + format := time.RFC3339Nano + switch v.Location() { + case internal.LocalDatetime: + format = "2006-01-02T15:04:05.999999999" + case internal.LocalDate: + format = "2006-01-02" + case internal.LocalTime: + format = "15:04:05.999999999" + } + switch v.Location() { + default: + enc.wf(v.Format(format)) + case internal.LocalDatetime, internal.LocalDate, internal.LocalTime: + enc.wf(v.In(time.UTC).Format(format)) + } + return + case Marshaler: + s, err := v.MarshalTOML() + if err != nil { + encPanic(err) + } + if s == nil { + encPanic(errors.New("MarshalTOML returned nil and no error")) + } + enc.w.Write(s) + return + case encoding.TextMarshaler: + s, err := v.MarshalText() + if err != nil { + encPanic(err) + } + if s == nil { + encPanic(errors.New("MarshalText returned nil and no error")) + } + enc.writeQuoted(string(s)) + return + case time.Duration: + enc.writeQuoted(v.String()) + return + case json.Number: + n, _ := rv.Interface().(json.Number) + + if n == "" { /// Useful zero value. + enc.w.WriteByte('0') + return + } else if v, err := n.Int64(); err == nil { + enc.eElement(reflect.ValueOf(v)) + return + } else if v, err := n.Float64(); err == nil { + enc.eElement(reflect.ValueOf(v)) + return + } + encPanic(fmt.Errorf("unable to convert %q to int64 or float64", n)) + } + + switch rv.Kind() { + case reflect.Ptr: + enc.eElement(rv.Elem()) + return + case reflect.String: + enc.writeQuoted(rv.String()) + case reflect.Bool: + enc.wf(strconv.FormatBool(rv.Bool())) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + enc.wf(strconv.FormatInt(rv.Int(), 10)) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + enc.wf(strconv.FormatUint(rv.Uint(), 10)) + case reflect.Float32: + f := rv.Float() + if math.IsNaN(f) { + enc.wf("nan") + } else if math.IsInf(f, 0) { + enc.wf("%cinf", map[bool]byte{true: '-', false: '+'}[math.Signbit(f)]) + } else { + enc.wf(floatAddDecimal(strconv.FormatFloat(f, 'f', -1, 32))) + } + case reflect.Float64: + f := rv.Float() + if math.IsNaN(f) { + enc.wf("nan") + } else if math.IsInf(f, 0) { + enc.wf("%cinf", map[bool]byte{true: '-', false: '+'}[math.Signbit(f)]) + } else { + enc.wf(floatAddDecimal(strconv.FormatFloat(f, 'f', -1, 64))) + } + case reflect.Array, reflect.Slice: + enc.eArrayOrSliceElement(rv) + case reflect.Struct: + enc.eStruct(nil, rv, true) + case reflect.Map: + enc.eMap(nil, rv, true) + case reflect.Interface: + enc.eElement(rv.Elem()) + default: + encPanic(fmt.Errorf("unexpected type: %T", rv.Interface())) + } +} + +// By the TOML spec, all floats must have a decimal with at least one number on +// either side. +func floatAddDecimal(fstr string) string { + if !strings.Contains(fstr, ".") { + return fstr + ".0" + } + return fstr +} + +func (enc *Encoder) writeQuoted(s string) { + enc.wf("\"%s\"", dblQuotedReplacer.Replace(s)) +} + +func (enc *Encoder) eArrayOrSliceElement(rv reflect.Value) { + length := rv.Len() + enc.wf("[") + for i := 0; i < length; i++ { + elem := eindirect(rv.Index(i)) + enc.eElement(elem) + if i != length-1 { + enc.wf(", ") + } + } + enc.wf("]") +} + +func (enc *Encoder) eArrayOfTables(key Key, rv reflect.Value) { + if len(key) == 0 { + encPanic(errNoKey) + } + for i := 0; i < rv.Len(); i++ { + trv := eindirect(rv.Index(i)) + if isNil(trv) { + continue + } + enc.newline() + enc.wf("%s[[%s]]", enc.indentStr(key), key) + enc.newline() + enc.eMapOrStruct(key, trv, false) + } +} + +func (enc *Encoder) eTable(key Key, rv reflect.Value) { + if len(key) == 1 { + // Output an extra newline between top-level tables. + // (The newline isn't written if nothing else has been written though.) + enc.newline() + } + if len(key) > 0 { + enc.wf("%s[%s]", enc.indentStr(key), key) + enc.newline() + } + enc.eMapOrStruct(key, rv, false) +} + +func (enc *Encoder) eMapOrStruct(key Key, rv reflect.Value, inline bool) { + switch rv.Kind() { + case reflect.Map: + enc.eMap(key, rv, inline) + case reflect.Struct: + enc.eStruct(key, rv, inline) + default: + // Should never happen? + panic("eTable: unhandled reflect.Value Kind: " + rv.Kind().String()) + } +} + +func (enc *Encoder) eMap(key Key, rv reflect.Value, inline bool) { + rt := rv.Type() + if rt.Key().Kind() != reflect.String { + encPanic(errNonString) + } + + // Sort keys so that we have deterministic output. And write keys directly + // underneath this key first, before writing sub-structs or sub-maps. + var mapKeysDirect, mapKeysSub []string + for _, mapKey := range rv.MapKeys() { + k := mapKey.String() + if typeIsTable(tomlTypeOfGo(eindirect(rv.MapIndex(mapKey)))) { + mapKeysSub = append(mapKeysSub, k) + } else { + mapKeysDirect = append(mapKeysDirect, k) + } + } + + var writeMapKeys = func(mapKeys []string, trailC bool) { + sort.Strings(mapKeys) + for i, mapKey := range mapKeys { + val := eindirect(rv.MapIndex(reflect.ValueOf(mapKey))) + if isNil(val) { + continue + } + + if inline { + enc.writeKeyValue(Key{mapKey}, val, true) + if trailC || i != len(mapKeys)-1 { + enc.wf(", ") + } + } else { + enc.encode(key.add(mapKey), val) + } + } + } + + if inline { + enc.wf("{") + } + writeMapKeys(mapKeysDirect, len(mapKeysSub) > 0) + writeMapKeys(mapKeysSub, false) + if inline { + enc.wf("}") + } +} + +const is32Bit = (32 << (^uint(0) >> 63)) == 32 + +func pointerTo(t reflect.Type) reflect.Type { + if t.Kind() == reflect.Ptr { + return pointerTo(t.Elem()) + } + return t +} + +func (enc *Encoder) eStruct(key Key, rv reflect.Value, inline bool) { + // Write keys for fields directly under this key first, because if we write + // a field that creates a new table then all keys under it will be in that + // table (not the one we're writing here). + // + // Fields is a [][]int: for fieldsDirect this always has one entry (the + // struct index). For fieldsSub it contains two entries: the parent field + // index from tv, and the field indexes for the fields of the sub. + var ( + rt = rv.Type() + fieldsDirect, fieldsSub [][]int + addFields func(rt reflect.Type, rv reflect.Value, start []int) + ) + addFields = func(rt reflect.Type, rv reflect.Value, start []int) { + for i := 0; i < rt.NumField(); i++ { + f := rt.Field(i) + isEmbed := f.Anonymous && pointerTo(f.Type).Kind() == reflect.Struct + if f.PkgPath != "" && !isEmbed { /// Skip unexported fields. + continue + } + opts := getOptions(f.Tag) + if opts.skip { + continue + } + + frv := eindirect(rv.Field(i)) + + // Treat anonymous struct fields with tag names as though they are + // not anonymous, like encoding/json does. + // + // Non-struct anonymous fields use the normal encoding logic. + if isEmbed { + if getOptions(f.Tag).name == "" && frv.Kind() == reflect.Struct { + addFields(frv.Type(), frv, append(start, f.Index...)) + continue + } + } + + if typeIsTable(tomlTypeOfGo(frv)) { + fieldsSub = append(fieldsSub, append(start, f.Index...)) + } else { + // Copy so it works correct on 32bit archs; not clear why this + // is needed. See #314, and https://www.reddit.com/r/golang/comments/pnx8v4 + // This also works fine on 64bit, but 32bit archs are somewhat + // rare and this is a wee bit faster. + if is32Bit { + copyStart := make([]int, len(start)) + copy(copyStart, start) + fieldsDirect = append(fieldsDirect, append(copyStart, f.Index...)) + } else { + fieldsDirect = append(fieldsDirect, append(start, f.Index...)) + } + } + } + } + addFields(rt, rv, nil) + + writeFields := func(fields [][]int) { + for _, fieldIndex := range fields { + fieldType := rt.FieldByIndex(fieldIndex) + fieldVal := eindirect(rv.FieldByIndex(fieldIndex)) + + if isNil(fieldVal) { /// Don't write anything for nil fields. + continue + } + + opts := getOptions(fieldType.Tag) + if opts.skip { + continue + } + keyName := fieldType.Name + if opts.name != "" { + keyName = opts.name + } + + if opts.omitempty && enc.isEmpty(fieldVal) { + continue + } + if opts.omitzero && isZero(fieldVal) { + continue + } + + if inline { + enc.writeKeyValue(Key{keyName}, fieldVal, true) + if fieldIndex[0] != len(fields)-1 { + enc.wf(", ") + } + } else { + enc.encode(key.add(keyName), fieldVal) + } + } + } + + if inline { + enc.wf("{") + } + writeFields(fieldsDirect) + writeFields(fieldsSub) + if inline { + enc.wf("}") + } +} + +// tomlTypeOfGo returns the TOML type name of the Go value's type. +// +// It is used to determine whether the types of array elements are mixed (which +// is forbidden). If the Go value is nil, then it is illegal for it to be an +// array element, and valueIsNil is returned as true. +// +// The type may be `nil`, which means no concrete TOML type could be found. +func tomlTypeOfGo(rv reflect.Value) tomlType { + if isNil(rv) || !rv.IsValid() { + return nil + } + + if rv.Kind() == reflect.Struct { + if rv.Type() == timeType { + return tomlDatetime + } + if isMarshaler(rv) { + return tomlString + } + return tomlHash + } + + if isMarshaler(rv) { + return tomlString + } + + switch rv.Kind() { + case reflect.Bool: + return tomlBool + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, + reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, + reflect.Uint64: + return tomlInteger + case reflect.Float32, reflect.Float64: + return tomlFloat + case reflect.Array, reflect.Slice: + if isTableArray(rv) { + return tomlArrayHash + } + return tomlArray + case reflect.Ptr, reflect.Interface: + return tomlTypeOfGo(rv.Elem()) + case reflect.String: + return tomlString + case reflect.Map: + return tomlHash + default: + encPanic(errors.New("unsupported type: " + rv.Kind().String())) + panic("unreachable") + } +} + +func isMarshaler(rv reflect.Value) bool { + return rv.Type().Implements(marshalText) || rv.Type().Implements(marshalToml) +} + +// isTableArray reports if all entries in the array or slice are a table. +func isTableArray(arr reflect.Value) bool { + if isNil(arr) || !arr.IsValid() || arr.Len() == 0 { + return false + } + + ret := true + for i := 0; i < arr.Len(); i++ { + tt := tomlTypeOfGo(eindirect(arr.Index(i))) + // Don't allow nil. + if tt == nil { + encPanic(errArrayNilElement) + } + + if ret && !typeEqual(tomlHash, tt) { + ret = false + } + } + return ret +} + +type tagOptions struct { + skip bool // "-" + name string + omitempty bool + omitzero bool +} + +func getOptions(tag reflect.StructTag) tagOptions { + t := tag.Get("toml") + if t == "-" { + return tagOptions{skip: true} + } + var opts tagOptions + parts := strings.Split(t, ",") + opts.name = parts[0] + for _, s := range parts[1:] { + switch s { + case "omitempty": + opts.omitempty = true + case "omitzero": + opts.omitzero = true + } + } + return opts +} + +func isZero(rv reflect.Value) bool { + switch rv.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return rv.Int() == 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + return rv.Uint() == 0 + case reflect.Float32, reflect.Float64: + return rv.Float() == 0.0 + } + return false +} + +func (enc *Encoder) isEmpty(rv reflect.Value) bool { + switch rv.Kind() { + case reflect.Array, reflect.Slice, reflect.Map, reflect.String: + return rv.Len() == 0 + case reflect.Struct: + if rv.Type().Comparable() { + return reflect.Zero(rv.Type()).Interface() == rv.Interface() + } + // Need to also check if all the fields are empty, otherwise something + // like this with uncomparable types will always return true: + // + // type a struct{ field b } + // type b struct{ s []string } + // s := a{field: b{s: []string{"AAA"}}} + for i := 0; i < rv.NumField(); i++ { + if !enc.isEmpty(rv.Field(i)) { + return false + } + } + return true + case reflect.Bool: + return !rv.Bool() + } + return false +} + +func (enc *Encoder) newline() { + if enc.hasWritten { + enc.wf("\n") + } +} + +// Write a key/value pair: +// +// key = +// +// This is also used for "k = v" in inline tables; so something like this will +// be written in three calls: +// +// ┌───────────────────┐ +// │ ┌───┐ ┌────┐│ +// v v v v vv +// key = {k = 1, k2 = 2} +func (enc *Encoder) writeKeyValue(key Key, val reflect.Value, inline bool) { + if len(key) == 0 { + encPanic(errNoKey) + } + enc.wf("%s%s = ", enc.indentStr(key), key.maybeQuoted(len(key)-1)) + enc.eElement(val) + if !inline { + enc.newline() + } +} + +func (enc *Encoder) wf(format string, v ...interface{}) { + _, err := fmt.Fprintf(enc.w, format, v...) + if err != nil { + encPanic(err) + } + enc.hasWritten = true +} + +func (enc *Encoder) indentStr(key Key) string { + return strings.Repeat(enc.Indent, len(key)-1) +} + +func encPanic(err error) { + panic(tomlEncodeError{err}) +} + +// Resolve any level of pointers to the actual value (e.g. **string → string). +func eindirect(v reflect.Value) reflect.Value { + if v.Kind() != reflect.Ptr && v.Kind() != reflect.Interface { + if isMarshaler(v) { + return v + } + if v.CanAddr() { /// Special case for marshalers; see #358. + if pv := v.Addr(); isMarshaler(pv) { + return pv + } + } + return v + } + + if v.IsNil() { + return v + } + + return eindirect(v.Elem()) +} + +func isNil(rv reflect.Value) bool { + switch rv.Kind() { + case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: + return rv.IsNil() + default: + return false + } +} diff --git a/vendor/github.com/BurntSushi/toml/error.go b/vendor/github.com/BurntSushi/toml/error.go new file mode 100644 index 00000000..f4f390e6 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/error.go @@ -0,0 +1,279 @@ +package toml + +import ( + "fmt" + "strings" +) + +// ParseError is returned when there is an error parsing the TOML syntax such as +// invalid syntax, duplicate keys, etc. +// +// In addition to the error message itself, you can also print detailed location +// information with context by using [ErrorWithPosition]: +// +// toml: error: Key 'fruit' was already created and cannot be used as an array. +// +// At line 4, column 2-7: +// +// 2 | fruit = [] +// 3 | +// 4 | [[fruit]] # Not allowed +// ^^^^^ +// +// [ErrorWithUsage] can be used to print the above with some more detailed usage +// guidance: +// +// toml: error: newlines not allowed within inline tables +// +// At line 1, column 18: +// +// 1 | x = [{ key = 42 # +// ^ +// +// Error help: +// +// Inline tables must always be on a single line: +// +// table = {key = 42, second = 43} +// +// It is invalid to split them over multiple lines like so: +// +// # INVALID +// table = { +// key = 42, +// second = 43 +// } +// +// Use regular for this: +// +// [table] +// key = 42 +// second = 43 +type ParseError struct { + Message string // Short technical message. + Usage string // Longer message with usage guidance; may be blank. + Position Position // Position of the error + LastKey string // Last parsed key, may be blank. + + // Line the error occurred. + // + // Deprecated: use [Position]. + Line int + + err error + input string +} + +// Position of an error. +type Position struct { + Line int // Line number, starting at 1. + Start int // Start of error, as byte offset starting at 0. + Len int // Lenght in bytes. +} + +func (pe ParseError) Error() string { + msg := pe.Message + if msg == "" { // Error from errorf() + msg = pe.err.Error() + } + + if pe.LastKey == "" { + return fmt.Sprintf("toml: line %d: %s", pe.Position.Line, msg) + } + return fmt.Sprintf("toml: line %d (last key %q): %s", + pe.Position.Line, pe.LastKey, msg) +} + +// ErrorWithUsage() returns the error with detailed location context. +// +// See the documentation on [ParseError]. +func (pe ParseError) ErrorWithPosition() string { + if pe.input == "" { // Should never happen, but just in case. + return pe.Error() + } + + var ( + lines = strings.Split(pe.input, "\n") + col = pe.column(lines) + b = new(strings.Builder) + ) + + msg := pe.Message + if msg == "" { + msg = pe.err.Error() + } + + // TODO: don't show control characters as literals? This may not show up + // well everywhere. + + if pe.Position.Len == 1 { + fmt.Fprintf(b, "toml: error: %s\n\nAt line %d, column %d:\n\n", + msg, pe.Position.Line, col+1) + } else { + fmt.Fprintf(b, "toml: error: %s\n\nAt line %d, column %d-%d:\n\n", + msg, pe.Position.Line, col, col+pe.Position.Len) + } + if pe.Position.Line > 2 { + fmt.Fprintf(b, "% 7d | %s\n", pe.Position.Line-2, lines[pe.Position.Line-3]) + } + if pe.Position.Line > 1 { + fmt.Fprintf(b, "% 7d | %s\n", pe.Position.Line-1, lines[pe.Position.Line-2]) + } + fmt.Fprintf(b, "% 7d | %s\n", pe.Position.Line, lines[pe.Position.Line-1]) + fmt.Fprintf(b, "% 10s%s%s\n", "", strings.Repeat(" ", col), strings.Repeat("^", pe.Position.Len)) + return b.String() +} + +// ErrorWithUsage() returns the error with detailed location context and usage +// guidance. +// +// See the documentation on [ParseError]. +func (pe ParseError) ErrorWithUsage() string { + m := pe.ErrorWithPosition() + if u, ok := pe.err.(interface{ Usage() string }); ok && u.Usage() != "" { + lines := strings.Split(strings.TrimSpace(u.Usage()), "\n") + for i := range lines { + if lines[i] != "" { + lines[i] = " " + lines[i] + } + } + return m + "Error help:\n\n" + strings.Join(lines, "\n") + "\n" + } + return m +} + +func (pe ParseError) column(lines []string) int { + var pos, col int + for i := range lines { + ll := len(lines[i]) + 1 // +1 for the removed newline + if pos+ll >= pe.Position.Start { + col = pe.Position.Start - pos + if col < 0 { // Should never happen, but just in case. + col = 0 + } + break + } + pos += ll + } + + return col +} + +type ( + errLexControl struct{ r rune } + errLexEscape struct{ r rune } + errLexUTF8 struct{ b byte } + errLexInvalidNum struct{ v string } + errLexInvalidDate struct{ v string } + errLexInlineTableNL struct{} + errLexStringNL struct{} + errParseRange struct { + i interface{} // int or float + size string // "int64", "uint16", etc. + } + errParseDuration struct{ d string } +) + +func (e errLexControl) Error() string { + return fmt.Sprintf("TOML files cannot contain control characters: '0x%02x'", e.r) +} +func (e errLexControl) Usage() string { return "" } + +func (e errLexEscape) Error() string { return fmt.Sprintf(`invalid escape in string '\%c'`, e.r) } +func (e errLexEscape) Usage() string { return usageEscape } +func (e errLexUTF8) Error() string { return fmt.Sprintf("invalid UTF-8 byte: 0x%02x", e.b) } +func (e errLexUTF8) Usage() string { return "" } +func (e errLexInvalidNum) Error() string { return fmt.Sprintf("invalid number: %q", e.v) } +func (e errLexInvalidNum) Usage() string { return "" } +func (e errLexInvalidDate) Error() string { return fmt.Sprintf("invalid date: %q", e.v) } +func (e errLexInvalidDate) Usage() string { return "" } +func (e errLexInlineTableNL) Error() string { return "newlines not allowed within inline tables" } +func (e errLexInlineTableNL) Usage() string { return usageInlineNewline } +func (e errLexStringNL) Error() string { return "strings cannot contain newlines" } +func (e errLexStringNL) Usage() string { return usageStringNewline } +func (e errParseRange) Error() string { return fmt.Sprintf("%v is out of range for %s", e.i, e.size) } +func (e errParseRange) Usage() string { return usageIntOverflow } +func (e errParseDuration) Error() string { return fmt.Sprintf("invalid duration: %q", e.d) } +func (e errParseDuration) Usage() string { return usageDuration } + +const usageEscape = ` +A '\' inside a "-delimited string is interpreted as an escape character. + +The following escape sequences are supported: +\b, \t, \n, \f, \r, \", \\, \uXXXX, and \UXXXXXXXX + +To prevent a '\' from being recognized as an escape character, use either: + +- a ' or '''-delimited string; escape characters aren't processed in them; or +- write two backslashes to get a single backslash: '\\'. + +If you're trying to add a Windows path (e.g. "C:\Users\martin") then using '/' +instead of '\' will usually also work: "C:/Users/martin". +` + +const usageInlineNewline = ` +Inline tables must always be on a single line: + + table = {key = 42, second = 43} + +It is invalid to split them over multiple lines like so: + + # INVALID + table = { + key = 42, + second = 43 + } + +Use regular for this: + + [table] + key = 42 + second = 43 +` + +const usageStringNewline = ` +Strings must always be on a single line, and cannot span more than one line: + + # INVALID + string = "Hello, + world!" + +Instead use """ or ''' to split strings over multiple lines: + + string = """Hello, + world!""" +` + +const usageIntOverflow = ` +This number is too large; this may be an error in the TOML, but it can also be a +bug in the program that uses too small of an integer. + +The maximum and minimum values are: + + size │ lowest │ highest + ───────┼────────────────┼────────── + int8 │ -128 │ 127 + int16 │ -32,768 │ 32,767 + int32 │ -2,147,483,648 │ 2,147,483,647 + int64 │ -9.2 × 10¹⁷ │ 9.2 × 10¹⁷ + uint8 │ 0 │ 255 + uint16 │ 0 │ 65535 + uint32 │ 0 │ 4294967295 + uint64 │ 0 │ 1.8 × 10¹⁸ + +int refers to int32 on 32-bit systems and int64 on 64-bit systems. +` + +const usageDuration = ` +A duration must be as "number", without any spaces. Valid units are: + + ns nanoseconds (billionth of a second) + us, µs microseconds (millionth of a second) + ms milliseconds (thousands of a second) + s seconds + m minutes + h hours + +You can combine multiple units; for example "5m10s" for 5 minutes and 10 +seconds. +` diff --git a/vendor/github.com/BurntSushi/toml/internal/tz.go b/vendor/github.com/BurntSushi/toml/internal/tz.go new file mode 100644 index 00000000..022f15bc --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/internal/tz.go @@ -0,0 +1,36 @@ +package internal + +import "time" + +// Timezones used for local datetime, date, and time TOML types. +// +// The exact way times and dates without a timezone should be interpreted is not +// well-defined in the TOML specification and left to the implementation. These +// defaults to current local timezone offset of the computer, but this can be +// changed by changing these variables before decoding. +// +// TODO: +// Ideally we'd like to offer people the ability to configure the used timezone +// by setting Decoder.Timezone and Encoder.Timezone; however, this is a bit +// tricky: the reason we use three different variables for this is to support +// round-tripping – without these specific TZ names we wouldn't know which +// format to use. +// +// There isn't a good way to encode this right now though, and passing this sort +// of information also ties in to various related issues such as string format +// encoding, encoding of comments, etc. +// +// So, for the time being, just put this in internal until we can write a good +// comprehensive API for doing all of this. +// +// The reason they're exported is because they're referred from in e.g. +// internal/tag. +// +// Note that this behaviour is valid according to the TOML spec as the exact +// behaviour is left up to implementations. +var ( + localOffset = func() int { _, o := time.Now().Zone(); return o }() + LocalDatetime = time.FixedZone("datetime-local", localOffset) + LocalDate = time.FixedZone("date-local", localOffset) + LocalTime = time.FixedZone("time-local", localOffset) +) diff --git a/vendor/github.com/BurntSushi/toml/lex.go b/vendor/github.com/BurntSushi/toml/lex.go new file mode 100644 index 00000000..d4d70871 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/lex.go @@ -0,0 +1,1233 @@ +package toml + +import ( + "fmt" + "reflect" + "runtime" + "strings" + "unicode" + "unicode/utf8" +) + +type itemType int + +const ( + itemError itemType = iota + itemNIL // used in the parser to indicate no type + itemEOF + itemText + itemString + itemRawString + itemMultilineString + itemRawMultilineString + itemBool + itemInteger + itemFloat + itemDatetime + itemArray // the start of an array + itemArrayEnd + itemTableStart + itemTableEnd + itemArrayTableStart + itemArrayTableEnd + itemKeyStart + itemKeyEnd + itemCommentStart + itemInlineTableStart + itemInlineTableEnd +) + +const eof = 0 + +type stateFn func(lx *lexer) stateFn + +func (p Position) String() string { + return fmt.Sprintf("at line %d; start %d; length %d", p.Line, p.Start, p.Len) +} + +type lexer struct { + input string + start int + pos int + line int + state stateFn + items chan item + + // Allow for backing up up to 4 runes. This is necessary because TOML + // contains 3-rune tokens (""" and '''). + prevWidths [4]int + nprev int // how many of prevWidths are in use + atEOF bool // If we emit an eof, we can still back up, but it is not OK to call next again. + + // A stack of state functions used to maintain context. + // + // The idea is to reuse parts of the state machine in various places. For + // example, values can appear at the top level or within arbitrarily nested + // arrays. The last state on the stack is used after a value has been lexed. + // Similarly for comments. + stack []stateFn +} + +type item struct { + typ itemType + val string + err error + pos Position +} + +func (lx *lexer) nextItem() item { + for { + select { + case item := <-lx.items: + return item + default: + lx.state = lx.state(lx) + //fmt.Printf(" STATE %-24s current: %-10s stack: %s\n", lx.state, lx.current(), lx.stack) + } + } +} + +func lex(input string) *lexer { + lx := &lexer{ + input: input, + state: lexTop, + items: make(chan item, 10), + stack: make([]stateFn, 0, 10), + line: 1, + } + return lx +} + +func (lx *lexer) push(state stateFn) { + lx.stack = append(lx.stack, state) +} + +func (lx *lexer) pop() stateFn { + if len(lx.stack) == 0 { + return lx.errorf("BUG in lexer: no states to pop") + } + last := lx.stack[len(lx.stack)-1] + lx.stack = lx.stack[0 : len(lx.stack)-1] + return last +} + +func (lx *lexer) current() string { + return lx.input[lx.start:lx.pos] +} + +func (lx lexer) getPos() Position { + p := Position{ + Line: lx.line, + Start: lx.start, + Len: lx.pos - lx.start, + } + if p.Len <= 0 { + p.Len = 1 + } + return p +} + +func (lx *lexer) emit(typ itemType) { + // Needed for multiline strings ending with an incomplete UTF-8 sequence. + if lx.start > lx.pos { + lx.error(errLexUTF8{lx.input[lx.pos]}) + return + } + lx.items <- item{typ: typ, pos: lx.getPos(), val: lx.current()} + lx.start = lx.pos +} + +func (lx *lexer) emitTrim(typ itemType) { + lx.items <- item{typ: typ, pos: lx.getPos(), val: strings.TrimSpace(lx.current())} + lx.start = lx.pos +} + +func (lx *lexer) next() (r rune) { + if lx.atEOF { + panic("BUG in lexer: next called after EOF") + } + if lx.pos >= len(lx.input) { + lx.atEOF = true + return eof + } + + if lx.input[lx.pos] == '\n' { + lx.line++ + } + lx.prevWidths[3] = lx.prevWidths[2] + lx.prevWidths[2] = lx.prevWidths[1] + lx.prevWidths[1] = lx.prevWidths[0] + if lx.nprev < 4 { + lx.nprev++ + } + + r, w := utf8.DecodeRuneInString(lx.input[lx.pos:]) + if r == utf8.RuneError { + lx.error(errLexUTF8{lx.input[lx.pos]}) + return utf8.RuneError + } + + // Note: don't use peek() here, as this calls next(). + if isControl(r) || (r == '\r' && (len(lx.input)-1 == lx.pos || lx.input[lx.pos+1] != '\n')) { + lx.errorControlChar(r) + return utf8.RuneError + } + + lx.prevWidths[0] = w + lx.pos += w + return r +} + +// ignore skips over the pending input before this point. +func (lx *lexer) ignore() { + lx.start = lx.pos +} + +// backup steps back one rune. Can be called 4 times between calls to next. +func (lx *lexer) backup() { + if lx.atEOF { + lx.atEOF = false + return + } + if lx.nprev < 1 { + panic("BUG in lexer: backed up too far") + } + w := lx.prevWidths[0] + lx.prevWidths[0] = lx.prevWidths[1] + lx.prevWidths[1] = lx.prevWidths[2] + lx.prevWidths[2] = lx.prevWidths[3] + lx.nprev-- + + lx.pos -= w + if lx.pos < len(lx.input) && lx.input[lx.pos] == '\n' { + lx.line-- + } +} + +// accept consumes the next rune if it's equal to `valid`. +func (lx *lexer) accept(valid rune) bool { + if lx.next() == valid { + return true + } + lx.backup() + return false +} + +// peek returns but does not consume the next rune in the input. +func (lx *lexer) peek() rune { + r := lx.next() + lx.backup() + return r +} + +// skip ignores all input that matches the given predicate. +func (lx *lexer) skip(pred func(rune) bool) { + for { + r := lx.next() + if pred(r) { + continue + } + lx.backup() + lx.ignore() + return + } +} + +// error stops all lexing by emitting an error and returning `nil`. +// +// Note that any value that is a character is escaped if it's a special +// character (newlines, tabs, etc.). +func (lx *lexer) error(err error) stateFn { + if lx.atEOF { + return lx.errorPrevLine(err) + } + lx.items <- item{typ: itemError, pos: lx.getPos(), err: err} + return nil +} + +// errorfPrevline is like error(), but sets the position to the last column of +// the previous line. +// +// This is so that unexpected EOF or NL errors don't show on a new blank line. +func (lx *lexer) errorPrevLine(err error) stateFn { + pos := lx.getPos() + pos.Line-- + pos.Len = 1 + pos.Start = lx.pos - 1 + lx.items <- item{typ: itemError, pos: pos, err: err} + return nil +} + +// errorPos is like error(), but allows explicitly setting the position. +func (lx *lexer) errorPos(start, length int, err error) stateFn { + pos := lx.getPos() + pos.Start = start + pos.Len = length + lx.items <- item{typ: itemError, pos: pos, err: err} + return nil +} + +// errorf is like error, and creates a new error. +func (lx *lexer) errorf(format string, values ...interface{}) stateFn { + if lx.atEOF { + pos := lx.getPos() + pos.Line-- + pos.Len = 1 + pos.Start = lx.pos - 1 + lx.items <- item{typ: itemError, pos: pos, err: fmt.Errorf(format, values...)} + return nil + } + lx.items <- item{typ: itemError, pos: lx.getPos(), err: fmt.Errorf(format, values...)} + return nil +} + +func (lx *lexer) errorControlChar(cc rune) stateFn { + return lx.errorPos(lx.pos-1, 1, errLexControl{cc}) +} + +// lexTop consumes elements at the top level of TOML data. +func lexTop(lx *lexer) stateFn { + r := lx.next() + if isWhitespace(r) || isNL(r) { + return lexSkip(lx, lexTop) + } + switch r { + case '#': + lx.push(lexTop) + return lexCommentStart + case '[': + return lexTableStart + case eof: + if lx.pos > lx.start { + return lx.errorf("unexpected EOF") + } + lx.emit(itemEOF) + return nil + } + + // At this point, the only valid item can be a key, so we back up + // and let the key lexer do the rest. + lx.backup() + lx.push(lexTopEnd) + return lexKeyStart +} + +// lexTopEnd is entered whenever a top-level item has been consumed. (A value +// or a table.) It must see only whitespace, and will turn back to lexTop +// upon a newline. If it sees EOF, it will quit the lexer successfully. +func lexTopEnd(lx *lexer) stateFn { + r := lx.next() + switch { + case r == '#': + // a comment will read to a newline for us. + lx.push(lexTop) + return lexCommentStart + case isWhitespace(r): + return lexTopEnd + case isNL(r): + lx.ignore() + return lexTop + case r == eof: + lx.emit(itemEOF) + return nil + } + return lx.errorf( + "expected a top-level item to end with a newline, comment, or EOF, but got %q instead", + r) +} + +// lexTable lexes the beginning of a table. Namely, it makes sure that +// it starts with a character other than '.' and ']'. +// It assumes that '[' has already been consumed. +// It also handles the case that this is an item in an array of tables. +// e.g., '[[name]]'. +func lexTableStart(lx *lexer) stateFn { + if lx.peek() == '[' { + lx.next() + lx.emit(itemArrayTableStart) + lx.push(lexArrayTableEnd) + } else { + lx.emit(itemTableStart) + lx.push(lexTableEnd) + } + return lexTableNameStart +} + +func lexTableEnd(lx *lexer) stateFn { + lx.emit(itemTableEnd) + return lexTopEnd +} + +func lexArrayTableEnd(lx *lexer) stateFn { + if r := lx.next(); r != ']' { + return lx.errorf("expected end of table array name delimiter ']', but got %q instead", r) + } + lx.emit(itemArrayTableEnd) + return lexTopEnd +} + +func lexTableNameStart(lx *lexer) stateFn { + lx.skip(isWhitespace) + switch r := lx.peek(); { + case r == ']' || r == eof: + return lx.errorf("unexpected end of table name (table names cannot be empty)") + case r == '.': + return lx.errorf("unexpected table separator (table names cannot be empty)") + case r == '"' || r == '\'': + lx.ignore() + lx.push(lexTableNameEnd) + return lexQuotedName + default: + lx.push(lexTableNameEnd) + return lexBareName + } +} + +// lexTableNameEnd reads the end of a piece of a table name, optionally +// consuming whitespace. +func lexTableNameEnd(lx *lexer) stateFn { + lx.skip(isWhitespace) + switch r := lx.next(); { + case isWhitespace(r): + return lexTableNameEnd + case r == '.': + lx.ignore() + return lexTableNameStart + case r == ']': + return lx.pop() + default: + return lx.errorf("expected '.' or ']' to end table name, but got %q instead", r) + } +} + +// lexBareName lexes one part of a key or table. +// +// It assumes that at least one valid character for the table has already been +// read. +// +// Lexes only one part, e.g. only 'a' inside 'a.b'. +func lexBareName(lx *lexer) stateFn { + r := lx.next() + if isBareKeyChar(r) { + return lexBareName + } + lx.backup() + lx.emit(itemText) + return lx.pop() +} + +// lexBareName lexes one part of a key or table. +// +// It assumes that at least one valid character for the table has already been +// read. +// +// Lexes only one part, e.g. only '"a"' inside '"a".b'. +func lexQuotedName(lx *lexer) stateFn { + r := lx.next() + switch { + case isWhitespace(r): + return lexSkip(lx, lexValue) + case r == '"': + lx.ignore() // ignore the '"' + return lexString + case r == '\'': + lx.ignore() // ignore the "'" + return lexRawString + case r == eof: + return lx.errorf("unexpected EOF; expected value") + default: + return lx.errorf("expected value but found %q instead", r) + } +} + +// lexKeyStart consumes all key parts until a '='. +func lexKeyStart(lx *lexer) stateFn { + lx.skip(isWhitespace) + switch r := lx.peek(); { + case r == '=' || r == eof: + return lx.errorf("unexpected '=': key name appears blank") + case r == '.': + return lx.errorf("unexpected '.': keys cannot start with a '.'") + case r == '"' || r == '\'': + lx.ignore() + fallthrough + default: // Bare key + lx.emit(itemKeyStart) + return lexKeyNameStart + } +} + +func lexKeyNameStart(lx *lexer) stateFn { + lx.skip(isWhitespace) + switch r := lx.peek(); { + case r == '=' || r == eof: + return lx.errorf("unexpected '='") + case r == '.': + return lx.errorf("unexpected '.'") + case r == '"' || r == '\'': + lx.ignore() + lx.push(lexKeyEnd) + return lexQuotedName + default: + lx.push(lexKeyEnd) + return lexBareName + } +} + +// lexKeyEnd consumes the end of a key and trims whitespace (up to the key +// separator). +func lexKeyEnd(lx *lexer) stateFn { + lx.skip(isWhitespace) + switch r := lx.next(); { + case isWhitespace(r): + return lexSkip(lx, lexKeyEnd) + case r == eof: + return lx.errorf("unexpected EOF; expected key separator '='") + case r == '.': + lx.ignore() + return lexKeyNameStart + case r == '=': + lx.emit(itemKeyEnd) + return lexSkip(lx, lexValue) + default: + return lx.errorf("expected '.' or '=', but got %q instead", r) + } +} + +// lexValue starts the consumption of a value anywhere a value is expected. +// lexValue will ignore whitespace. +// After a value is lexed, the last state on the next is popped and returned. +func lexValue(lx *lexer) stateFn { + // We allow whitespace to precede a value, but NOT newlines. + // In array syntax, the array states are responsible for ignoring newlines. + r := lx.next() + switch { + case isWhitespace(r): + return lexSkip(lx, lexValue) + case isDigit(r): + lx.backup() // avoid an extra state and use the same as above + return lexNumberOrDateStart + } + switch r { + case '[': + lx.ignore() + lx.emit(itemArray) + return lexArrayValue + case '{': + lx.ignore() + lx.emit(itemInlineTableStart) + return lexInlineTableValue + case '"': + if lx.accept('"') { + if lx.accept('"') { + lx.ignore() // Ignore """ + return lexMultilineString + } + lx.backup() + } + lx.ignore() // ignore the '"' + return lexString + case '\'': + if lx.accept('\'') { + if lx.accept('\'') { + lx.ignore() // Ignore """ + return lexMultilineRawString + } + lx.backup() + } + lx.ignore() // ignore the "'" + return lexRawString + case '.': // special error case, be kind to users + return lx.errorf("floats must start with a digit, not '.'") + case 'i', 'n': + if (lx.accept('n') && lx.accept('f')) || (lx.accept('a') && lx.accept('n')) { + lx.emit(itemFloat) + return lx.pop() + } + case '-', '+': + return lexDecimalNumberStart + } + if unicode.IsLetter(r) { + // Be permissive here; lexBool will give a nice error if the + // user wrote something like + // x = foo + // (i.e. not 'true' or 'false' but is something else word-like.) + lx.backup() + return lexBool + } + if r == eof { + return lx.errorf("unexpected EOF; expected value") + } + return lx.errorf("expected value but found %q instead", r) +} + +// lexArrayValue consumes one value in an array. It assumes that '[' or ',' +// have already been consumed. All whitespace and newlines are ignored. +func lexArrayValue(lx *lexer) stateFn { + r := lx.next() + switch { + case isWhitespace(r) || isNL(r): + return lexSkip(lx, lexArrayValue) + case r == '#': + lx.push(lexArrayValue) + return lexCommentStart + case r == ',': + return lx.errorf("unexpected comma") + case r == ']': + return lexArrayEnd + } + + lx.backup() + lx.push(lexArrayValueEnd) + return lexValue +} + +// lexArrayValueEnd consumes everything between the end of an array value and +// the next value (or the end of the array): it ignores whitespace and newlines +// and expects either a ',' or a ']'. +func lexArrayValueEnd(lx *lexer) stateFn { + switch r := lx.next(); { + case isWhitespace(r) || isNL(r): + return lexSkip(lx, lexArrayValueEnd) + case r == '#': + lx.push(lexArrayValueEnd) + return lexCommentStart + case r == ',': + lx.ignore() + return lexArrayValue // move on to the next value + case r == ']': + return lexArrayEnd + default: + return lx.errorf("expected a comma (',') or array terminator (']'), but got %s", runeOrEOF(r)) + } +} + +// lexArrayEnd finishes the lexing of an array. +// It assumes that a ']' has just been consumed. +func lexArrayEnd(lx *lexer) stateFn { + lx.ignore() + lx.emit(itemArrayEnd) + return lx.pop() +} + +// lexInlineTableValue consumes one key/value pair in an inline table. +// It assumes that '{' or ',' have already been consumed. Whitespace is ignored. +func lexInlineTableValue(lx *lexer) stateFn { + r := lx.next() + switch { + case isWhitespace(r): + return lexSkip(lx, lexInlineTableValue) + case isNL(r): + return lx.errorPrevLine(errLexInlineTableNL{}) + case r == '#': + lx.push(lexInlineTableValue) + return lexCommentStart + case r == ',': + return lx.errorf("unexpected comma") + case r == '}': + return lexInlineTableEnd + } + lx.backup() + lx.push(lexInlineTableValueEnd) + return lexKeyStart +} + +// lexInlineTableValueEnd consumes everything between the end of an inline table +// key/value pair and the next pair (or the end of the table): +// it ignores whitespace and expects either a ',' or a '}'. +func lexInlineTableValueEnd(lx *lexer) stateFn { + switch r := lx.next(); { + case isWhitespace(r): + return lexSkip(lx, lexInlineTableValueEnd) + case isNL(r): + return lx.errorPrevLine(errLexInlineTableNL{}) + case r == '#': + lx.push(lexInlineTableValueEnd) + return lexCommentStart + case r == ',': + lx.ignore() + lx.skip(isWhitespace) + if lx.peek() == '}' { + return lx.errorf("trailing comma not allowed in inline tables") + } + return lexInlineTableValue + case r == '}': + return lexInlineTableEnd + default: + return lx.errorf("expected a comma or an inline table terminator '}', but got %s instead", runeOrEOF(r)) + } +} + +func runeOrEOF(r rune) string { + if r == eof { + return "end of file" + } + return "'" + string(r) + "'" +} + +// lexInlineTableEnd finishes the lexing of an inline table. +// It assumes that a '}' has just been consumed. +func lexInlineTableEnd(lx *lexer) stateFn { + lx.ignore() + lx.emit(itemInlineTableEnd) + return lx.pop() +} + +// lexString consumes the inner contents of a string. It assumes that the +// beginning '"' has already been consumed and ignored. +func lexString(lx *lexer) stateFn { + r := lx.next() + switch { + case r == eof: + return lx.errorf(`unexpected EOF; expected '"'`) + case isNL(r): + return lx.errorPrevLine(errLexStringNL{}) + case r == '\\': + lx.push(lexString) + return lexStringEscape + case r == '"': + lx.backup() + lx.emit(itemString) + lx.next() + lx.ignore() + return lx.pop() + } + return lexString +} + +// lexMultilineString consumes the inner contents of a string. It assumes that +// the beginning '"""' has already been consumed and ignored. +func lexMultilineString(lx *lexer) stateFn { + r := lx.next() + switch r { + default: + return lexMultilineString + case eof: + return lx.errorf(`unexpected EOF; expected '"""'`) + case '\\': + return lexMultilineStringEscape + case '"': + /// Found " → try to read two more "". + if lx.accept('"') { + if lx.accept('"') { + /// Peek ahead: the string can contain " and "", including at the + /// end: """str""""" + /// 6 or more at the end, however, is an error. + if lx.peek() == '"' { + /// Check if we already lexed 5 's; if so we have 6 now, and + /// that's just too many man! + /// + /// Second check is for the edge case: + /// + /// two quotes allowed. + /// vv + /// """lol \"""""" + /// ^^ ^^^---- closing three + /// escaped + /// + /// But ugly, but it works + if strings.HasSuffix(lx.current(), `"""""`) && !strings.HasSuffix(lx.current(), `\"""""`) { + return lx.errorf(`unexpected '""""""'`) + } + lx.backup() + lx.backup() + return lexMultilineString + } + + lx.backup() /// backup: don't include the """ in the item. + lx.backup() + lx.backup() + lx.emit(itemMultilineString) + lx.next() /// Read over ''' again and discard it. + lx.next() + lx.next() + lx.ignore() + return lx.pop() + } + lx.backup() + } + return lexMultilineString + } +} + +// lexRawString consumes a raw string. Nothing can be escaped in such a string. +// It assumes that the beginning "'" has already been consumed and ignored. +func lexRawString(lx *lexer) stateFn { + r := lx.next() + switch { + default: + return lexRawString + case r == eof: + return lx.errorf(`unexpected EOF; expected "'"`) + case isNL(r): + return lx.errorPrevLine(errLexStringNL{}) + case r == '\'': + lx.backup() + lx.emit(itemRawString) + lx.next() + lx.ignore() + return lx.pop() + } +} + +// lexMultilineRawString consumes a raw string. Nothing can be escaped in such +// a string. It assumes that the beginning ''' has already been consumed and +// ignored. +func lexMultilineRawString(lx *lexer) stateFn { + r := lx.next() + switch r { + default: + return lexMultilineRawString + case eof: + return lx.errorf(`unexpected EOF; expected "'''"`) + case '\'': + /// Found ' → try to read two more ''. + if lx.accept('\'') { + if lx.accept('\'') { + /// Peek ahead: the string can contain ' and '', including at the + /// end: '''str''''' + /// 6 or more at the end, however, is an error. + if lx.peek() == '\'' { + /// Check if we already lexed 5 's; if so we have 6 now, and + /// that's just too many man! + if strings.HasSuffix(lx.current(), "'''''") { + return lx.errorf(`unexpected "''''''"`) + } + lx.backup() + lx.backup() + return lexMultilineRawString + } + + lx.backup() /// backup: don't include the ''' in the item. + lx.backup() + lx.backup() + lx.emit(itemRawMultilineString) + lx.next() /// Read over ''' again and discard it. + lx.next() + lx.next() + lx.ignore() + return lx.pop() + } + lx.backup() + } + return lexMultilineRawString + } +} + +// lexMultilineStringEscape consumes an escaped character. It assumes that the +// preceding '\\' has already been consumed. +func lexMultilineStringEscape(lx *lexer) stateFn { + if isNL(lx.next()) { /// \ escaping newline. + return lexMultilineString + } + lx.backup() + lx.push(lexMultilineString) + return lexStringEscape(lx) +} + +func lexStringEscape(lx *lexer) stateFn { + r := lx.next() + switch r { + case 'b': + fallthrough + case 't': + fallthrough + case 'n': + fallthrough + case 'f': + fallthrough + case 'r': + fallthrough + case '"': + fallthrough + case ' ', '\t': + // Inside """ .. """ strings you can use \ to escape newlines, and any + // amount of whitespace can be between the \ and \n. + fallthrough + case '\\': + return lx.pop() + case 'u': + return lexShortUnicodeEscape + case 'U': + return lexLongUnicodeEscape + } + return lx.error(errLexEscape{r}) +} + +func lexShortUnicodeEscape(lx *lexer) stateFn { + var r rune + for i := 0; i < 4; i++ { + r = lx.next() + if !isHexadecimal(r) { + return lx.errorf( + `expected four hexadecimal digits after '\u', but got %q instead`, + lx.current()) + } + } + return lx.pop() +} + +func lexLongUnicodeEscape(lx *lexer) stateFn { + var r rune + for i := 0; i < 8; i++ { + r = lx.next() + if !isHexadecimal(r) { + return lx.errorf( + `expected eight hexadecimal digits after '\U', but got %q instead`, + lx.current()) + } + } + return lx.pop() +} + +// lexNumberOrDateStart processes the first character of a value which begins +// with a digit. It exists to catch values starting with '0', so that +// lexBaseNumberOrDate can differentiate base prefixed integers from other +// types. +func lexNumberOrDateStart(lx *lexer) stateFn { + r := lx.next() + switch r { + case '0': + return lexBaseNumberOrDate + } + + if !isDigit(r) { + // The only way to reach this state is if the value starts + // with a digit, so specifically treat anything else as an + // error. + return lx.errorf("expected a digit but got %q", r) + } + + return lexNumberOrDate +} + +// lexNumberOrDate consumes either an integer, float or datetime. +func lexNumberOrDate(lx *lexer) stateFn { + r := lx.next() + if isDigit(r) { + return lexNumberOrDate + } + switch r { + case '-', ':': + return lexDatetime + case '_': + return lexDecimalNumber + case '.', 'e', 'E': + return lexFloat + } + + lx.backup() + lx.emit(itemInteger) + return lx.pop() +} + +// lexDatetime consumes a Datetime, to a first approximation. +// The parser validates that it matches one of the accepted formats. +func lexDatetime(lx *lexer) stateFn { + r := lx.next() + if isDigit(r) { + return lexDatetime + } + switch r { + case '-', ':', 'T', 't', ' ', '.', 'Z', 'z', '+': + return lexDatetime + } + + lx.backup() + lx.emitTrim(itemDatetime) + return lx.pop() +} + +// lexHexInteger consumes a hexadecimal integer after seeing the '0x' prefix. +func lexHexInteger(lx *lexer) stateFn { + r := lx.next() + if isHexadecimal(r) { + return lexHexInteger + } + switch r { + case '_': + return lexHexInteger + } + + lx.backup() + lx.emit(itemInteger) + return lx.pop() +} + +// lexOctalInteger consumes an octal integer after seeing the '0o' prefix. +func lexOctalInteger(lx *lexer) stateFn { + r := lx.next() + if isOctal(r) { + return lexOctalInteger + } + switch r { + case '_': + return lexOctalInteger + } + + lx.backup() + lx.emit(itemInteger) + return lx.pop() +} + +// lexBinaryInteger consumes a binary integer after seeing the '0b' prefix. +func lexBinaryInteger(lx *lexer) stateFn { + r := lx.next() + if isBinary(r) { + return lexBinaryInteger + } + switch r { + case '_': + return lexBinaryInteger + } + + lx.backup() + lx.emit(itemInteger) + return lx.pop() +} + +// lexDecimalNumber consumes a decimal float or integer. +func lexDecimalNumber(lx *lexer) stateFn { + r := lx.next() + if isDigit(r) { + return lexDecimalNumber + } + switch r { + case '.', 'e', 'E': + return lexFloat + case '_': + return lexDecimalNumber + } + + lx.backup() + lx.emit(itemInteger) + return lx.pop() +} + +// lexDecimalNumber consumes the first digit of a number beginning with a sign. +// It assumes the sign has already been consumed. Values which start with a sign +// are only allowed to be decimal integers or floats. +// +// The special "nan" and "inf" values are also recognized. +func lexDecimalNumberStart(lx *lexer) stateFn { + r := lx.next() + + // Special error cases to give users better error messages + switch r { + case 'i': + if !lx.accept('n') || !lx.accept('f') { + return lx.errorf("invalid float: '%s'", lx.current()) + } + lx.emit(itemFloat) + return lx.pop() + case 'n': + if !lx.accept('a') || !lx.accept('n') { + return lx.errorf("invalid float: '%s'", lx.current()) + } + lx.emit(itemFloat) + return lx.pop() + case '0': + p := lx.peek() + switch p { + case 'b', 'o', 'x': + return lx.errorf("cannot use sign with non-decimal numbers: '%s%c'", lx.current(), p) + } + case '.': + return lx.errorf("floats must start with a digit, not '.'") + } + + if isDigit(r) { + return lexDecimalNumber + } + + return lx.errorf("expected a digit but got %q", r) +} + +// lexBaseNumberOrDate differentiates between the possible values which +// start with '0'. It assumes that before reaching this state, the initial '0' +// has been consumed. +func lexBaseNumberOrDate(lx *lexer) stateFn { + r := lx.next() + // Note: All datetimes start with at least two digits, so we don't + // handle date characters (':', '-', etc.) here. + if isDigit(r) { + return lexNumberOrDate + } + switch r { + case '_': + // Can only be decimal, because there can't be an underscore + // between the '0' and the base designator, and dates can't + // contain underscores. + return lexDecimalNumber + case '.', 'e', 'E': + return lexFloat + case 'b': + r = lx.peek() + if !isBinary(r) { + lx.errorf("not a binary number: '%s%c'", lx.current(), r) + } + return lexBinaryInteger + case 'o': + r = lx.peek() + if !isOctal(r) { + lx.errorf("not an octal number: '%s%c'", lx.current(), r) + } + return lexOctalInteger + case 'x': + r = lx.peek() + if !isHexadecimal(r) { + lx.errorf("not a hexidecimal number: '%s%c'", lx.current(), r) + } + return lexHexInteger + } + + lx.backup() + lx.emit(itemInteger) + return lx.pop() +} + +// lexFloat consumes the elements of a float. It allows any sequence of +// float-like characters, so floats emitted by the lexer are only a first +// approximation and must be validated by the parser. +func lexFloat(lx *lexer) stateFn { + r := lx.next() + if isDigit(r) { + return lexFloat + } + switch r { + case '_', '.', '-', '+', 'e', 'E': + return lexFloat + } + + lx.backup() + lx.emit(itemFloat) + return lx.pop() +} + +// lexBool consumes a bool string: 'true' or 'false. +func lexBool(lx *lexer) stateFn { + var rs []rune + for { + r := lx.next() + if !unicode.IsLetter(r) { + lx.backup() + break + } + rs = append(rs, r) + } + s := string(rs) + switch s { + case "true", "false": + lx.emit(itemBool) + return lx.pop() + } + return lx.errorf("expected value but found %q instead", s) +} + +// lexCommentStart begins the lexing of a comment. It will emit +// itemCommentStart and consume no characters, passing control to lexComment. +func lexCommentStart(lx *lexer) stateFn { + lx.ignore() + lx.emit(itemCommentStart) + return lexComment +} + +// lexComment lexes an entire comment. It assumes that '#' has been consumed. +// It will consume *up to* the first newline character, and pass control +// back to the last state on the stack. +func lexComment(lx *lexer) stateFn { + switch r := lx.next(); { + case isNL(r) || r == eof: + lx.backup() + lx.emit(itemText) + return lx.pop() + default: + return lexComment + } +} + +// lexSkip ignores all slurped input and moves on to the next state. +func lexSkip(lx *lexer, nextState stateFn) stateFn { + lx.ignore() + return nextState +} + +func (s stateFn) String() string { + name := runtime.FuncForPC(reflect.ValueOf(s).Pointer()).Name() + if i := strings.LastIndexByte(name, '.'); i > -1 { + name = name[i+1:] + } + if s == nil { + name = "" + } + return name + "()" +} + +func (itype itemType) String() string { + switch itype { + case itemError: + return "Error" + case itemNIL: + return "NIL" + case itemEOF: + return "EOF" + case itemText: + return "Text" + case itemString, itemRawString, itemMultilineString, itemRawMultilineString: + return "String" + case itemBool: + return "Bool" + case itemInteger: + return "Integer" + case itemFloat: + return "Float" + case itemDatetime: + return "DateTime" + case itemTableStart: + return "TableStart" + case itemTableEnd: + return "TableEnd" + case itemKeyStart: + return "KeyStart" + case itemKeyEnd: + return "KeyEnd" + case itemArray: + return "Array" + case itemArrayEnd: + return "ArrayEnd" + case itemCommentStart: + return "CommentStart" + case itemInlineTableStart: + return "InlineTableStart" + case itemInlineTableEnd: + return "InlineTableEnd" + } + panic(fmt.Sprintf("BUG: Unknown type '%d'.", int(itype))) +} + +func (item item) String() string { + return fmt.Sprintf("(%s, %s)", item.typ.String(), item.val) +} + +func isWhitespace(r rune) bool { return r == '\t' || r == ' ' } +func isNL(r rune) bool { return r == '\n' || r == '\r' } +func isControl(r rune) bool { // Control characters except \t, \r, \n + switch r { + case '\t', '\r', '\n': + return false + default: + return (r >= 0x00 && r <= 0x1f) || r == 0x7f + } +} +func isDigit(r rune) bool { return r >= '0' && r <= '9' } +func isBinary(r rune) bool { return r == '0' || r == '1' } +func isOctal(r rune) bool { return r >= '0' && r <= '7' } +func isHexadecimal(r rune) bool { + return (r >= '0' && r <= '9') || (r >= 'a' && r <= 'f') || (r >= 'A' && r <= 'F') +} +func isBareKeyChar(r rune) bool { + return (r >= 'A' && r <= 'Z') || + (r >= 'a' && r <= 'z') || + (r >= '0' && r <= '9') || + r == '_' || r == '-' +} diff --git a/vendor/github.com/BurntSushi/toml/meta.go b/vendor/github.com/BurntSushi/toml/meta.go new file mode 100644 index 00000000..71847a04 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/meta.go @@ -0,0 +1,121 @@ +package toml + +import ( + "strings" +) + +// MetaData allows access to meta information about TOML data that's not +// accessible otherwise. +// +// It allows checking if a key is defined in the TOML data, whether any keys +// were undecoded, and the TOML type of a key. +type MetaData struct { + context Key // Used only during decoding. + + keyInfo map[string]keyInfo + mapping map[string]interface{} + keys []Key + decoded map[string]struct{} + data []byte // Input file; for errors. +} + +// IsDefined reports if the key exists in the TOML data. +// +// The key should be specified hierarchically, for example to access the TOML +// key "a.b.c" you would use IsDefined("a", "b", "c"). Keys are case sensitive. +// +// Returns false for an empty key. +func (md *MetaData) IsDefined(key ...string) bool { + if len(key) == 0 { + return false + } + + var ( + hash map[string]interface{} + ok bool + hashOrVal interface{} = md.mapping + ) + for _, k := range key { + if hash, ok = hashOrVal.(map[string]interface{}); !ok { + return false + } + if hashOrVal, ok = hash[k]; !ok { + return false + } + } + return true +} + +// Type returns a string representation of the type of the key specified. +// +// Type will return the empty string if given an empty key or a key that does +// not exist. Keys are case sensitive. +func (md *MetaData) Type(key ...string) string { + if ki, ok := md.keyInfo[Key(key).String()]; ok { + return ki.tomlType.typeString() + } + return "" +} + +// Keys returns a slice of every key in the TOML data, including key groups. +// +// Each key is itself a slice, where the first element is the top of the +// hierarchy and the last is the most specific. The list will have the same +// order as the keys appeared in the TOML data. +// +// All keys returned are non-empty. +func (md *MetaData) Keys() []Key { + return md.keys +} + +// Undecoded returns all keys that have not been decoded in the order in which +// they appear in the original TOML document. +// +// This includes keys that haven't been decoded because of a [Primitive] value. +// Once the Primitive value is decoded, the keys will be considered decoded. +// +// Also note that decoding into an empty interface will result in no decoding, +// and so no keys will be considered decoded. +// +// In this sense, the Undecoded keys correspond to keys in the TOML document +// that do not have a concrete type in your representation. +func (md *MetaData) Undecoded() []Key { + undecoded := make([]Key, 0, len(md.keys)) + for _, key := range md.keys { + if _, ok := md.decoded[key.String()]; !ok { + undecoded = append(undecoded, key) + } + } + return undecoded +} + +// Key represents any TOML key, including key groups. Use [MetaData.Keys] to get +// values of this type. +type Key []string + +func (k Key) String() string { + ss := make([]string, len(k)) + for i := range k { + ss[i] = k.maybeQuoted(i) + } + return strings.Join(ss, ".") +} + +func (k Key) maybeQuoted(i int) string { + if k[i] == "" { + return `""` + } + for _, c := range k[i] { + if !isBareKeyChar(c) { + return `"` + dblQuotedReplacer.Replace(k[i]) + `"` + } + } + return k[i] +} + +func (k Key) add(piece string) Key { + newKey := make(Key, len(k)+1) + copy(newKey, k) + newKey[len(k)] = piece + return newKey +} diff --git a/vendor/github.com/BurntSushi/toml/parse.go b/vendor/github.com/BurntSushi/toml/parse.go new file mode 100644 index 00000000..d2542d6f --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/parse.go @@ -0,0 +1,781 @@ +package toml + +import ( + "fmt" + "strconv" + "strings" + "time" + "unicode/utf8" + + "github.com/BurntSushi/toml/internal" +) + +type parser struct { + lx *lexer + context Key // Full key for the current hash in scope. + currentKey string // Base key name for everything except hashes. + pos Position // Current position in the TOML file. + + ordered []Key // List of keys in the order that they appear in the TOML data. + + keyInfo map[string]keyInfo // Map keyname → info about the TOML key. + mapping map[string]interface{} // Map keyname → key value. + implicits map[string]struct{} // Record implicit keys (e.g. "key.group.names"). +} + +type keyInfo struct { + pos Position + tomlType tomlType +} + +func parse(data string) (p *parser, err error) { + defer func() { + if r := recover(); r != nil { + if pErr, ok := r.(ParseError); ok { + pErr.input = data + err = pErr + return + } + panic(r) + } + }() + + // Read over BOM; do this here as the lexer calls utf8.DecodeRuneInString() + // which mangles stuff. + if strings.HasPrefix(data, "\xff\xfe") || strings.HasPrefix(data, "\xfe\xff") { + data = data[2:] + } + + // Examine first few bytes for NULL bytes; this probably means it's a UTF-16 + // file (second byte in surrogate pair being NULL). Again, do this here to + // avoid having to deal with UTF-8/16 stuff in the lexer. + ex := 6 + if len(data) < 6 { + ex = len(data) + } + if i := strings.IndexRune(data[:ex], 0); i > -1 { + return nil, ParseError{ + Message: "files cannot contain NULL bytes; probably using UTF-16; TOML files must be UTF-8", + Position: Position{Line: 1, Start: i, Len: 1}, + Line: 1, + input: data, + } + } + + p = &parser{ + keyInfo: make(map[string]keyInfo), + mapping: make(map[string]interface{}), + lx: lex(data), + ordered: make([]Key, 0), + implicits: make(map[string]struct{}), + } + for { + item := p.next() + if item.typ == itemEOF { + break + } + p.topLevel(item) + } + + return p, nil +} + +func (p *parser) panicErr(it item, err error) { + panic(ParseError{ + err: err, + Position: it.pos, + Line: it.pos.Len, + LastKey: p.current(), + }) +} + +func (p *parser) panicItemf(it item, format string, v ...interface{}) { + panic(ParseError{ + Message: fmt.Sprintf(format, v...), + Position: it.pos, + Line: it.pos.Len, + LastKey: p.current(), + }) +} + +func (p *parser) panicf(format string, v ...interface{}) { + panic(ParseError{ + Message: fmt.Sprintf(format, v...), + Position: p.pos, + Line: p.pos.Line, + LastKey: p.current(), + }) +} + +func (p *parser) next() item { + it := p.lx.nextItem() + //fmt.Printf("ITEM %-18s line %-3d │ %q\n", it.typ, it.pos.Line, it.val) + if it.typ == itemError { + if it.err != nil { + panic(ParseError{ + Position: it.pos, + Line: it.pos.Line, + LastKey: p.current(), + err: it.err, + }) + } + + p.panicItemf(it, "%s", it.val) + } + return it +} + +func (p *parser) nextPos() item { + it := p.next() + p.pos = it.pos + return it +} + +func (p *parser) bug(format string, v ...interface{}) { + panic(fmt.Sprintf("BUG: "+format+"\n\n", v...)) +} + +func (p *parser) expect(typ itemType) item { + it := p.next() + p.assertEqual(typ, it.typ) + return it +} + +func (p *parser) assertEqual(expected, got itemType) { + if expected != got { + p.bug("Expected '%s' but got '%s'.", expected, got) + } +} + +func (p *parser) topLevel(item item) { + switch item.typ { + case itemCommentStart: // # .. + p.expect(itemText) + case itemTableStart: // [ .. ] + name := p.nextPos() + + var key Key + for ; name.typ != itemTableEnd && name.typ != itemEOF; name = p.next() { + key = append(key, p.keyString(name)) + } + p.assertEqual(itemTableEnd, name.typ) + + p.addContext(key, false) + p.setType("", tomlHash, item.pos) + p.ordered = append(p.ordered, key) + case itemArrayTableStart: // [[ .. ]] + name := p.nextPos() + + var key Key + for ; name.typ != itemArrayTableEnd && name.typ != itemEOF; name = p.next() { + key = append(key, p.keyString(name)) + } + p.assertEqual(itemArrayTableEnd, name.typ) + + p.addContext(key, true) + p.setType("", tomlArrayHash, item.pos) + p.ordered = append(p.ordered, key) + case itemKeyStart: // key = .. + outerContext := p.context + /// Read all the key parts (e.g. 'a' and 'b' in 'a.b') + k := p.nextPos() + var key Key + for ; k.typ != itemKeyEnd && k.typ != itemEOF; k = p.next() { + key = append(key, p.keyString(k)) + } + p.assertEqual(itemKeyEnd, k.typ) + + /// The current key is the last part. + p.currentKey = key[len(key)-1] + + /// All the other parts (if any) are the context; need to set each part + /// as implicit. + context := key[:len(key)-1] + for i := range context { + p.addImplicitContext(append(p.context, context[i:i+1]...)) + } + + /// Set value. + vItem := p.next() + val, typ := p.value(vItem, false) + p.set(p.currentKey, val, typ, vItem.pos) + p.ordered = append(p.ordered, p.context.add(p.currentKey)) + + /// Remove the context we added (preserving any context from [tbl] lines). + p.context = outerContext + p.currentKey = "" + default: + p.bug("Unexpected type at top level: %s", item.typ) + } +} + +// Gets a string for a key (or part of a key in a table name). +func (p *parser) keyString(it item) string { + switch it.typ { + case itemText: + return it.val + case itemString, itemMultilineString, + itemRawString, itemRawMultilineString: + s, _ := p.value(it, false) + return s.(string) + default: + p.bug("Unexpected key type: %s", it.typ) + } + panic("unreachable") +} + +var datetimeRepl = strings.NewReplacer( + "z", "Z", + "t", "T", + " ", "T") + +// value translates an expected value from the lexer into a Go value wrapped +// as an empty interface. +func (p *parser) value(it item, parentIsArray bool) (interface{}, tomlType) { + switch it.typ { + case itemString: + return p.replaceEscapes(it, it.val), p.typeOfPrimitive(it) + case itemMultilineString: + return p.replaceEscapes(it, stripFirstNewline(p.stripEscapedNewlines(it.val))), p.typeOfPrimitive(it) + case itemRawString: + return it.val, p.typeOfPrimitive(it) + case itemRawMultilineString: + return stripFirstNewline(it.val), p.typeOfPrimitive(it) + case itemInteger: + return p.valueInteger(it) + case itemFloat: + return p.valueFloat(it) + case itemBool: + switch it.val { + case "true": + return true, p.typeOfPrimitive(it) + case "false": + return false, p.typeOfPrimitive(it) + default: + p.bug("Expected boolean value, but got '%s'.", it.val) + } + case itemDatetime: + return p.valueDatetime(it) + case itemArray: + return p.valueArray(it) + case itemInlineTableStart: + return p.valueInlineTable(it, parentIsArray) + default: + p.bug("Unexpected value type: %s", it.typ) + } + panic("unreachable") +} + +func (p *parser) valueInteger(it item) (interface{}, tomlType) { + if !numUnderscoresOK(it.val) { + p.panicItemf(it, "Invalid integer %q: underscores must be surrounded by digits", it.val) + } + if numHasLeadingZero(it.val) { + p.panicItemf(it, "Invalid integer %q: cannot have leading zeroes", it.val) + } + + num, err := strconv.ParseInt(it.val, 0, 64) + if err != nil { + // Distinguish integer values. Normally, it'd be a bug if the lexer + // provides an invalid integer, but it's possible that the number is + // out of range of valid values (which the lexer cannot determine). + // So mark the former as a bug but the latter as a legitimate user + // error. + if e, ok := err.(*strconv.NumError); ok && e.Err == strconv.ErrRange { + p.panicErr(it, errParseRange{i: it.val, size: "int64"}) + } else { + p.bug("Expected integer value, but got '%s'.", it.val) + } + } + return num, p.typeOfPrimitive(it) +} + +func (p *parser) valueFloat(it item) (interface{}, tomlType) { + parts := strings.FieldsFunc(it.val, func(r rune) bool { + switch r { + case '.', 'e', 'E': + return true + } + return false + }) + for _, part := range parts { + if !numUnderscoresOK(part) { + p.panicItemf(it, "Invalid float %q: underscores must be surrounded by digits", it.val) + } + } + if len(parts) > 0 && numHasLeadingZero(parts[0]) { + p.panicItemf(it, "Invalid float %q: cannot have leading zeroes", it.val) + } + if !numPeriodsOK(it.val) { + // As a special case, numbers like '123.' or '1.e2', + // which are valid as far as Go/strconv are concerned, + // must be rejected because TOML says that a fractional + // part consists of '.' followed by 1+ digits. + p.panicItemf(it, "Invalid float %q: '.' must be followed by one or more digits", it.val) + } + val := strings.Replace(it.val, "_", "", -1) + if val == "+nan" || val == "-nan" { // Go doesn't support this, but TOML spec does. + val = "nan" + } + num, err := strconv.ParseFloat(val, 64) + if err != nil { + if e, ok := err.(*strconv.NumError); ok && e.Err == strconv.ErrRange { + p.panicErr(it, errParseRange{i: it.val, size: "float64"}) + } else { + p.panicItemf(it, "Invalid float value: %q", it.val) + } + } + return num, p.typeOfPrimitive(it) +} + +var dtTypes = []struct { + fmt string + zone *time.Location +}{ + {time.RFC3339Nano, time.Local}, + {"2006-01-02T15:04:05.999999999", internal.LocalDatetime}, + {"2006-01-02", internal.LocalDate}, + {"15:04:05.999999999", internal.LocalTime}, +} + +func (p *parser) valueDatetime(it item) (interface{}, tomlType) { + it.val = datetimeRepl.Replace(it.val) + var ( + t time.Time + ok bool + err error + ) + for _, dt := range dtTypes { + t, err = time.ParseInLocation(dt.fmt, it.val, dt.zone) + if err == nil { + ok = true + break + } + } + if !ok { + p.panicItemf(it, "Invalid TOML Datetime: %q.", it.val) + } + return t, p.typeOfPrimitive(it) +} + +func (p *parser) valueArray(it item) (interface{}, tomlType) { + p.setType(p.currentKey, tomlArray, it.pos) + + var ( + types []tomlType + + // Initialize to a non-nil empty slice. This makes it consistent with + // how S = [] decodes into a non-nil slice inside something like struct + // { S []string }. See #338 + array = []interface{}{} + ) + for it = p.next(); it.typ != itemArrayEnd; it = p.next() { + if it.typ == itemCommentStart { + p.expect(itemText) + continue + } + + val, typ := p.value(it, true) + array = append(array, val) + types = append(types, typ) + + // XXX: types isn't used here, we need it to record the accurate type + // information. + // + // Not entirely sure how to best store this; could use "key[0]", + // "key[1]" notation, or maybe store it on the Array type? + } + return array, tomlArray +} + +func (p *parser) valueInlineTable(it item, parentIsArray bool) (interface{}, tomlType) { + var ( + hash = make(map[string]interface{}) + outerContext = p.context + outerKey = p.currentKey + ) + + p.context = append(p.context, p.currentKey) + prevContext := p.context + p.currentKey = "" + + p.addImplicit(p.context) + p.addContext(p.context, parentIsArray) + + /// Loop over all table key/value pairs. + for it := p.next(); it.typ != itemInlineTableEnd; it = p.next() { + if it.typ == itemCommentStart { + p.expect(itemText) + continue + } + + /// Read all key parts. + k := p.nextPos() + var key Key + for ; k.typ != itemKeyEnd && k.typ != itemEOF; k = p.next() { + key = append(key, p.keyString(k)) + } + p.assertEqual(itemKeyEnd, k.typ) + + /// The current key is the last part. + p.currentKey = key[len(key)-1] + + /// All the other parts (if any) are the context; need to set each part + /// as implicit. + context := key[:len(key)-1] + for i := range context { + p.addImplicitContext(append(p.context, context[i:i+1]...)) + } + + /// Set the value. + val, typ := p.value(p.next(), false) + p.set(p.currentKey, val, typ, it.pos) + p.ordered = append(p.ordered, p.context.add(p.currentKey)) + hash[p.currentKey] = val + + /// Restore context. + p.context = prevContext + } + p.context = outerContext + p.currentKey = outerKey + return hash, tomlHash +} + +// numHasLeadingZero checks if this number has leading zeroes, allowing for '0', +// +/- signs, and base prefixes. +func numHasLeadingZero(s string) bool { + if len(s) > 1 && s[0] == '0' && !(s[1] == 'b' || s[1] == 'o' || s[1] == 'x') { // Allow 0b, 0o, 0x + return true + } + if len(s) > 2 && (s[0] == '-' || s[0] == '+') && s[1] == '0' { + return true + } + return false +} + +// numUnderscoresOK checks whether each underscore in s is surrounded by +// characters that are not underscores. +func numUnderscoresOK(s string) bool { + switch s { + case "nan", "+nan", "-nan", "inf", "-inf", "+inf": + return true + } + accept := false + for _, r := range s { + if r == '_' { + if !accept { + return false + } + } + + // isHexadecimal is a superset of all the permissable characters + // surrounding an underscore. + accept = isHexadecimal(r) + } + return accept +} + +// numPeriodsOK checks whether every period in s is followed by a digit. +func numPeriodsOK(s string) bool { + period := false + for _, r := range s { + if period && !isDigit(r) { + return false + } + period = r == '.' + } + return !period +} + +// Set the current context of the parser, where the context is either a hash or +// an array of hashes, depending on the value of the `array` parameter. +// +// Establishing the context also makes sure that the key isn't a duplicate, and +// will create implicit hashes automatically. +func (p *parser) addContext(key Key, array bool) { + var ok bool + + // Always start at the top level and drill down for our context. + hashContext := p.mapping + keyContext := make(Key, 0) + + // We only need implicit hashes for key[0:-1] + for _, k := range key[0 : len(key)-1] { + _, ok = hashContext[k] + keyContext = append(keyContext, k) + + // No key? Make an implicit hash and move on. + if !ok { + p.addImplicit(keyContext) + hashContext[k] = make(map[string]interface{}) + } + + // If the hash context is actually an array of tables, then set + // the hash context to the last element in that array. + // + // Otherwise, it better be a table, since this MUST be a key group (by + // virtue of it not being the last element in a key). + switch t := hashContext[k].(type) { + case []map[string]interface{}: + hashContext = t[len(t)-1] + case map[string]interface{}: + hashContext = t + default: + p.panicf("Key '%s' was already created as a hash.", keyContext) + } + } + + p.context = keyContext + if array { + // If this is the first element for this array, then allocate a new + // list of tables for it. + k := key[len(key)-1] + if _, ok := hashContext[k]; !ok { + hashContext[k] = make([]map[string]interface{}, 0, 4) + } + + // Add a new table. But make sure the key hasn't already been used + // for something else. + if hash, ok := hashContext[k].([]map[string]interface{}); ok { + hashContext[k] = append(hash, make(map[string]interface{})) + } else { + p.panicf("Key '%s' was already created and cannot be used as an array.", key) + } + } else { + p.setValue(key[len(key)-1], make(map[string]interface{})) + } + p.context = append(p.context, key[len(key)-1]) +} + +// set calls setValue and setType. +func (p *parser) set(key string, val interface{}, typ tomlType, pos Position) { + p.setValue(key, val) + p.setType(key, typ, pos) + +} + +// setValue sets the given key to the given value in the current context. +// It will make sure that the key hasn't already been defined, account for +// implicit key groups. +func (p *parser) setValue(key string, value interface{}) { + var ( + tmpHash interface{} + ok bool + hash = p.mapping + keyContext Key + ) + for _, k := range p.context { + keyContext = append(keyContext, k) + if tmpHash, ok = hash[k]; !ok { + p.bug("Context for key '%s' has not been established.", keyContext) + } + switch t := tmpHash.(type) { + case []map[string]interface{}: + // The context is a table of hashes. Pick the most recent table + // defined as the current hash. + hash = t[len(t)-1] + case map[string]interface{}: + hash = t + default: + p.panicf("Key '%s' has already been defined.", keyContext) + } + } + keyContext = append(keyContext, key) + + if _, ok := hash[key]; ok { + // Normally redefining keys isn't allowed, but the key could have been + // defined implicitly and it's allowed to be redefined concretely. (See + // the `valid/implicit-and-explicit-after.toml` in toml-test) + // + // But we have to make sure to stop marking it as an implicit. (So that + // another redefinition provokes an error.) + // + // Note that since it has already been defined (as a hash), we don't + // want to overwrite it. So our business is done. + if p.isArray(keyContext) { + p.removeImplicit(keyContext) + hash[key] = value + return + } + if p.isImplicit(keyContext) { + p.removeImplicit(keyContext) + return + } + + // Otherwise, we have a concrete key trying to override a previous + // key, which is *always* wrong. + p.panicf("Key '%s' has already been defined.", keyContext) + } + + hash[key] = value +} + +// setType sets the type of a particular value at a given key. It should be +// called immediately AFTER setValue. +// +// Note that if `key` is empty, then the type given will be applied to the +// current context (which is either a table or an array of tables). +func (p *parser) setType(key string, typ tomlType, pos Position) { + keyContext := make(Key, 0, len(p.context)+1) + keyContext = append(keyContext, p.context...) + if len(key) > 0 { // allow type setting for hashes + keyContext = append(keyContext, key) + } + // Special case to make empty keys ("" = 1) work. + // Without it it will set "" rather than `""`. + // TODO: why is this needed? And why is this only needed here? + if len(keyContext) == 0 { + keyContext = Key{""} + } + p.keyInfo[keyContext.String()] = keyInfo{tomlType: typ, pos: pos} +} + +// Implicit keys need to be created when tables are implied in "a.b.c.d = 1" and +// "[a.b.c]" (the "a", "b", and "c" hashes are never created explicitly). +func (p *parser) addImplicit(key Key) { p.implicits[key.String()] = struct{}{} } +func (p *parser) removeImplicit(key Key) { delete(p.implicits, key.String()) } +func (p *parser) isImplicit(key Key) bool { _, ok := p.implicits[key.String()]; return ok } +func (p *parser) isArray(key Key) bool { return p.keyInfo[key.String()].tomlType == tomlArray } +func (p *parser) addImplicitContext(key Key) { + p.addImplicit(key) + p.addContext(key, false) +} + +// current returns the full key name of the current context. +func (p *parser) current() string { + if len(p.currentKey) == 0 { + return p.context.String() + } + if len(p.context) == 0 { + return p.currentKey + } + return fmt.Sprintf("%s.%s", p.context, p.currentKey) +} + +func stripFirstNewline(s string) string { + if len(s) > 0 && s[0] == '\n' { + return s[1:] + } + if len(s) > 1 && s[0] == '\r' && s[1] == '\n' { + return s[2:] + } + return s +} + +// Remove newlines inside triple-quoted strings if a line ends with "\". +func (p *parser) stripEscapedNewlines(s string) string { + split := strings.Split(s, "\n") + if len(split) < 1 { + return s + } + + escNL := false // Keep track of the last non-blank line was escaped. + for i, line := range split { + line = strings.TrimRight(line, " \t\r") + + if len(line) == 0 || line[len(line)-1] != '\\' { + split[i] = strings.TrimRight(split[i], "\r") + if !escNL && i != len(split)-1 { + split[i] += "\n" + } + continue + } + + escBS := true + for j := len(line) - 1; j >= 0 && line[j] == '\\'; j-- { + escBS = !escBS + } + if escNL { + line = strings.TrimLeft(line, " \t\r") + } + escNL = !escBS + + if escBS { + split[i] += "\n" + continue + } + + if i == len(split)-1 { + p.panicf("invalid escape: '\\ '") + } + + split[i] = line[:len(line)-1] // Remove \ + if len(split)-1 > i { + split[i+1] = strings.TrimLeft(split[i+1], " \t\r") + } + } + return strings.Join(split, "") +} + +func (p *parser) replaceEscapes(it item, str string) string { + replaced := make([]rune, 0, len(str)) + s := []byte(str) + r := 0 + for r < len(s) { + if s[r] != '\\' { + c, size := utf8.DecodeRune(s[r:]) + r += size + replaced = append(replaced, c) + continue + } + r += 1 + if r >= len(s) { + p.bug("Escape sequence at end of string.") + return "" + } + switch s[r] { + default: + p.bug("Expected valid escape code after \\, but got %q.", s[r]) + case ' ', '\t': + p.panicItemf(it, "invalid escape: '\\%c'", s[r]) + case 'b': + replaced = append(replaced, rune(0x0008)) + r += 1 + case 't': + replaced = append(replaced, rune(0x0009)) + r += 1 + case 'n': + replaced = append(replaced, rune(0x000A)) + r += 1 + case 'f': + replaced = append(replaced, rune(0x000C)) + r += 1 + case 'r': + replaced = append(replaced, rune(0x000D)) + r += 1 + case '"': + replaced = append(replaced, rune(0x0022)) + r += 1 + case '\\': + replaced = append(replaced, rune(0x005C)) + r += 1 + case 'u': + // At this point, we know we have a Unicode escape of the form + // `uXXXX` at [r, r+5). (Because the lexer guarantees this + // for us.) + escaped := p.asciiEscapeToUnicode(it, s[r+1:r+5]) + replaced = append(replaced, escaped) + r += 5 + case 'U': + // At this point, we know we have a Unicode escape of the form + // `uXXXX` at [r, r+9). (Because the lexer guarantees this + // for us.) + escaped := p.asciiEscapeToUnicode(it, s[r+1:r+9]) + replaced = append(replaced, escaped) + r += 9 + } + } + return string(replaced) +} + +func (p *parser) asciiEscapeToUnicode(it item, bs []byte) rune { + s := string(bs) + hex, err := strconv.ParseUint(strings.ToLower(s), 16, 32) + if err != nil { + p.bug("Could not parse '%s' as a hexadecimal number, but the lexer claims it's OK: %s", s, err) + } + if !utf8.ValidRune(rune(hex)) { + p.panicItemf(it, "Escaped character '\\u%s' is not valid UTF-8.", s) + } + return rune(hex) +} diff --git a/vendor/github.com/BurntSushi/toml/type_fields.go b/vendor/github.com/BurntSushi/toml/type_fields.go new file mode 100644 index 00000000..254ca82e --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/type_fields.go @@ -0,0 +1,242 @@ +package toml + +// Struct field handling is adapted from code in encoding/json: +// +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the Go distribution. + +import ( + "reflect" + "sort" + "sync" +) + +// A field represents a single field found in a struct. +type field struct { + name string // the name of the field (`toml` tag included) + tag bool // whether field has a `toml` tag + index []int // represents the depth of an anonymous field + typ reflect.Type // the type of the field +} + +// byName sorts field by name, breaking ties with depth, +// then breaking ties with "name came from toml tag", then +// breaking ties with index sequence. +type byName []field + +func (x byName) Len() int { return len(x) } + +func (x byName) Swap(i, j int) { x[i], x[j] = x[j], x[i] } + +func (x byName) Less(i, j int) bool { + if x[i].name != x[j].name { + return x[i].name < x[j].name + } + if len(x[i].index) != len(x[j].index) { + return len(x[i].index) < len(x[j].index) + } + if x[i].tag != x[j].tag { + return x[i].tag + } + return byIndex(x).Less(i, j) +} + +// byIndex sorts field by index sequence. +type byIndex []field + +func (x byIndex) Len() int { return len(x) } + +func (x byIndex) Swap(i, j int) { x[i], x[j] = x[j], x[i] } + +func (x byIndex) Less(i, j int) bool { + for k, xik := range x[i].index { + if k >= len(x[j].index) { + return false + } + if xik != x[j].index[k] { + return xik < x[j].index[k] + } + } + return len(x[i].index) < len(x[j].index) +} + +// typeFields returns a list of fields that TOML should recognize for the given +// type. The algorithm is breadth-first search over the set of structs to +// include - the top struct and then any reachable anonymous structs. +func typeFields(t reflect.Type) []field { + // Anonymous fields to explore at the current level and the next. + current := []field{} + next := []field{{typ: t}} + + // Count of queued names for current level and the next. + var count map[reflect.Type]int + var nextCount map[reflect.Type]int + + // Types already visited at an earlier level. + visited := map[reflect.Type]bool{} + + // Fields found. + var fields []field + + for len(next) > 0 { + current, next = next, current[:0] + count, nextCount = nextCount, map[reflect.Type]int{} + + for _, f := range current { + if visited[f.typ] { + continue + } + visited[f.typ] = true + + // Scan f.typ for fields to include. + for i := 0; i < f.typ.NumField(); i++ { + sf := f.typ.Field(i) + if sf.PkgPath != "" && !sf.Anonymous { // unexported + continue + } + opts := getOptions(sf.Tag) + if opts.skip { + continue + } + index := make([]int, len(f.index)+1) + copy(index, f.index) + index[len(f.index)] = i + + ft := sf.Type + if ft.Name() == "" && ft.Kind() == reflect.Ptr { + // Follow pointer. + ft = ft.Elem() + } + + // Record found field and index sequence. + if opts.name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct { + tagged := opts.name != "" + name := opts.name + if name == "" { + name = sf.Name + } + fields = append(fields, field{name, tagged, index, ft}) + if count[f.typ] > 1 { + // If there were multiple instances, add a second, + // so that the annihilation code will see a duplicate. + // It only cares about the distinction between 1 or 2, + // so don't bother generating any more copies. + fields = append(fields, fields[len(fields)-1]) + } + continue + } + + // Record new anonymous struct to explore in next round. + nextCount[ft]++ + if nextCount[ft] == 1 { + f := field{name: ft.Name(), index: index, typ: ft} + next = append(next, f) + } + } + } + } + + sort.Sort(byName(fields)) + + // Delete all fields that are hidden by the Go rules for embedded fields, + // except that fields with TOML tags are promoted. + + // The fields are sorted in primary order of name, secondary order + // of field index length. Loop over names; for each name, delete + // hidden fields by choosing the one dominant field that survives. + out := fields[:0] + for advance, i := 0, 0; i < len(fields); i += advance { + // One iteration per name. + // Find the sequence of fields with the name of this first field. + fi := fields[i] + name := fi.name + for advance = 1; i+advance < len(fields); advance++ { + fj := fields[i+advance] + if fj.name != name { + break + } + } + if advance == 1 { // Only one field with this name + out = append(out, fi) + continue + } + dominant, ok := dominantField(fields[i : i+advance]) + if ok { + out = append(out, dominant) + } + } + + fields = out + sort.Sort(byIndex(fields)) + + return fields +} + +// dominantField looks through the fields, all of which are known to +// have the same name, to find the single field that dominates the +// others using Go's embedding rules, modified by the presence of +// TOML tags. If there are multiple top-level fields, the boolean +// will be false: This condition is an error in Go and we skip all +// the fields. +func dominantField(fields []field) (field, bool) { + // The fields are sorted in increasing index-length order. The winner + // must therefore be one with the shortest index length. Drop all + // longer entries, which is easy: just truncate the slice. + length := len(fields[0].index) + tagged := -1 // Index of first tagged field. + for i, f := range fields { + if len(f.index) > length { + fields = fields[:i] + break + } + if f.tag { + if tagged >= 0 { + // Multiple tagged fields at the same level: conflict. + // Return no field. + return field{}, false + } + tagged = i + } + } + if tagged >= 0 { + return fields[tagged], true + } + // All remaining fields have the same length. If there's more than one, + // we have a conflict (two fields named "X" at the same level) and we + // return no field. + if len(fields) > 1 { + return field{}, false + } + return fields[0], true +} + +var fieldCache struct { + sync.RWMutex + m map[reflect.Type][]field +} + +// cachedTypeFields is like typeFields but uses a cache to avoid repeated work. +func cachedTypeFields(t reflect.Type) []field { + fieldCache.RLock() + f := fieldCache.m[t] + fieldCache.RUnlock() + if f != nil { + return f + } + + // Compute fields without lock. + // Might duplicate effort but won't hold other computations back. + f = typeFields(t) + if f == nil { + f = []field{} + } + + fieldCache.Lock() + if fieldCache.m == nil { + fieldCache.m = map[reflect.Type][]field{} + } + fieldCache.m[t] = f + fieldCache.Unlock() + return f +} diff --git a/vendor/github.com/BurntSushi/toml/type_toml.go b/vendor/github.com/BurntSushi/toml/type_toml.go new file mode 100644 index 00000000..4e90d773 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/type_toml.go @@ -0,0 +1,70 @@ +package toml + +// tomlType represents any Go type that corresponds to a TOML type. +// While the first draft of the TOML spec has a simplistic type system that +// probably doesn't need this level of sophistication, we seem to be militating +// toward adding real composite types. +type tomlType interface { + typeString() string +} + +// typeEqual accepts any two types and returns true if they are equal. +func typeEqual(t1, t2 tomlType) bool { + if t1 == nil || t2 == nil { + return false + } + return t1.typeString() == t2.typeString() +} + +func typeIsTable(t tomlType) bool { + return typeEqual(t, tomlHash) || typeEqual(t, tomlArrayHash) +} + +type tomlBaseType string + +func (btype tomlBaseType) typeString() string { + return string(btype) +} + +func (btype tomlBaseType) String() string { + return btype.typeString() +} + +var ( + tomlInteger tomlBaseType = "Integer" + tomlFloat tomlBaseType = "Float" + tomlDatetime tomlBaseType = "Datetime" + tomlString tomlBaseType = "String" + tomlBool tomlBaseType = "Bool" + tomlArray tomlBaseType = "Array" + tomlHash tomlBaseType = "Hash" + tomlArrayHash tomlBaseType = "ArrayHash" +) + +// typeOfPrimitive returns a tomlType of any primitive value in TOML. +// Primitive values are: Integer, Float, Datetime, String and Bool. +// +// Passing a lexer item other than the following will cause a BUG message +// to occur: itemString, itemBool, itemInteger, itemFloat, itemDatetime. +func (p *parser) typeOfPrimitive(lexItem item) tomlType { + switch lexItem.typ { + case itemInteger: + return tomlInteger + case itemFloat: + return tomlFloat + case itemDatetime: + return tomlDatetime + case itemString: + return tomlString + case itemMultilineString: + return tomlString + case itemRawString: + return tomlString + case itemRawMultilineString: + return tomlString + case itemBool: + return tomlBool + } + p.bug("Cannot infer primitive type of lex item '%s'.", lexItem) + panic("unreachable") +} diff --git a/vendor/github.com/Kunde21/markdownfmt/v3/LICENSE b/vendor/github.com/Kunde21/markdownfmt/v3/LICENSE new file mode 100644 index 00000000..972d527d --- /dev/null +++ b/vendor/github.com/Kunde21/markdownfmt/v3/LICENSE @@ -0,0 +1,22 @@ +MIT License + +Copyright (c) 2014 Dmitri Shuralyov +Copyright (c) 2021 Chad Kunde + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/Kunde21/markdownfmt/v3/markdown/doc.go b/vendor/github.com/Kunde21/markdownfmt/v3/markdown/doc.go new file mode 100644 index 00000000..5579f1fd --- /dev/null +++ b/vendor/github.com/Kunde21/markdownfmt/v3/markdown/doc.go @@ -0,0 +1,2 @@ +// Package markdown renders the given goldmark AST to Markdown. +package markdown diff --git a/vendor/github.com/Kunde21/markdownfmt/v3/markdown/indent.go b/vendor/github.com/Kunde21/markdownfmt/v3/markdown/indent.go new file mode 100644 index 00000000..875fac97 --- /dev/null +++ b/vendor/github.com/Kunde21/markdownfmt/v3/markdown/indent.go @@ -0,0 +1,61 @@ +package markdown + +// indentation tracks indentation data for a mardown writer. +type indentation struct { + // Indentation is comprised of multiple sections of indentations. + // indents tracks the combined full indentation, + // and lengths tracks the length of each appended section. + indents []byte + lengths []int + + // index at which trailing spaces start in indents. + trailSpaceIdx int +} + +// Indent reports the fixed text prefix pushed so far. +// +// Invariant: This does not end with whitespace. +func (id *indentation) Indent() []byte { + return id.indents[:id.trailSpaceIdx] +} + +// Whitespace reports the trailing whitespace of the indentation pushed so far. +func (id *indentation) Whitespace() []byte { + return id.indents[id.trailSpaceIdx:] +} + +// Push adds a block of text to the indentation stack. +// +// Indent and Whitespace will report this in consescutive calls. +func (id *indentation) Push(bs []byte) { + id.indents = append(id.indents, bs...) + id.lengths = append(id.lengths, len(bs)) + id.trailSpaceIdx = trailingSpaceIdx(id.indents) +} + +// Pop removes the last pushed block of text from the stack. +func (id *indentation) Pop() { + count := len(id.lengths) + if count == 0 { + panic("bug: indentation.Pop called for empty indentation") + } + lastLen := id.lengths[count-1] + + id.lengths = id.lengths[:count-1] + id.indents = id.indents[:len(id.indents)-lastLen] + id.trailSpaceIdx = trailingSpaceIdx(id.indents) +} + +// trailingSpaceIdx returns the index at which trailing space +// starts in the given byte slice. +// +// Returns 0 if the slice is entirely whitespace, +// and len(bs) if the slice is entirely non-whitespace. +func trailingSpaceIdx(bs []byte) int { + for idx := len(bs); idx > 0; idx-- { + if bs[idx-1] != ' ' { + return idx + } + } + return 0 +} diff --git a/vendor/github.com/Kunde21/markdownfmt/v3/markdown/renderer.go b/vendor/github.com/Kunde21/markdownfmt/v3/markdown/renderer.go new file mode 100644 index 00000000..99d66676 --- /dev/null +++ b/vendor/github.com/Kunde21/markdownfmt/v3/markdown/renderer.go @@ -0,0 +1,646 @@ +package markdown + +import ( + "bytes" + "fmt" + "go/format" + "io" + "strconv" + "unicode/utf8" + "unsafe" + + "github.com/yuin/goldmark/ast" + extAST "github.com/yuin/goldmark/extension/ast" + "github.com/yuin/goldmark/renderer" + "github.com/yuin/goldmark/text" +) + +var ( + newLineChar = []byte{'\n'} + spaceChar = []byte{' '} + strikeThroughChars = []byte("~~") + thematicBreakChars = []byte("---") + blockquoteChars = []byte{'>', ' '} + codeBlockChars = []byte("```") + tableHeaderColChar = []byte{'-'} + tableHeaderAlignColChar = []byte{':'} + heading1UnderlineChar = []byte{'='} + heading2UnderlineChar = []byte{'-'} + fourSpacesChars = bytes.Repeat([]byte{' '}, 4) +) + +// Ensure compatibility with Goldmark parser. +var _ renderer.Renderer = &Renderer{} + +// Renderer allows to render markdown AST into markdown bytes in consistent format. +// Render is reusable across Renders, it holds configuration only. +type Renderer struct { + underlineHeadings bool + softWraps bool + emphToken []byte + strongToken []byte // if nil, use emphToken*2 + listIndentStyle ListIndentStyle + + // language name => format function + formatters map[string]func([]byte) []byte +} + +// AddOptions pulls Markdown renderer specific options from the given list, +// and applies them to the renderer. +func (mr *Renderer) AddOptions(opts ...renderer.Option) { + mdopts := make([]Option, 0, len(opts)) + for _, o := range opts { + if mo, ok := o.(Option); ok { + mdopts = append(mdopts, mo) + } + } + mr.AddMarkdownOptions(mdopts...) +} + +// AddMarkdownOptions modifies the Renderer with the given options. +func (mr *Renderer) AddMarkdownOptions(opts ...Option) { + for _, o := range opts { + o.apply(mr) + } +} + +// Option customizes the behavior of the markdown renderer. +type Option interface { + renderer.Option + + apply(r *Renderer) +} + +type optionFunc func(*Renderer) + +func (f optionFunc) SetConfig(*renderer.Config) {} + +func (f optionFunc) apply(r *Renderer) { + f(r) +} + +// WithUnderlineHeadings configures the renderer to use +// Setext-style headers (=== and ---). +func WithUnderlineHeadings() Option { + return optionFunc(func(r *Renderer) { + r.underlineHeadings = true + }) +} + +// WithSoftWraps allows you to wrap lines even on soft line breaks. +func WithSoftWraps() Option { + return optionFunc(func(r *Renderer) { + r.softWraps = true + }) +} + +// WithEmphasisToken specifies the character used to wrap emphasised text. +// Per the CommonMark spec, valid values are '*' and '_'. +// +// Defaults to '*'. +func WithEmphasisToken(c rune) Option { + return optionFunc(func(r *Renderer) { + buf := make([]byte, 4) // enough to encode any utf8 rune + n := utf8.EncodeRune(buf, c) + r.emphToken = buf[:n] + }) +} + +// WithStrongToken specifies the string used to wrap bold text. +// Per the CommonMark spec, valid values are '**' and '__'. +// +// Defaults to repeating the emphasis token twice. +// See [WithEmphasisToken] for how to change that. +func WithStrongToken(s string) Option { + return optionFunc(func(r *Renderer) { + r.strongToken = []byte(s) + }) +} + +// ListIndentStyle specifies how items nested inside lists +// should be indented. +type ListIndentStyle int + +const ( + // ListIndentAligned specifies that items inside a list item + // should be aligned to the content in the first item. + // + // - First paragraph. + // + // Second paragraph aligned with the first. + // + // This applies to ordered lists too. + // + // 1. First paragraph. + // + // Second paragraph aligned with the first. + // + // ... + // + // 10. Contents. + // + // Long lists indent content further. + // + // This is the default. + ListIndentAligned ListIndentStyle = iota + + // ListIndentUniform specifies that items inside a list item + // should be aligned uniformly with 4 spaces. + // + // For example: + // + // - First paragraph. + // + // Second paragraph indented 4 spaces. + // + // For ordered lists: + // + // 1. First paragraph. + // + // Second paragraph indented 4 spaces. + // + // ... + // + // 10. Contents. + // + // Always indented 4 spaces. + ListIndentUniform +) + +// WithListIndentStyle specifies how contents nested under a list item +// should be indented. +// +// Defaults to [ListIndentAligned]. +func WithListIndentStyle(style ListIndentStyle) Option { + return optionFunc(func(r *Renderer) { + r.listIndentStyle = style + }) +} + +// CodeFormatter reformats code samples found in the document, +// matching them by name. +type CodeFormatter struct { + // Name of the language. + Name string + + // Aliases for the language, if any. + Aliases []string + + // Function to format the code snippet. + // In case of errors, format functions should typically return + // the original string unchanged. + Format func([]byte) []byte +} + +// GoCodeFormatter is a [CodeFormatter] that reformats Go source code inside +// fenced code blocks tagged with 'go' or 'Go'. +// +// ```go +// func main() { +// } +// ``` +// +// Supply it to the renderer with [WithCodeFormatters]. +var GoCodeFormatter = CodeFormatter{ + Name: "go", + Aliases: []string{"Go"}, + Format: formatGo, +} + +func formatGo(src []byte) []byte { + gofmt, err := format.Source(src) + if err != nil { + // We don't handle gofmt errors. + // If code is not compilable we just + // don't format it without any warning. + return src + } + return gofmt +} + +// WithCodeFormatters changes the functions used to reformat code blocks found +// in the original file. +// +// formatters := []markdown.CodeFormatter{ +// markdown.GoCodeFormatter, +// // ... +// } +// r := NewRenderer() +// r.AddMarkdownOptions(WithCodeFormatters(formatters...)) +// +// Defaults to empty. +func WithCodeFormatters(fs ...CodeFormatter) Option { + return optionFunc(func(r *Renderer) { + formatters := make(map[string]func([]byte) []byte, len(fs)) + for _, f := range fs { + formatters[f.Name] = f.Format + for _, alias := range f.Aliases { + formatters[alias] = f.Format + } + } + r.formatters = formatters + }) +} + +// NewRenderer builds a new Markdown renderer with default settings. +// To use this with goldmark.Markdown, use the goldmark.WithRenderer option. +// +// r := markdown.NewRenderer() +// md := goldmark.New(goldmark.WithRenderer(r)) +// md.Convert(src, w) +// +// Alternatively, you can call [Renderer.Render] directly. +// +// r := markdown.NewRenderer() +// r.Render(w, src, node) +// +// Use [Renderer.AddMarkdownOptions] to customize the output of the renderer. +func NewRenderer() *Renderer { + return &Renderer{ + emphToken: []byte{'*'}, + // Leave strongToken as nil by default. + // At render time, we'll use what was specified, + // or repeat emphToken twice to get the strong token. + } +} + +// render represents a single markdown rendering operation. +type render struct { + mr *Renderer + + emphToken []byte + strongToken []byte + + // TODO(bwplotka): Wrap it with something that catch errors. + w *lineIndentWriter + source []byte +} + +func (mr *Renderer) newRender(w io.Writer, source []byte) *render { + strongToken := mr.strongToken + if len(strongToken) == 0 { + strongToken = bytes.Repeat(mr.emphToken, 2) + } + + return &render{ + mr: mr, + w: wrapWithLineIndentWriter(w), + source: source, + strongToken: strongToken, + emphToken: mr.emphToken, + } +} + +// Render renders the given AST node to the given writer, +// given the original source from which the node was parsed. +// +// NOTE: This is the entry point used by Goldmark. +func (mr *Renderer) Render(w io.Writer, source []byte, node ast.Node) error { + // Perform DFS. + return ast.Walk(node, mr.newRender(w, source).renderNode) +} + +func (r *render) renderNode(node ast.Node, entering bool) (ast.WalkStatus, error) { + if entering && node.PreviousSibling() != nil { + switch node.(type) { + // All Block types (except few) usually have 2x new lines before itself when they are non-first siblings. + case *ast.Paragraph, *ast.Heading, *ast.FencedCodeBlock, + *ast.CodeBlock, *ast.ThematicBreak, *extAST.Table, + *ast.Blockquote: + _, _ = r.w.Write(newLineChar) + _, _ = r.w.Write(newLineChar) + case *ast.List, *ast.HTMLBlock: + _, _ = r.w.Write(newLineChar) + if node.HasBlankPreviousLines() { + _, _ = r.w.Write(newLineChar) + } + case *ast.ListItem: + // TODO(bwplotka): Handle tight/loose rule explicitly. + // See: https://github.github.com/gfm/#loose + if node.HasBlankPreviousLines() { + _, _ = r.w.Write(newLineChar) + } + } + } + + switch tnode := node.(type) { + case *ast.Document: + if entering { + break + } + + _, _ = r.w.Write(newLineChar) + + // Spans, meaning no newlines before or after. + case *ast.Text: + if entering { + text := tnode.Segment.Value(r.source) + _ = writeClean(r.w, text) + break + } + + if tnode.SoftLineBreak() { + char := spaceChar + if r.mr.softWraps { + char = newLineChar + } + _, _ = r.w.Write(char) + } + + if tnode.HardLineBreak() { + if tnode.SoftLineBreak() { + _, _ = r.w.Write(spaceChar) + } + _, _ = r.w.Write(newLineChar) + } + case *ast.String: + if entering { + _, _ = r.w.Write(tnode.Value) + } + case *ast.AutoLink: + // We treat autolink as normal string. + if entering { + _, _ = r.w.Write(tnode.Label(r.source)) + } + case *extAST.TaskCheckBox: + if !entering { + break + } + if tnode.IsChecked { + _, _ = r.w.Write([]byte("[X] ")) + break + } + _, _ = r.w.Write([]byte("[ ] ")) + case *ast.CodeSpan: + if entering { + _, _ = r.w.Write([]byte{'`'}) + break + } + + _, _ = r.w.Write([]byte{'`'}) + case *extAST.Strikethrough: + return r.wrapNonEmptyContentWith(strikeThroughChars, entering), nil + case *ast.Emphasis: + var emWrapper []byte + switch tnode.Level { + case 1: + emWrapper = r.emphToken + case 2: + emWrapper = r.strongToken + default: + emWrapper = bytes.Repeat(r.emphToken, tnode.Level) + } + return r.wrapNonEmptyContentWith(emWrapper, entering), nil + case *ast.Link: + if entering { + r.w.AddIndentOnFirstWrite([]byte("[")) + break + } + + _, _ = fmt.Fprintf(r.w, "](%s", tnode.Destination) + if len(tnode.Title) > 0 { + _, _ = fmt.Fprintf(r.w, ` "%s"`, tnode.Title) + } + _, _ = r.w.Write([]byte{')'}) + case *ast.Image: + if entering { + r.w.AddIndentOnFirstWrite([]byte("![")) + break + } + + _, _ = fmt.Fprintf(r.w, "](%s", tnode.Destination) + if len(tnode.Title) > 0 { + _, _ = fmt.Fprintf(r.w, ` "%s"`, tnode.Title) + } + _, _ = r.w.Write([]byte{')'}) + case *ast.RawHTML: + if !entering { + break + } + + for i := 0; i < tnode.Segments.Len(); i++ { + segment := tnode.Segments.At(i) + _, _ = r.w.Write(segment.Value(r.source)) + } + return ast.WalkSkipChildren, nil + + // Blocks. + case *ast.Paragraph, *ast.TextBlock, *ast.List, *extAST.TableCell: + // Things that has no content, just children elements, go there. + break + case *ast.Heading: + if !entering { + break + } + + // Render it straight away. No nested headings are supported and we expect + // headings to have limited content, so limit WALK. + if err := r.renderHeading(tnode); err != nil { + return ast.WalkStop, fmt.Errorf("rendering heading: %w", err) + } + return ast.WalkSkipChildren, nil + case *ast.HTMLBlock: + if !entering { + break + } + + var segments []text.Segment + for i := 0; i < node.Lines().Len(); i++ { + segments = append(segments, node.Lines().At(i)) + } + + if tnode.ClosureLine.Len() != 0 { + segments = append(segments, tnode.ClosureLine) + } + for i, s := range segments { + o := s.Value(r.source) + if i == len(segments)-1 { + o = bytes.TrimSuffix(o, []byte("\n")) + } + _, _ = r.w.Write(o) + } + return ast.WalkSkipChildren, nil + case *ast.CodeBlock, *ast.FencedCodeBlock: + if !entering { + break + } + + _, _ = r.w.Write(codeBlockChars) + + var lang []byte + if fencedNode, isFenced := node.(*ast.FencedCodeBlock); isFenced && fencedNode.Info != nil { + lang = fencedNode.Info.Text(r.source) + _, _ = r.w.Write(lang) + for _, elt := range bytes.Fields(lang) { + elt = bytes.TrimSpace(bytes.TrimLeft(elt, ". ")) + if len(elt) == 0 { + continue + } + lang = elt + break + } + } + + _, _ = r.w.Write(newLineChar) + codeBuf := bytes.Buffer{} + for i := 0; i < tnode.Lines().Len(); i++ { + line := tnode.Lines().At(i) + _, _ = codeBuf.Write(line.Value(r.source)) + } + + if formatCode, ok := r.mr.formatters[noAllocString(lang)]; ok { + code := formatCode(codeBuf.Bytes()) + if !bytes.HasSuffix(code, newLineChar) { + // Ensure code sample ends with a newline. + code = append(code, newLineChar...) + } + _, _ = r.w.Write(code) + } else { + _, _ = r.w.Write(codeBuf.Bytes()) + } + + _, _ = r.w.Write(codeBlockChars) + return ast.WalkSkipChildren, nil + case *ast.ThematicBreak: + if !entering { + break + } + + _, _ = r.w.Write(thematicBreakChars) + case *ast.Blockquote: + if entering { + r.w.PushIndent(blockquoteChars) + if node.Parent() != nil && node.Parent().Kind() == ast.KindListItem && + node.PreviousSibling() == nil { + _, _ = r.w.Write(blockquoteChars) + } + } else { + r.w.PopIndent() + } + + case *ast.ListItem: + if entering { + liMarker := listItemMarkerChars(tnode) + _, _ = r.w.Write(liMarker) + if r.mr.listIndentStyle == ListIndentUniform && + // We can use 4 spaces for indentation only if + // that would still qualify as part of the list + // item text. e.g., given "123. foo", + // for content to be part of that list item, + // it must be indented 5 spaces. + // + // 123. foo + // + // bar + len(liMarker) <= len(fourSpacesChars) { + r.w.PushIndent(fourSpacesChars) + } else { + r.w.PushIndent(bytes.Repeat(spaceChar, len(liMarker))) + } + } else { + if tnode.NextSibling() != nil && tnode.NextSibling().Kind() == ast.KindListItem { + // Newline after list item. + _, _ = r.w.Write(newLineChar) + } + r.w.PopIndent() + } + + case *extAST.Table: + if !entering { + break + } + + // Render it straight away. No nested tables are supported and we expect + // tables to have limited content, so limit WALK. + if err := r.renderTable(tnode); err != nil { + return ast.WalkStop, fmt.Errorf("rendering table: %w", err) + } + return ast.WalkSkipChildren, nil + case *extAST.TableRow, *extAST.TableHeader: + return ast.WalkStop, fmt.Errorf("%v element detected, but table should be rendered in renderTable instead", tnode.Kind()) + default: + return ast.WalkStop, fmt.Errorf("detected unexpected tree type %v", tnode.Kind()) + } + return ast.WalkContinue, nil +} + +func (r *render) wrapNonEmptyContentWith(b []byte, entering bool) ast.WalkStatus { + if entering { + r.w.AddIndentOnFirstWrite(b) + return ast.WalkContinue + } + + if r.w.WasIndentOnFirstWriteWritten() { + _, _ = r.w.Write(b) + return ast.WalkContinue + } + r.w.DelIndentOnFirstWrite(b) + return ast.WalkContinue +} + +func listItemMarkerChars(tnode *ast.ListItem) []byte { + parList := tnode.Parent().(*ast.List) + if parList.IsOrdered() { + cnt := 1 + if parList.Start != 0 { + cnt = parList.Start + } + s := tnode.PreviousSibling() + for s != nil { + cnt++ + s = s.PreviousSibling() + } + return append(strconv.AppendInt(nil, int64(cnt), 10), parList.Marker, ' ') + } + return []byte{parList.Marker, spaceChar[0]} +} + +func noAllocString(buf []byte) string { + return *(*string)(unsafe.Pointer(&buf)) +} + +// writeClean writes the given byte slice to the writer +// replacing consecutive spaces, newlines, and tabs +// with single spaces. +func writeClean(w io.Writer, bs []byte) error { + // This works by scanning the byte slice, + // and writing sub-slices of bs + // as we see and skip blank sections. + + var ( + // Start of the current sub-slice to be written. + startIdx int + // Normalized last character we saw: + // for whitespace, this is ' ', + // for everything else, it's left as-is. + p byte + ) + + for idx, q := range bs { + if q == '\n' || q == '\r' || q == '\t' { + q = ' ' + } + + if q == ' ' { + if p != ' ' { + // Going from non-blank to blank. + // Write the current sub-slice and the blank. + if _, err := w.Write(bs[startIdx:idx]); err != nil { + return err + } + if _, err := w.Write(spaceChar); err != nil { + return err + } + } + startIdx = idx + 1 + } else if p == ' ' { + // Going from blank to non-blank. + // Start a new sub-slice. + startIdx = idx + } + p = q + } + + _, err := w.Write(bs[startIdx:]) + return err +} diff --git a/vendor/github.com/Kunde21/markdownfmt/v3/markdown/renderer_heading.go b/vendor/github.com/Kunde21/markdownfmt/v3/markdown/renderer_heading.go new file mode 100644 index 00000000..dd700796 --- /dev/null +++ b/vendor/github.com/Kunde21/markdownfmt/v3/markdown/renderer_heading.go @@ -0,0 +1,89 @@ +package markdown + +import ( + "bytes" + "fmt" + "sort" + "strings" + + "github.com/mattn/go-runewidth" + "github.com/yuin/goldmark/ast" +) + +func (r *render) renderHeading(node *ast.Heading) error { + underlineHeading := false + if r.mr.underlineHeadings { + underlineHeading = node.Level <= 2 + } + + if !underlineHeading { + r.w.Write(bytes.Repeat([]byte{'#'}, node.Level)) + r.w.Write(spaceChar) + } + + var headBuf bytes.Buffer + headBuf.Reset() + + for n := node.FirstChild(); n != nil; n = n.NextSibling() { + if err := ast.Walk(n, func(inner ast.Node, entering bool) (ast.WalkStatus, error) { + if entering { + if err := ast.Walk(inner, r.mr.newRender(&headBuf, r.source).renderNode); err != nil { + return ast.WalkStop, err + } + } + return ast.WalkSkipChildren, nil + }); err != nil { + return err + } + } + a := node.Attributes() + sort.SliceStable(a, func(i, j int) bool { + switch { + case bytes.Equal(a[i].Name, []byte("id")): + return true + case bytes.Equal(a[j].Name, []byte("id")): + return false + case bytes.Equal(a[i].Name, []byte("class")): + return true + case bytes.Equal(a[j].Name, []byte("class")): + return false + } + return bytes.Compare(a[i].Name, a[j].Name) == -1 + }) + + hAttr := []string{} + for _, attr := range node.Attributes() { + switch string(attr.Name) { + case "id": + hAttr = append(hAttr, fmt.Sprintf("#%s", attr.Value)) + case "class": + hAttr = append(hAttr, strings.ReplaceAll(fmt.Sprintf(".%s", attr.Value), " ", " .")) + default: + if attr.Value == nil { + hAttr = append(hAttr, string(attr.Name)) + continue + } + hAttr = append(hAttr, fmt.Sprintf("%s=%s", string(attr.Name), attr.Value)) + } + } + if len(hAttr) != 0 { + _, _ = fmt.Fprintf(&headBuf, " {%s}", strings.Join(hAttr, " ")) + } + + _, _ = r.w.Write(headBuf.Bytes()) + + if underlineHeading { + width := runewidth.StringWidth(headBuf.String()) + + _, _ = r.w.Write(newLineChar) + + switch node.Level { + case 1: + r.w.Write(bytes.Repeat(heading1UnderlineChar, width)) + case 2: + r.w.Write(bytes.Repeat(heading2UnderlineChar, width)) + } + } + + return nil +} diff --git a/vendor/github.com/Kunde21/markdownfmt/v3/markdown/renderer_table.go b/vendor/github.com/Kunde21/markdownfmt/v3/markdown/renderer_table.go new file mode 100644 index 00000000..09dba2e2 --- /dev/null +++ b/vendor/github.com/Kunde21/markdownfmt/v3/markdown/renderer_table.go @@ -0,0 +1,140 @@ +package markdown + +import ( + "bytes" + "fmt" + + "github.com/mattn/go-runewidth" + "github.com/yuin/goldmark/ast" + extAST "github.com/yuin/goldmark/extension/ast" +) + +func (r *render) renderTable(node *extAST.Table) error { + var ( + columnAligns []extAST.Alignment + columnWidths []int + colIndex int + cellBuf bytes.Buffer + ) + + // Walk tree initially to count column widths and alignments. + for n := node.FirstChild(); n != nil; n = n.NextSibling() { + if err := ast.Walk(n, func(inner ast.Node, entering bool) (ast.WalkStatus, error) { + switch tnode := inner.(type) { + case *extAST.TableRow, *extAST.TableHeader: + if entering { + colIndex = 0 + } + case *extAST.TableCell: + if entering { + if _, isHeader := tnode.Parent().(*extAST.TableHeader); isHeader { + columnAligns = append(columnAligns, tnode.Alignment) + } + + cellBuf.Reset() + if err := ast.Walk(tnode, r.mr.newRender(&cellBuf, r.source).renderNode); err != nil { + return ast.WalkStop, err + } + width := runewidth.StringWidth(cellBuf.String()) + if len(columnWidths) <= colIndex { + columnWidths = append(columnWidths, width) + } else if width > columnWidths[colIndex] { + columnWidths[colIndex] = width + } + colIndex++ + return ast.WalkSkipChildren, nil + } + default: + return ast.WalkStop, fmt.Errorf("detected unexpected tree type %v", tnode.Kind()) + } + return ast.WalkContinue, nil + }); err != nil { + return err + } + } + + // Write all according to alignments and width. + for n := node.FirstChild(); n != nil; n = n.NextSibling() { + if err := ast.Walk(n, func(inner ast.Node, entering bool) (ast.WalkStatus, error) { + switch tnode := inner.(type) { + case *extAST.TableRow: + if entering { + colIndex = 0 + _, _ = r.w.Write(newLineChar) + break + } + + _, _ = r.w.Write([]byte("|")) + case *extAST.TableHeader: + if entering { + colIndex = 0 + break + } + + _, _ = r.w.Write([]byte("|\n")) + for i, align := range columnAligns { + _, _ = r.w.Write([]byte{'|'}) + width := columnWidths[i] + + left, right := tableHeaderColChar, tableHeaderColChar + switch align { + case extAST.AlignLeft: + left = tableHeaderAlignColChar + case extAST.AlignRight: + right = tableHeaderAlignColChar + case extAST.AlignCenter: + left, right = tableHeaderAlignColChar, tableHeaderAlignColChar + } + _, _ = r.w.Write(left) + _, _ = r.w.Write(bytes.Repeat(tableHeaderColChar, width)) + _, _ = r.w.Write(right) + } + _, _ = r.w.Write([]byte("|")) + case *extAST.TableCell: + if !entering { + break + } + + width := columnWidths[colIndex] + align := columnAligns[colIndex] + + if tnode.Parent().Kind() == extAST.KindTableHeader { + align = extAST.AlignLeft + } + + cellBuf.Reset() + if err := ast.Walk(tnode, r.mr.newRender(&cellBuf, r.source).renderNode); err != nil { + return ast.WalkStop, err + } + + _, _ = r.w.Write([]byte("| ")) + whitespaceWidth := width - runewidth.StringWidth(cellBuf.String()) + switch align { + default: + fallthrough + case extAST.AlignLeft: + _, _ = r.w.Write(cellBuf.Bytes()) + _, _ = r.w.Write(bytes.Repeat([]byte{' '}, 1+whitespaceWidth)) + case extAST.AlignCenter: + first := whitespaceWidth / 2 + _, _ = r.w.Write(bytes.Repeat([]byte{' '}, first)) + _, _ = r.w.Write(cellBuf.Bytes()) + _, _ = r.w.Write(bytes.Repeat([]byte{' '}, whitespaceWidth-first)) + _, _ = r.w.Write([]byte{' '}) + case extAST.AlignRight: + _, _ = r.w.Write(bytes.Repeat([]byte{' '}, whitespaceWidth)) + _, _ = r.w.Write(cellBuf.Bytes()) + _, _ = r.w.Write([]byte{' '}) + } + colIndex++ + return ast.WalkSkipChildren, nil + default: + return ast.WalkStop, fmt.Errorf("detected unexpected tree type %v", tnode.Kind()) + } + return ast.WalkContinue, nil + }); err != nil { + return err + } + } + return nil +} diff --git a/vendor/github.com/Kunde21/markdownfmt/v3/markdown/writer_indent.go b/vendor/github.com/Kunde21/markdownfmt/v3/markdown/writer_indent.go new file mode 100644 index 00000000..a763f18a --- /dev/null +++ b/vendor/github.com/Kunde21/markdownfmt/v3/markdown/writer_indent.go @@ -0,0 +1,106 @@ +package markdown + +import ( + "io" +) + +// lineIndentWriter wraps io.Writer and adds given indent everytime new line is created . +type lineIndentWriter struct { + io.Writer + + id indentation + firstWriteExtraIndent []byte + + previousCharWasNewLine bool +} + +func wrapWithLineIndentWriter(w io.Writer) *lineIndentWriter { + return &lineIndentWriter{Writer: w, previousCharWasNewLine: true} +} + +func (l *lineIndentWriter) PushIndent(indent []byte) { + l.id.Push(indent) +} + +func (l *lineIndentWriter) PopIndent() { + l.id.Pop() +} + +func (l *lineIndentWriter) AddIndentOnFirstWrite(add []byte) { + l.firstWriteExtraIndent = append(l.firstWriteExtraIndent, add...) +} + +func (l *lineIndentWriter) DelIndentOnFirstWrite(del []byte) { + l.firstWriteExtraIndent = l.firstWriteExtraIndent[:len(l.firstWriteExtraIndent)-len(del)] +} + +func (l *lineIndentWriter) WasIndentOnFirstWriteWritten() bool { + return len(l.firstWriteExtraIndent) == 0 +} + +func (l *lineIndentWriter) Write(b []byte) (n int, _ error) { + if len(b) == 0 { + return 0, nil + } + + writtenFromB := 0 + for i, c := range b { + if l.previousCharWasNewLine { + ns, err := l.Writer.Write(l.id.Indent()) + n += ns + if err != nil { + return n, err + } + } + + if c == newLineChar[0] { + if !l.WasIndentOnFirstWriteWritten() { + ns, err := l.Writer.Write(l.firstWriteExtraIndent) + n += ns + if err != nil { + return n, err + } + l.firstWriteExtraIndent = nil + } + + ns, err := l.Writer.Write(b[writtenFromB : i+1]) + n += ns + writtenFromB += ns + if err != nil { + return n, err + } + l.previousCharWasNewLine = true + continue + } + + // Not a newline, make a space if indent was created. + if l.previousCharWasNewLine { + ws := l.id.Whitespace() + if len(ws) > 0 { + ns, err := l.Writer.Write(ws) + n += ns + if err != nil { + return n, err + } + } + } + l.previousCharWasNewLine = false + } + + if writtenFromB >= len(b) { + return n, nil + } + + if !l.WasIndentOnFirstWriteWritten() { + ns, err := l.Writer.Write(l.firstWriteExtraIndent) + n += ns + if err != nil { + return n, err + } + l.firstWriteExtraIndent = nil + } + + ns, err := l.Writer.Write(b[writtenFromB:]) + n += ns + return n, err +} diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/signature.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/signature.go index ff14da31..42062538 100644 --- a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/signature.go +++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/signature.go @@ -127,13 +127,6 @@ type VerifiableSignature struct { Packet *Signature } -// SaltedHashSpecifier specifies that the given salt and hash are -// used by a v6 signature. -type SaltedHashSpecifier struct { - Hash crypto.Hash - Salt []byte -} - // NewVerifiableSig returns a struct of type VerifiableSignature referencing the input signature. func NewVerifiableSig(signature *Signature) *VerifiableSignature { return &VerifiableSignature{ diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/read.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/read.go index ac897d70..40850659 100644 --- a/vendor/github.com/ProtonMail/go-crypto/openpgp/read.go +++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/read.go @@ -6,7 +6,6 @@ package openpgp // import "github.com/ProtonMail/go-crypto/openpgp" import ( - "bytes" "crypto" _ "crypto/sha256" _ "crypto/sha512" @@ -455,19 +454,13 @@ func (scr *signatureCheckReader) Read(buf []byte) (int, error) { // if any, and a possible signature verification error. // If the signer isn't known, ErrUnknownIssuer is returned. func VerifyDetachedSignature(keyring KeyRing, signed, signature io.Reader, config *packet.Config) (sig *packet.Signature, signer *Entity, err error) { - return verifyDetachedSignature(keyring, signed, signature, nil, nil, false, config) + return verifyDetachedSignature(keyring, signed, signature, nil, false, config) } // VerifyDetachedSignatureAndHash performs the same actions as // VerifyDetachedSignature and checks that the expected hash functions were used. func VerifyDetachedSignatureAndHash(keyring KeyRing, signed, signature io.Reader, expectedHashes []crypto.Hash, config *packet.Config) (sig *packet.Signature, signer *Entity, err error) { - return verifyDetachedSignature(keyring, signed, signature, expectedHashes, nil, true, config) -} - -// VerifyDetachedSignatureAndSaltedHash performs the same actions as -// VerifyDetachedSignature and checks that the expected hash functions and salts were used. -func VerifyDetachedSignatureAndSaltedHash(keyring KeyRing, signed, signature io.Reader, expectedHashes []crypto.Hash, expectedSaltedHashes []*packet.SaltedHashSpecifier, config *packet.Config) (sig *packet.Signature, signer *Entity, err error) { - return verifyDetachedSignature(keyring, signed, signature, expectedHashes, expectedSaltedHashes, true, config) + return verifyDetachedSignature(keyring, signed, signature, expectedHashes, true, config) } // CheckDetachedSignature takes a signed file and a detached signature and @@ -475,25 +468,18 @@ func VerifyDetachedSignatureAndSaltedHash(keyring KeyRing, signed, signature io. // signature verification error. If the signer isn't known, // ErrUnknownIssuer is returned. func CheckDetachedSignature(keyring KeyRing, signed, signature io.Reader, config *packet.Config) (signer *Entity, err error) { - _, signer, err = verifyDetachedSignature(keyring, signed, signature, nil, nil, false, config) - return -} - -// CheckDetachedSignatureAndSaltedHash performs the same actions as -// CheckDetachedSignature and checks that the expected hash functions or salted hash functions were used. -func CheckDetachedSignatureAndSaltedHash(keyring KeyRing, signed, signature io.Reader, expectedHashes []crypto.Hash, expectedSaltedHashes []*packet.SaltedHashSpecifier, config *packet.Config) (signer *Entity, err error) { - _, signer, err = verifyDetachedSignature(keyring, signed, signature, expectedHashes, expectedSaltedHashes, true, config) + _, signer, err = verifyDetachedSignature(keyring, signed, signature, nil, false, config) return } // CheckDetachedSignatureAndHash performs the same actions as // CheckDetachedSignature and checks that the expected hash functions were used. func CheckDetachedSignatureAndHash(keyring KeyRing, signed, signature io.Reader, expectedHashes []crypto.Hash, config *packet.Config) (signer *Entity, err error) { - _, signer, err = verifyDetachedSignature(keyring, signed, signature, expectedHashes, nil, true, config) + _, signer, err = verifyDetachedSignature(keyring, signed, signature, expectedHashes, true, config) return } -func verifyDetachedSignature(keyring KeyRing, signed, signature io.Reader, expectedHashes []crypto.Hash, expectedSaltedHashes []*packet.SaltedHashSpecifier, checkHashes bool, config *packet.Config) (sig *packet.Signature, signer *Entity, err error) { +func verifyDetachedSignature(keyring KeyRing, signed, signature io.Reader, expectedHashes []crypto.Hash, checkHashes bool, config *packet.Config) (sig *packet.Signature, signer *Entity, err error) { var issuerKeyId uint64 var hashFunc crypto.Hash var sigType packet.SignatureType @@ -523,22 +509,11 @@ func verifyDetachedSignature(keyring KeyRing, signed, signature io.Reader, expec sigType = sig.SigType if checkHashes { matchFound := false - if sig.Version == 6 { - // check for salted hashes - for _, expectedSaltedHash := range expectedSaltedHashes { - if hashFunc == expectedSaltedHash.Hash && bytes.Equal(sig.Salt(), expectedSaltedHash.Salt) { - matchFound = true - break - } - } - - } else { - // check for hashes - for _, expectedHash := range expectedHashes { - if hashFunc == expectedHash { - matchFound = true - break - } + // check for hashes + for _, expectedHash := range expectedHashes { + if hashFunc == expectedHash { + matchFound = true + break } } if !matchFound { diff --git a/vendor/github.com/bmatcuk/doublestar/v4/.codecov.yml b/vendor/github.com/bmatcuk/doublestar/v4/.codecov.yml new file mode 100644 index 00000000..db6e504a --- /dev/null +++ b/vendor/github.com/bmatcuk/doublestar/v4/.codecov.yml @@ -0,0 +1,10 @@ +coverage: + status: + project: + default: + threshold: 1% + patch: + default: + target: 70% +ignore: + - globoptions.go diff --git a/vendor/github.com/bmatcuk/doublestar/v4/.gitignore b/vendor/github.com/bmatcuk/doublestar/v4/.gitignore new file mode 100644 index 00000000..af212ecc --- /dev/null +++ b/vendor/github.com/bmatcuk/doublestar/v4/.gitignore @@ -0,0 +1,32 @@ +# vi +*~ +*.swp +*.swo + +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe +*.test +*.prof + +# test directory +test/ diff --git a/vendor/github.com/bmatcuk/doublestar/v4/LICENSE b/vendor/github.com/bmatcuk/doublestar/v4/LICENSE new file mode 100644 index 00000000..309c9d1d --- /dev/null +++ b/vendor/github.com/bmatcuk/doublestar/v4/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2014 Bob Matcuk + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/vendor/github.com/bmatcuk/doublestar/v4/README.md b/vendor/github.com/bmatcuk/doublestar/v4/README.md new file mode 100644 index 00000000..70117eff --- /dev/null +++ b/vendor/github.com/bmatcuk/doublestar/v4/README.md @@ -0,0 +1,402 @@ +# doublestar + +Path pattern matching and globbing supporting `doublestar` (`**`) patterns. + +[![PkgGoDev](https://pkg.go.dev/badge/github.com/bmatcuk/doublestar)](https://pkg.go.dev/github.com/bmatcuk/doublestar/v4) +[![Release](https://img.shields.io/github/release/bmatcuk/doublestar.svg?branch=master)](https://github.com/bmatcuk/doublestar/releases) +[![Build Status](https://github.com/bmatcuk/doublestar/actions/workflows/test.yml/badge.svg)](https://github.com/bmatcuk/doublestar/actions) +[![codecov.io](https://img.shields.io/codecov/c/github/bmatcuk/doublestar.svg?branch=master)](https://codecov.io/github/bmatcuk/doublestar?branch=master) +[![Sponsor](https://img.shields.io/static/v1?label=Sponsor&message=%E2%9D%A4&logo=GitHub&color=%23fe8e86)](https://github.com/sponsors/bmatcuk) + +## About + +#### [Upgrading?](UPGRADING.md) + +**doublestar** is a [golang] implementation of path pattern matching and +globbing with support for "doublestar" (aka globstar: `**`) patterns. + +doublestar patterns match files and directories recursively. For example, if +you had the following directory structure: + +```bash +grandparent +`-- parent + |-- child1 + `-- child2 +``` + +You could find the children with patterns such as: `**/child*`, +`grandparent/**/child?`, `**/parent/*`, or even just `**` by itself (which will +return all files and directories recursively). + +Bash's globstar is doublestar's inspiration and, as such, works similarly. +Note that the doublestar must appear as a path component by itself. A pattern +such as `/path**` is invalid and will be treated the same as `/path*`, but +`/path*/**` should achieve the desired result. Additionally, `/path/**` will +match all directories and files under the path directory, but `/path/**/` will +only match directories. + +v4 is a complete rewrite with a focus on performance. Additionally, +[doublestar] has been updated to use the new [io/fs] package for filesystem +access. As a result, it is only supported by [golang] v1.16+. + +## Installation + +**doublestar** can be installed via `go get`: + +```bash +go get github.com/bmatcuk/doublestar/v4 +``` + +To use it in your code, you must import it: + +```go +import "github.com/bmatcuk/doublestar/v4" +``` + +## Usage + +### ErrBadPattern + +```go +doublestar.ErrBadPattern +``` + +Returned by various functions to report that the pattern is malformed. At the +moment, this value is equal to `path.ErrBadPattern`, but, for portability, this +equivalence should probably not be relied upon. + +### Match + +```go +func Match(pattern, name string) (bool, error) +``` + +Match returns true if `name` matches the file name `pattern` ([see +"patterns"]). `name` and `pattern` are split on forward slash (`/`) characters +and may be relative or absolute. + +Match requires pattern to match all of name, not just a substring. The only +possible returned error is `ErrBadPattern`, when pattern is malformed. + +Note: this is meant as a drop-in replacement for `path.Match()` which always +uses `'/'` as the path separator. If you want to support systems which use a +different path separator (such as Windows), what you want is `PathMatch()`. +Alternatively, you can run `filepath.ToSlash()` on both pattern and name and +then use this function. + +Note: users should _not_ count on the returned error, +`doublestar.ErrBadPattern`, being equal to `path.ErrBadPattern`. + + +### PathMatch + +```go +func PathMatch(pattern, name string) (bool, error) +``` + +PathMatch returns true if `name` matches the file name `pattern` ([see +"patterns"]). The difference between Match and PathMatch is that PathMatch will +automatically use your system's path separator to split `name` and `pattern`. +On systems where the path separator is `'\'`, escaping will be disabled. + +Note: this is meant as a drop-in replacement for `filepath.Match()`. It assumes +that both `pattern` and `name` are using the system's path separator. If you +can't be sure of that, use `filepath.ToSlash()` on both `pattern` and `name`, +and then use the `Match()` function instead. + +### GlobOption + +Options that may be passed to `Glob`, `GlobWalk`, or `FilepathGlob`. Any number +of options may be passed to these functions, and in any order, as the last +argument(s). + +```go +WithFailOnIOErrors() +``` + +If passed, doublestar will abort and return IO errors when encountered. Note +that if the glob pattern references a path that does not exist (such as +`nonexistent/path/*`), this is _not_ considered an IO error: it is considered a +pattern with no matches. + +```go +WithFailOnPatternNotExist() +``` + +If passed, doublestar will abort and return `doublestar.ErrPatternNotExist` if +the pattern references a path that does not exist before any meta characters +such as `nonexistent/path/*`. Note that alts (ie, `{...}`) are expanded before +this check. In other words, a pattern such as `{a,b}/*` may fail if either `a` +or `b` do not exist but `*/{a,b}` will never fail because the star may match +nothing. + +```go +WithFilesOnly() +``` + +If passed, doublestar will only return "files" from `Glob`, `GlobWalk`, or +`FilepathGlob`. In this context, "files" are anything that is not a directory +or a symlink to a directory. + +Note: if combined with the WithNoFollow option, symlinks to directories _will_ +be included in the result since no attempt is made to follow the symlink. + +```go +WithNoFollow() +``` + +If passed, doublestar will not follow symlinks while traversing the filesystem. +However, due to io/fs's _very_ poor support for querying the filesystem about +symlinks, there's a caveat here: if part of the pattern before any meta +characters contains a reference to a symlink, it will be followed. For example, +a pattern such as `path/to/symlink/*` will be followed assuming it is a valid +symlink to a directory. However, from this same example, a pattern such as +`path/to/**` will not traverse the `symlink`, nor would `path/*/symlink/*` + +Note: if combined with the WithFilesOnly option, symlinks to directories _will_ +be included in the result since no attempt is made to follow the symlink. + +### Glob + +```go +func Glob(fsys fs.FS, pattern string, opts ...GlobOption) ([]string, error) +``` + +Glob returns the names of all files matching pattern or nil if there is no +matching file. The syntax of patterns is the same as in `Match()`. The pattern +may describe hierarchical names such as `usr/*/bin/ed`. + +Glob ignores file system errors such as I/O errors reading directories by +default. The only possible returned error is `ErrBadPattern`, reporting that +the pattern is malformed. + +To enable aborting on I/O errors, the `WithFailOnIOErrors` option can be +passed. + +Note: this is meant as a drop-in replacement for `io/fs.Glob()`. Like +`io/fs.Glob()`, this function assumes that your pattern uses `/` as the path +separator even if that's not correct for your OS (like Windows). If you aren't +sure if that's the case, you can use `filepath.ToSlash()` on your pattern +before calling `Glob()`. + +Like `io/fs.Glob()`, patterns containing `/./`, `/../`, or starting with `/` +will return no results and no errors. This seems to be a [conscious +decision](https://github.com/golang/go/issues/44092#issuecomment-774132549), +even if counter-intuitive. You can use [SplitPattern] to divide a pattern into +a base path (to initialize an `FS` object) and pattern. + +Note: users should _not_ count on the returned error, +`doublestar.ErrBadPattern`, being equal to `path.ErrBadPattern`. + +### GlobWalk + +```go +type GlobWalkFunc func(path string, d fs.DirEntry) error + +func GlobWalk(fsys fs.FS, pattern string, fn GlobWalkFunc, opts ...GlobOption) error +``` + +GlobWalk calls the callback function `fn` for every file matching pattern. The +syntax of pattern is the same as in Match() and the behavior is the same as +Glob(), with regard to limitations (such as patterns containing `/./`, `/../`, +or starting with `/`). The pattern may describe hierarchical names such as +usr/*/bin/ed. + +GlobWalk may have a small performance benefit over Glob if you do not need a +slice of matches because it can avoid allocating memory for the matches. +Additionally, GlobWalk gives you access to the `fs.DirEntry` objects for each +match, and lets you quit early by returning a non-nil error from your callback +function. Like `io/fs.WalkDir`, if your callback returns `SkipDir`, GlobWalk +will skip the current directory. This means that if the current path _is_ a +directory, GlobWalk will not recurse into it. If the current path is not a +directory, the rest of the parent directory will be skipped. + +GlobWalk ignores file system errors such as I/O errors reading directories by +default. GlobWalk may return `ErrBadPattern`, reporting that the pattern is +malformed. + +To enable aborting on I/O errors, the `WithFailOnIOErrors` option can be +passed. + +Additionally, if the callback function `fn` returns an error, GlobWalk will +exit immediately and return that error. + +Like Glob(), this function assumes that your pattern uses `/` as the path +separator even if that's not correct for your OS (like Windows). If you aren't +sure if that's the case, you can use filepath.ToSlash() on your pattern before +calling GlobWalk(). + +Note: users should _not_ count on the returned error, +`doublestar.ErrBadPattern`, being equal to `path.ErrBadPattern`. + +### FilepathGlob + +```go +func FilepathGlob(pattern string, opts ...GlobOption) (matches []string, err error) +``` + +FilepathGlob returns the names of all files matching pattern or nil if there is +no matching file. The syntax of pattern is the same as in Match(). The pattern +may describe hierarchical names such as usr/*/bin/ed. + +FilepathGlob ignores file system errors such as I/O errors reading directories +by default. The only possible returned error is `ErrBadPattern`, reporting that +the pattern is malformed. + +To enable aborting on I/O errors, the `WithFailOnIOErrors` option can be +passed. + +Note: FilepathGlob is a convenience function that is meant as a drop-in +replacement for `path/filepath.Glob()` for users who don't need the +complication of io/fs. Basically, it: + +* Runs `filepath.Clean()` and `ToSlash()` on the pattern +* Runs `SplitPattern()` to get a base path and a pattern to Glob +* Creates an FS object from the base path and `Glob()s` on the pattern +* Joins the base path with all of the matches from `Glob()` + +Returned paths will use the system's path separator, just like +`filepath.Glob()`. + +Note: the returned error `doublestar.ErrBadPattern` is not equal to +`filepath.ErrBadPattern`. + +### SplitPattern + +```go +func SplitPattern(p string) (base, pattern string) +``` + +SplitPattern is a utility function. Given a pattern, SplitPattern will return +two strings: the first string is everything up to the last slash (`/`) that +appears _before_ any unescaped "meta" characters (ie, `*?[{`). The second +string is everything after that slash. For example, given the pattern: + +``` +../../path/to/meta*/** + ^----------- split here +``` + +SplitPattern returns "../../path/to" and "meta*/**". This is useful for +initializing os.DirFS() to call Glob() because Glob() will silently fail if +your pattern includes `/./` or `/../`. For example: + +```go +base, pattern := SplitPattern("../../path/to/meta*/**") +fsys := os.DirFS(base) +matches, err := Glob(fsys, pattern) +``` + +If SplitPattern cannot find somewhere to split the pattern (for example, +`meta*/**`), it will return "." and the unaltered pattern (`meta*/**` in this +example). + +Of course, it is your responsibility to decide if the returned base path is +"safe" in the context of your application. Perhaps you could use Match() to +validate against a list of approved base directories? + +### ValidatePattern + +```go +func ValidatePattern(s string) bool +``` + +Validate a pattern. Patterns are validated while they run in Match(), +PathMatch(), and Glob(), so, you normally wouldn't need to call this. However, +there are cases where this might be useful: for example, if your program allows +a user to enter a pattern that you'll run at a later time, you might want to +validate it. + +ValidatePattern assumes your pattern uses '/' as the path separator. + +### ValidatePathPattern + +```go +func ValidatePathPattern(s string) bool +``` + +Like ValidatePattern, only uses your OS path separator. In other words, use +ValidatePattern if you would normally use Match() or Glob(). Use +ValidatePathPattern if you would normally use PathMatch(). Keep in mind, Glob() +requires '/' separators, even if your OS uses something else. + +### Patterns + +**doublestar** supports the following special terms in the patterns: + +Special Terms | Meaning +------------- | ------- +`*` | matches any sequence of non-path-separators +`/**/` | matches zero or more directories +`?` | matches any single non-path-separator character +`[class]` | matches any single non-path-separator character against a class of characters ([see "character classes"]) +`{alt1,...}` | matches a sequence of characters if one of the comma-separated alternatives matches + +Any character with a special meaning can be escaped with a backslash (`\`). + +A doublestar (`**`) should appear surrounded by path separators such as `/**/`. +A mid-pattern doublestar (`**`) behaves like bash's globstar option: a pattern +such as `path/to/**.txt` would return the same results as `path/to/*.txt`. The +pattern you're looking for is `path/to/**/*.txt`. + +#### Character Classes + +Character classes support the following: + +Class | Meaning +---------- | ------- +`[abc]` | matches any single character within the set +`[a-z]` | matches any single character in the range +`[^class]` | matches any single character which does *not* match the class +`[!class]` | same as `^`: negates the class + +## Performance + +``` +goos: darwin +goarch: amd64 +pkg: github.com/bmatcuk/doublestar/v4 +cpu: Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz +BenchmarkMatch-8 285639 3868 ns/op 0 B/op 0 allocs/op +BenchmarkGoMatch-8 286945 3726 ns/op 0 B/op 0 allocs/op +BenchmarkPathMatch-8 320511 3493 ns/op 0 B/op 0 allocs/op +BenchmarkGoPathMatch-8 304236 3434 ns/op 0 B/op 0 allocs/op +BenchmarkGlob-8 466 2501123 ns/op 190225 B/op 2849 allocs/op +BenchmarkGlobWalk-8 476 2536293 ns/op 184017 B/op 2750 allocs/op +BenchmarkGoGlob-8 463 2574836 ns/op 194249 B/op 2929 allocs/op +``` + +These benchmarks (in `doublestar_test.go`) compare Match() to path.Match(), +PathMath() to filepath.Match(), and Glob() + GlobWalk() to io/fs.Glob(). They +only run patterns that the standard go packages can understand as well (so, no +`{alts}` or `**`) for a fair comparison. Of course, alts and doublestars will +be less performant than the other pattern meta characters. + +Alts are essentially like running multiple patterns, the number of which can +get large if your pattern has alts nested inside alts. This affects both +matching (ie, Match()) and globbing (Glob()). + +`**` performance in matching is actually pretty similar to a regular `*`, but +can cause a large number of reads when globbing as it will need to recursively +traverse your filesystem. + +## Sponsors +I started this project in 2014 in my spare time and have been maintaining it +ever since. In that time, it has grown into one of the most popular globbing +libraries in the Go ecosystem. So, if **doublestar** is a useful library in +your project, consider [sponsoring] my work! I'd really appreciate it! + +Thanks for sponsoring me! + +## License + +[MIT License](LICENSE) + +[SplitPattern]: #splitpattern +[doublestar]: https://github.com/bmatcuk/doublestar +[golang]: http://golang.org/ +[io/fs]: https://pkg.go.dev/io/fs +[see "character classes"]: #character-classes +[see "patterns"]: #patterns +[sponsoring]: https://github.com/sponsors/bmatcuk diff --git a/vendor/github.com/bmatcuk/doublestar/v4/UPGRADING.md b/vendor/github.com/bmatcuk/doublestar/v4/UPGRADING.md new file mode 100644 index 00000000..25aace3d --- /dev/null +++ b/vendor/github.com/bmatcuk/doublestar/v4/UPGRADING.md @@ -0,0 +1,63 @@ +# Upgrading from v3 to v4 + +v4 is a complete rewrite with a focus on performance. Additionally, +[doublestar] has been updated to use the new [io/fs] package for filesystem +access. As a result, it is only supported by [golang] v1.16+. + +`Match()` and `PathMatch()` mostly did not change, besides big performance +improvements. Their API is the same. However, note the following corner cases: + +* In previous versions of [doublestar], `PathMatch()` could accept patterns + that used either platform-specific path separators, or `/`. This was + undocumented and didn't match `filepath.Match()`. In v4, both `pattern` and + `name` must be using appropriate path separators for the platform. You can + use `filepath.FromSlash()` to change `/` to platform-specific separators if + you aren't sure. +* In previous versions of [doublestar], a pattern such as `path/to/a/**` would + _not_ match `path/to/a`. In v4, this pattern _will_ match because if `a` was + a directory, `Glob()` would return it. In other words, the following returns + true: `Match("path/to/a/**", "path/to/a")` + +`Glob()` changed from using a [doublestar]-specific filesystem abstraction (the +`OS` interface) to the [io/fs] package. As a result, it now takes a `fs.FS` as +its first argument. This change has a couple ramifications: + +* Like `io/fs.Glob`, `pattern` must use a `/` as path separator, even on + platforms that use something else. You can use `filepath.ToSlash()` on your + patterns if you aren't sure. +* Patterns that contain `/./` or `/../` are invalid. The [io/fs] package + rejects them, returning an IO error. Since `Glob()` ignores IO errors, it'll + end up being silently rejected. You can run `path.Clean()` to ensure they are + removed from the pattern. + +v4 also added a `GlobWalk()` function that is slightly more performant than +`Glob()` if you just need to iterate over the results and don't need a string +slice. You also get `fs.DirEntry` objects for each result, and can quit early +if your callback returns an error. + +# Upgrading from v2 to v3 + +v3 introduced using `!` to negate character classes, in addition to `^`. If any +of your patterns include a character class that starts with an exclamation mark +(ie, `[!...]`), you'll need to update the pattern to escape or move the +exclamation mark. Note that, like the caret (`^`), it only negates the +character class if it is the first character in the character class. + +# Upgrading from v1 to v2 + +The change from v1 to v2 was fairly minor: the return type of the `Open` method +on the `OS` interface was changed from `*os.File` to `File`, a new interface +exported by doublestar. The new `File` interface only defines the functionality +doublestar actually needs (`io.Closer` and `Readdir`), making it easier to use +doublestar with [go-billy], [afero], or something similar. If you were using +this functionality, updating should be as easy as updating `Open's` return +type, since `os.File` already implements `doublestar.File`. + +If you weren't using this functionality, updating should be as easy as changing +your dependencies to point to v2. + +[afero]: https://github.com/spf13/afero +[doublestar]: https://github.com/bmatcuk/doublestar +[go-billy]: https://github.com/src-d/go-billy +[golang]: http://golang.org/ +[io/fs]: https://golang.org/pkg/io/fs/ diff --git a/vendor/github.com/bmatcuk/doublestar/v4/doublestar.go b/vendor/github.com/bmatcuk/doublestar/v4/doublestar.go new file mode 100644 index 00000000..210fd40c --- /dev/null +++ b/vendor/github.com/bmatcuk/doublestar/v4/doublestar.go @@ -0,0 +1,13 @@ +package doublestar + +import ( + "errors" + "path" +) + +// ErrBadPattern indicates a pattern was malformed. +var ErrBadPattern = path.ErrBadPattern + +// ErrPatternNotExist indicates that the pattern passed to Glob, GlobWalk, or +// FilepathGlob references a path that does not exist. +var ErrPatternNotExist = errors.New("pattern does not exist") diff --git a/vendor/github.com/bmatcuk/doublestar/v4/glob.go b/vendor/github.com/bmatcuk/doublestar/v4/glob.go new file mode 100644 index 00000000..519601b1 --- /dev/null +++ b/vendor/github.com/bmatcuk/doublestar/v4/glob.go @@ -0,0 +1,473 @@ +package doublestar + +import ( + "errors" + "io/fs" + "path" +) + +// Glob returns the names of all files matching pattern or nil if there is no +// matching file. The syntax of pattern is the same as in Match(). The pattern +// may describe hierarchical names such as usr/*/bin/ed. +// +// Glob ignores file system errors such as I/O errors reading directories by +// default. The only possible returned error is ErrBadPattern, reporting that +// the pattern is malformed. +// +// To enable aborting on I/O errors, the WithFailOnIOErrors option can be +// passed. +// +// Note: this is meant as a drop-in replacement for io/fs.Glob(). Like +// io/fs.Glob(), this function assumes that your pattern uses `/` as the path +// separator even if that's not correct for your OS (like Windows). If you +// aren't sure if that's the case, you can use filepath.ToSlash() on your +// pattern before calling Glob(). +// +// Like `io/fs.Glob()`, patterns containing `/./`, `/../`, or starting with `/` +// will return no results and no errors. You can use SplitPattern to divide a +// pattern into a base path (to initialize an `FS` object) and pattern. +// +// Note: users should _not_ count on the returned error, +// doublestar.ErrBadPattern, being equal to path.ErrBadPattern. +// +func Glob(fsys fs.FS, pattern string, opts ...GlobOption) ([]string, error) { + if !ValidatePattern(pattern) { + return nil, ErrBadPattern + } + + g := newGlob(opts...) + + if hasMidDoubleStar(pattern) { + // If the pattern has a `**` anywhere but the very end, GlobWalk is more + // performant because it can get away with less allocations. If the pattern + // ends in a `**`, both methods are pretty much the same, but Glob has a + // _very_ slight advantage because of lower function call overhead. + var matches []string + err := g.doGlobWalk(fsys, pattern, true, true, func(p string, d fs.DirEntry) error { + matches = append(matches, p) + return nil + }) + return matches, err + } + return g.doGlob(fsys, pattern, nil, true, true) +} + +// Does the actual globbin' +// - firstSegment is true if we're in the first segment of the pattern, ie, +// the right-most part where we can match files. If it's false, we're +// somewhere in the middle (or at the beginning) and can only match +// directories since there are path segments above us. +// - beforeMeta is true if we're exploring segments before any meta +// characters, ie, in a pattern such as `path/to/file*.txt`, the `path/to/` +// bit does not contain any meta characters. +func (g *glob) doGlob(fsys fs.FS, pattern string, m []string, firstSegment, beforeMeta bool) (matches []string, err error) { + matches = m + patternStart := indexMeta(pattern) + if patternStart == -1 { + // pattern doesn't contain any meta characters - does a file matching the + // pattern exist? + // The pattern may contain escaped wildcard characters for an exact path match. + path := unescapeMeta(pattern) + pathInfo, pathExists, pathErr := g.exists(fsys, path, beforeMeta) + if pathErr != nil { + return nil, pathErr + } + + if pathExists && (!firstSegment || !g.filesOnly || !pathInfo.IsDir()) { + matches = append(matches, path) + } + + return + } + + dir := "." + splitIdx := lastIndexSlashOrAlt(pattern) + if splitIdx != -1 { + if pattern[splitIdx] == '}' { + openingIdx := indexMatchedOpeningAlt(pattern[:splitIdx]) + if openingIdx == -1 { + // if there's no matching opening index, technically Match() will treat + // an unmatched `}` as nothing special, so... we will, too! + splitIdx = lastIndexSlash(pattern[:splitIdx]) + if splitIdx != -1 { + dir = pattern[:splitIdx] + pattern = pattern[splitIdx+1:] + } + } else { + // otherwise, we have to handle the alts: + return g.globAlts(fsys, pattern, openingIdx, splitIdx, matches, firstSegment, beforeMeta) + } + } else { + dir = pattern[:splitIdx] + pattern = pattern[splitIdx+1:] + } + } + + // if `splitIdx` is less than `patternStart`, we know `dir` has no meta + // characters. They would be equal if they are both -1, which means `dir` + // will be ".", and we know that doesn't have meta characters either. + if splitIdx <= patternStart { + return g.globDir(fsys, dir, pattern, matches, firstSegment, beforeMeta) + } + + var dirs []string + dirs, err = g.doGlob(fsys, dir, matches, false, beforeMeta) + if err != nil { + return + } + for _, d := range dirs { + matches, err = g.globDir(fsys, d, pattern, matches, firstSegment, false) + if err != nil { + return + } + } + + return +} + +// handle alts in the glob pattern - `openingIdx` and `closingIdx` are the +// indexes of `{` and `}`, respectively +func (g *glob) globAlts(fsys fs.FS, pattern string, openingIdx, closingIdx int, m []string, firstSegment, beforeMeta bool) (matches []string, err error) { + matches = m + + var dirs []string + startIdx := 0 + afterIdx := closingIdx + 1 + splitIdx := lastIndexSlashOrAlt(pattern[:openingIdx]) + if splitIdx == -1 || pattern[splitIdx] == '}' { + // no common prefix + dirs = []string{""} + } else { + // our alts have a common prefix that we can process first + dirs, err = g.doGlob(fsys, pattern[:splitIdx], matches, false, beforeMeta) + if err != nil { + return + } + + startIdx = splitIdx + 1 + } + + for _, d := range dirs { + patIdx := openingIdx + 1 + altResultsStartIdx := len(matches) + thisResultStartIdx := altResultsStartIdx + for patIdx < closingIdx { + nextIdx := indexNextAlt(pattern[patIdx:closingIdx], true) + if nextIdx == -1 { + nextIdx = closingIdx + } else { + nextIdx += patIdx + } + + alt := buildAlt(d, pattern, startIdx, openingIdx, patIdx, nextIdx, afterIdx) + matches, err = g.doGlob(fsys, alt, matches, firstSegment, beforeMeta) + if err != nil { + return + } + + matchesLen := len(matches) + if altResultsStartIdx != thisResultStartIdx && thisResultStartIdx != matchesLen { + // Alts can result in matches that aren't sorted, or, worse, duplicates + // (consider the trivial pattern `path/to/{a,*}`). Since doGlob returns + // sorted results, we can do a sort of in-place merge and remove + // duplicates. But, we only need to do this if this isn't the first alt + // (ie, `altResultsStartIdx != thisResultsStartIdx`) and if the latest + // alt actually added some matches (`thisResultStartIdx != + // len(matches)`) + matches = sortAndRemoveDups(matches, altResultsStartIdx, thisResultStartIdx, matchesLen) + + // length of matches may have changed + thisResultStartIdx = len(matches) + } else { + thisResultStartIdx = matchesLen + } + + patIdx = nextIdx + 1 + } + } + + return +} + +// find files/subdirectories in the given `dir` that match `pattern` +func (g *glob) globDir(fsys fs.FS, dir, pattern string, matches []string, canMatchFiles, beforeMeta bool) (m []string, e error) { + m = matches + + if pattern == "" { + if !canMatchFiles || !g.filesOnly { + // pattern can be an empty string if the original pattern ended in a + // slash, in which case, we should just return dir, but only if it + // actually exists and it's a directory (or a symlink to a directory) + _, isDir, err := g.isPathDir(fsys, dir, beforeMeta) + if err != nil { + return nil, err + } + if isDir { + m = append(m, dir) + } + } + return + } + + if pattern == "**" { + return g.globDoubleStar(fsys, dir, m, canMatchFiles, beforeMeta) + } + + dirs, err := fs.ReadDir(fsys, dir) + if err != nil { + if errors.Is(err, fs.ErrNotExist) { + e = g.handlePatternNotExist(beforeMeta) + } else { + e = g.forwardErrIfFailOnIOErrors(err) + } + return + } + + var matched bool + for _, info := range dirs { + name := info.Name() + matched, e = matchWithSeparator(pattern, name, '/', false) + if e != nil { + return + } + if matched { + matched = canMatchFiles + if !matched || g.filesOnly { + matched, e = g.isDir(fsys, dir, name, info) + if e != nil { + return + } + if canMatchFiles { + // if we're here, it's because g.filesOnly + // is set and we don't want directories + matched = !matched + } + } + if matched { + m = append(m, path.Join(dir, name)) + } + } + } + + return +} + +func (g *glob) globDoubleStar(fsys fs.FS, dir string, matches []string, canMatchFiles, beforeMeta bool) ([]string, error) { + dirs, err := fs.ReadDir(fsys, dir) + if err != nil { + if errors.Is(err, fs.ErrNotExist) { + return matches, g.handlePatternNotExist(beforeMeta) + } else { + return matches, g.forwardErrIfFailOnIOErrors(err) + } + } + + if !g.filesOnly { + // `**` can match *this* dir, so add it + matches = append(matches, dir) + } + + for _, info := range dirs { + name := info.Name() + isDir, err := g.isDir(fsys, dir, name, info) + if err != nil { + return nil, err + } + if isDir { + matches, err = g.globDoubleStar(fsys, path.Join(dir, name), matches, canMatchFiles, false) + if err != nil { + return nil, err + } + } else if canMatchFiles { + matches = append(matches, path.Join(dir, name)) + } + } + + return matches, nil +} + +// Returns true if the pattern has a doublestar in the middle of the pattern. +// In this case, GlobWalk is faster because it can get away with less +// allocations. However, Glob has a _very_ slight edge if the pattern ends in +// `**`. +func hasMidDoubleStar(p string) bool { + // subtract 3: 2 because we want to return false if the pattern ends in `**` + // (Glob is _very_ slightly faster in that case), and the extra 1 because our + // loop checks p[i] and p[i+1]. + l := len(p) - 3 + for i := 0; i < l; i++ { + if p[i] == '\\' { + // escape next byte + i++ + } else if p[i] == '*' && p[i+1] == '*' { + return true + } + } + return false +} + +// Returns the index of the first unescaped meta character, or negative 1. +func indexMeta(s string) int { + var c byte + l := len(s) + for i := 0; i < l; i++ { + c = s[i] + if c == '*' || c == '?' || c == '[' || c == '{' { + return i + } else if c == '\\' { + // skip next byte + i++ + } + } + return -1 +} + +// Returns the index of the last unescaped slash or closing alt (`}`) in the +// string, or negative 1. +func lastIndexSlashOrAlt(s string) int { + for i := len(s) - 1; i >= 0; i-- { + if (s[i] == '/' || s[i] == '}') && (i == 0 || s[i-1] != '\\') { + return i + } + } + return -1 +} + +// Returns the index of the last unescaped slash in the string, or negative 1. +func lastIndexSlash(s string) int { + for i := len(s) - 1; i >= 0; i-- { + if s[i] == '/' && (i == 0 || s[i-1] != '\\') { + return i + } + } + return -1 +} + +// Assuming the byte after the end of `s` is a closing `}`, this function will +// find the index of the matching `{`. That is, it'll skip over any nested `{}` +// and account for escaping. +func indexMatchedOpeningAlt(s string) int { + alts := 1 + for i := len(s) - 1; i >= 0; i-- { + if s[i] == '}' && (i == 0 || s[i-1] != '\\') { + alts++ + } else if s[i] == '{' && (i == 0 || s[i-1] != '\\') { + if alts--; alts == 0 { + return i + } + } + } + return -1 +} + +// Returns true if the path exists +func (g *glob) exists(fsys fs.FS, name string, beforeMeta bool) (fs.FileInfo, bool, error) { + // name might end in a slash, but Stat doesn't like that + namelen := len(name) + if namelen > 1 && name[namelen-1] == '/' { + name = name[:namelen-1] + } + + info, err := fs.Stat(fsys, name) + if errors.Is(err, fs.ErrNotExist) { + return nil, false, g.handlePatternNotExist(beforeMeta) + } + return info, err == nil, g.forwardErrIfFailOnIOErrors(err) +} + +// Returns true if the path exists and is a directory or a symlink to a +// directory +func (g *glob) isPathDir(fsys fs.FS, name string, beforeMeta bool) (fs.FileInfo, bool, error) { + info, err := fs.Stat(fsys, name) + if errors.Is(err, fs.ErrNotExist) { + return nil, false, g.handlePatternNotExist(beforeMeta) + } + return info, err == nil && info.IsDir(), g.forwardErrIfFailOnIOErrors(err) +} + +// Returns whether or not the given DirEntry is a directory. If the DirEntry +// represents a symbolic link, the link is followed by running fs.Stat() on +// `path.Join(dir, name)` (if dir is "", name will be used without joining) +func (g *glob) isDir(fsys fs.FS, dir, name string, info fs.DirEntry) (bool, error) { + if !g.noFollow && (info.Type()&fs.ModeSymlink) > 0 { + p := name + if dir != "" { + p = path.Join(dir, name) + } + finfo, err := fs.Stat(fsys, p) + if err != nil { + if errors.Is(err, fs.ErrNotExist) { + // this function is only ever called while expanding a glob, so it can + // never return ErrPatternNotExist + return false, nil + } + return false, g.forwardErrIfFailOnIOErrors(err) + } + return finfo.IsDir(), nil + } + return info.IsDir(), nil +} + +// Builds a string from an alt +func buildAlt(prefix, pattern string, startIdx, openingIdx, currentIdx, nextIdx, afterIdx int) string { + // pattern: + // ignored/start{alts,go,here}remaining - len = 36 + // | | | | ^--- afterIdx = 27 + // | | | \--------- nextIdx = 21 + // | | \----------- currentIdx = 19 + // | \----------------- openingIdx = 13 + // \---------------------- startIdx = 8 + // + // result: + // prefix/startgoremaining - len = 7 + 5 + 2 + 9 = 23 + var buf []byte + patLen := len(pattern) + size := (openingIdx - startIdx) + (nextIdx - currentIdx) + (patLen - afterIdx) + if prefix != "" && prefix != "." { + buf = make([]byte, 0, size+len(prefix)+1) + buf = append(buf, prefix...) + buf = append(buf, '/') + } else { + buf = make([]byte, 0, size) + } + buf = append(buf, pattern[startIdx:openingIdx]...) + buf = append(buf, pattern[currentIdx:nextIdx]...) + if afterIdx < patLen { + buf = append(buf, pattern[afterIdx:]...) + } + return string(buf) +} + +// Running alts can produce results that are not sorted, and, worse, can cause +// duplicates (consider the trivial pattern `path/to/{a,*}`). Since we know +// each run of doGlob is sorted, we can basically do the "merge" step of a +// merge sort in-place. +func sortAndRemoveDups(matches []string, idx1, idx2, l int) []string { + var tmp string + for ; idx1 < idx2; idx1++ { + if matches[idx1] < matches[idx2] { + // order is correct + continue + } else if matches[idx1] > matches[idx2] { + // need to swap and then re-sort matches above idx2 + tmp = matches[idx1] + matches[idx1] = matches[idx2] + + shft := idx2 + 1 + for ; shft < l && matches[shft] < tmp; shft++ { + matches[shft-1] = matches[shft] + } + matches[shft-1] = tmp + } else { + // duplicate - shift matches above idx2 down one and decrement l + for shft := idx2 + 1; shft < l; shft++ { + matches[shft-1] = matches[shft] + } + if l--; idx2 == l { + // nothing left to do... matches[idx2:] must have been full of dups + break + } + } + } + return matches[:l] +} diff --git a/vendor/github.com/bmatcuk/doublestar/v4/globoptions.go b/vendor/github.com/bmatcuk/doublestar/v4/globoptions.go new file mode 100644 index 00000000..9483c4bb --- /dev/null +++ b/vendor/github.com/bmatcuk/doublestar/v4/globoptions.go @@ -0,0 +1,144 @@ +package doublestar + +import "strings" + +// glob is an internal type to store options during globbing. +type glob struct { + failOnIOErrors bool + failOnPatternNotExist bool + filesOnly bool + noFollow bool +} + +// GlobOption represents a setting that can be passed to Glob, GlobWalk, and +// FilepathGlob. +type GlobOption func(*glob) + +// Construct a new glob object with the given options +func newGlob(opts ...GlobOption) *glob { + g := &glob{} + for _, opt := range opts { + opt(g) + } + return g +} + +// WithFailOnIOErrors is an option that can be passed to Glob, GlobWalk, or +// FilepathGlob. If passed, doublestar will abort and return IO errors when +// encountered. Note that if the glob pattern references a path that does not +// exist (such as `nonexistent/path/*`), this is _not_ considered an IO error: +// it is considered a pattern with no matches. +// +func WithFailOnIOErrors() GlobOption { + return func(g *glob) { + g.failOnIOErrors = true + } +} + +// WithFailOnPatternNotExist is an option that can be passed to Glob, GlobWalk, +// or FilepathGlob. If passed, doublestar will abort and return +// ErrPatternNotExist if the pattern references a path that does not exist +// before any meta charcters such as `nonexistent/path/*`. Note that alts (ie, +// `{...}`) are expanded before this check. In other words, a pattern such as +// `{a,b}/*` may fail if either `a` or `b` do not exist but `*/{a,b}` will +// never fail because the star may match nothing. +// +func WithFailOnPatternNotExist() GlobOption { + return func(g *glob) { + g.failOnPatternNotExist = true + } +} + +// WithFilesOnly is an option that can be passed to Glob, GlobWalk, or +// FilepathGlob. If passed, doublestar will only return files that match the +// pattern, not directories. +// +// Note: if combined with the WithNoFollow option, symlinks to directories +// _will_ be included in the result since no attempt is made to follow the +// symlink. +// +func WithFilesOnly() GlobOption { + return func(g *glob) { + g.filesOnly = true + } +} + +// WithNoFollow is an option that can be passed to Glob, GlobWalk, or +// FilepathGlob. If passed, doublestar will not follow symlinks while +// traversing the filesystem. However, due to io/fs's _very_ poor support for +// querying the filesystem about symlinks, there's a caveat here: if part of +// the pattern before any meta characters contains a reference to a symlink, it +// will be followed. For example, a pattern such as `path/to/symlink/*` will be +// followed assuming it is a valid symlink to a directory. However, from this +// same example, a pattern such as `path/to/**` will not traverse the +// `symlink`, nor would `path/*/symlink/*` +// +// Note: if combined with the WithFilesOnly option, symlinks to directories +// _will_ be included in the result since no attempt is made to follow the +// symlink. +// +func WithNoFollow() GlobOption { + return func(g *glob) { + g.noFollow = true + } +} + +// forwardErrIfFailOnIOErrors is used to wrap the return values of I/O +// functions. When failOnIOErrors is enabled, it will return err; otherwise, it +// always returns nil. +// +func (g *glob) forwardErrIfFailOnIOErrors(err error) error { + if g.failOnIOErrors { + return err + } + return nil +} + +// handleErrNotExist handles fs.ErrNotExist errors. If +// WithFailOnPatternNotExist has been enabled and canFail is true, this will +// return ErrPatternNotExist. Otherwise, it will return nil. +// +func (g *glob) handlePatternNotExist(canFail bool) error { + if canFail && g.failOnPatternNotExist { + return ErrPatternNotExist + } + return nil +} + +// Format options for debugging/testing purposes +func (g *glob) GoString() string { + var b strings.Builder + b.WriteString("opts: ") + + hasOpts := false + if g.failOnIOErrors { + b.WriteString("WithFailOnIOErrors") + hasOpts = true + } + if g.failOnPatternNotExist { + if hasOpts { + b.WriteString(", ") + } + b.WriteString("WithFailOnPatternNotExist") + hasOpts = true + } + if g.filesOnly { + if hasOpts { + b.WriteString(", ") + } + b.WriteString("WithFilesOnly") + hasOpts = true + } + if g.noFollow { + if hasOpts { + b.WriteString(", ") + } + b.WriteString("WithNoFollow") + hasOpts = true + } + + if !hasOpts { + b.WriteString("nil") + } + return b.String() +} diff --git a/vendor/github.com/bmatcuk/doublestar/v4/globwalk.go b/vendor/github.com/bmatcuk/doublestar/v4/globwalk.go new file mode 100644 index 00000000..84e764f0 --- /dev/null +++ b/vendor/github.com/bmatcuk/doublestar/v4/globwalk.go @@ -0,0 +1,414 @@ +package doublestar + +import ( + "errors" + "io/fs" + "path" + "path/filepath" + "strings" +) + +// If returned from GlobWalkFunc, will cause GlobWalk to skip the current +// directory. In other words, if the current path is a directory, GlobWalk will +// not recurse into it. Otherwise, GlobWalk will skip the rest of the current +// directory. +var SkipDir = fs.SkipDir + +// Callback function for GlobWalk(). If the function returns an error, GlobWalk +// will end immediately and return the same error. +type GlobWalkFunc func(path string, d fs.DirEntry) error + +// GlobWalk calls the callback function `fn` for every file matching pattern. +// The syntax of pattern is the same as in Match() and the behavior is the same +// as Glob(), with regard to limitations (such as patterns containing `/./`, +// `/../`, or starting with `/`). The pattern may describe hierarchical names +// such as usr/*/bin/ed. +// +// GlobWalk may have a small performance benefit over Glob if you do not need a +// slice of matches because it can avoid allocating memory for the matches. +// Additionally, GlobWalk gives you access to the `fs.DirEntry` objects for +// each match, and lets you quit early by returning a non-nil error from your +// callback function. Like `io/fs.WalkDir`, if your callback returns `SkipDir`, +// GlobWalk will skip the current directory. This means that if the current +// path _is_ a directory, GlobWalk will not recurse into it. If the current +// path is not a directory, the rest of the parent directory will be skipped. +// +// GlobWalk ignores file system errors such as I/O errors reading directories +// by default. GlobWalk may return ErrBadPattern, reporting that the pattern is +// malformed. +// +// To enable aborting on I/O errors, the WithFailOnIOErrors option can be +// passed. +// +// Additionally, if the callback function `fn` returns an error, GlobWalk will +// exit immediately and return that error. +// +// Like Glob(), this function assumes that your pattern uses `/` as the path +// separator even if that's not correct for your OS (like Windows). If you +// aren't sure if that's the case, you can use filepath.ToSlash() on your +// pattern before calling GlobWalk(). +// +// Note: users should _not_ count on the returned error, +// doublestar.ErrBadPattern, being equal to path.ErrBadPattern. +// +func GlobWalk(fsys fs.FS, pattern string, fn GlobWalkFunc, opts ...GlobOption) error { + if !ValidatePattern(pattern) { + return ErrBadPattern + } + + g := newGlob(opts...) + return g.doGlobWalk(fsys, pattern, true, true, fn) +} + +// Actually execute GlobWalk +// - firstSegment is true if we're in the first segment of the pattern, ie, +// the right-most part where we can match files. If it's false, we're +// somewhere in the middle (or at the beginning) and can only match +// directories since there are path segments above us. +// - beforeMeta is true if we're exploring segments before any meta +// characters, ie, in a pattern such as `path/to/file*.txt`, the `path/to/` +// bit does not contain any meta characters. +func (g *glob) doGlobWalk(fsys fs.FS, pattern string, firstSegment, beforeMeta bool, fn GlobWalkFunc) error { + patternStart := indexMeta(pattern) + if patternStart == -1 { + // pattern doesn't contain any meta characters - does a file matching the + // pattern exist? + // The pattern may contain escaped wildcard characters for an exact path match. + path := unescapeMeta(pattern) + info, pathExists, err := g.exists(fsys, path, beforeMeta) + if pathExists && (!firstSegment || !g.filesOnly || !info.IsDir()) { + err = fn(path, dirEntryFromFileInfo(info)) + if err == SkipDir { + err = nil + } + } + return err + } + + dir := "." + splitIdx := lastIndexSlashOrAlt(pattern) + if splitIdx != -1 { + if pattern[splitIdx] == '}' { + openingIdx := indexMatchedOpeningAlt(pattern[:splitIdx]) + if openingIdx == -1 { + // if there's no matching opening index, technically Match() will treat + // an unmatched `}` as nothing special, so... we will, too! + splitIdx = lastIndexSlash(pattern[:splitIdx]) + if splitIdx != -1 { + dir = pattern[:splitIdx] + pattern = pattern[splitIdx+1:] + } + } else { + // otherwise, we have to handle the alts: + return g.globAltsWalk(fsys, pattern, openingIdx, splitIdx, firstSegment, beforeMeta, fn) + } + } else { + dir = pattern[:splitIdx] + pattern = pattern[splitIdx+1:] + } + } + + // if `splitIdx` is less than `patternStart`, we know `dir` has no meta + // characters. They would be equal if they are both -1, which means `dir` + // will be ".", and we know that doesn't have meta characters either. + if splitIdx <= patternStart { + return g.globDirWalk(fsys, dir, pattern, firstSegment, beforeMeta, fn) + } + + return g.doGlobWalk(fsys, dir, false, beforeMeta, func(p string, d fs.DirEntry) error { + if err := g.globDirWalk(fsys, p, pattern, firstSegment, false, fn); err != nil { + return err + } + return nil + }) +} + +// handle alts in the glob pattern - `openingIdx` and `closingIdx` are the +// indexes of `{` and `}`, respectively +func (g *glob) globAltsWalk(fsys fs.FS, pattern string, openingIdx, closingIdx int, firstSegment, beforeMeta bool, fn GlobWalkFunc) (err error) { + var matches []DirEntryWithFullPath + startIdx := 0 + afterIdx := closingIdx + 1 + splitIdx := lastIndexSlashOrAlt(pattern[:openingIdx]) + if splitIdx == -1 || pattern[splitIdx] == '}' { + // no common prefix + matches, err = g.doGlobAltsWalk(fsys, "", pattern, startIdx, openingIdx, closingIdx, afterIdx, firstSegment, beforeMeta, matches) + if err != nil { + return + } + } else { + // our alts have a common prefix that we can process first + startIdx = splitIdx + 1 + innerBeforeMeta := beforeMeta && !hasMetaExceptAlts(pattern[:splitIdx]) + err = g.doGlobWalk(fsys, pattern[:splitIdx], false, beforeMeta, func(p string, d fs.DirEntry) (e error) { + matches, e = g.doGlobAltsWalk(fsys, p, pattern, startIdx, openingIdx, closingIdx, afterIdx, firstSegment, innerBeforeMeta, matches) + return e + }) + if err != nil { + return + } + } + + skip := "" + for _, m := range matches { + if skip != "" { + // Because matches are sorted, we know that descendants of the skipped + // item must come immediately after the skipped item. If we find an item + // that does not have a prefix matching the skipped item, we know we're + // done skipping. I'm using strings.HasPrefix here because + // filepath.HasPrefix has been marked deprecated (and just calls + // strings.HasPrefix anyway). The reason it's deprecated is because it + // doesn't handle case-insensitive paths, nor does it guarantee that the + // prefix is actually a parent directory. Neither is an issue here: the + // paths come from the system so their cases will match, and we guarantee + // a parent directory by appending a slash to the prefix. + // + // NOTE: m.Path will always use slashes as path separators. + if strings.HasPrefix(m.Path, skip) { + continue + } + skip = "" + } + if err = fn(m.Path, m.Entry); err != nil { + if err == SkipDir { + isDir, err := g.isDir(fsys, "", m.Path, m.Entry) + if err != nil { + return err + } + if isDir { + // append a slash to guarantee `skip` will be treated as a parent dir + skip = m.Path + "/" + } else { + // Dir() calls Clean() which calls FromSlash(), so we need to convert + // back to slashes + skip = filepath.ToSlash(filepath.Dir(m.Path)) + "/" + } + err = nil + continue + } + return + } + } + + return +} + +// runs actual matching for alts +func (g *glob) doGlobAltsWalk(fsys fs.FS, d, pattern string, startIdx, openingIdx, closingIdx, afterIdx int, firstSegment, beforeMeta bool, m []DirEntryWithFullPath) (matches []DirEntryWithFullPath, err error) { + matches = m + matchesLen := len(m) + patIdx := openingIdx + 1 + for patIdx < closingIdx { + nextIdx := indexNextAlt(pattern[patIdx:closingIdx], true) + if nextIdx == -1 { + nextIdx = closingIdx + } else { + nextIdx += patIdx + } + + alt := buildAlt(d, pattern, startIdx, openingIdx, patIdx, nextIdx, afterIdx) + err = g.doGlobWalk(fsys, alt, firstSegment, beforeMeta, func(p string, d fs.DirEntry) error { + // insertion sort, ignoring dups + insertIdx := matchesLen + for insertIdx > 0 && matches[insertIdx-1].Path > p { + insertIdx-- + } + if insertIdx > 0 && matches[insertIdx-1].Path == p { + // dup + return nil + } + + // append to grow the slice, then insert + entry := DirEntryWithFullPath{d, p} + matches = append(matches, entry) + for i := matchesLen; i > insertIdx; i-- { + matches[i] = matches[i-1] + } + matches[insertIdx] = entry + matchesLen++ + + return nil + }) + if err != nil { + return + } + + patIdx = nextIdx + 1 + } + + return +} + +func (g *glob) globDirWalk(fsys fs.FS, dir, pattern string, canMatchFiles, beforeMeta bool, fn GlobWalkFunc) (e error) { + if pattern == "" { + if !canMatchFiles || !g.filesOnly { + // pattern can be an empty string if the original pattern ended in a + // slash, in which case, we should just return dir, but only if it + // actually exists and it's a directory (or a symlink to a directory) + info, isDir, err := g.isPathDir(fsys, dir, beforeMeta) + if err != nil { + return err + } + if isDir { + e = fn(dir, dirEntryFromFileInfo(info)) + if e == SkipDir { + e = nil + } + } + } + return + } + + if pattern == "**" { + // `**` can match *this* dir + info, dirExists, err := g.exists(fsys, dir, beforeMeta) + if err != nil { + return err + } + if !dirExists || !info.IsDir() { + return nil + } + if !canMatchFiles || !g.filesOnly { + if e = fn(dir, dirEntryFromFileInfo(info)); e != nil { + if e == SkipDir { + e = nil + } + return + } + } + return g.globDoubleStarWalk(fsys, dir, canMatchFiles, fn) + } + + dirs, err := fs.ReadDir(fsys, dir) + if err != nil { + if errors.Is(err, fs.ErrNotExist) { + return g.handlePatternNotExist(beforeMeta) + } + return g.forwardErrIfFailOnIOErrors(err) + } + + var matched bool + for _, info := range dirs { + name := info.Name() + matched, e = matchWithSeparator(pattern, name, '/', false) + if e != nil { + return + } + if matched { + matched = canMatchFiles + if !matched || g.filesOnly { + matched, e = g.isDir(fsys, dir, name, info) + if e != nil { + return e + } + if canMatchFiles { + // if we're here, it's because g.filesOnly + // is set and we don't want directories + matched = !matched + } + } + if matched { + if e = fn(path.Join(dir, name), info); e != nil { + if e == SkipDir { + e = nil + } + return + } + } + } + } + + return +} + +// recursively walk files/directories in a directory +func (g *glob) globDoubleStarWalk(fsys fs.FS, dir string, canMatchFiles bool, fn GlobWalkFunc) (e error) { + dirs, err := fs.ReadDir(fsys, dir) + if err != nil { + if errors.Is(err, fs.ErrNotExist) { + // This function is only ever called after we know the top-most directory + // exists, so, if we ever get here, we know we'll never return + // ErrPatternNotExist. + return nil + } + return g.forwardErrIfFailOnIOErrors(err) + } + + for _, info := range dirs { + name := info.Name() + isDir, err := g.isDir(fsys, dir, name, info) + if err != nil { + return err + } + + if isDir { + p := path.Join(dir, name) + if !canMatchFiles || !g.filesOnly { + // `**` can match *this* dir, so add it + if e = fn(p, info); e != nil { + if e == SkipDir { + e = nil + continue + } + return + } + } + if e = g.globDoubleStarWalk(fsys, p, canMatchFiles, fn); e != nil { + return + } + } else if canMatchFiles { + if e = fn(path.Join(dir, name), info); e != nil { + if e == SkipDir { + e = nil + } + return + } + } + } + + return +} + +type DirEntryFromFileInfo struct { + fi fs.FileInfo +} + +func (d *DirEntryFromFileInfo) Name() string { + return d.fi.Name() +} + +func (d *DirEntryFromFileInfo) IsDir() bool { + return d.fi.IsDir() +} + +func (d *DirEntryFromFileInfo) Type() fs.FileMode { + return d.fi.Mode().Type() +} + +func (d *DirEntryFromFileInfo) Info() (fs.FileInfo, error) { + return d.fi, nil +} + +func dirEntryFromFileInfo(fi fs.FileInfo) fs.DirEntry { + return &DirEntryFromFileInfo{fi} +} + +type DirEntryWithFullPath struct { + Entry fs.DirEntry + Path string +} + +func hasMetaExceptAlts(s string) bool { + var c byte + l := len(s) + for i := 0; i < l; i++ { + c = s[i] + if c == '*' || c == '?' || c == '[' { + return true + } else if c == '\\' { + // skip next byte + i++ + } + } + return false +} diff --git a/vendor/github.com/bmatcuk/doublestar/v4/match.go b/vendor/github.com/bmatcuk/doublestar/v4/match.go new file mode 100644 index 00000000..4232c79f --- /dev/null +++ b/vendor/github.com/bmatcuk/doublestar/v4/match.go @@ -0,0 +1,381 @@ +package doublestar + +import ( + "path/filepath" + "unicode/utf8" +) + +// Match reports whether name matches the shell pattern. +// The pattern syntax is: +// +// pattern: +// { term } +// term: +// '*' matches any sequence of non-path-separators +// '/**/' matches zero or more directories +// '?' matches any single non-path-separator character +// '[' [ '^' '!' ] { character-range } ']' +// character class (must be non-empty) +// starting with `^` or `!` negates the class +// '{' { term } [ ',' { term } ... ] '}' +// alternatives +// c matches character c (c != '*', '?', '\\', '[') +// '\\' c matches character c +// +// character-range: +// c matches character c (c != '\\', '-', ']') +// '\\' c matches character c +// lo '-' hi matches character c for lo <= c <= hi +// +// Match returns true if `name` matches the file name `pattern`. `name` and +// `pattern` are split on forward slash (`/`) characters and may be relative or +// absolute. +// +// Match requires pattern to match all of name, not just a substring. +// The only possible returned error is ErrBadPattern, when pattern +// is malformed. +// +// A doublestar (`**`) should appear surrounded by path separators such as +// `/**/`. A mid-pattern doublestar (`**`) behaves like bash's globstar +// option: a pattern such as `path/to/**.txt` would return the same results as +// `path/to/*.txt`. The pattern you're looking for is `path/to/**/*.txt`. +// +// Note: this is meant as a drop-in replacement for path.Match() which +// always uses '/' as the path separator. If you want to support systems +// which use a different path separator (such as Windows), what you want +// is PathMatch(). Alternatively, you can run filepath.ToSlash() on both +// pattern and name and then use this function. +// +// Note: users should _not_ count on the returned error, +// doublestar.ErrBadPattern, being equal to path.ErrBadPattern. +// +func Match(pattern, name string) (bool, error) { + return matchWithSeparator(pattern, name, '/', true) +} + +// PathMatch returns true if `name` matches the file name `pattern`. The +// difference between Match and PathMatch is that PathMatch will automatically +// use your system's path separator to split `name` and `pattern`. On systems +// where the path separator is `'\'`, escaping will be disabled. +// +// Note: this is meant as a drop-in replacement for filepath.Match(). It +// assumes that both `pattern` and `name` are using the system's path +// separator. If you can't be sure of that, use filepath.ToSlash() on both +// `pattern` and `name`, and then use the Match() function instead. +// +func PathMatch(pattern, name string) (bool, error) { + return matchWithSeparator(pattern, name, filepath.Separator, true) +} + +func matchWithSeparator(pattern, name string, separator rune, validate bool) (matched bool, err error) { + return doMatchWithSeparator(pattern, name, separator, validate, -1, -1, -1, -1, 0, 0) +} + +func doMatchWithSeparator(pattern, name string, separator rune, validate bool, doublestarPatternBacktrack, doublestarNameBacktrack, starPatternBacktrack, starNameBacktrack, patIdx, nameIdx int) (matched bool, err error) { + patLen := len(pattern) + nameLen := len(name) + startOfSegment := true +MATCH: + for nameIdx < nameLen { + if patIdx < patLen { + switch pattern[patIdx] { + case '*': + if patIdx++; patIdx < patLen && pattern[patIdx] == '*' { + // doublestar - must begin with a path separator, otherwise we'll + // treat it like a single star like bash + patIdx++ + if startOfSegment { + if patIdx >= patLen { + // pattern ends in `/**`: return true + return true, nil + } + + // doublestar must also end with a path separator, otherwise we're + // just going to treat the doublestar as a single star like bash + patRune, patRuneLen := utf8.DecodeRuneInString(pattern[patIdx:]) + if patRune == separator { + patIdx += patRuneLen + + doublestarPatternBacktrack = patIdx + doublestarNameBacktrack = nameIdx + starPatternBacktrack = -1 + starNameBacktrack = -1 + continue + } + } + } + startOfSegment = false + + starPatternBacktrack = patIdx + starNameBacktrack = nameIdx + continue + + case '?': + startOfSegment = false + nameRune, nameRuneLen := utf8.DecodeRuneInString(name[nameIdx:]) + if nameRune == separator { + // `?` cannot match the separator + break + } + + patIdx++ + nameIdx += nameRuneLen + continue + + case '[': + startOfSegment = false + if patIdx++; patIdx >= patLen { + // class didn't end + return false, ErrBadPattern + } + nameRune, nameRuneLen := utf8.DecodeRuneInString(name[nameIdx:]) + + matched := false + negate := pattern[patIdx] == '!' || pattern[patIdx] == '^' + if negate { + patIdx++ + } + + if patIdx >= patLen || pattern[patIdx] == ']' { + // class didn't end or empty character class + return false, ErrBadPattern + } + + last := utf8.MaxRune + for patIdx < patLen && pattern[patIdx] != ']' { + patRune, patRuneLen := utf8.DecodeRuneInString(pattern[patIdx:]) + patIdx += patRuneLen + + // match a range + if last < utf8.MaxRune && patRune == '-' && patIdx < patLen && pattern[patIdx] != ']' { + if pattern[patIdx] == '\\' { + // next character is escaped + patIdx++ + } + patRune, patRuneLen = utf8.DecodeRuneInString(pattern[patIdx:]) + patIdx += patRuneLen + + if last <= nameRune && nameRune <= patRune { + matched = true + break + } + + // didn't match range - reset `last` + last = utf8.MaxRune + continue + } + + // not a range - check if the next rune is escaped + if patRune == '\\' { + patRune, patRuneLen = utf8.DecodeRuneInString(pattern[patIdx:]) + patIdx += patRuneLen + } + + // check if the rune matches + if patRune == nameRune { + matched = true + break + } + + // no matches yet + last = patRune + } + + if matched == negate { + // failed to match - if we reached the end of the pattern, that means + // we never found a closing `]` + if patIdx >= patLen { + return false, ErrBadPattern + } + break + } + + closingIdx := indexUnescapedByte(pattern[patIdx:], ']', true) + if closingIdx == -1 { + // no closing `]` + return false, ErrBadPattern + } + + patIdx += closingIdx + 1 + nameIdx += nameRuneLen + continue + + case '{': + startOfSegment = false + beforeIdx := patIdx + patIdx++ + closingIdx := indexMatchedClosingAlt(pattern[patIdx:], separator != '\\') + if closingIdx == -1 { + // no closing `}` + return false, ErrBadPattern + } + closingIdx += patIdx + + for { + commaIdx := indexNextAlt(pattern[patIdx:closingIdx], separator != '\\') + if commaIdx == -1 { + break + } + commaIdx += patIdx + + result, err := doMatchWithSeparator(pattern[:beforeIdx]+pattern[patIdx:commaIdx]+pattern[closingIdx+1:], name, separator, validate, doublestarPatternBacktrack, doublestarNameBacktrack, starPatternBacktrack, starNameBacktrack, beforeIdx, nameIdx) + if result || err != nil { + return result, err + } + + patIdx = commaIdx + 1 + } + return doMatchWithSeparator(pattern[:beforeIdx]+pattern[patIdx:closingIdx]+pattern[closingIdx+1:], name, separator, validate, doublestarPatternBacktrack, doublestarNameBacktrack, starPatternBacktrack, starNameBacktrack, beforeIdx, nameIdx) + + case '\\': + if separator != '\\' { + // next rune is "escaped" in the pattern - literal match + if patIdx++; patIdx >= patLen { + // pattern ended + return false, ErrBadPattern + } + } + fallthrough + + default: + patRune, patRuneLen := utf8.DecodeRuneInString(pattern[patIdx:]) + nameRune, nameRuneLen := utf8.DecodeRuneInString(name[nameIdx:]) + if patRune != nameRune { + if separator != '\\' && patIdx > 0 && pattern[patIdx-1] == '\\' { + // if this rune was meant to be escaped, we need to move patIdx + // back to the backslash before backtracking or validating below + patIdx-- + } + break + } + + patIdx += patRuneLen + nameIdx += nameRuneLen + startOfSegment = patRune == separator + continue + } + } + + if starPatternBacktrack >= 0 { + // `*` backtrack, but only if the `name` rune isn't the separator + nameRune, nameRuneLen := utf8.DecodeRuneInString(name[starNameBacktrack:]) + if nameRune != separator { + starNameBacktrack += nameRuneLen + patIdx = starPatternBacktrack + nameIdx = starNameBacktrack + startOfSegment = false + continue + } + } + + if doublestarPatternBacktrack >= 0 { + // `**` backtrack, advance `name` past next separator + nameIdx = doublestarNameBacktrack + for nameIdx < nameLen { + nameRune, nameRuneLen := utf8.DecodeRuneInString(name[nameIdx:]) + nameIdx += nameRuneLen + if nameRune == separator { + doublestarNameBacktrack = nameIdx + patIdx = doublestarPatternBacktrack + startOfSegment = true + continue MATCH + } + } + } + + if validate && patIdx < patLen && !doValidatePattern(pattern[patIdx:], separator) { + return false, ErrBadPattern + } + return false, nil + } + + if nameIdx < nameLen { + // we reached the end of `pattern` before the end of `name` + return false, nil + } + + // we've reached the end of `name`; we've successfully matched if we've also + // reached the end of `pattern`, or if the rest of `pattern` can match a + // zero-length string + return isZeroLengthPattern(pattern[patIdx:], separator) +} + +func isZeroLengthPattern(pattern string, separator rune) (ret bool, err error) { + // `/**`, `**/`, and `/**/` are special cases - a pattern such as `path/to/a/**` or `path/to/a/**/` + // *should* match `path/to/a` because `a` might be a directory + if pattern == "" || + pattern == "*" || + pattern == "**" || + pattern == string(separator)+"**" || + pattern == "**"+string(separator) || + pattern == string(separator)+"**"+string(separator) { + return true, nil + } + + if pattern[0] == '{' { + closingIdx := indexMatchedClosingAlt(pattern[1:], separator != '\\') + if closingIdx == -1 { + // no closing '}' + return false, ErrBadPattern + } + closingIdx += 1 + + patIdx := 1 + for { + commaIdx := indexNextAlt(pattern[patIdx:closingIdx], separator != '\\') + if commaIdx == -1 { + break + } + commaIdx += patIdx + + ret, err = isZeroLengthPattern(pattern[patIdx:commaIdx]+pattern[closingIdx+1:], separator) + if ret || err != nil { + return + } + + patIdx = commaIdx + 1 + } + return isZeroLengthPattern(pattern[patIdx:closingIdx]+pattern[closingIdx+1:], separator) + } + + // no luck - validate the rest of the pattern + if !doValidatePattern(pattern, separator) { + return false, ErrBadPattern + } + return false, nil +} + +// Finds the index of the first unescaped byte `c`, or negative 1. +func indexUnescapedByte(s string, c byte, allowEscaping bool) int { + l := len(s) + for i := 0; i < l; i++ { + if allowEscaping && s[i] == '\\' { + // skip next byte + i++ + } else if s[i] == c { + return i + } + } + return -1 +} + +// Assuming the byte before the beginning of `s` is an opening `{`, this +// function will find the index of the matching `}`. That is, it'll skip over +// any nested `{}` and account for escaping +func indexMatchedClosingAlt(s string, allowEscaping bool) int { + alts := 1 + l := len(s) + for i := 0; i < l; i++ { + if allowEscaping && s[i] == '\\' { + // skip next byte + i++ + } else if s[i] == '{' { + alts++ + } else if s[i] == '}' { + if alts--; alts == 0 { + return i + } + } + } + return -1 +} diff --git a/vendor/github.com/bmatcuk/doublestar/v4/utils.go b/vendor/github.com/bmatcuk/doublestar/v4/utils.go new file mode 100644 index 00000000..0ab1dc98 --- /dev/null +++ b/vendor/github.com/bmatcuk/doublestar/v4/utils.go @@ -0,0 +1,147 @@ +package doublestar + +import ( + "errors" + "os" + "path" + "path/filepath" + "strings" +) + +// SplitPattern is a utility function. Given a pattern, SplitPattern will +// return two strings: the first string is everything up to the last slash +// (`/`) that appears _before_ any unescaped "meta" characters (ie, `*?[{`). +// The second string is everything after that slash. For example, given the +// pattern: +// +// ../../path/to/meta*/** +// ^----------- split here +// +// SplitPattern returns "../../path/to" and "meta*/**". This is useful for +// initializing os.DirFS() to call Glob() because Glob() will silently fail if +// your pattern includes `/./` or `/../`. For example: +// +// base, pattern := SplitPattern("../../path/to/meta*/**") +// fsys := os.DirFS(base) +// matches, err := Glob(fsys, pattern) +// +// If SplitPattern cannot find somewhere to split the pattern (for example, +// `meta*/**`), it will return "." and the unaltered pattern (`meta*/**` in +// this example). +// +// Of course, it is your responsibility to decide if the returned base path is +// "safe" in the context of your application. Perhaps you could use Match() to +// validate against a list of approved base directories? +// +func SplitPattern(p string) (base, pattern string) { + base = "." + pattern = p + + splitIdx := -1 + for i := 0; i < len(p); i++ { + c := p[i] + if c == '\\' { + i++ + } else if c == '/' { + splitIdx = i + } else if c == '*' || c == '?' || c == '[' || c == '{' { + break + } + } + + if splitIdx == 0 { + return "/", p[1:] + } else if splitIdx > 0 { + return p[:splitIdx], p[splitIdx+1:] + } + + return +} + +// FilepathGlob returns the names of all files matching pattern or nil if there +// is no matching file. The syntax of pattern is the same as in Match(). The +// pattern may describe hierarchical names such as usr/*/bin/ed. +// +// FilepathGlob ignores file system errors such as I/O errors reading +// directories by default. The only possible returned error is ErrBadPattern, +// reporting that the pattern is malformed. +// +// To enable aborting on I/O errors, the WithFailOnIOErrors option can be +// passed. +// +// Note: FilepathGlob is a convenience function that is meant as a drop-in +// replacement for `path/filepath.Glob()` for users who don't need the +// complication of io/fs. Basically, it: +// - Runs `filepath.Clean()` and `ToSlash()` on the pattern +// - Runs `SplitPattern()` to get a base path and a pattern to Glob +// - Creates an FS object from the base path and `Glob()s` on the pattern +// - Joins the base path with all of the matches from `Glob()` +// +// Returned paths will use the system's path separator, just like +// `filepath.Glob()`. +// +// Note: the returned error doublestar.ErrBadPattern is not equal to +// filepath.ErrBadPattern. +// +func FilepathGlob(pattern string, opts ...GlobOption) (matches []string, err error) { + pattern = filepath.Clean(pattern) + pattern = filepath.ToSlash(pattern) + base, f := SplitPattern(pattern) + if f == "" || f == "." || f == ".." { + // some special cases to match filepath.Glob behavior + if !ValidatePathPattern(pattern) { + return nil, ErrBadPattern + } + + if filepath.Separator != '\\' { + pattern = unescapeMeta(pattern) + } + + if _, err = os.Lstat(pattern); err != nil { + g := newGlob(opts...) + if errors.Is(err, os.ErrNotExist) { + return nil, g.handlePatternNotExist(true) + } + return nil, g.forwardErrIfFailOnIOErrors(err) + } + return []string{filepath.FromSlash(pattern)}, nil + } + + fs := os.DirFS(base) + if matches, err = Glob(fs, f, opts...); err != nil { + return nil, err + } + for i := range matches { + // use path.Join because we used ToSlash above to ensure our paths are made + // of forward slashes, no matter what the system uses + matches[i] = filepath.FromSlash(path.Join(base, matches[i])) + } + return +} + +// Finds the next comma, but ignores any commas that appear inside nested `{}`. +// Assumes that each opening bracket has a corresponding closing bracket. +func indexNextAlt(s string, allowEscaping bool) int { + alts := 1 + l := len(s) + for i := 0; i < l; i++ { + if allowEscaping && s[i] == '\\' { + // skip next byte + i++ + } else if s[i] == '{' { + alts++ + } else if s[i] == '}' { + alts-- + } else if s[i] == ',' && alts == 1 { + return i + } + } + return -1 +} + +var metaReplacer = strings.NewReplacer("\\*", "*", "\\?", "?", "\\[", "[", "\\]", "]", "\\{", "{", "\\}", "}") + +// Unescapes meta characters (*?[]{}) +func unescapeMeta(pattern string) string { + return metaReplacer.Replace(pattern) +} diff --git a/vendor/github.com/bmatcuk/doublestar/v4/validate.go b/vendor/github.com/bmatcuk/doublestar/v4/validate.go new file mode 100644 index 00000000..c689b9eb --- /dev/null +++ b/vendor/github.com/bmatcuk/doublestar/v4/validate.go @@ -0,0 +1,82 @@ +package doublestar + +import "path/filepath" + +// Validate a pattern. Patterns are validated while they run in Match(), +// PathMatch(), and Glob(), so, you normally wouldn't need to call this. +// However, there are cases where this might be useful: for example, if your +// program allows a user to enter a pattern that you'll run at a later time, +// you might want to validate it. +// +// ValidatePattern assumes your pattern uses '/' as the path separator. +// +func ValidatePattern(s string) bool { + return doValidatePattern(s, '/') +} + +// Like ValidatePattern, only uses your OS path separator. In other words, use +// ValidatePattern if you would normally use Match() or Glob(). Use +// ValidatePathPattern if you would normally use PathMatch(). Keep in mind, +// Glob() requires '/' separators, even if your OS uses something else. +// +func ValidatePathPattern(s string) bool { + return doValidatePattern(s, filepath.Separator) +} + +func doValidatePattern(s string, separator rune) bool { + altDepth := 0 + l := len(s) +VALIDATE: + for i := 0; i < l; i++ { + switch s[i] { + case '\\': + if separator != '\\' { + // skip the next byte - return false if there is no next byte + if i++; i >= l { + return false + } + } + continue + + case '[': + if i++; i >= l { + // class didn't end + return false + } + if s[i] == '^' || s[i] == '!' { + i++ + } + if i >= l || s[i] == ']' { + // class didn't end or empty character class + return false + } + + for ; i < l; i++ { + if separator != '\\' && s[i] == '\\' { + i++ + } else if s[i] == ']' { + // looks good + continue VALIDATE + } + } + + // class didn't end + return false + + case '{': + altDepth++ + continue + + case '}': + if altDepth == 0 { + // alt end without a corresponding start + return false + } + altDepth-- + continue + } + } + + // valid as long as all alts are closed + return altDepth == 0 +} diff --git a/vendor/github.com/mitchellh/cli/LICENSE b/vendor/github.com/hashicorp/cli/LICENSE similarity index 100% rename from vendor/github.com/mitchellh/cli/LICENSE rename to vendor/github.com/hashicorp/cli/LICENSE diff --git a/vendor/github.com/mitchellh/cli/Makefile b/vendor/github.com/hashicorp/cli/Makefile similarity index 100% rename from vendor/github.com/mitchellh/cli/Makefile rename to vendor/github.com/hashicorp/cli/Makefile diff --git a/vendor/github.com/mitchellh/cli/README.md b/vendor/github.com/hashicorp/cli/README.md similarity index 88% rename from vendor/github.com/mitchellh/cli/README.md rename to vendor/github.com/hashicorp/cli/README.md index d75ff863..440f69e6 100644 --- a/vendor/github.com/mitchellh/cli/README.md +++ b/vendor/github.com/hashicorp/cli/README.md @@ -1,8 +1,8 @@ -# Go CLI Library [![GoDoc](https://godoc.org/github.com/mitchellh/cli?status.png)](https://pkg.go.dev/github.com/mitchellh/cli) +# Go CLI Library [![GoDoc](https://godoc.org/github.com/hashicorp/cli?status.png)](https://pkg.go.dev/github.com/hashicorp/cli) cli is a library for implementing command-line interfaces in Go. cli is the library that powers the CLI for -[Packer](https://github.com/mitchellh/packer), +[Packer](https://github.com/hashicorp/packer), [Consul](https://github.com/hashicorp/consul), [Vault](https://github.com/hashicorp/vault), [Terraform](https://github.com/hashicorp/terraform), @@ -44,7 +44,7 @@ import ( "log" "os" - "github.com/mitchellh/cli" + "github.com/hashicorp/cli" ) func main() { diff --git a/vendor/github.com/mitchellh/cli/autocomplete.go b/vendor/github.com/hashicorp/cli/autocomplete.go similarity index 94% rename from vendor/github.com/mitchellh/cli/autocomplete.go rename to vendor/github.com/hashicorp/cli/autocomplete.go index 3bec6258..0671db7c 100644 --- a/vendor/github.com/mitchellh/cli/autocomplete.go +++ b/vendor/github.com/hashicorp/cli/autocomplete.go @@ -1,3 +1,6 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + package cli import ( diff --git a/vendor/github.com/mitchellh/cli/cli.go b/vendor/github.com/hashicorp/cli/cli.go similarity index 99% rename from vendor/github.com/mitchellh/cli/cli.go rename to vendor/github.com/hashicorp/cli/cli.go index 95205328..0a479b86 100644 --- a/vendor/github.com/mitchellh/cli/cli.go +++ b/vendor/github.com/hashicorp/cli/cli.go @@ -1,3 +1,6 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + package cli import ( diff --git a/vendor/github.com/mitchellh/cli/command.go b/vendor/github.com/hashicorp/cli/command.go similarity index 97% rename from vendor/github.com/mitchellh/cli/command.go rename to vendor/github.com/hashicorp/cli/command.go index bed11faf..717c0701 100644 --- a/vendor/github.com/mitchellh/cli/command.go +++ b/vendor/github.com/hashicorp/cli/command.go @@ -1,3 +1,6 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + package cli import ( diff --git a/vendor/github.com/mitchellh/cli/command_mock.go b/vendor/github.com/hashicorp/cli/command_mock.go similarity index 94% rename from vendor/github.com/mitchellh/cli/command_mock.go rename to vendor/github.com/hashicorp/cli/command_mock.go index 7a584b7e..ee80c8f8 100644 --- a/vendor/github.com/mitchellh/cli/command_mock.go +++ b/vendor/github.com/hashicorp/cli/command_mock.go @@ -1,3 +1,6 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + package cli import ( diff --git a/vendor/github.com/mitchellh/cli/help.go b/vendor/github.com/hashicorp/cli/help.go similarity index 96% rename from vendor/github.com/mitchellh/cli/help.go rename to vendor/github.com/hashicorp/cli/help.go index f5ca58f5..acbdc44b 100644 --- a/vendor/github.com/mitchellh/cli/help.go +++ b/vendor/github.com/hashicorp/cli/help.go @@ -1,3 +1,6 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + package cli import ( diff --git a/vendor/github.com/hashicorp/cli/ui.go b/vendor/github.com/hashicorp/cli/ui.go new file mode 100644 index 00000000..4cb41f66 --- /dev/null +++ b/vendor/github.com/hashicorp/cli/ui.go @@ -0,0 +1,63 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 +//go:build !js +// +build !js + +package cli + +import ( + "bufio" + "errors" + "fmt" + "os" + "os/signal" + "strings" + + "github.com/bgentry/speakeasy" + "github.com/mattn/go-isatty" +) + +func (u *BasicUi) ask(query string, secret bool) (string, error) { + if _, err := fmt.Fprint(u.Writer, query+" "); err != nil { + return "", err + } + + // Register for interrupts so that we can catch it and immediately + // return... + sigCh := make(chan os.Signal, 1) + signal.Notify(sigCh, os.Interrupt) + defer signal.Stop(sigCh) + + // Ask for input in a go-routine so that we can ignore it. + errCh := make(chan error, 1) + lineCh := make(chan string, 1) + go func() { + var line string + var err error + if secret && isatty.IsTerminal(os.Stdin.Fd()) { + line, err = speakeasy.Ask("") + } else { + r := bufio.NewReader(u.Reader) + line, err = r.ReadString('\n') + } + if err != nil { + errCh <- err + return + } + + lineCh <- strings.TrimRight(line, "\r\n") + }() + + select { + case err := <-errCh: + return "", err + case line := <-lineCh: + return line, nil + case <-sigCh: + // Print a newline so that any further output starts properly + // on a new line. + fmt.Fprintln(u.Writer) + + return "", errors.New("interrupted") + } +} diff --git a/vendor/github.com/mitchellh/cli/ui_colored.go b/vendor/github.com/hashicorp/cli/ui_colored.go similarity index 94% rename from vendor/github.com/mitchellh/cli/ui_colored.go rename to vendor/github.com/hashicorp/cli/ui_colored.go index b0ec4484..0c0bc27a 100644 --- a/vendor/github.com/mitchellh/cli/ui_colored.go +++ b/vendor/github.com/hashicorp/cli/ui_colored.go @@ -1,3 +1,8 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 +//go:build !js +// +build !js + package cli import ( diff --git a/vendor/github.com/mitchellh/cli/ui.go b/vendor/github.com/hashicorp/cli/ui_common.go similarity index 72% rename from vendor/github.com/mitchellh/cli/ui.go rename to vendor/github.com/hashicorp/cli/ui_common.go index a2d6f94f..9cd6b1b9 100644 --- a/vendor/github.com/mitchellh/cli/ui.go +++ b/vendor/github.com/hashicorp/cli/ui_common.go @@ -1,16 +1,8 @@ package cli import ( - "bufio" - "errors" "fmt" "io" - "os" - "os/signal" - "strings" - - "github.com/bgentry/speakeasy" - "github.com/mattn/go-isatty" ) // Ui is an interface for interacting with the terminal, or "interface" @@ -59,51 +51,6 @@ func (u *BasicUi) AskSecret(query string) (string, error) { return u.ask(query, true) } -func (u *BasicUi) ask(query string, secret bool) (string, error) { - if _, err := fmt.Fprint(u.Writer, query+" "); err != nil { - return "", err - } - - // Register for interrupts so that we can catch it and immediately - // return... - sigCh := make(chan os.Signal, 1) - signal.Notify(sigCh, os.Interrupt) - defer signal.Stop(sigCh) - - // Ask for input in a go-routine so that we can ignore it. - errCh := make(chan error, 1) - lineCh := make(chan string, 1) - go func() { - var line string - var err error - if secret && isatty.IsTerminal(os.Stdin.Fd()) { - line, err = speakeasy.Ask("") - } else { - r := bufio.NewReader(u.Reader) - line, err = r.ReadString('\n') - } - if err != nil { - errCh <- err - return - } - - lineCh <- strings.TrimRight(line, "\r\n") - }() - - select { - case err := <-errCh: - return "", err - case line := <-lineCh: - return line, nil - case <-sigCh: - // Print a newline so that any further output starts properly - // on a new line. - fmt.Fprintln(u.Writer) - - return "", errors.New("interrupted") - } -} - func (u *BasicUi) Error(message string) { w := u.Writer if u.ErrorWriter != nil { diff --git a/vendor/github.com/mitchellh/cli/ui_concurrent.go b/vendor/github.com/hashicorp/cli/ui_concurrent.go similarity index 92% rename from vendor/github.com/mitchellh/cli/ui_concurrent.go rename to vendor/github.com/hashicorp/cli/ui_concurrent.go index b4f4dbfa..3262a113 100644 --- a/vendor/github.com/mitchellh/cli/ui_concurrent.go +++ b/vendor/github.com/hashicorp/cli/ui_concurrent.go @@ -1,3 +1,6 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + package cli import ( diff --git a/vendor/github.com/hashicorp/cli/ui_js.go b/vendor/github.com/hashicorp/cli/ui_js.go new file mode 100644 index 00000000..ac02693d --- /dev/null +++ b/vendor/github.com/hashicorp/cli/ui_js.go @@ -0,0 +1,10 @@ +package cli + +import ( + "syscall/js" +) + +func (u *BasicUi) ask(query string, secret bool) (string, error) { + line := js.Global().Call("prompt", query).String() + return line, nil +} diff --git a/vendor/github.com/mitchellh/cli/ui_mock.go b/vendor/github.com/hashicorp/cli/ui_mock.go similarity index 96% rename from vendor/github.com/mitchellh/cli/ui_mock.go rename to vendor/github.com/hashicorp/cli/ui_mock.go index 935f28a4..42d90e2c 100644 --- a/vendor/github.com/mitchellh/cli/ui_mock.go +++ b/vendor/github.com/hashicorp/cli/ui_mock.go @@ -1,3 +1,6 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + package cli import ( diff --git a/vendor/github.com/mitchellh/cli/ui_writer.go b/vendor/github.com/hashicorp/cli/ui_writer.go similarity index 83% rename from vendor/github.com/mitchellh/cli/ui_writer.go rename to vendor/github.com/hashicorp/cli/ui_writer.go index 1e1db3cf..b0b4cdcc 100644 --- a/vendor/github.com/mitchellh/cli/ui_writer.go +++ b/vendor/github.com/hashicorp/cli/ui_writer.go @@ -1,3 +1,6 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + package cli // UiWriter is an io.Writer implementation that can be used with diff --git a/vendor/github.com/hashicorp/go-version/CHANGELOG.md b/vendor/github.com/hashicorp/go-version/CHANGELOG.md index 5f16dd14..6d48174b 100644 --- a/vendor/github.com/hashicorp/go-version/CHANGELOG.md +++ b/vendor/github.com/hashicorp/go-version/CHANGELOG.md @@ -1,3 +1,22 @@ +# 1.7.0 (May 24, 2024) + +ENHANCEMENTS: + +- Remove `reflect` dependency ([#91](https://github.com/hashicorp/go-version/pull/91)) +- Implement the `database/sql.Scanner` and `database/sql/driver.Value` interfaces for `Version` ([#133](https://github.com/hashicorp/go-version/pull/133)) + +INTERNAL: + +- [COMPLIANCE] Add Copyright and License Headers ([#115](https://github.com/hashicorp/go-version/pull/115)) +- [COMPLIANCE] Update MPL-2.0 LICENSE ([#105](https://github.com/hashicorp/go-version/pull/105)) +- Bump actions/cache from 3.0.11 to 3.2.5 ([#116](https://github.com/hashicorp/go-version/pull/116)) +- Bump actions/checkout from 3.2.0 to 3.3.0 ([#111](https://github.com/hashicorp/go-version/pull/111)) +- Bump actions/upload-artifact from 3.1.1 to 3.1.2 ([#112](https://github.com/hashicorp/go-version/pull/112)) +- GHA Migration ([#103](https://github.com/hashicorp/go-version/pull/103)) +- github: Pin external GitHub Actions to hashes ([#107](https://github.com/hashicorp/go-version/pull/107)) +- SEC-090: Automated trusted workflow pinning (2023-04-05) ([#124](https://github.com/hashicorp/go-version/pull/124)) +- update readme ([#104](https://github.com/hashicorp/go-version/pull/104)) + # 1.6.0 (June 28, 2022) FEATURES: diff --git a/vendor/github.com/hashicorp/go-version/LICENSE b/vendor/github.com/hashicorp/go-version/LICENSE index c33dcc7c..1409d6ab 100644 --- a/vendor/github.com/hashicorp/go-version/LICENSE +++ b/vendor/github.com/hashicorp/go-version/LICENSE @@ -1,3 +1,5 @@ +Copyright (c) 2014 HashiCorp, Inc. + Mozilla Public License, version 2.0 1. Definitions diff --git a/vendor/github.com/hashicorp/go-version/README.md b/vendor/github.com/hashicorp/go-version/README.md index 4d250509..4b7806cd 100644 --- a/vendor/github.com/hashicorp/go-version/README.md +++ b/vendor/github.com/hashicorp/go-version/README.md @@ -1,5 +1,5 @@ # Versioning Library for Go -[![Build Status](https://circleci.com/gh/hashicorp/go-version/tree/main.svg?style=svg)](https://circleci.com/gh/hashicorp/go-version/tree/main) +![Build Status](https://github.com/hashicorp/go-version/actions/workflows/go-tests.yml/badge.svg) [![GoDoc](https://godoc.org/github.com/hashicorp/go-version?status.svg)](https://godoc.org/github.com/hashicorp/go-version) go-version is a library for parsing versions and version constraints, diff --git a/vendor/github.com/hashicorp/go-version/constraint.go b/vendor/github.com/hashicorp/go-version/constraint.go index da5d1aca..29bdc4d2 100644 --- a/vendor/github.com/hashicorp/go-version/constraint.go +++ b/vendor/github.com/hashicorp/go-version/constraint.go @@ -1,8 +1,10 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + package version import ( "fmt" - "reflect" "regexp" "sort" "strings" @@ -199,7 +201,7 @@ func prereleaseCheck(v, c *Version) bool { case cPre && vPre: // A constraint with a pre-release can only match a pre-release version // with the same base segments. - return reflect.DeepEqual(c.Segments64(), v.Segments64()) + return v.equalSegments(c) case !cPre && vPre: // A constraint without a pre-release can only match a version without a diff --git a/vendor/github.com/hashicorp/go-version/version.go b/vendor/github.com/hashicorp/go-version/version.go index e87df699..7c683c28 100644 --- a/vendor/github.com/hashicorp/go-version/version.go +++ b/vendor/github.com/hashicorp/go-version/version.go @@ -1,9 +1,12 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + package version import ( "bytes" + "database/sql/driver" "fmt" - "reflect" "regexp" "strconv" "strings" @@ -117,11 +120,8 @@ func (v *Version) Compare(other *Version) int { return 0 } - segmentsSelf := v.Segments64() - segmentsOther := other.Segments64() - // If the segments are the same, we must compare on prerelease info - if reflect.DeepEqual(segmentsSelf, segmentsOther) { + if v.equalSegments(other) { preSelf := v.Prerelease() preOther := other.Prerelease() if preSelf == "" && preOther == "" { @@ -137,6 +137,8 @@ func (v *Version) Compare(other *Version) int { return comparePrereleases(preSelf, preOther) } + segmentsSelf := v.Segments64() + segmentsOther := other.Segments64() // Get the highest specificity (hS), or if they're equal, just use segmentSelf length lenSelf := len(segmentsSelf) lenOther := len(segmentsOther) @@ -160,7 +162,7 @@ func (v *Version) Compare(other *Version) int { // this means Other had the lower specificity // Check to see if the remaining segments in Self are all zeros - if !allZero(segmentsSelf[i:]) { - //if not, it means that Self has to be greater than Other + // if not, it means that Self has to be greater than Other return 1 } break @@ -180,6 +182,21 @@ func (v *Version) Compare(other *Version) int { return 0 } +func (v *Version) equalSegments(other *Version) bool { + segmentsSelf := v.Segments64() + segmentsOther := other.Segments64() + + if len(segmentsSelf) != len(segmentsOther) { + return false + } + for i, v := range segmentsSelf { + if v != segmentsOther[i] { + return false + } + } + return true +} + func allZero(segs []int64) bool { for _, s := range segs { if s != 0 { @@ -405,3 +422,20 @@ func (v *Version) UnmarshalText(b []byte) error { func (v *Version) MarshalText() ([]byte, error) { return []byte(v.String()), nil } + +// Scan implements the sql.Scanner interface. +func (v *Version) Scan(src interface{}) error { + switch src := src.(type) { + case string: + return v.UnmarshalText([]byte(src)) + case nil: + return nil + default: + return fmt.Errorf("cannot scan %T as Version", src) + } +} + +// Value implements the driver.Valuer interface. +func (v *Version) Value() (driver.Value, error) { + return v.String(), nil +} diff --git a/vendor/github.com/hashicorp/go-version/version_collection.go b/vendor/github.com/hashicorp/go-version/version_collection.go index cc888d43..83547fe1 100644 --- a/vendor/github.com/hashicorp/go-version/version_collection.go +++ b/vendor/github.com/hashicorp/go-version/version_collection.go @@ -1,3 +1,6 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + package version // Collection is a type that implements the sort.Interface interface diff --git a/vendor/github.com/hashicorp/hc-install/README.md b/vendor/github.com/hashicorp/hc-install/README.md index 6e78b5a6..0d55191b 100644 --- a/vendor/github.com/hashicorp/hc-install/README.md +++ b/vendor/github.com/hashicorp/hc-install/README.md @@ -14,55 +14,55 @@ the library in ad-hoc or CI shell scripting outside of Go. `hc-install` does **not**: - - Determine suitable installation path based on target system. e.g. in `/usr/bin` or `/usr/local/bin` on Unix based system. - - Deal with execution of installed binaries (via service files or otherwise). - - Upgrade existing binaries on your system. - - Add nor link downloaded binaries to your `$PATH`. +- Determine suitable installation path based on target system. e.g. in `/usr/bin` or `/usr/local/bin` on Unix based system. +- Deal with execution of installed binaries (via service files or otherwise). +- Upgrade existing binaries on your system. +- Add nor link downloaded binaries to your `$PATH`. ## API The `Installer` offers a few high-level methods: - - `Ensure(context.Context, []src.Source)` to find, install, or build a product version - - `Install(context.Context, []src.Installable)` to install a product version +- `Ensure(context.Context, []src.Source)` to find, install, or build a product version +- `Install(context.Context, []src.Installable)` to install a product version ### Sources The `Installer` methods accept number of different `Source` types. Each comes with different trade-offs described below. - - `fs.{AnyVersion,ExactVersion,Version}` - Finds a binary in `$PATH` (or additional paths) - - **Pros:** - - This is most convenient when you already have the product installed on your system +- `fs.{AnyVersion,ExactVersion,Version}` - Finds a binary in `$PATH` (or additional paths) + - **Pros:** + - This is most convenient when you already have the product installed on your system which you already manage. - - **Cons:** - - Only relies on a single version, expects _you_ to manage the installation - - _Not recommended_ for any environment where product installation is not controlled or managed by you (e.g. default GitHub Actions image managed by GitHub) - - `releases.{LatestVersion,ExactVersion}` - Downloads, verifies & installs any known product from `releases.hashicorp.com` - - **Pros:** - - Fast and reliable way of obtaining any pre-built version of any product - - Allows installation of enterprise versions - - **Cons:** - - Installation may consume some bandwidth, disk space and a little time - - Potentially less stable builds (see `checkpoint` below) - - `checkpoint.LatestVersion` - Downloads, verifies & installs any known product available in HashiCorp Checkpoint - - **Pros:** - - Checkpoint typically contains only product versions considered stable - - **Cons:** - - Installation may consume some bandwidth, disk space and a little time - - Currently doesn't allow installation of old versions or enterprise versions (see `releases` above) - - `build.GitRevision` - Clones raw source code and builds the product from it - - **Pros:** - - Useful for catching bugs and incompatibilities as early as possible (prior to product release). - - **Cons:** - - Building from scratch can consume significant amount of time & resources (CPU, memory, bandwith, disk space) - - There are no guarantees that build instructions will always be up-to-date - - There's increased likelihood of build containing bugs prior to release - - Any CI builds relying on this are likely to be fragile + - **Cons:** + - Only relies on a single version, expects _you_ to manage the installation + - _Not recommended_ for any environment where product installation is not controlled or managed by you (e.g. default GitHub Actions image managed by GitHub) +- `releases.{LatestVersion,ExactVersion}` - Downloads, verifies & installs any known product from `releases.hashicorp.com` + - **Pros:** + - Fast and reliable way of obtaining any pre-built version of any product + - Allows installation of enterprise versions + - **Cons:** + - Installation may consume some bandwidth, disk space and a little time + - Potentially less stable builds (see `checkpoint` below) +- `checkpoint.LatestVersion` - Downloads, verifies & installs any known product available in HashiCorp Checkpoint + - **Pros:** + - Checkpoint typically contains only product versions considered stable + - **Cons:** + - Installation may consume some bandwidth, disk space and a little time + - Currently doesn't allow installation of old versions or enterprise versions (see `releases` above) +- `build.GitRevision` - Clones raw source code and builds the product from it + - **Pros:** + - Useful for catching bugs and incompatibilities as early as possible (prior to product release). + - **Cons:** + - Building from scratch can consume significant amount of time & resources (CPU, memory, bandwidth, disk space) + - There are no guarantees that build instructions will always be up-to-date + - There's increased likelihood of build containing bugs prior to release + - Any CI builds relying on this are likely to be fragile ## Example Usage -See examples at https://pkg.go.dev/github.com/hashicorp/hc-install#example-Installer. +See examples at . ## CLI @@ -70,9 +70,9 @@ In addition to the Go library, which is the intended primary use case of `hc-ins The CLI comes with some trade-offs: - - more limited interface compared to the flexible Go API (installs specific versions of products via `releases.ExactVersion`) - - minimal environment pre-requisites (no need to compile Go code) - - see ["hc-install is not a package manager"](https://github.com/hashicorp/hc-install#hc-install-is-not-a-package-manager) +- more limited interface compared to the flexible Go API (installs specific versions of products via `releases.ExactVersion`) +- minimal environment pre-requisites (no need to compile Go code) +- see ["hc-install is not a package manager"](https://github.com/hashicorp/hc-install#hc-install-is-not-a-package-manager) ### Installation @@ -82,7 +82,7 @@ Given that one of the key roles of the CLI/library is integrity checking, you sh [Homebrew](https://brew.sh) -``` +```sh brew install hashicorp/tap/hc-install ``` @@ -102,19 +102,23 @@ You can follow the instructions in the [Official Packaging Guide](https://www.ha ### Usage -``` +```text Usage: hc-install install [options] -version This command installs a HashiCorp product. Options: -version [REQUIRED] Version of product to install. - -path Path to directory where the product will be installed. Defaults - to current working directory. + -path Path to directory where the product will be installed. + Defaults to current working directory. + -log-file Path to file where logs will be written. /dev/stdout + or /dev/stderr can be used to log to STDOUT/STDERR. ``` + ```sh hc-install install -version 1.3.7 terraform ``` -``` + +```sh hc-install: will install terraform@1.3.7 installed terraform@1.3.7 to /current/working/dir/terraform ``` diff --git a/vendor/github.com/hashicorp/hc-install/checkpoint/latest_version.go b/vendor/github.com/hashicorp/hc-install/checkpoint/latest_version.go index 2cd5379f..7a8aa3d9 100644 --- a/vendor/github.com/hashicorp/hc-install/checkpoint/latest_version.go +++ b/vendor/github.com/hashicorp/hc-install/checkpoint/latest_version.go @@ -6,7 +6,7 @@ package checkpoint import ( "context" "fmt" - "io/ioutil" + "io" "log" "os" "path/filepath" @@ -24,7 +24,7 @@ import ( var ( defaultTimeout = 30 * time.Second - discardLogger = log.New(ioutil.Discard, "", 0) + discardLogger = log.New(io.Discard, "", 0) ) // LatestVersion installs the latest version known to Checkpoint @@ -101,7 +101,7 @@ func (lv *LatestVersion) Install(ctx context.Context) (string, error) { if dstDir == "" { var err error dirName := fmt.Sprintf("%s_*", lv.Product.Name) - dstDir, err = ioutil.TempDir("", dirName) + dstDir, err = os.MkdirTemp("", dirName) if err != nil { return "", err } @@ -126,9 +126,9 @@ func (lv *LatestVersion) Install(ctx context.Context) (string, error) { if lv.ArmoredPublicKey != "" { d.ArmoredPublicKey = lv.ArmoredPublicKey } - zipFilePath, err := d.DownloadAndUnpack(ctx, pv, dstDir, "") - if zipFilePath != "" { - lv.pathsToRemove = append(lv.pathsToRemove, zipFilePath) + up, err := d.DownloadAndUnpack(ctx, pv, dstDir, "") + if up != nil { + lv.pathsToRemove = append(lv.pathsToRemove, up.PathsToRemove...) } if err != nil { return "", err diff --git a/vendor/github.com/hashicorp/hc-install/fs/fs.go b/vendor/github.com/hashicorp/hc-install/fs/fs.go index 216df2c2..ac6f5cf9 100644 --- a/vendor/github.com/hashicorp/hc-install/fs/fs.go +++ b/vendor/github.com/hashicorp/hc-install/fs/fs.go @@ -4,14 +4,14 @@ package fs import ( - "io/ioutil" + "io" "log" "time" ) var ( defaultTimeout = 10 * time.Second - discardLogger = log.New(ioutil.Discard, "", 0) + discardLogger = log.New(io.Discard, "", 0) ) type fileCheckFunc func(path string) error diff --git a/vendor/github.com/hashicorp/hc-install/fs/fs_unix.go b/vendor/github.com/hashicorp/hc-install/fs/fs_unix.go index eebd98b8..5aed8444 100644 --- a/vendor/github.com/hashicorp/hc-install/fs/fs_unix.go +++ b/vendor/github.com/hashicorp/hc-install/fs/fs_unix.go @@ -16,9 +16,7 @@ import ( func lookupDirs(extraDirs []string) []string { pathVar := os.Getenv("PATH") dirs := filepath.SplitList(pathVar) - for _, ep := range extraDirs { - dirs = append(dirs, ep) - } + dirs = append(dirs, extraDirs...) return dirs } diff --git a/vendor/github.com/hashicorp/hc-install/installer.go b/vendor/github.com/hashicorp/hc-install/installer.go index 6c704eed..01c1fdee 100644 --- a/vendor/github.com/hashicorp/hc-install/installer.go +++ b/vendor/github.com/hashicorp/hc-install/installer.go @@ -6,7 +6,7 @@ package install import ( "context" "fmt" - "io/ioutil" + "io" "log" "github.com/hashicorp/go-multierror" @@ -23,7 +23,7 @@ type Installer struct { type RemoveFunc func(ctx context.Context) error func NewInstaller() *Installer { - discardLogger := log.New(ioutil.Discard, "", 0) + discardLogger := log.New(io.Discard, "", 0) return &Installer{ logger: discardLogger, } diff --git a/vendor/github.com/hashicorp/hc-install/internal/build/go_build.go b/vendor/github.com/hashicorp/hc-install/internal/build/go_build.go index 504bf45a..6eef755b 100644 --- a/vendor/github.com/hashicorp/hc-install/internal/build/go_build.go +++ b/vendor/github.com/hashicorp/hc-install/internal/build/go_build.go @@ -7,7 +7,7 @@ import ( "bytes" "context" "fmt" - "io/ioutil" + "io" "log" "os" "os/exec" @@ -17,7 +17,7 @@ import ( "golang.org/x/mod/modfile" ) -var discardLogger = log.New(ioutil.Discard, "", 0) +var discardLogger = log.New(io.Discard, "", 0) // GoBuild represents a Go builder (to run "go build") type GoBuild struct { @@ -161,7 +161,7 @@ type CleanupFunc func(context.Context) func guessRequiredGoVersion(repoDir string) (*version.Version, bool) { goEnvFile := filepath.Join(repoDir, ".go-version") if fi, err := os.Stat(goEnvFile); err == nil && !fi.IsDir() { - b, err := ioutil.ReadFile(goEnvFile) + b, err := os.ReadFile(goEnvFile) if err != nil { return nil, false } @@ -174,7 +174,7 @@ func guessRequiredGoVersion(repoDir string) (*version.Version, bool) { goModFile := filepath.Join(repoDir, "go.mod") if fi, err := os.Stat(goModFile); err == nil && !fi.IsDir() { - b, err := ioutil.ReadFile(goModFile) + b, err := os.ReadFile(goModFile) if err != nil { return nil, false } diff --git a/vendor/github.com/hashicorp/hc-install/internal/releasesjson/checksum_downloader.go b/vendor/github.com/hashicorp/hc-install/internal/releasesjson/checksum_downloader.go index 843de8cd..59dd1a1f 100644 --- a/vendor/github.com/hashicorp/hc-install/internal/releasesjson/checksum_downloader.go +++ b/vendor/github.com/hashicorp/hc-install/internal/releasesjson/checksum_downloader.go @@ -55,7 +55,7 @@ func (cd *ChecksumDownloader) DownloadAndVerifyChecksums(ctx context.Context) (C client := httpclient.NewHTTPClient() sigURL := fmt.Sprintf("%s/%s/%s/%s", cd.BaseURL, url.PathEscape(cd.ProductVersion.Name), - url.PathEscape(cd.ProductVersion.RawVersion), + url.PathEscape(cd.ProductVersion.Version.String()), url.PathEscape(sigFilename)) cd.Logger.Printf("downloading signature from %s", sigURL) @@ -76,7 +76,7 @@ func (cd *ChecksumDownloader) DownloadAndVerifyChecksums(ctx context.Context) (C shasumsURL := fmt.Sprintf("%s/%s/%s/%s", cd.BaseURL, url.PathEscape(cd.ProductVersion.Name), - url.PathEscape(cd.ProductVersion.RawVersion), + url.PathEscape(cd.ProductVersion.Version.String()), url.PathEscape(cd.ProductVersion.SHASUMS)) cd.Logger.Printf("downloading checksums from %s", shasumsURL) diff --git a/vendor/github.com/hashicorp/hc-install/internal/releasesjson/downloader.go b/vendor/github.com/hashicorp/hc-install/internal/releasesjson/downloader.go index 146c1cf0..a1139b58 100644 --- a/vendor/github.com/hashicorp/hc-install/internal/releasesjson/downloader.go +++ b/vendor/github.com/hashicorp/hc-install/internal/releasesjson/downloader.go @@ -10,7 +10,6 @@ import ( "crypto/sha256" "fmt" "io" - "io/ioutil" "log" "net/http" "net/url" @@ -29,14 +28,18 @@ type Downloader struct { BaseURL string } -func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion, binDir string, licenseDir string) (zipFilePath string, err error) { +type UnpackedProduct struct { + PathsToRemove []string +} + +func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion, binDir string, licenseDir string) (up *UnpackedProduct, err error) { if len(pv.Builds) == 0 { - return "", fmt.Errorf("no builds found for %s %s", pv.Name, pv.Version) + return nil, fmt.Errorf("no builds found for %s %s", pv.Name, pv.Version) } pb, ok := pv.Builds.FilterBuild(runtime.GOOS, runtime.GOARCH, "zip") if !ok { - return "", fmt.Errorf("no ZIP archive found for %s %s %s/%s", + return nil, fmt.Errorf("no ZIP archive found for %s %s %s/%s", pv.Name, pv.Version, runtime.GOOS, runtime.GOARCH) } @@ -50,12 +53,12 @@ func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion, } verifiedChecksums, err := v.DownloadAndVerifyChecksums(ctx) if err != nil { - return "", err + return nil, err } var ok bool verifiedChecksum, ok = verifiedChecksums[pb.Filename] if !ok { - return "", fmt.Errorf("no checksum found for %q", pb.Filename) + return nil, fmt.Errorf("no checksum found for %q", pb.Filename) } } @@ -63,16 +66,17 @@ func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion, archiveURL := pb.URL if d.BaseURL != "" { - // ensure that absolute download links from mocked responses - // are still pointing to the mock server if one is set + // If custom URL is set, use that instead of the one from the JSON. + // Also ensures that absolute download links from mocked responses + // are still pointing to the mock server if one is set. baseURL, err := url.Parse(d.BaseURL) if err != nil { - return "", err + return nil, err } u, err := url.Parse(archiveURL) if err != nil { - return "", err + return nil, err } u.Scheme = baseURL.Scheme u.Host = baseURL.Host @@ -83,15 +87,15 @@ func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion, req, err := http.NewRequestWithContext(ctx, http.MethodGet, archiveURL, nil) if err != nil { - return "", fmt.Errorf("failed to create request for %q: %w", archiveURL, err) + return nil, fmt.Errorf("failed to create request for %q: %w", archiveURL, err) } resp, err := client.Do(req) if err != nil { - return "", err + return nil, err } if resp.StatusCode != 200 { - return "", fmt.Errorf("failed to download ZIP archive from %q: %s", archiveURL, resp.Status) + return nil, fmt.Errorf("failed to download ZIP archive from %q: %s", archiveURL, resp.Status) } defer resp.Body.Close() @@ -100,19 +104,22 @@ func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion, contentType := resp.Header.Get("content-type") if !contentTypeIsZip(contentType) { - return "", fmt.Errorf("unexpected content-type: %s (expected any of %q)", + return nil, fmt.Errorf("unexpected content-type: %s (expected any of %q)", contentType, zipMimeTypes) } expectedSize := resp.ContentLength - pkgFile, err := ioutil.TempFile("", pb.Filename) + pkgFile, err := os.CreateTemp("", pb.Filename) if err != nil { - return "", err + return nil, err } defer pkgFile.Close() pkgFilePath, err := filepath.Abs(pkgFile.Name()) + up = &UnpackedProduct{} + up.PathsToRemove = append(up.PathsToRemove, pkgFilePath) + d.Logger.Printf("copying %q (%d bytes) to %s", pb.Filename, expectedSize, pkgFile.Name()) var bytesCopied int64 @@ -123,12 +130,12 @@ func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion, bytesCopied, err = io.Copy(h, r) if err != nil { - return "", err + return nil, err } calculatedSum := h.Sum(nil) if !bytes.Equal(calculatedSum, verifiedChecksum) { - return pkgFilePath, fmt.Errorf( + return up, fmt.Errorf( "checksum mismatch (expected: %x, got: %x)", verifiedChecksum, calculatedSum, ) @@ -136,14 +143,14 @@ func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion, } else { bytesCopied, err = io.Copy(pkgFile, pkgReader) if err != nil { - return pkgFilePath, err + return up, err } } d.Logger.Printf("copied %d bytes to %s", bytesCopied, pkgFile.Name()) if expectedSize != 0 && bytesCopied != int64(expectedSize) { - return pkgFilePath, fmt.Errorf( + return up, fmt.Errorf( "unexpected size (downloaded: %d, expected: %d)", bytesCopied, expectedSize, ) @@ -151,7 +158,7 @@ func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion, r, err := zip.OpenReader(pkgFile.Name()) if err != nil { - return pkgFilePath, err + return up, err } defer r.Close() @@ -163,7 +170,7 @@ func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion, } srcFile, err := f.Open() if err != nil { - return pkgFilePath, err + return up, err } // Determine the appropriate destination file path @@ -174,20 +181,25 @@ func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion, d.Logger.Printf("unpacking %s to %s", f.Name, dstDir) dstPath := filepath.Join(dstDir, f.Name) + + if isLicenseFile(f.Name) { + up.PathsToRemove = append(up.PathsToRemove, dstPath) + } + dstFile, err := os.Create(dstPath) if err != nil { - return pkgFilePath, err + return up, err } _, err = io.Copy(dstFile, srcFile) if err != nil { - return pkgFilePath, err + return up, err } srcFile.Close() dstFile.Close() } - return pkgFilePath, nil + return up, nil } // The production release site uses consistent single mime type @@ -207,11 +219,13 @@ func contentTypeIsZip(contentType string) bool { return false } -// Enterprise products have a few additional license files -// that need to be extracted to a separate directory +// Product archives may have a few license files +// which may be extracted to a separate directory +// and may need to be tracked for later cleanup. var licenseFiles = []string{ "EULA.txt", "TermsOfEvaluation.txt", + "LICENSE.txt", } func isLicenseFile(filename string) bool { diff --git a/vendor/github.com/hashicorp/hc-install/internal/releasesjson/product_version.go b/vendor/github.com/hashicorp/hc-install/internal/releasesjson/product_version.go index 99b811a6..94152b13 100644 --- a/vendor/github.com/hashicorp/hc-install/internal/releasesjson/product_version.go +++ b/vendor/github.com/hashicorp/hc-install/internal/releasesjson/product_version.go @@ -9,8 +9,7 @@ import "github.com/hashicorp/go-version" // "consul 0.5.1". A ProductVersion may have one or more builds. type ProductVersion struct { Name string `json:"name"` - RawVersion string `json:"version"` - Version *version.Version `json:"-"` + Version *version.Version `json:"version"` SHASUMS string `json:"shasums,omitempty"` SHASUMSSig string `json:"shasums_signature,omitempty"` SHASUMSSigs []string `json:"shasums_signatures,omitempty"` diff --git a/vendor/github.com/hashicorp/hc-install/internal/releasesjson/releases.go b/vendor/github.com/hashicorp/hc-install/internal/releasesjson/releases.go index 755019f2..4c0bab00 100644 --- a/vendor/github.com/hashicorp/hc-install/internal/releasesjson/releases.go +++ b/vendor/github.com/hashicorp/hc-install/internal/releasesjson/releases.go @@ -7,7 +7,7 @@ import ( "context" "encoding/json" "fmt" - "io/ioutil" + "io" "log" "net/http" "net/url" @@ -55,7 +55,7 @@ type Releases struct { func NewReleases() *Releases { return &Releases{ - logger: log.New(ioutil.Discard, "", 0), + logger: log.New(io.Discard, "", 0), BaseURL: defaultBaseURL, } } @@ -95,7 +95,7 @@ func (r *Releases) ListProductVersions(ctx context.Context, productName string) r.logger.Printf("received %s", resp.Status) - body, err := ioutil.ReadAll(resp.Body) + body, err := io.ReadAll(resp.Body) if err != nil { return nil, err } @@ -153,7 +153,7 @@ func (r *Releases) GetProductVersion(ctx context.Context, product string, versio r.logger.Printf("received %s", resp.Status) - body, err := ioutil.ReadAll(resp.Body) + body, err := io.ReadAll(resp.Body) if err != nil { return nil, err } diff --git a/vendor/github.com/hashicorp/hc-install/releases/exact_version.go b/vendor/github.com/hashicorp/hc-install/releases/exact_version.go index e42f4d23..179f0b4b 100644 --- a/vendor/github.com/hashicorp/hc-install/releases/exact_version.go +++ b/vendor/github.com/hashicorp/hc-install/releases/exact_version.go @@ -6,7 +6,6 @@ package releases import ( "context" "fmt" - "io/ioutil" "log" "os" "path/filepath" @@ -37,7 +36,10 @@ type ExactVersion struct { // instead of built-in pubkey to verify signature of downloaded checksums ArmoredPublicKey string - apiBaseURL string + // ApiBaseURL is an optional field that specifies a custom URL to download the product from. + // If ApiBaseURL is set, the product will be downloaded from this base URL instead of the default site. + // Note: The directory structure of the custom URL must match the HashiCorp releases site (including the index.json files). + ApiBaseURL string logger *log.Logger pathsToRemove []string } @@ -93,7 +95,7 @@ func (ev *ExactVersion) Install(ctx context.Context) (string, error) { if dstDir == "" { var err error dirName := fmt.Sprintf("%s_*", ev.Product.Name) - dstDir, err = ioutil.TempDir("", dirName) + dstDir, err = os.MkdirTemp("", dirName) if err != nil { return "", err } @@ -103,8 +105,8 @@ func (ev *ExactVersion) Install(ctx context.Context) (string, error) { ev.log().Printf("will install into dir at %s", dstDir) rels := rjson.NewReleases() - if ev.apiBaseURL != "" { - rels.BaseURL = ev.apiBaseURL + if ev.ApiBaseURL != "" { + rels.BaseURL = ev.ApiBaseURL } rels.SetLogger(ev.log()) installVersion := ev.Version @@ -125,17 +127,17 @@ func (ev *ExactVersion) Install(ctx context.Context) (string, error) { if ev.ArmoredPublicKey != "" { d.ArmoredPublicKey = ev.ArmoredPublicKey } - if ev.apiBaseURL != "" { - d.BaseURL = ev.apiBaseURL + if ev.ApiBaseURL != "" { + d.BaseURL = ev.ApiBaseURL } licenseDir := "" if ev.Enterprise != nil { licenseDir = ev.Enterprise.LicenseDir } - zipFilePath, err := d.DownloadAndUnpack(ctx, pv, dstDir, licenseDir) - if zipFilePath != "" { - ev.pathsToRemove = append(ev.pathsToRemove, zipFilePath) + up, err := d.DownloadAndUnpack(ctx, pv, dstDir, licenseDir) + if up != nil { + ev.pathsToRemove = append(ev.pathsToRemove, up.PathsToRemove...) } if err != nil { return "", err diff --git a/vendor/github.com/hashicorp/hc-install/releases/latest_version.go b/vendor/github.com/hashicorp/hc-install/releases/latest_version.go index 9893b223..c4888f4a 100644 --- a/vendor/github.com/hashicorp/hc-install/releases/latest_version.go +++ b/vendor/github.com/hashicorp/hc-install/releases/latest_version.go @@ -6,7 +6,6 @@ package releases import ( "context" "fmt" - "io/ioutil" "log" "os" "path/filepath" @@ -37,7 +36,10 @@ type LatestVersion struct { // instead of built-in pubkey to verify signature of downloaded checksums ArmoredPublicKey string - apiBaseURL string + // ApiBaseURL is an optional field that specifies a custom URL to download the product from. + // If ApiBaseURL is set, the product will be downloaded from this base URL instead of the default site. + // Note: The directory structure of the custom URL must match the HashiCorp releases site (including the index.json files). + ApiBaseURL string logger *log.Logger pathsToRemove []string } @@ -89,7 +91,7 @@ func (lv *LatestVersion) Install(ctx context.Context) (string, error) { if dstDir == "" { var err error dirName := fmt.Sprintf("%s_*", lv.Product.Name) - dstDir, err = ioutil.TempDir("", dirName) + dstDir, err = os.MkdirTemp("", dirName) if err != nil { return "", err } @@ -99,8 +101,8 @@ func (lv *LatestVersion) Install(ctx context.Context) (string, error) { lv.log().Printf("will install into dir at %s", dstDir) rels := rjson.NewReleases() - if lv.apiBaseURL != "" { - rels.BaseURL = lv.apiBaseURL + if lv.ApiBaseURL != "" { + rels.BaseURL = lv.ApiBaseURL } rels.SetLogger(lv.log()) versions, err := rels.ListProductVersions(ctx, lv.Product.Name) @@ -126,16 +128,16 @@ func (lv *LatestVersion) Install(ctx context.Context) (string, error) { if lv.ArmoredPublicKey != "" { d.ArmoredPublicKey = lv.ArmoredPublicKey } - if lv.apiBaseURL != "" { - d.BaseURL = lv.apiBaseURL + if lv.ApiBaseURL != "" { + d.BaseURL = lv.ApiBaseURL } licenseDir := "" if lv.Enterprise != nil { licenseDir = lv.Enterprise.LicenseDir } - zipFilePath, err := d.DownloadAndUnpack(ctx, versionToInstall, dstDir, licenseDir) - if zipFilePath != "" { - lv.pathsToRemove = append(lv.pathsToRemove, zipFilePath) + up, err := d.DownloadAndUnpack(ctx, versionToInstall, dstDir, licenseDir) + if up != nil { + lv.pathsToRemove = append(lv.pathsToRemove, up.PathsToRemove...) } if err != nil { return "", err diff --git a/vendor/github.com/hashicorp/hc-install/releases/releases.go b/vendor/github.com/hashicorp/hc-install/releases/releases.go index 7bef49ba..a24db6c6 100644 --- a/vendor/github.com/hashicorp/hc-install/releases/releases.go +++ b/vendor/github.com/hashicorp/hc-install/releases/releases.go @@ -4,7 +4,7 @@ package releases import ( - "io/ioutil" + "io" "log" "time" ) @@ -12,5 +12,5 @@ import ( var ( defaultInstallTimeout = 30 * time.Second defaultListTimeout = 10 * time.Second - discardLogger = log.New(ioutil.Discard, "", 0) + discardLogger = log.New(io.Discard, "", 0) ) diff --git a/vendor/github.com/hashicorp/hc-install/version/VERSION b/vendor/github.com/hashicorp/hc-install/version/VERSION index 844f6a91..faef31a4 100644 --- a/vendor/github.com/hashicorp/hc-install/version/VERSION +++ b/vendor/github.com/hashicorp/hc-install/version/VERSION @@ -1 +1 @@ -0.6.3 +0.7.0 diff --git a/vendor/github.com/hashicorp/terraform-exec/internal/version/version.go b/vendor/github.com/hashicorp/terraform-exec/internal/version/version.go index 90b66889..235d5612 100644 --- a/vendor/github.com/hashicorp/terraform-exec/internal/version/version.go +++ b/vendor/github.com/hashicorp/terraform-exec/internal/version/version.go @@ -3,7 +3,7 @@ package version -const version = "0.20.0" +const version = "0.21.0" // ModuleVersion returns the current version of the github.com/hashicorp/terraform-exec Go module. // This is a function to allow for future possible enhancement using debug.BuildInfo. diff --git a/vendor/github.com/hashicorp/terraform-exec/tfexec/apply.go b/vendor/github.com/hashicorp/terraform-exec/tfexec/apply.go index 2c5a6d07..7a6ea923 100644 --- a/vendor/github.com/hashicorp/terraform-exec/tfexec/apply.go +++ b/vendor/github.com/hashicorp/terraform-exec/tfexec/apply.go @@ -12,10 +12,11 @@ import ( ) type applyConfig struct { - backup string - destroy bool - dirOrPlan string - lock bool + allowDeferral bool + backup string + destroy bool + dirOrPlan string + lock bool // LockTimeout must be a string with time unit, e.g. '10s' lockTimeout string @@ -105,6 +106,10 @@ func (opt *DestroyFlagOption) configureApply(conf *applyConfig) { conf.destroy = opt.destroy } +func (opt *AllowDeferralOption) configureApply(conf *applyConfig) { + conf.allowDeferral = opt.allowDeferral +} + // Apply represents the terraform apply subcommand. func (tf *Terraform) Apply(ctx context.Context, opts ...ApplyOption) error { cmd, err := tf.applyCmd(ctx, opts...) @@ -232,6 +237,22 @@ func (tf *Terraform) buildApplyArgs(ctx context.Context, c applyConfig) ([]strin } } + if c.allowDeferral { + // Ensure the version is later than 1.9.0 + err := tf.compatible(ctx, tf1_9_0, nil) + if err != nil { + return nil, fmt.Errorf("-allow-deferral is an experimental option introduced in Terraform 1.9.0: %w", err) + } + + // Ensure the version has experiments enabled (alpha or dev builds) + err = tf.experimentsEnabled(ctx) + if err != nil { + return nil, fmt.Errorf("-allow-deferral is only available in experimental Terraform builds: %w", err) + } + + args = append(args, "-allow-deferral") + } + return args, nil } diff --git a/vendor/github.com/hashicorp/terraform-exec/tfexec/options.go b/vendor/github.com/hashicorp/terraform-exec/tfexec/options.go index d783027a..339bf39e 100644 --- a/vendor/github.com/hashicorp/terraform-exec/tfexec/options.go +++ b/vendor/github.com/hashicorp/terraform-exec/tfexec/options.go @@ -7,6 +7,18 @@ import ( "encoding/json" ) +// AllowDeferralOption represents the -allow-deferral flag. This flag is only enabled in +// experimental builds of Terraform. (alpha or built via source with experiments enabled) +type AllowDeferralOption struct { + allowDeferral bool +} + +// AllowDeferral represents the -allow-deferral flag. This flag is only enabled in +// experimental builds of Terraform. (alpha or built via source with experiments enabled) +func AllowDeferral(allowDeferral bool) *AllowDeferralOption { + return &AllowDeferralOption{allowDeferral} +} + // AllowMissingConfigOption represents the -allow-missing-config flag. type AllowMissingConfigOption struct { allowMissingConfig bool diff --git a/vendor/github.com/hashicorp/terraform-exec/tfexec/plan.go b/vendor/github.com/hashicorp/terraform-exec/tfexec/plan.go index 946ce8d0..c2ec1f9e 100644 --- a/vendor/github.com/hashicorp/terraform-exec/tfexec/plan.go +++ b/vendor/github.com/hashicorp/terraform-exec/tfexec/plan.go @@ -12,20 +12,21 @@ import ( ) type planConfig struct { - destroy bool - dir string - lock bool - lockTimeout string - out string - parallelism int - reattachInfo ReattachInfo - refresh bool - refreshOnly bool - replaceAddrs []string - state string - targets []string - vars []string - varFiles []string + allowDeferral bool + destroy bool + dir string + lock bool + lockTimeout string + out string + parallelism int + reattachInfo ReattachInfo + refresh bool + refreshOnly bool + replaceAddrs []string + state string + targets []string + vars []string + varFiles []string } var defaultPlanOptions = planConfig{ @@ -97,6 +98,10 @@ func (opt *DestroyFlagOption) configurePlan(conf *planConfig) { conf.destroy = opt.destroy } +func (opt *AllowDeferralOption) configurePlan(conf *planConfig) { + conf.allowDeferral = opt.allowDeferral +} + // Plan executes `terraform plan` with the specified options and waits for it // to complete. // @@ -243,6 +248,21 @@ func (tf *Terraform) buildPlanArgs(ctx context.Context, c planConfig) ([]string, args = append(args, "-var", v) } } + if c.allowDeferral { + // Ensure the version is later than 1.9.0 + err := tf.compatible(ctx, tf1_9_0, nil) + if err != nil { + return nil, fmt.Errorf("-allow-deferral is an experimental option introduced in Terraform 1.9.0: %w", err) + } + + // Ensure the version has experiments enabled (alpha or dev builds) + err = tf.experimentsEnabled(ctx) + if err != nil { + return nil, fmt.Errorf("-allow-deferral is only available in experimental Terraform builds: %w", err) + } + + args = append(args, "-allow-deferral") + } return args, nil } diff --git a/vendor/github.com/hashicorp/terraform-exec/tfexec/version.go b/vendor/github.com/hashicorp/terraform-exec/tfexec/version.go index 4ba4f6ea..87addd1e 100644 --- a/vendor/github.com/hashicorp/terraform-exec/tfexec/version.go +++ b/vendor/github.com/hashicorp/terraform-exec/tfexec/version.go @@ -33,6 +33,7 @@ var ( tf1_1_0 = version.Must(version.NewVersion("1.1.0")) tf1_4_0 = version.Must(version.NewVersion("1.4.0")) tf1_6_0 = version.Must(version.NewVersion("1.6.0")) + tf1_9_0 = version.Must(version.NewVersion("1.9.0")) ) // Version returns structured output from the terraform version command including both the Terraform CLI version @@ -180,6 +181,22 @@ func (tf *Terraform) compatible(ctx context.Context, minInclusive *version.Versi return nil } +// experimentsEnabled asserts the cached terraform version has experiments enabled in the executable, +// and returns a well known error if not. Experiments are enabled in alpha and (potentially) dev builds of Terraform. +func (tf *Terraform) experimentsEnabled(ctx context.Context) error { + tfv, _, err := tf.Version(ctx, false) + if err != nil { + return err + } + + preRelease := tfv.Prerelease() + if preRelease == "dev" || strings.Contains(preRelease, "alpha") { + return nil + } + + return fmt.Errorf("experiments are not enabled in version %s, as it's not an alpha or dev build", errorVersionString(tfv)) +} + func stripPrereleaseAndMeta(v *version.Version) *version.Version { if v == nil { return nil diff --git a/vendor/github.com/hashicorp/terraform-json/plan.go b/vendor/github.com/hashicorp/terraform-json/plan.go index 38ea778e..d8618985 100644 --- a/vendor/github.com/hashicorp/terraform-json/plan.go +++ b/vendor/github.com/hashicorp/terraform-json/plan.go @@ -60,6 +60,17 @@ type Plan struct { // plan. ResourceChanges []*ResourceChange `json:"resource_changes,omitempty"` + // DeferredChanges contains the change operations for resources that are deferred + // for this plan. + DeferredChanges []*DeferredResourceChange `json:"deferred_changes,omitempty"` + + // Complete indicates that all resources have successfully planned changes. + // This will be false if there are DeferredChanges or if the -target flag is used. + // + // Complete was introduced in Terraform 1.8 and will be nil for all previous + // Terraform versions. + Complete *bool `json:"complete,omitempty"` + // The change operations for outputs within this plan. OutputChanges map[string]*Change `json:"output_changes,omitempty"` @@ -269,3 +280,13 @@ type PlanVariable struct { // The value for this variable at plan time. Value interface{} `json:"value,omitempty"` } + +// DeferredResourceChange is a description of a resource change that has been +// deferred for some reason. +type DeferredResourceChange struct { + // Reason is the reason why this resource change was deferred. + Reason string `json:"reason,omitempty"` + + // Change contains any information we have about the deferred change. + ResourceChange *ResourceChange `json:"resource_change,omitempty"` +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs/build/version.go b/vendor/github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs/build/version.go new file mode 100644 index 00000000..01ded843 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs/build/version.go @@ -0,0 +1,18 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package build + +var ( + // These vars will be set by goreleaser. + version string = `dev` + commit string = `` +) + +func GetVersion() string { + version := "tfplugindocs" + " Version " + version + if commit != "" { + version += " from commit " + commit + } + return version +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs/main.go b/vendor/github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs/main.go index df6e336a..8e3c25da 100644 --- a/vendor/github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs/main.go +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs/main.go @@ -6,24 +6,9 @@ package main import ( "os" - "github.com/mattn/go-colorable" - "github.com/hashicorp/terraform-plugin-docs/internal/cmd" ) func main() { - name := "tfplugindocs" - version := name + " Version " + version - if commit != "" { - version += " from commit " + commit - } - - os.Exit(cmd.Run( - name, - version, - os.Args[1:], - os.Stdin, - colorable.NewColorableStdout(), - colorable.NewColorableStderr(), - )) + os.Exit(cmd.Main()) } diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs/version.go b/vendor/github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs/version.go deleted file mode 100644 index 68dc6cee..00000000 --- a/vendor/github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs/version.go +++ /dev/null @@ -1,10 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package main - -var ( - // These vars will be set by goreleaser. - version string = `dev` - commit string = `` -) diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/directory.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/directory.go new file mode 100644 index 00000000..3c3183b7 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/directory.go @@ -0,0 +1,174 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package check + +import ( + "fmt" + "log" + "path/filepath" +) + +const ( + CdktfIndexDirectory = `cdktf` + + LegacyIndexDirectory = `website/docs` + LegacyDataSourcesDirectory = `d` + LegacyGuidesDirectory = `guides` + LegacyResourcesDirectory = `r` + LegacyFunctionsDirectory = `functions` + + RegistryIndexDirectory = `docs` + RegistryDataSourcesDirectory = `data-sources` + RegistryGuidesDirectory = `guides` + RegistryResourcesDirectory = `resources` + RegistryFunctionsDirectory = `functions` + + // Terraform Registry Storage Limits + // https://www.terraform.io/docs/registry/providers/docs.html#storage-limits + RegistryMaximumNumberOfFiles = 2000 + RegistryMaximumSizeOfFile = 500000 // 500KB + +) + +var ValidLegacyDirectories = []string{ + LegacyIndexDirectory, + LegacyIndexDirectory + "/" + LegacyDataSourcesDirectory, + LegacyIndexDirectory + "/" + LegacyGuidesDirectory, + LegacyIndexDirectory + "/" + LegacyResourcesDirectory, + LegacyIndexDirectory + "/" + LegacyFunctionsDirectory, +} + +var ValidRegistryDirectories = []string{ + RegistryIndexDirectory, + RegistryIndexDirectory + "/" + RegistryDataSourcesDirectory, + RegistryIndexDirectory + "/" + RegistryGuidesDirectory, + RegistryIndexDirectory + "/" + RegistryResourcesDirectory, + RegistryIndexDirectory + "/" + RegistryFunctionsDirectory, +} + +var ValidCdktfLanguages = []string{ + "csharp", + "go", + "java", + "python", + "typescript", +} + +var ValidLegacySubdirectories = []string{ + LegacyIndexDirectory, + LegacyDataSourcesDirectory, + LegacyGuidesDirectory, + LegacyResourcesDirectory, +} + +var ValidRegistrySubdirectories = []string{ + RegistryIndexDirectory, + RegistryDataSourcesDirectory, + RegistryGuidesDirectory, + RegistryResourcesDirectory, +} + +func InvalidDirectoriesCheck(dirPath string) error { + if IsValidRegistryDirectory(dirPath) { + return nil + } + + if IsValidLegacyDirectory(dirPath) { + return nil + } + + if IsValidCdktfDirectory(dirPath) { + return nil + } + + return fmt.Errorf("invalid Terraform Provider documentation directory found: %s", dirPath) + +} + +func MixedDirectoriesCheck(docFiles []string) error { + var legacyDirectoryFound bool + var registryDirectoryFound bool + err := fmt.Errorf("mixed Terraform Provider documentation directory layouts found, must use only legacy or registry layout") + + for _, file := range docFiles { + directory := filepath.Dir(file) + log.Printf("[DEBUG] Found directory: %s", directory) + + // Allow docs/ with other files + if IsValidRegistryDirectory(directory) && directory != RegistryIndexDirectory { + registryDirectoryFound = true + + if legacyDirectoryFound { + log.Printf("[DEBUG] Found mixed directories") + return err + } + } + + if IsValidLegacyDirectory(directory) { + legacyDirectoryFound = true + + if registryDirectoryFound { + log.Printf("[DEBUG] Found mixed directories") + return err + } + } + } + + return nil +} + +func IsValidLegacyDirectory(directory string) bool { + for _, validLegacyDirectory := range ValidLegacyDirectories { + if directory == filepath.FromSlash(validLegacyDirectory) { + return true + } + } + + return false +} + +func IsValidRegistryDirectory(directory string) bool { + for _, validRegistryDirectory := range ValidRegistryDirectories { + if directory == filepath.FromSlash(validRegistryDirectory) { + return true + } + } + + return false +} + +func IsValidCdktfDirectory(directory string) bool { + if directory == filepath.FromSlash(fmt.Sprintf("%s/%s", LegacyIndexDirectory, CdktfIndexDirectory)) { + return true + } + + if directory == filepath.FromSlash(fmt.Sprintf("%s/%s", RegistryIndexDirectory, CdktfIndexDirectory)) { + return true + } + + for _, validCdktfLanguage := range ValidCdktfLanguages { + + if directory == filepath.FromSlash(fmt.Sprintf("%s/%s/%s", LegacyIndexDirectory, CdktfIndexDirectory, validCdktfLanguage)) { + return true + } + + if directory == filepath.FromSlash(fmt.Sprintf("%s/%s/%s", RegistryIndexDirectory, CdktfIndexDirectory, validCdktfLanguage)) { + return true + } + + for _, validLegacySubdirectory := range ValidLegacySubdirectories { + if directory == filepath.FromSlash(fmt.Sprintf("%s/%s/%s/%s", LegacyIndexDirectory, CdktfIndexDirectory, validCdktfLanguage, validLegacySubdirectory)) { + return true + } + } + + for _, validRegistrySubdirectory := range ValidRegistrySubdirectories { + if directory == filepath.FromSlash(fmt.Sprintf("%s/%s/%s/%s", RegistryIndexDirectory, CdktfIndexDirectory, validCdktfLanguage, validRegistrySubdirectory)) { + return true + } + } + } + + return false +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/file.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/file.go new file mode 100644 index 00000000..cb079b3a --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/file.go @@ -0,0 +1,39 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package check + +import ( + "fmt" + "log" + "os" + "path/filepath" +) + +type FileOptions struct { + BasePath string +} + +func (opts *FileOptions) FullPath(path string) string { + if opts.BasePath != "" { + return filepath.Join(opts.BasePath, path) + } + + return path +} + +// FileSizeCheck verifies that documentation file is below the Terraform Registry storage limit. +func FileSizeCheck(fullpath string) error { + fi, err := os.Stat(fullpath) + + if err != nil { + return err + } + + log.Printf("[DEBUG] File %s size: %d (limit: %d)", fullpath, fi.Size(), RegistryMaximumSizeOfFile) + if fi.Size() >= int64(RegistryMaximumSizeOfFile) { + return fmt.Errorf("exceeded maximum (%d) size of documentation file for Terraform Registry: %d", RegistryMaximumSizeOfFile, fi.Size()) + } + + return nil +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/file_extension.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/file_extension.go new file mode 100644 index 00000000..dd5f37b6 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/file_extension.go @@ -0,0 +1,64 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package check + +import ( + "fmt" + "path/filepath" + "strings" +) + +const ( + FileExtensionHtmlMarkdown = `.html.markdown` + FileExtensionHtmlMd = `.html.md` + FileExtensionMarkdown = `.markdown` + FileExtensionMd = `.md` +) + +var ValidLegacyFileExtensions = []string{ + FileExtensionHtmlMarkdown, + FileExtensionHtmlMd, + FileExtensionMarkdown, + FileExtensionMd, +} + +var ValidRegistryFileExtensions = []string{ + FileExtensionMd, +} + +// FileExtensionCheck checks if the file extension of the given path is valid. +func FileExtensionCheck(path string, validExtensions []string) error { + if !FilePathEndsWithExtensionFrom(path, validExtensions) { + return fmt.Errorf("file does not end with a valid extension, valid extensions: %v", ValidLegacyFileExtensions) + } + + return nil +} + +func FilePathEndsWithExtensionFrom(path string, validExtensions []string) bool { + for _, validExtension := range validExtensions { + if strings.HasSuffix(path, validExtension) { + return true + } + } + + return false +} + +// TrimFileExtension removes file extensions including those with multiple periods. +func TrimFileExtension(path string) string { + filename := filepath.Base(path) + + if filename == "." { + return "" + } + + dotIndex := strings.IndexByte(filename, '.') + + if dotIndex > 0 { + return filename[:dotIndex] + } + + return filename +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/file_mismatch.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/file_mismatch.go new file mode 100644 index 00000000..d65989fd --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/file_mismatch.go @@ -0,0 +1,284 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package check + +import ( + "errors" + "fmt" + "log" + "os" + "sort" + + tfjson "github.com/hashicorp/terraform-json" +) + +type FileMismatchOptions struct { + *FileOptions + + IgnoreFileMismatch []string + + IgnoreFileMissing []string + + ProviderShortName string + + DatasourceEntries []os.DirEntry + + ResourceEntries []os.DirEntry + + FunctionEntries []os.DirEntry + + Schema *tfjson.ProviderSchema +} + +type FileMismatchCheck struct { + Options *FileMismatchOptions +} + +func NewFileMismatchCheck(opts *FileMismatchOptions) *FileMismatchCheck { + check := &FileMismatchCheck{ + Options: opts, + } + + if check.Options == nil { + check.Options = &FileMismatchOptions{} + } + + if check.Options.FileOptions == nil { + check.Options.FileOptions = &FileOptions{} + } + + return check +} + +func (check *FileMismatchCheck) Run() error { + var result error + + if check.Options.Schema == nil { + log.Printf("[DEBUG] Skipping file mismatch checks due to missing provider schema") + return nil + } + + if check.Options.ResourceEntries != nil { + err := check.ResourceFileMismatchCheck(check.Options.ResourceEntries, "resource", check.Options.Schema.ResourceSchemas) + result = errors.Join(result, err) + } + + if check.Options.DatasourceEntries != nil { + err := check.ResourceFileMismatchCheck(check.Options.DatasourceEntries, "datasource", check.Options.Schema.DataSourceSchemas) + result = errors.Join(result, err) + } + + if check.Options.FunctionEntries != nil { + err := check.FunctionFileMismatchCheck(check.Options.FunctionEntries, check.Options.Schema.Functions) + result = errors.Join(result, err) + } + + return result +} + +// ResourceFileMismatchCheck checks for mismatched files, either missing or extraneous, against the resource/datasouce schema +func (check *FileMismatchCheck) ResourceFileMismatchCheck(files []os.DirEntry, resourceType string, schemas map[string]*tfjson.Schema) error { + if len(files) == 0 { + log.Printf("[DEBUG] Skipping %s file mismatch checks due to missing file list", resourceType) + return nil + } + + if len(schemas) == 0 { + log.Printf("[DEBUG] Skipping %s file mismatch checks due to missing schemas", resourceType) + return nil + } + + var extraFiles []string + var missingFiles []string + + for _, file := range files { + log.Printf("[DEBUG] Found file %s", file.Name()) + if fileHasResource(schemas, check.Options.ProviderShortName, file.Name()) { + continue + } + + if check.IgnoreFileMismatch(file.Name()) { + continue + } + + log.Printf("[DEBUG] Found extraneous file %s", file.Name()) + extraFiles = append(extraFiles, file.Name()) + } + + for _, resourceName := range resourceNames(schemas) { + log.Printf("[DEBUG] Found %s %s", resourceType, resourceName) + if resourceHasFile(files, check.Options.ProviderShortName, resourceName) { + continue + } + + if check.IgnoreFileMissing(resourceName) { + continue + } + + log.Printf("[DEBUG] Missing file for %s %s", resourceType, resourceName) + missingFiles = append(missingFiles, resourceName) + } + + var result error + + for _, extraFile := range extraFiles { + err := fmt.Errorf("matching %s for documentation file (%s) not found, file is extraneous or incorrectly named", resourceType, extraFile) + result = errors.Join(result, err) + } + + for _, missingFile := range missingFiles { + err := fmt.Errorf("missing documentation file for %s: %s", resourceType, missingFile) + result = errors.Join(result, err) + } + + return result + +} + +// FunctionFileMismatchCheck checks for mismatched files, either missing or extraneous, against the function signature +func (check *FileMismatchCheck) FunctionFileMismatchCheck(files []os.DirEntry, functions map[string]*tfjson.FunctionSignature) error { + if len(files) == 0 { + log.Printf("[DEBUG] Skipping function file mismatch checks due to missing file list") + return nil + } + + if len(functions) == 0 { + log.Printf("[DEBUG] Skipping function file mismatch checks due to missing schemas") + return nil + } + + var extraFiles []string + var missingFiles []string + + for _, file := range files { + if fileHasFunction(functions, file.Name()) { + continue + } + + if check.IgnoreFileMismatch(file.Name()) { + continue + } + + extraFiles = append(extraFiles, file.Name()) + } + + for _, functionName := range functionNames(functions) { + if functionHasFile(files, functionName) { + continue + } + + if check.IgnoreFileMissing(functionName) { + continue + } + + missingFiles = append(missingFiles, functionName) + } + + var result error + + for _, extraFile := range extraFiles { + err := fmt.Errorf("matching function for documentation file (%s) not found, file is extraneous or incorrectly named", extraFile) + result = errors.Join(result, err) + } + + for _, missingFile := range missingFiles { + err := fmt.Errorf("missing documentation file for function: %s", missingFile) + result = errors.Join(result, err) + } + + return result + +} + +func (check *FileMismatchCheck) IgnoreFileMismatch(file string) bool { + for _, ignoreResourceName := range check.Options.IgnoreFileMismatch { + if ignoreResourceName == fileResourceName(check.Options.ProviderShortName, file) { + return true + } + } + + return false +} + +func (check *FileMismatchCheck) IgnoreFileMissing(resourceName string) bool { + for _, ignoreResourceName := range check.Options.IgnoreFileMissing { + if ignoreResourceName == resourceName { + return true + } + } + + return false +} + +func fileHasResource(schemaResources map[string]*tfjson.Schema, providerName, file string) bool { + if _, ok := schemaResources[fileResourceName(providerName, file)]; ok { + return true + } + + return false +} + +func fileHasFunction(functions map[string]*tfjson.FunctionSignature, file string) bool { + if _, ok := functions[TrimFileExtension(file)]; ok { + return true + } + + return false +} + +func fileResourceName(providerName, fileName string) string { + resourceSuffix := TrimFileExtension(fileName) + + return fmt.Sprintf("%s_%s", providerName, resourceSuffix) +} + +func resourceHasFile(files []os.DirEntry, providerName, resourceName string) bool { + var found bool + + for _, file := range files { + if fileResourceName(providerName, file.Name()) == resourceName { + found = true + break + } + } + + return found +} + +func functionHasFile(files []os.DirEntry, functionName string) bool { + var found bool + + for _, file := range files { + if TrimFileExtension(file.Name()) == functionName { + found = true + break + } + } + + return found +} + +func resourceNames(resources map[string]*tfjson.Schema) []string { + names := make([]string, 0, len(resources)) + + for name := range resources { + names = append(names, name) + } + + sort.Strings(names) + + return names +} + +func functionNames(functions map[string]*tfjson.FunctionSignature) []string { + names := make([]string, 0, len(functions)) + + for name := range functions { + names = append(names, name) + } + + sort.Strings(names) + + return names +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/frontmatter.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/frontmatter.go new file mode 100644 index 00000000..65ac43aa --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/frontmatter.go @@ -0,0 +1,104 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package check + +import ( + "bytes" + "fmt" + + "github.com/yuin/goldmark" + "github.com/yuin/goldmark/parser" + "go.abhg.dev/goldmark/frontmatter" +) + +type FrontMatterCheck struct { + Options *FrontMatterOptions +} + +// FrontMatterData represents the YAML frontmatter of Terraform Provider documentation. +type FrontMatterData struct { + Description *string `yaml:"description,omitempty"` + Layout *string `yaml:"layout,omitempty"` + PageTitle *string `yaml:"page_title,omitempty"` + SidebarCurrent *string `yaml:"sidebar_current,omitempty"` + Subcategory *string `yaml:"subcategory,omitempty"` +} + +// FrontMatterOptions represents configuration options for FrontMatter. +type FrontMatterOptions struct { + NoLayout bool + NoPageTitle bool + NoSidebarCurrent bool + NoSubcategory bool + RequireDescription bool + RequireLayout bool + RequirePageTitle bool +} + +func NewFrontMatterCheck(opts *FrontMatterOptions) *FrontMatterCheck { + check := &FrontMatterCheck{ + Options: opts, + } + + if check.Options == nil { + check.Options = &FrontMatterOptions{} + } + + return check +} + +func (check *FrontMatterCheck) Run(src []byte) error { + frontMatter := FrontMatterData{} + + md := goldmark.New( + goldmark.WithExtensions(&frontmatter.Extender{}), + ) + + ctx := parser.NewContext() + var buff bytes.Buffer + + err := md.Convert(src, &buff, parser.WithContext(ctx)) + if err != nil { + return err + } + d := frontmatter.Get(ctx) + if d == nil { + return fmt.Errorf("no frontmatter found") + } + + err = d.Decode(&frontMatter) + if err != nil { + return fmt.Errorf("error parsing YAML frontmatter: %w", err) + } + + if check.Options.NoLayout && frontMatter.Layout != nil { + return fmt.Errorf("YAML frontmatter should not contain layout") + } + + if check.Options.NoPageTitle && frontMatter.PageTitle != nil { + return fmt.Errorf("YAML frontmatter should not contain page_title") + } + + if check.Options.NoSidebarCurrent && frontMatter.SidebarCurrent != nil { + return fmt.Errorf("YAML frontmatter should not contain sidebar_current") + } + + if check.Options.NoSubcategory && frontMatter.Subcategory != nil { + return fmt.Errorf("YAML frontmatter should not contain subcategory") + } + + if check.Options.RequireDescription && frontMatter.Description == nil { + return fmt.Errorf("YAML frontmatter missing required description") + } + + if check.Options.RequireLayout && frontMatter.Layout == nil { + return fmt.Errorf("YAML frontmatter missing required layout") + } + + if check.Options.RequirePageTitle && frontMatter.PageTitle == nil { + return fmt.Errorf("YAML frontmatter missing required page_title") + } + + return nil +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/provider_file.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/provider_file.go new file mode 100644 index 00000000..5358b669 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/provider_file.go @@ -0,0 +1,67 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package check + +import ( + "fmt" + "log" + "os" +) + +type ProviderFileOptions struct { + *FileOptions + + FrontMatter *FrontMatterOptions + ValidExtensions []string +} + +type ProviderFileCheck struct { + Options *ProviderFileOptions +} + +func NewProviderFileCheck(opts *ProviderFileOptions) *ProviderFileCheck { + check := &ProviderFileCheck{ + Options: opts, + } + + if check.Options == nil { + check.Options = &ProviderFileOptions{} + } + + if check.Options.FileOptions == nil { + check.Options.FileOptions = &FileOptions{} + } + + if check.Options.FrontMatter == nil { + check.Options.FrontMatter = &FrontMatterOptions{} + } + + return check +} + +func (check *ProviderFileCheck) Run(path string) error { + fullpath := check.Options.FullPath(path) + + log.Printf("[DEBUG] Checking file: %s", fullpath) + + if err := FileExtensionCheck(path, check.Options.ValidExtensions); err != nil { + return fmt.Errorf("%s: error checking file extension: %w", path, err) + } + + if err := FileSizeCheck(fullpath); err != nil { + return fmt.Errorf("%s: error checking file size: %w", path, err) + } + + content, err := os.ReadFile(fullpath) + + if err != nil { + return fmt.Errorf("%s: error reading file: %w", path, err) + } + + if err := NewFrontMatterCheck(check.Options.FrontMatter).Run(content); err != nil { + return fmt.Errorf("%s: error checking file frontmatter: %w", path, err) + } + + return nil +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/generate.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/generate.go index 29b25ed5..77dbc96f 100644 --- a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/generate.go +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/generate.go @@ -20,6 +20,7 @@ type generateCmd struct { flagRenderedProviderName string flagProviderDir string + flagProvidersSchema string flagRenderedWebsiteDir string flagExamplesDir string flagWebsiteTmpDir string @@ -71,14 +72,15 @@ func (cmd *generateCmd) Help() string { func (cmd *generateCmd) Flags() *flag.FlagSet { fs := flag.NewFlagSet("generate", flag.ExitOnError) - fs.StringVar(&cmd.flagProviderName, "provider-name", "", "provider name, as used in Terraform configurations") + fs.StringVar(&cmd.flagProviderName, "provider-name", "", "provider name, as used in Terraform configurations; defaults to the --provider-dir short name (after removing `terraform-provider-` prefix)") fs.StringVar(&cmd.flagProviderDir, "provider-dir", "", "relative or absolute path to the root provider code directory when running the command outside the root provider code directory") + fs.StringVar(&cmd.flagProvidersSchema, "providers-schema", "", "path to the providers schema JSON file, which contains the output of the terraform providers schema -json command. Setting this flag will skip building the provider and calling Terraform CLI") fs.StringVar(&cmd.flagRenderedProviderName, "rendered-provider-name", "", "provider name, as generated in documentation (ex. page titles, ...)") fs.StringVar(&cmd.flagRenderedWebsiteDir, "rendered-website-dir", "docs", "output directory based on provider-dir") fs.StringVar(&cmd.flagExamplesDir, "examples-dir", "examples", "examples directory based on provider-dir") fs.StringVar(&cmd.flagWebsiteTmpDir, "website-temp-dir", "", "temporary directory (used during generation)") fs.StringVar(&cmd.flagWebsiteSourceDir, "website-source-dir", "templates", "templates directory based on provider-dir") - fs.StringVar(&cmd.tfVersion, "tf-version", "", "terraform binary version to download") + fs.StringVar(&cmd.tfVersion, "tf-version", "", "terraform binary version to download. If not provided, will look for a terraform binary in the local environment. If not found in the environment, will download the latest version of Terraform") fs.BoolVar(&cmd.flagIgnoreDeprecated, "ignore-deprecated", false, "don't generate documentation for deprecated resources and data-sources") return fs } @@ -99,6 +101,7 @@ func (cmd *generateCmd) runInternal() error { cmd.ui, cmd.flagProviderDir, cmd.flagProviderName, + cmd.flagProvidersSchema, cmd.flagRenderedProviderName, cmd.flagRenderedWebsiteDir, cmd.flagExamplesDir, diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/migrate.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/migrate.go new file mode 100644 index 00000000..14e39ec1 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/migrate.go @@ -0,0 +1,100 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package cmd + +import ( + "flag" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-docs/internal/provider" +) + +type migrateCmd struct { + commonCmd + + flagProviderDir string + flagTemplatesDir string + flagExamplesDir string + flagProviderName string +} + +func (cmd *migrateCmd) Synopsis() string { + return "migrates website files from either the legacy rendered website directory (`website/docs/r`) or the docs rendered website directory (`docs/resources`) to the tfplugindocs supported structure (`templates/`)." +} + +func (cmd *migrateCmd) Help() string { + strBuilder := &strings.Builder{} + + longestName := 0 + longestUsage := 0 + cmd.Flags().VisitAll(func(f *flag.Flag) { + if len(f.Name) > longestName { + longestName = len(f.Name) + } + if len(f.Usage) > longestUsage { + longestUsage = len(f.Usage) + } + }) + + strBuilder.WriteString("\nUsage: tfplugindocs migrate []\n\n") + cmd.Flags().VisitAll(func(f *flag.Flag) { + if f.DefValue != "" { + strBuilder.WriteString(fmt.Sprintf(" --%s %s%s%s (default: %q)\n", + f.Name, + strings.Repeat(" ", longestName-len(f.Name)+2), + f.Usage, + strings.Repeat(" ", longestUsage-len(f.Usage)+2), + f.DefValue, + )) + } else { + strBuilder.WriteString(fmt.Sprintf(" --%s %s%s%s\n", + f.Name, + strings.Repeat(" ", longestName-len(f.Name)+2), + f.Usage, + strings.Repeat(" ", longestUsage-len(f.Usage)+2), + )) + } + }) + strBuilder.WriteString("\n") + + return strBuilder.String() +} + +func (cmd *migrateCmd) Flags() *flag.FlagSet { + fs := flag.NewFlagSet("migrate", flag.ExitOnError) + + fs.StringVar(&cmd.flagProviderDir, "provider-dir", "", "relative or absolute path to the root provider code directory; this will default to the current working directory if not set") + fs.StringVar(&cmd.flagTemplatesDir, "templates-dir", "templates", "new website templates directory based on provider-dir; files will be migrated to this directory") + fs.StringVar(&cmd.flagExamplesDir, "examples-dir", "examples", "examples directory based on provider-dir; extracted code examples will be migrated to this directory") + fs.StringVar(&cmd.flagProviderName, "provider-name", "", "provider name, as used in Terraform configurations; defaults to the --provider-dir short name (after removing `terraform-provider-` prefix)") + + return fs +} + +func (cmd *migrateCmd) Run(args []string) int { + fs := cmd.Flags() + err := fs.Parse(args) + if err != nil { + cmd.ui.Error(fmt.Sprintf("unable to parse flags: %s", err)) + return 1 + } + + return cmd.run(cmd.runInternal) +} + +func (cmd *migrateCmd) runInternal() error { + err := provider.Migrate( + cmd.ui, + cmd.flagProviderDir, + cmd.flagTemplatesDir, + cmd.flagExamplesDir, + cmd.flagProviderName, + ) + if err != nil { + return fmt.Errorf("unable to migrate website: %w", err) + } + + return nil +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/run.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/run.go index 82f47cde..471a76b8 100644 --- a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/run.go +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/run.go @@ -8,7 +8,10 @@ import ( "io" "os" - "github.com/mitchellh/cli" + "github.com/hashicorp/cli" + "github.com/mattn/go-colorable" + + "github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs/build" ) type commonCmd struct { @@ -54,10 +57,19 @@ func initCommands(ui cli.Ui) map[string]cli.CommandFactory { }, nil } + migrateFactory := func() (cli.Command, error) { + return &migrateCmd{ + commonCmd: commonCmd{ + ui: ui, + }, + }, nil + } + return map[string]cli.CommandFactory{ "": defaultFactory, "generate": generateFactory, "validate": validateFactory, + "migrate": migrateFactory, //"serve": serveFactory, } } @@ -100,3 +112,16 @@ func Run(name, version string, args []string, stdin io.Reader, stdout, stderr io } return exitCode } + +// Main has the required function signature for use with testscript +func Main() int { + + return Run( + "tfplugindocs", + build.GetVersion(), + os.Args[1:], + os.Stdin, + colorable.NewColorableStdout(), + colorable.NewColorableStderr(), + ) +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/validate.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/validate.go index c4a406cf..55107f3c 100644 --- a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/validate.go +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/validate.go @@ -4,6 +4,7 @@ package cmd import ( + "errors" "flag" "fmt" "strings" @@ -13,10 +14,15 @@ import ( type validateCmd struct { commonCmd + + flagProviderName string + flagProviderDir string + flagProvidersSchema string + tfVersion string } func (cmd *validateCmd) Synopsis() string { - return "validates a plugin website for the current directory" + return "validates a plugin website" } func (cmd *validateCmd) Help() string { @@ -59,6 +65,10 @@ func (cmd *validateCmd) Help() string { func (cmd *validateCmd) Flags() *flag.FlagSet { fs := flag.NewFlagSet("validate", flag.ExitOnError) + fs.StringVar(&cmd.flagProviderName, "provider-name", "", "provider name, as used in Terraform configurations; defaults to the --provider-dir short name (after removing `terraform-provider-` prefix)") + fs.StringVar(&cmd.flagProviderDir, "provider-dir", "", "relative or absolute path to the root provider code directory; this will default to the current working directory if not set") + fs.StringVar(&cmd.flagProvidersSchema, "providers-schema", "", "path to the providers schema JSON file, which contains the output of the terraform providers schema -json command. Setting this flag will skip building the provider and calling Terraform CLI") + fs.StringVar(&cmd.tfVersion, "tf-version", "", "terraform binary version to download. If not provided, will look for a terraform binary in the local environment. If not found in the environment, will download the latest version of Terraform") return fs } @@ -74,9 +84,14 @@ func (cmd *validateCmd) Run(args []string) int { } func (cmd *validateCmd) runInternal() error { - err := provider.Validate(cmd.ui) + err := provider.Validate(cmd.ui, + cmd.flagProviderDir, + cmd.flagProviderName, + cmd.flagProvidersSchema, + cmd.tfVersion, + ) if err != nil { - return fmt.Errorf("unable to validate website: %w", err) + return errors.Join(errors.New("validation errors found: "), err) } return nil diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/functionmd/render.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/functionmd/render.go new file mode 100644 index 00000000..7b4951b7 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/functionmd/render.go @@ -0,0 +1,96 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package functionmd + +import ( + "bytes" + "fmt" + "strings" + + tfjson "github.com/hashicorp/terraform-json" + + "github.com/hashicorp/terraform-plugin-docs/internal/schemamd" +) + +// RenderArguments returns a Markdown formatted string of the function arguments. +func RenderArguments(signature *tfjson.FunctionSignature) (string, error) { + argBuffer := bytes.NewBuffer(nil) + for i, p := range signature.Parameters { + name := p.Name + desc := strings.TrimSpace(p.Description) + + typeBuffer := bytes.NewBuffer(nil) + err := schemamd.WriteType(typeBuffer, p.Type) + if err != nil { + return "", err + } + + if p.IsNullable { + argBuffer.WriteString(fmt.Sprintf("1. `%s` (%s, Nullable) %s", name, typeBuffer.String(), desc)) + } else { + argBuffer.WriteString(fmt.Sprintf("1. `%s` (%s) %s", name, typeBuffer.String(), desc)) + } + + if i != len(signature.Parameters)-1 { + argBuffer.WriteString("\n") + } + + } + return argBuffer.String(), nil + +} + +// RenderSignature returns a Markdown formatted string of the function signature. +func RenderSignature(funcName string, signature *tfjson.FunctionSignature) (string, error) { + + returnType := signature.ReturnType.FriendlyName() + + paramBuffer := bytes.NewBuffer(nil) + for i, p := range signature.Parameters { + if i != 0 { + paramBuffer.WriteString(", ") + } + + paramBuffer.WriteString(fmt.Sprintf("%s %s", p.Name, p.Type.FriendlyName())) + } + + if signature.VariadicParameter != nil { + if signature.Parameters != nil { + paramBuffer.WriteString(", ") + } + + paramBuffer.WriteString(fmt.Sprintf("%s %s...", signature.VariadicParameter.Name, + signature.VariadicParameter.Type.FriendlyName())) + + } + + return fmt.Sprintf("```text\n"+ + "%s(%s) %s\n"+ + "```", + funcName, paramBuffer.String(), returnType), nil +} + +// RenderVariadicArg returns a Markdown formatted string of the variadic argument if it exists, +// otherwise an empty string. +func RenderVariadicArg(signature *tfjson.FunctionSignature) (string, error) { + if signature.VariadicParameter == nil { + return "", nil + } + + name := signature.VariadicParameter.Name + desc := strings.TrimSpace(signature.VariadicParameter.Description) + + typeBuffer := bytes.NewBuffer(nil) + err := schemamd.WriteType(typeBuffer, signature.VariadicParameter.Type) + if err != nil { + return "", err + } + + if signature.VariadicParameter.IsNullable { + return fmt.Sprintf("1. `%s` (Variadic, %s, Nullable) %s", name, typeBuffer.String(), desc), nil + } else { + return fmt.Sprintf("1. `%s` (Variadic, %s) %s", name, typeBuffer.String(), desc), nil + } + +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/mdplain/mdplain.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/mdplain/mdplain.go index ded53b65..60a0ede9 100644 --- a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/mdplain/mdplain.go +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/mdplain/mdplain.go @@ -3,13 +3,25 @@ package mdplain -import "github.com/russross/blackfriday" +import ( + "bytes" -// Clean runs a VERY naive cleanup of markdown text to make it more palatable as plain text. -func PlainMarkdown(md string) (string, error) { - pt := &Text{} - - html := blackfriday.MarkdownOptions([]byte(md), pt, blackfriday.Options{}) + "github.com/yuin/goldmark" + "github.com/yuin/goldmark/extension" +) - return string(html), nil +// Clean runs a VERY naive cleanup of markdown text to make it more palatable as plain text. +func PlainMarkdown(markdown string) (string, error) { + var buf bytes.Buffer + extensions := []goldmark.Extender{ + extension.Linkify, + } + md := goldmark.New( + goldmark.WithExtensions(extensions...), + goldmark.WithRenderer(NewTextRenderer()), + ) + if err := md.Convert([]byte(markdown), &buf); err != nil { + return "", err + } + return buf.String(), nil } diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/mdplain/renderer.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/mdplain/renderer.go index 660cdae5..93bd5ec7 100644 --- a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/mdplain/renderer.go +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/mdplain/renderer.go @@ -5,175 +5,101 @@ package mdplain import ( "bytes" + "io" - "github.com/russross/blackfriday" + "github.com/yuin/goldmark/ast" + extAST "github.com/yuin/goldmark/extension/ast" + "github.com/yuin/goldmark/renderer" ) -type Text struct{} - -func TextRenderer() blackfriday.Renderer { - return &Text{} -} - -func (options *Text) GetFlags() int { - return 0 -} - -func (options *Text) TitleBlock(out *bytes.Buffer, text []byte) { - text = bytes.TrimPrefix(text, []byte("% ")) - text = bytes.Replace(text, []byte("\n% "), []byte("\n"), -1) - out.Write(text) - out.WriteString("\n") -} - -func (options *Text) Header(out *bytes.Buffer, text func() bool, level int, id string) { - marker := out.Len() - doubleSpace(out) - - if !text() { - out.Truncate(marker) - return +type TextRender struct{} + +func NewTextRenderer() *TextRender { + return &TextRender{} +} + +func (r *TextRender) Render(w io.Writer, source []byte, n ast.Node) error { + out := bytes.NewBuffer([]byte{}) + err := ast.Walk(n, func(node ast.Node, entering bool) (ast.WalkStatus, error) { + if !entering || node.Type() == ast.TypeDocument { + return ast.WalkContinue, nil + } + + switch node := node.(type) { + case *ast.Blockquote, *ast.Heading: + doubleSpace(out) + out.Write(node.Text(source)) + return ast.WalkSkipChildren, nil + case *ast.ThematicBreak: + doubleSpace(out) + return ast.WalkSkipChildren, nil + case *ast.CodeBlock: + doubleSpace(out) + for i := 0; i < node.Lines().Len(); i++ { + line := node.Lines().At(i) + out.Write(line.Value(source)) + } + return ast.WalkSkipChildren, nil + case *ast.FencedCodeBlock: + doubleSpace(out) + doubleSpace(out) + for i := 0; i < node.Lines().Len(); i++ { + line := node.Lines().At(i) + _, _ = out.Write(line.Value(source)) + } + return ast.WalkSkipChildren, nil + case *ast.List: + doubleSpace(out) + return ast.WalkContinue, nil + case *ast.Paragraph: + doubleSpace(out) + if node.Text(source)[0] == '|' { // Write tables as-is. + for i := 0; i < node.Lines().Len(); i++ { + line := node.Lines().At(i) + out.Write(line.Value(source)) + } + return ast.WalkSkipChildren, nil + } + return ast.WalkContinue, nil + case *extAST.Strikethrough: + out.Write(node.Text(source)) + return ast.WalkContinue, nil + case *ast.AutoLink: + out.Write(node.URL(source)) + return ast.WalkSkipChildren, nil + case *ast.CodeSpan: + out.Write(node.Text(source)) + return ast.WalkSkipChildren, nil + case *ast.Link: + _, err := out.Write(node.Text(source)) + if !isRelativeLink(node.Destination) { + out.WriteString(" ") + out.Write(node.Destination) + } + return ast.WalkSkipChildren, err + case *ast.Text: + out.Write(node.Text(source)) + if node.SoftLineBreak() { + doubleSpace(out) + } + return ast.WalkContinue, nil + case *ast.Image: + return ast.WalkSkipChildren, nil + + } + return ast.WalkContinue, nil + }) + if err != nil { + return err } -} - -func (options *Text) BlockHtml(out *bytes.Buffer, text []byte) { - doubleSpace(out) - out.Write(text) - out.WriteByte('\n') -} - -func (options *Text) HRule(out *bytes.Buffer) { - doubleSpace(out) -} - -func (options *Text) BlockCode(out *bytes.Buffer, text []byte, lang string) { - options.BlockCodeNormal(out, text, lang) -} - -func (options *Text) BlockCodeNormal(out *bytes.Buffer, text []byte, lang string) { - doubleSpace(out) - out.Write(text) -} - -func (options *Text) BlockQuote(out *bytes.Buffer, text []byte) { - doubleSpace(out) - out.Write(text) -} - -func (options *Text) Table(out *bytes.Buffer, header []byte, body []byte, columnData []int) { - doubleSpace(out) - out.Write(header) - out.Write(body) -} - -func (options *Text) TableRow(out *bytes.Buffer, text []byte) { - doubleSpace(out) - out.Write(text) -} - -func (options *Text) TableHeaderCell(out *bytes.Buffer, text []byte, align int) { - doubleSpace(out) - out.Write(text) -} - -func (options *Text) TableCell(out *bytes.Buffer, text []byte, align int) { - doubleSpace(out) - out.Write(text) -} - -func (options *Text) Footnotes(out *bytes.Buffer, text func() bool) { - options.HRule(out) - options.List(out, text, 0) -} - -func (options *Text) FootnoteItem(out *bytes.Buffer, name, text []byte, flags int) { - out.Write(text) -} - -func (options *Text) List(out *bytes.Buffer, text func() bool, flags int) { - marker := out.Len() - doubleSpace(out) - - if !text() { - out.Truncate(marker) - return + _, err = w.Write(out.Bytes()) + if err != nil { + return err } + return nil } -func (options *Text) ListItem(out *bytes.Buffer, text []byte, flags int) { - out.Write(text) -} - -func (options *Text) Paragraph(out *bytes.Buffer, text func() bool) { - marker := out.Len() - doubleSpace(out) - - if !text() { - out.Truncate(marker) - return - } -} - -func (options *Text) AutoLink(out *bytes.Buffer, link []byte, kind int) { - out.Write(link) -} - -func (options *Text) CodeSpan(out *bytes.Buffer, text []byte) { - out.Write(text) -} - -func (options *Text) DoubleEmphasis(out *bytes.Buffer, text []byte) { - out.Write(text) -} - -func (options *Text) Emphasis(out *bytes.Buffer, text []byte) { - if len(text) == 0 { - return - } - out.Write(text) -} - -func (options *Text) Image(out *bytes.Buffer, link []byte, title []byte, alt []byte) {} - -func (options *Text) LineBreak(out *bytes.Buffer) {} - -func (options *Text) Link(out *bytes.Buffer, link []byte, title []byte, content []byte) { - out.Write(content) - if !isRelativeLink(link) { - out.WriteString(" ") - out.Write(link) - } -} - -func (options *Text) RawHtmlTag(out *bytes.Buffer, text []byte) {} - -func (options *Text) TripleEmphasis(out *bytes.Buffer, text []byte) { - out.Write(text) -} - -func (options *Text) StrikeThrough(out *bytes.Buffer, text []byte) { - out.Write(text) -} - -func (options *Text) FootnoteRef(out *bytes.Buffer, ref []byte, id int) {} - -func (options *Text) Entity(out *bytes.Buffer, entity []byte) { - out.Write(entity) -} - -func (options *Text) NormalText(out *bytes.Buffer, text []byte) { - out.Write(text) -} - -func (options *Text) Smartypants(out *bytes.Buffer, text []byte) {} - -func (options *Text) DocumentHeader(out *bytes.Buffer) {} - -func (options *Text) DocumentFooter(out *bytes.Buffer) {} - -func (options *Text) TocHeader(text []byte, level int) {} - -func (options *Text) TocFinalize() {} +func (r *TextRender) AddOptions(...renderer.Option) {} func doubleSpace(out *bytes.Buffer) { if out.Len() > 0 { diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/generate.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/generate.go index b4c3293f..d0c3c965 100644 --- a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/generate.go +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/generate.go @@ -8,11 +8,11 @@ import ( "fmt" "os" "os/exec" - "path" "path/filepath" "runtime" "strings" + "github.com/hashicorp/cli" "github.com/hashicorp/go-version" install "github.com/hashicorp/hc-install" "github.com/hashicorp/hc-install/checkpoint" @@ -22,54 +22,55 @@ import ( "github.com/hashicorp/hc-install/src" "github.com/hashicorp/terraform-exec/tfexec" tfjson "github.com/hashicorp/terraform-json" - "github.com/mitchellh/cli" "golang.org/x/exp/slices" ) var ( - examplesResourceFileTemplate = resourceFileTemplate("resources/{{.Name}}/resource.tf") - examplesResourceImportTemplate = resourceFileTemplate("resources/{{.Name}}/import.sh") - examplesDataSourceFileTemplate = resourceFileTemplate("data-sources/{{ .Name }}/data-source.tf") - examplesProviderFileTemplate = providerFileTemplate("provider/provider.tf") - - websiteResourceFileTemplate = resourceFileTemplate("resources/{{ .ShortName }}.md.tmpl") - websiteResourceFallbackFileTemplate = resourceFileTemplate("resources.md.tmpl") - websiteResourceFileStatic = []resourceFileTemplate{ - resourceFileTemplate("resources/{{ .ShortName }}.md"), - // TODO: warn for all of these, as they won't render? massage them to the proper output file name? - resourceFileTemplate("resources/{{ .ShortName }}.markdown"), - resourceFileTemplate("resources/{{ .ShortName }}.html.markdown"), - resourceFileTemplate("resources/{{ .ShortName }}.html.md"), - resourceFileTemplate("r/{{ .ShortName }}.markdown"), - resourceFileTemplate("r/{{ .ShortName }}.md"), - resourceFileTemplate("r/{{ .ShortName }}.html.markdown"), - resourceFileTemplate("r/{{ .ShortName }}.html.md"), - } - websiteDataSourceFileTemplate = resourceFileTemplate("data-sources/{{ .ShortName }}.md.tmpl") - websiteDataSourceFallbackFileTemplate = resourceFileTemplate("data-sources.md.tmpl") - websiteDataSourceFileStatic = []resourceFileTemplate{ - resourceFileTemplate("data-sources/{{ .ShortName }}.md"), - // TODO: warn for all of these, as they won't render? massage them to the proper output file name? - resourceFileTemplate("data-sources/{{ .ShortName }}.markdown"), - resourceFileTemplate("data-sources/{{ .ShortName }}.html.markdown"), - resourceFileTemplate("data-sources/{{ .ShortName }}.html.md"), - resourceFileTemplate("d/{{ .ShortName }}.markdown"), - resourceFileTemplate("d/{{ .ShortName }}.md"), - resourceFileTemplate("d/{{ .ShortName }}.html.markdown"), - resourceFileTemplate("d/{{ .ShortName }}.html.md"), - } - websiteProviderFileTemplate = providerFileTemplate("index.md.tmpl") - websiteProviderFileStatic = []providerFileTemplate{ - providerFileTemplate("index.markdown"), - providerFileTemplate("index.md"), - providerFileTemplate("index.html.markdown"), - providerFileTemplate("index.html.md"), + websiteResourceFile = "resources/%s.md.tmpl" + websiteResourceFallbackFile = "resources.md.tmpl" + websiteResourceFileStaticCandidates = []string{ + "resources/%s.md", + "resources/%s.markdown", + "resources/%s.html.markdown", + "resources/%s.html.md", + "r/%s.markdown", + "r/%s.md", + "r/%s.html.markdown", + "r/%s.html.md", + } + websiteDataSourceFile = "data-sources/%s.md.tmpl" + websiteDataSourceFallbackFile = "data-sources.md.tmpl" + websiteDataSourceFileStaticCandidates = []string{ + "data-sources/%s.md", + "data-sources/%s.markdown", + "data-sources/%s.html.markdown", + "data-sources/%s.html.md", + "d/%s.markdown", + "d/%s.md", + "d/%s.html.markdown", + "d/%s.html.md", + } + websiteFunctionFile = "functions/%s.md.tmpl" + websiteFunctionFallbackFile = "functions.md.tmpl" + websiteFunctionFileStaticCandidates = []string{ + "functions/%s.md", + "functions/%s.markdown", + "functions/%s.html.markdown", + "functions/%s.html.md", + } + websiteProviderFile = "index.md.tmpl" + websiteProviderFileStaticCandidates = []string{ + "index.markdown", + "index.md", + "index.html.markdown", + "index.html.md", } managedWebsiteSubDirectories = []string{ "data-sources", "guides", "resources", + "functions", } managedWebsiteFiles = []string{ @@ -85,6 +86,7 @@ type generator struct { providerDir string providerName string + providersSchemaPath string renderedProviderName string renderedWebsiteDir string examplesDir string @@ -102,7 +104,7 @@ func (g *generator) warnf(format string, a ...interface{}) { g.ui.Warn(fmt.Sprintf(format, a...)) } -func Generate(ui cli.Ui, providerDir, providerName, renderedProviderName, renderedWebsiteDir, examplesDir, websiteTmpDir, templatesDir, tfVersion string, ignoreDeprecated bool) error { +func Generate(ui cli.Ui, providerDir, providerName, providersSchemaPath, renderedProviderName, renderedWebsiteDir, examplesDir, websiteTmpDir, templatesDir, tfVersion string, ignoreDeprecated bool) error { // Ensure provider directory is resolved absolute path if providerDir == "" { wd, err := os.Getwd() @@ -139,6 +141,7 @@ func Generate(ui cli.Ui, providerDir, providerName, renderedProviderName, render providerDir: providerDir, providerName: providerName, + providersSchemaPath: providersSchemaPath, renderedProviderName: renderedProviderName, renderedWebsiteDir: renderedWebsiteDir, examplesDir: examplesDir, @@ -156,35 +159,34 @@ func Generate(ui cli.Ui, providerDir, providerName, renderedProviderName, render func (g *generator) Generate(ctx context.Context) error { var err error - providerName := g.providerName if g.providerName == "" { - providerName = filepath.Base(g.providerDir) + g.providerName = filepath.Base(g.providerDir) } if g.renderedProviderName == "" { - g.renderedProviderName = providerName + g.renderedProviderName = g.providerName } - g.infof("rendering website for provider %q (as %q)", providerName, g.renderedProviderName) + g.infof("rendering website for provider %q (as %q)", g.providerName, g.renderedProviderName) switch { case g.websiteTmpDir == "": g.websiteTmpDir, err = os.MkdirTemp("", "tfws") if err != nil { - return err + return fmt.Errorf("error creating temporary website directory: %w", err) } defer os.RemoveAll(g.websiteTmpDir) default: g.infof("cleaning tmp dir %q", g.websiteTmpDir) err = os.RemoveAll(g.websiteTmpDir) if err != nil { - return err + return fmt.Errorf("error removing temporary website directory %q: %w", g.websiteTmpDir, err) } g.infof("creating tmp dir %q", g.websiteTmpDir) err = os.MkdirAll(g.websiteTmpDir, 0755) if err != nil { - return err + return fmt.Errorf("error creating temporary website directory %q: %w", g.websiteTmpDir, err) } } @@ -193,7 +195,7 @@ func (g *generator) Generate(ctx context.Context) error { case os.IsNotExist(err): // do nothing, no template dir case err != nil: - return err + return fmt.Errorf("error getting information for provider templates directory %q: %w", g.ProviderTemplatesDir(), err) default: if !templatesDirInfo.IsDir() { return fmt.Errorf("template path is not a directory: %s", g.ProviderTemplatesDir()) @@ -202,26 +204,36 @@ func (g *generator) Generate(ctx context.Context) error { g.infof("copying any existing content to tmp dir") err = cp(g.ProviderTemplatesDir(), g.TempTemplatesDir()) if err != nil { - return err + return fmt.Errorf("error copying exiting content to temporary directory %q: %w", g.TempTemplatesDir(), err) } } - g.infof("exporting schema from Terraform") - providerSchema, err := g.terraformProviderSchema(ctx, providerName) - if err != nil { - return err + var providerSchema *tfjson.ProviderSchema + + if g.providersSchemaPath == "" { + g.infof("exporting schema from Terraform") + providerSchema, err = g.terraformProviderSchemaFromTerraform(ctx) + if err != nil { + return fmt.Errorf("error exporting provider schema from Terraform: %w", err) + } + } else { + g.infof("exporting schema from JSON file") + providerSchema, err = g.terraformProviderSchemaFromFile() + if err != nil { + return fmt.Errorf("error exporting provider schema from JSON file: %w", err) + } } - g.infof("rendering missing docs") - err = g.renderMissingDocs(providerName, providerSchema) + g.infof("generating missing templates") + err = g.generateMissingTemplates(providerSchema) if err != nil { - return err + return fmt.Errorf("error generating missing templates: %w", err) } g.infof("rendering static website") - err = g.renderStaticWebsite(providerName, providerSchema) + err = g.renderStaticWebsite(providerSchema) if err != nil { - return err + return fmt.Errorf("error rendering static website: %w", err) } return nil @@ -229,170 +241,171 @@ func (g *generator) Generate(ctx context.Context) error { // ProviderDocsDir returns the absolute path to the joined provider and // given website documentation directory, which defaults to "docs". -func (g generator) ProviderDocsDir() string { +func (g *generator) ProviderDocsDir() string { return filepath.Join(g.providerDir, g.renderedWebsiteDir) } // ProviderExamplesDir returns the absolute path to the joined provider and // given examples directory, which defaults to "examples". -func (g generator) ProviderExamplesDir() string { +func (g *generator) ProviderExamplesDir() string { return filepath.Join(g.providerDir, g.examplesDir) } // ProviderTemplatesDir returns the absolute path to the joined provider and // given templates directory, which defaults to "templates". -func (g generator) ProviderTemplatesDir() string { +func (g *generator) ProviderTemplatesDir() string { return filepath.Join(g.providerDir, g.templatesDir) } // TempTemplatesDir returns the absolute path to the joined temporary and -// hardcoded "templates" sub-directory, which is where provider templates are +// hardcoded "templates" subdirectory, which is where provider templates are // copied. -func (g generator) TempTemplatesDir() string { +func (g *generator) TempTemplatesDir() string { return filepath.Join(g.websiteTmpDir, "templates") } -func (g *generator) renderMissingResourceDoc(providerName, name, typeName string, schema *tfjson.Schema, websiteFileTemplate resourceFileTemplate, fallbackWebsiteFileTemplate resourceFileTemplate, websiteStaticCandidateTemplates []resourceFileTemplate, examplesFileTemplate resourceFileTemplate, examplesImportTemplate *resourceFileTemplate) error { - tmplPath, err := websiteFileTemplate.Render(g.providerDir, name, providerName) - if err != nil { - return fmt.Errorf("unable to render path for resource %q: %w", name, err) - } - tmplPath = filepath.Join(g.TempTemplatesDir(), tmplPath) - if fileExists(tmplPath) { - g.infof("resource %q template exists, skipping", name) +func (g *generator) generateMissingResourceTemplate(resourceName string) error { + templatePath := fmt.Sprintf(websiteResourceFile, resourceShortName(resourceName, g.providerName)) + templatePath = filepath.Join(g.TempTemplatesDir(), templatePath) + if fileExists(templatePath) { + g.infof("resource %q template exists, skipping", resourceName) return nil } - for _, candidate := range websiteStaticCandidateTemplates { - candidatePath, err := candidate.Render(g.providerDir, name, providerName) + fallbackTemplatePath := filepath.Join(g.TempTemplatesDir(), websiteResourceFallbackFile) + if fileExists(fallbackTemplatePath) { + g.infof("resource %q fallback template exists, creating template", resourceName) + err := cp(fallbackTemplatePath, templatePath) if err != nil { - return fmt.Errorf("unable to render path for resource %q: %w", name, err) + return fmt.Errorf("unable to copy fallback template for %q: %w", resourceName, err) } + return nil + } + + for _, candidate := range websiteResourceFileStaticCandidates { + candidatePath := fmt.Sprintf(candidate, resourceShortName(resourceName, g.providerName)) candidatePath = filepath.Join(g.TempTemplatesDir(), candidatePath) if fileExists(candidatePath) { - g.infof("resource %q static file exists, skipping", name) + g.infof("resource %q static file exists, skipping", resourceName) return nil } } - examplePath, err := examplesFileTemplate.Render(g.providerDir, name, providerName) + g.infof("generating new template for %q", resourceName) + err := writeFile(templatePath, string(defaultResourceTemplate)) if err != nil { - return fmt.Errorf("unable to render example file path for %q: %w", name, err) - } - if examplePath != "" { - examplePath = filepath.Join(g.ProviderExamplesDir(), examplePath) - } - if !fileExists(examplePath) { - examplePath = "" + return fmt.Errorf("unable to write template for %q: %w", resourceName, err) } - importPath := "" - if examplesImportTemplate != nil { - importPath, err = examplesImportTemplate.Render(g.providerDir, name, providerName) - if err != nil { - return fmt.Errorf("unable to render example import file path for %q: %w", name, err) - } - if importPath != "" { - importPath = filepath.Join(g.ProviderExamplesDir(), importPath) - } - if !fileExists(importPath) { - importPath = "" - } - } - - targetResourceTemplate := defaultResourceTemplate + return nil +} - fallbackTmplPath, err := fallbackWebsiteFileTemplate.Render(g.providerDir, name, providerName) - if err != nil { - return fmt.Errorf("unable to render path for resource %q: %w", name, err) +func (g *generator) generateMissingDataSourceTemplate(datasourceName string) error { + templatePath := fmt.Sprintf(websiteDataSourceFile, resourceShortName(datasourceName, g.providerName)) + templatePath = filepath.Join(g.TempTemplatesDir(), templatePath) + if fileExists(templatePath) { + g.infof("data-source %q template exists, skipping", datasourceName) + return nil } - fallbackTmplPath = filepath.Join(g.TempTemplatesDir(), fallbackTmplPath) - if fileExists(fallbackTmplPath) { - g.infof("resource %q fallback template exists", name) - tmplData, err := os.ReadFile(fallbackTmplPath) + + fallbackTemplatePath := filepath.Join(g.TempTemplatesDir(), websiteDataSourceFallbackFile) + if fileExists(fallbackTemplatePath) { + g.infof("data-source %q fallback template exists, creating template", datasourceName) + err := cp(fallbackTemplatePath, templatePath) if err != nil { - return fmt.Errorf("unable to read file %q: %w", fallbackTmplPath, err) + return fmt.Errorf("unable to copy fallback template for %q: %w", datasourceName, err) } - targetResourceTemplate = resourceTemplate(tmplData) + return nil } - g.infof("generating template for %q", name) - md, err := targetResourceTemplate.Render(g.providerDir, name, providerName, g.renderedProviderName, typeName, examplePath, importPath, schema) - if err != nil { - return fmt.Errorf("unable to render template for %q: %w", name, err) + for _, candidate := range websiteDataSourceFileStaticCandidates { + candidatePath := fmt.Sprintf(candidate, resourceShortName(datasourceName, g.providerName)) + candidatePath = filepath.Join(g.TempTemplatesDir(), candidatePath) + if fileExists(candidatePath) { + g.infof("data-source %q static file exists, skipping", datasourceName) + return nil + } } - err = writeFile(tmplPath, md) + g.infof("generating new template for data-source %q", datasourceName) + err := writeFile(templatePath, string(defaultResourceTemplate)) if err != nil { - return fmt.Errorf("unable to write file %q: %w", tmplPath, err) + return fmt.Errorf("unable to write template for %q: %w", datasourceName, err) } return nil } -func (g *generator) renderMissingProviderDoc(providerName string, schema *tfjson.Schema, websiteFileTemplate providerFileTemplate, websiteStaticCandidateTemplates []providerFileTemplate, examplesFileTemplate providerFileTemplate) error { - tmplPath, err := websiteFileTemplate.Render(g.providerDir, providerName) - if err != nil { - return fmt.Errorf("unable to render path for provider %q: %w", providerName, err) - } - tmplPath = filepath.Join(g.TempTemplatesDir(), tmplPath) - if fileExists(tmplPath) { - g.infof("provider %q template exists, skipping", providerName) +func (g *generator) generateMissingFunctionTemplate(functionName string) error { + templatePath := fmt.Sprintf(websiteFunctionFile, resourceShortName(functionName, g.providerName)) + templatePath = filepath.Join(g.TempTemplatesDir(), templatePath) + if fileExists(templatePath) { + g.infof("function %q template exists, skipping", functionName) return nil } - for _, candidate := range websiteStaticCandidateTemplates { - candidatePath, err := candidate.Render(g.providerDir, providerName) + fallbackTemplatePath := filepath.Join(g.TempTemplatesDir(), websiteFunctionFallbackFile) + if fileExists(fallbackTemplatePath) { + g.infof("function %q fallback template exists, creating template", functionName) + err := cp(fallbackTemplatePath, templatePath) if err != nil { - return fmt.Errorf("unable to render path for provider %q: %w", providerName, err) + return fmt.Errorf("unable to copy fallback template for %q: %w", functionName, err) } + return nil + } + + for _, candidate := range websiteFunctionFileStaticCandidates { + candidatePath := fmt.Sprintf(candidate, resourceShortName(functionName, g.providerName)) candidatePath = filepath.Join(g.TempTemplatesDir(), candidatePath) if fileExists(candidatePath) { - g.infof("provider %q static file exists, skipping", providerName) + g.infof("function %q static file exists, skipping", functionName) return nil } } - examplePath, err := examplesFileTemplate.Render(g.providerDir, providerName) + g.infof("generating new template for function %q", functionName) + err := writeFile(templatePath, string(defaultFunctionTemplate)) if err != nil { - return fmt.Errorf("unable to render example file path for %q: %w", providerName, err) - } - if examplePath != "" { - examplePath = filepath.Join(g.ProviderExamplesDir(), examplePath) + return fmt.Errorf("unable to write template for %q: %w", functionName, err) } - if !fileExists(examplePath) { - examplePath = "" + + return nil +} + +func (g *generator) generateMissingProviderTemplate() error { + templatePath := filepath.Join(g.TempTemplatesDir(), websiteProviderFile) + if fileExists(templatePath) { + g.infof("provider %q template exists, skipping", g.providerName) + return nil } - g.infof("generating template for %q", providerName) - md, err := defaultProviderTemplate.Render(g.providerDir, providerName, g.renderedProviderName, examplePath, schema) - if err != nil { - return fmt.Errorf("unable to render template for %q: %w", providerName, err) + for _, candidate := range websiteProviderFileStaticCandidates { + candidatePath := filepath.Join(g.TempTemplatesDir(), candidate) + if fileExists(candidatePath) { + g.infof("provider %q static file exists, skipping", g.providerName) + return nil + } } - err = writeFile(tmplPath, md) + g.infof("generating new template for %q", g.providerName) + err := writeFile(templatePath, string(defaultProviderTemplate)) if err != nil { - return fmt.Errorf("unable to write file %q: %w", tmplPath, err) + return fmt.Errorf("unable to write template for %q: %w", g.providerName, err) } return nil } -func (g *generator) renderMissingDocs(providerName string, providerSchema *tfjson.ProviderSchema) error { +func (g *generator) generateMissingTemplates(providerSchema *tfjson.ProviderSchema) error { g.infof("generating missing resource content") for name, schema := range providerSchema.ResourceSchemas { if g.ignoreDeprecated && schema.Block.Deprecated { continue } - err := g.renderMissingResourceDoc(providerName, name, "Resource", schema, - websiteResourceFileTemplate, - websiteResourceFallbackFileTemplate, - websiteResourceFileStatic, - examplesResourceFileTemplate, - &examplesResourceImportTemplate) + err := g.generateMissingResourceTemplate(name) if err != nil { - return fmt.Errorf("unable to render doc %q: %w", name, err) + return fmt.Errorf("unable to generate template for resource %q: %w", name, err) } } @@ -402,35 +415,38 @@ func (g *generator) renderMissingDocs(providerName string, providerSchema *tfjso continue } - err := g.renderMissingResourceDoc(providerName, name, "Data Source", schema, - websiteDataSourceFileTemplate, - websiteDataSourceFallbackFileTemplate, - websiteDataSourceFileStatic, - examplesDataSourceFileTemplate, - nil) + err := g.generateMissingDataSourceTemplate(name) if err != nil { - return fmt.Errorf("unable to render doc %q: %w", name, err) + return fmt.Errorf("unable to generate template for data-source %q: %w", name, err) + } + } + + g.infof("generating missing function content") + for name, signature := range providerSchema.Functions { + if g.ignoreDeprecated && signature.DeprecationMessage != "" { + continue + } + + err := g.generateMissingFunctionTemplate(name) + if err != nil { + return fmt.Errorf("unable to generate template for function %q: %w", name, err) } } g.infof("generating missing provider content") - err := g.renderMissingProviderDoc(providerName, providerSchema.ConfigSchema, - websiteProviderFileTemplate, - websiteProviderFileStatic, - examplesProviderFileTemplate, - ) + err := g.generateMissingProviderTemplate() if err != nil { - return fmt.Errorf("unable to render provider doc: %w", err) + return fmt.Errorf("unable to generate template for provider: %w", err) } return nil } -func (g *generator) renderStaticWebsite(providerName string, providerSchema *tfjson.ProviderSchema) error { +func (g *generator) renderStaticWebsite(providerSchema *tfjson.ProviderSchema) error { g.infof("cleaning rendered website dir") dirEntry, err := os.ReadDir(g.ProviderDocsDir()) - if err != nil { - return err + if err != nil && !os.IsNotExist(err) { + return fmt.Errorf("unable to read rendered website directory %q: %w", g.ProviderDocsDir(), err) } for _, file := range dirEntry { @@ -438,9 +454,9 @@ func (g *generator) renderStaticWebsite(providerName string, providerSchema *tfj // Remove subdirectories managed by tfplugindocs if file.IsDir() && slices.Contains(managedWebsiteSubDirectories, file.Name()) { g.infof("removing directory: %q", file.Name()) - err = os.RemoveAll(path.Join(g.ProviderDocsDir(), file.Name())) + err = os.RemoveAll(filepath.Join(g.ProviderDocsDir(), file.Name())) if err != nil { - return err + return fmt.Errorf("unable to remove directory %q from rendered website directory: %w", file.Name(), err) } continue } @@ -448,41 +464,45 @@ func (g *generator) renderStaticWebsite(providerName string, providerSchema *tfj // Remove files managed by tfplugindocs if !file.IsDir() && slices.Contains(managedWebsiteFiles, file.Name()) { g.infof("removing file: %q", file.Name()) - err = os.RemoveAll(path.Join(g.ProviderDocsDir(), file.Name())) + err = os.RemoveAll(filepath.Join(g.ProviderDocsDir(), file.Name())) if err != nil { - return err + return fmt.Errorf("unable to remove file %q from rendered website directory: %w", file.Name(), err) } continue } } - shortName := providerShortName(providerName) + shortName := providerShortName(g.providerName) g.infof("rendering templated website to static markdown") - err = filepath.Walk(g.websiteTmpDir, func(path string, info os.FileInfo, _ error) error { - if info.IsDir() { + err = filepath.WalkDir(g.websiteTmpDir, func(path string, d os.DirEntry, err error) error { + if err != nil { + return fmt.Errorf("unable to walk path %q: %w", path, err) + } + if d.IsDir() { // skip directories return nil } rel, err := filepath.Rel(filepath.Join(g.TempTemplatesDir()), path) if err != nil { - return err + return fmt.Errorf("unable to retrieve the relative path of basepath %q and targetpath %q: %w", + filepath.Join(g.TempTemplatesDir()), path, err) } relDir, relFile := filepath.Split(rel) relDir = filepath.ToSlash(relDir) - // skip special top-level generic resource and data source templates - if relDir == "" && (relFile == "resources.md.tmpl" || relFile == "data-sources.md.tmpl") { + // skip special top-level generic resource, data source, and function templates + if relDir == "" && (relFile == "resources.md.tmpl" || relFile == "data-sources.md.tmpl" || relFile == "functions.md.tmpl") { return nil } renderedPath := filepath.Join(g.ProviderDocsDir(), rel) err = os.MkdirAll(filepath.Dir(renderedPath), 0755) if err != nil { - return err + return fmt.Errorf("unable to create rendered website subdirectory %q: %w", renderedPath, err) } ext := filepath.Ext(path) @@ -500,7 +520,7 @@ func (g *generator) renderStaticWebsite(providerName string, providerSchema *tfj out, err := os.Create(renderedPath) if err != nil { - return err + return fmt.Errorf("unable to create file %q: %w", renderedPath, err) } defer out.Close() @@ -512,7 +532,7 @@ func (g *generator) renderStaticWebsite(providerName string, providerSchema *tfj if resSchema != nil { tmpl := resourceTemplate(tmplData) - render, err := tmpl.Render(g.providerDir, resName, providerName, g.renderedProviderName, "Data Source", exampleFilePath, "", resSchema) + render, err := tmpl.Render(g.providerDir, resName, g.providerName, g.renderedProviderName, "Data Source", exampleFilePath, "", resSchema) if err != nil { return fmt.Errorf("unable to render data source template %q: %w", rel, err) } @@ -530,22 +550,40 @@ func (g *generator) renderStaticWebsite(providerName string, providerSchema *tfj if resSchema != nil { tmpl := resourceTemplate(tmplData) - render, err := tmpl.Render(g.providerDir, resName, providerName, g.renderedProviderName, "Resource", exampleFilePath, importFilePath, resSchema) + render, err := tmpl.Render(g.providerDir, resName, g.providerName, g.renderedProviderName, "Resource", exampleFilePath, importFilePath, resSchema) if err != nil { return fmt.Errorf("unable to render resource template %q: %w", rel, err) } _, err = out.WriteString(render) if err != nil { - return fmt.Errorf("unable to write regindered string: %w", err) + return fmt.Errorf("unable to write rendered string: %w", err) } return nil } g.warnf("resource entitled %q, or %q does not exist", shortName, resName) + case "functions/": + funcName := removeAllExt(relFile) + if signature, ok := providerSchema.Functions[funcName]; ok { + exampleFilePath := filepath.Join(g.ProviderExamplesDir(), "functions", funcName, "function.tf") + + tmpl := functionTemplate(tmplData) + render, err := tmpl.Render(g.providerDir, funcName, g.providerName, g.renderedProviderName, "function", exampleFilePath, signature) + if err != nil { + return fmt.Errorf("unable to render function template %q: %w", rel, err) + } + _, err = out.WriteString(render) + if err != nil { + return fmt.Errorf("unable to write rendered string: %w", err) + } + return nil + } + + g.warnf("function entitled %q does not exist", funcName) case "": // provider if relFile == "index.md.tmpl" { tmpl := providerTemplate(tmplData) exampleFilePath := filepath.Join(g.ProviderExamplesDir(), "provider", "provider.tf") - render, err := tmpl.Render(g.providerDir, providerName, g.renderedProviderName, exampleFilePath, providerSchema.ConfigSchema) + render, err := tmpl.Render(g.providerDir, g.providerName, g.renderedProviderName, exampleFilePath, providerSchema.ConfigSchema) if err != nil { return fmt.Errorf("unable to render provider template %q: %w", rel, err) } @@ -565,28 +603,23 @@ func (g *generator) renderStaticWebsite(providerName string, providerSchema *tfj return nil }) if err != nil { - return err + return fmt.Errorf("unable to render templated website to static markdown: %w", err) } return nil } -func (g *generator) terraformProviderSchema(ctx context.Context, providerName string) (*tfjson.ProviderSchema, error) { +func (g *generator) terraformProviderSchemaFromTerraform(ctx context.Context) (*tfjson.ProviderSchema, error) { var err error - shortName := providerShortName(providerName) + shortName := providerShortName(g.providerName) tmpDir, err := os.MkdirTemp("", "tfws") if err != nil { - return nil, err + return nil, fmt.Errorf("unable to create temporary provider install directory %q: %w", tmpDir, err) } defer os.RemoveAll(tmpDir) - // tmpDir := "/tmp/tftmp" - // os.RemoveAll(tmpDir) - // os.MkdirAll(tmpDir, 0755) - // fmt.Printf("[DEBUG] tmpdir %q\n", tmpDir) - g.infof("compiling provider %q", shortName) providerPath := fmt.Sprintf("plugins/registry.terraform.io/hashicorp/%s/0.0.1/%s_%s", shortName, runtime.GOOS, runtime.GOARCH) outFile := filepath.Join(tmpDir, providerPath, fmt.Sprintf("terraform-provider-%s", shortName)) @@ -599,7 +632,7 @@ func (g *generator) terraformProviderSchema(ctx context.Context, providerName st // TODO: constrain env here to make it a little safer? _, err = runCmd(buildCmd) if err != nil { - return nil, err + return nil, fmt.Errorf("unable to execute go build command: %w", err) } err = writeFile(filepath.Join(tmpDir, "provider.tf"), fmt.Sprintf(` @@ -607,7 +640,7 @@ provider %[1]q { } `, shortName)) if err != nil { - return nil, err + return nil, fmt.Errorf("unable to write provider.tf file: %w", err) } i := install.NewInstaller() @@ -636,24 +669,46 @@ provider %[1]q { tfBin, err := i.Ensure(context.Background(), sources) if err != nil { - return nil, err + return nil, fmt.Errorf("unable to download Terraform binary: %w", err) } tf, err := tfexec.NewTerraform(tmpDir, tfBin) if err != nil { - return nil, err + return nil, fmt.Errorf("unable to create new terraform exec instance: %w", err) } g.infof("running terraform init") err = tf.Init(ctx, tfexec.Get(false), tfexec.PluginDir("./plugins")) if err != nil { - return nil, err + return nil, fmt.Errorf("unable to run terraform init on provider: %w", err) } g.infof("getting provider schema") schemas, err := tf.ProvidersSchema(ctx) if err != nil { - return nil, err + return nil, fmt.Errorf("unable to retrieve provider schema from terraform exec: %w", err) + } + + if ps, ok := schemas.Schemas[shortName]; ok { + return ps, nil + } + + if ps, ok := schemas.Schemas["registry.terraform.io/hashicorp/"+shortName]; ok { + return ps, nil + } + + return nil, fmt.Errorf("unable to find schema in JSON for provider %q", shortName) +} + +func (g *generator) terraformProviderSchemaFromFile() (*tfjson.ProviderSchema, error) { + var err error + + shortName := providerShortName(g.providerName) + + g.infof("getting provider schema") + schemas, err := extractSchemaFromFile(g.providersSchemaPath) + if err != nil { + return nil, fmt.Errorf("unable to retrieve provider schema from JSON file: %w", err) } if ps, ok := schemas.Schemas[shortName]; ok { diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/logger.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/logger.go new file mode 100644 index 00000000..366812bc --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/logger.go @@ -0,0 +1,27 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package provider + +import ( + "fmt" + + "github.com/hashicorp/cli" +) + +type Logger struct { + ui cli.Ui +} + +func NewLogger(ui cli.Ui) *Logger { + return &Logger{ui} +} + +func (l *Logger) infof(format string, args ...interface{}) { + l.ui.Info(fmt.Sprintf(format, args...)) +} + +//nolint:unused +func (l *Logger) warnf(format string, args ...interface{}) { + l.ui.Warn(fmt.Sprintf(format, args...)) +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/migrate.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/migrate.go new file mode 100644 index 00000000..babe2127 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/migrate.go @@ -0,0 +1,409 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package provider + +import ( + "bufio" + "bytes" + "fmt" + "io/fs" + "os" + "path/filepath" + "regexp" + "strconv" + "strings" + + "github.com/hashicorp/cli" + "github.com/yuin/goldmark/ast" + "github.com/yuin/goldmark/text" +) + +type migrator struct { + // providerDir is the absolute path to the root provider directory + providerDir string + + websiteDir string + templatesDir string + examplesDir string + + providerName string + + ui cli.Ui +} + +func (m *migrator) infof(format string, a ...interface{}) { + m.ui.Info(fmt.Sprintf(format, a...)) +} + +func (m *migrator) warnf(format string, a ...interface{}) { + m.ui.Warn(fmt.Sprintf(format, a...)) +} + +func Migrate(ui cli.Ui, providerDir string, templatesDir string, examplesDir string, providerName string) error { + // Ensure provider directory is resolved absolute path + if providerDir == "" { + wd, err := os.Getwd() + + if err != nil { + return fmt.Errorf("error getting working directory: %w", err) + } + + providerDir = wd + } else { + absProviderDir, err := filepath.Abs(providerDir) + + if err != nil { + return fmt.Errorf("error getting absolute path with provider directory %q: %w", providerDir, err) + } + + providerDir = absProviderDir + } + + // Verify provider directory + providerDirFileInfo, err := os.Stat(providerDir) + + if err != nil { + return fmt.Errorf("error getting information for provider directory %q: %w", providerDir, err) + } + + if !providerDirFileInfo.IsDir() { + return fmt.Errorf("expected %q to be a directory", providerDir) + } + + // Default providerName to provider directory name + if providerName == "" { + providerName = filepath.Base(providerDir) + } + + // Determine website directory + websiteDir, err := determineWebsiteDir(providerDir) + if err != nil { + return err + } + + m := &migrator{ + providerDir: providerDir, + templatesDir: templatesDir, + examplesDir: examplesDir, + websiteDir: websiteDir, + providerName: providerName, + ui: ui, + } + + return m.Migrate() +} + +func (m *migrator) Migrate() error { + m.infof("migrating website from %q to %q", m.ProviderWebsiteDir(), m.ProviderTemplatesDir()) + + err := filepath.WalkDir(m.ProviderWebsiteDir(), func(path string, d os.DirEntry, err error) error { + if err != nil { + return fmt.Errorf("unable to walk path %q: %w", path, err) + } + + if d.IsDir() { + switch d.Name() { + case "d", "data-sources": //data-sources + m.infof("migrating data-sources directory: %s", d.Name()) + err := filepath.WalkDir(path, m.MigrateTemplate("data-sources")) + if err != nil { + return err + } + return filepath.SkipDir + case "r", "resources": //resources + m.infof("migrating resources directory: %s", d.Name()) + err := filepath.WalkDir(path, m.MigrateTemplate("resources")) + if err != nil { + return err + } + return filepath.SkipDir + case "functions": + m.infof("migrating functons directory: %s", d.Name()) + err := filepath.WalkDir(path, m.MigrateTemplate("functions")) + if err != nil { + return err + } + return filepath.SkipDir + case "guides": + m.infof("copying guides directory: %s", d.Name()) + err := cp(path, filepath.Join(m.ProviderTemplatesDir(), "guides")) + if err != nil { + return fmt.Errorf("unable to copy guides directory %q: %w", path, err) + } + return filepath.SkipDir + } + } else { + switch { + case regexp.MustCompile(`index.*`).MatchString(d.Name()): //index file + m.infof("migrating provider index: %s", d.Name()) + err := filepath.WalkDir(path, m.MigrateTemplate("")) + if err != nil { + return err + } + return nil + default: + //skip non-index files + return nil + } + } + + return nil + }) + if err != nil { + return fmt.Errorf("unable to migrate website: %w", err) + } + + //remove legacy website directory + err = os.RemoveAll(filepath.Join(m.providerDir, "website")) + if err != nil { + return fmt.Errorf("unable to remove legacy website directory: %w", err) + } + + return nil +} + +func (m *migrator) MigrateTemplate(relDir string) fs.WalkDirFunc { + return func(path string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + if d.IsDir() { + //skip processing directories + return nil + } + + m.infof("migrating file %q", d.Name()) + data, err := os.ReadFile(path) + if err != nil { + return fmt.Errorf("unable to read file %q: %w", d.Name(), err) + } + + baseName, _, _ := strings.Cut(d.Name(), ".") + shortName := providerShortName(m.providerName) + fileName := strings.TrimPrefix(baseName, shortName+"_") + + var exampleRelDir string + if fileName == "index" { + exampleRelDir = relDir + } else { + exampleRelDir = filepath.Join(relDir, fileName) + } + templateFilePath := filepath.Join(m.ProviderTemplatesDir(), relDir, fileName+".md.tmpl") + + err = os.MkdirAll(filepath.Dir(templateFilePath), 0755) + if err != nil { + return fmt.Errorf("unable to create directory %q: %w", templateFilePath, err) + } + + templateFile, err := os.OpenFile(templateFilePath, os.O_WRONLY|os.O_CREATE, 0600) + + if err != nil { + return fmt.Errorf("unable to open file %q: %w", templateFilePath, err) + } + + defer func(f *os.File) { + err := f.Close() + if err != nil { + m.warnf("unable to close file %q: %q", f.Name(), err) + } + }(templateFile) + + m.infof("extracting YAML frontmatter to %q", templateFilePath) + err = m.ExtractFrontMatter(data, relDir, templateFile) + if err != nil { + return fmt.Errorf("unable to extract front matter to %q: %w", templateFilePath, err) + } + + m.infof("extracting code examples from %q", d.Name()) + err = m.ExtractCodeExamples(data, exampleRelDir, templateFile) + if err != nil { + return fmt.Errorf("unable to extract code examples from %q: %w", templateFilePath, err) + } + + return nil + } + +} + +func (m *migrator) ExtractFrontMatter(content []byte, relDir string, templateFile *os.File) error { + fileScanner := bufio.NewScanner(bytes.NewReader(content)) + fileScanner.Split(bufio.ScanLines) + + hasFirstLine := fileScanner.Scan() + if !hasFirstLine || fileScanner.Text() != "---" { + m.warnf("no frontmatter found in %q", templateFile.Name()) + return nil + } + _, err := templateFile.WriteString(fileScanner.Text() + "\n") + if err != nil { + return fmt.Errorf("unable to append frontmatter to %q: %w", templateFile.Name(), err) + } + exited := false + for fileScanner.Scan() { + if strings.Contains(fileScanner.Text(), "layout:") { + // skip layout front matter + continue + } + _, err = templateFile.WriteString(fileScanner.Text() + "\n") + if err != nil { + return fmt.Errorf("unable to append frontmatter to %q: %w", templateFile.Name(), err) + } + if fileScanner.Text() == "---" { + exited = true + break + } + } + + if !exited { + return fmt.Errorf("cannot find ending of frontmatter block in %q", templateFile.Name()) + } + + // add comment to end of front matter briefly explaining template functionality + if relDir == "functions" { + _, err = templateFile.WriteString(migrateFunctionTemplateComment + "\n") + } else { + _, err = templateFile.WriteString(migrateProviderTemplateComment + "\n") + } + if err != nil { + return fmt.Errorf("unable to append template comment to %q: %w", templateFile.Name(), err) + } + + return nil +} + +func (m *migrator) ExtractCodeExamples(content []byte, newRelDir string, templateFile *os.File) error { + md := newMarkdownRenderer() + p := md.Parser() + root := p.Parse(text.NewReader(content)) + + exampleCount := 0 + importCount := 0 + + err := ast.Walk(root, func(node ast.Node, enter bool) (ast.WalkStatus, error) { + // skip the root node + if !enter || node.Type() == ast.TypeDocument { + return ast.WalkContinue, nil + } + + if fencedNode, isFenced := node.(*ast.FencedCodeBlock); isFenced && fencedNode.Info != nil { + var ext, exampleName, examplePath, template string + + lang := string(fencedNode.Info.Text(content)[:]) + switch lang { + case "hcl", "terraform": + exampleCount++ + ext = ".tf" + exampleName = "example_" + strconv.Itoa(exampleCount) + ext + examplePath = filepath.Join(m.examplesDir, newRelDir, exampleName) + template = fmt.Sprintf("{{tffile \"%s\"}}", examplePath) + m.infof("creating example file %q", filepath.Join(m.providerDir, examplePath)) + case "console": + importCount++ + ext = ".sh" + exampleName = "import_" + strconv.Itoa(importCount) + ext + examplePath = filepath.Join(m.examplesDir, newRelDir, exampleName) + template = fmt.Sprintf("{{codefile \"shell\" \"%s\"}}", examplePath) + m.infof("creating import file %q", filepath.Join(m.providerDir, examplePath)) + default: + // Render node as is + m.infof("skipping code block with unknown language %q", lang) + err := md.Renderer().Render(templateFile, content, node) + if err != nil { + return ast.WalkStop, fmt.Errorf("unable to render node: %w", err) + } + return ast.WalkSkipChildren, nil + } + + // add code block text to buffer + codeBuf := bytes.Buffer{} + for i := 0; i < node.Lines().Len(); i++ { + line := node.Lines().At(i) + _, _ = codeBuf.Write(line.Value(content)) + } + + // create example file from code block + err := writeFile(examplePath, codeBuf.String()) + if err != nil { + return ast.WalkStop, fmt.Errorf("unable to write file %q: %w", examplePath, err) + } + + // replace original code block with tfplugindocs template + _, err = templateFile.WriteString("\n\n" + template) + if err != nil { + return ast.WalkStop, fmt.Errorf("unable to write to template %q: %w", template, err) + } + + return ast.WalkSkipChildren, nil + } + + // Render non-code nodes as is + err := md.Renderer().Render(templateFile, content, node) + if err != nil { + return ast.WalkStop, fmt.Errorf("unable to render node: %w", err) + } + if node.HasChildren() { + return ast.WalkSkipChildren, nil + } + + return ast.WalkContinue, nil + }) + if err != nil { + return fmt.Errorf("unable to walk AST: %w", err) + } + + _, err = templateFile.WriteString("\n") + if err != nil { + return fmt.Errorf("unable to write to template %q: %w", templateFile.Name(), err) + } + m.infof("finished creating template %q", templateFile.Name()) + + return nil +} + +// ProviderWebsiteDir returns the absolute path to the joined provider and +// the website directory that templates will be migrated from, which defaults to either "website/docs/" or "docs". +func (m *migrator) ProviderWebsiteDir() string { + return filepath.Join(m.providerDir, m.websiteDir) +} + +// ProviderTemplatesDir returns the absolute path to the joined provider and +// given new templates directory, which defaults to "templates". +func (m *migrator) ProviderTemplatesDir() string { + return filepath.Join(m.providerDir, m.templatesDir) +} + +// ProviderExamplesDir returns the absolute path to the joined provider and +// given examples directory, which defaults to "examples". +func (m *migrator) ProviderExamplesDir() string { + return filepath.Join(m.providerDir, m.examplesDir) +} + +func determineWebsiteDir(providerDir string) (string, error) { + // Check for legacy website directory + providerWebsiteDirFileInfo, err := os.Stat(filepath.Join(providerDir, "website/docs")) + + if err != nil { + if os.IsNotExist(err) { + // Legacy website directory does not exist, check for docs directory + } else { + return "", fmt.Errorf("error getting information for provider website directory %q: %w", providerDir, err) + } + } else if providerWebsiteDirFileInfo.IsDir() { + return "website/docs", nil + } + + // Check for docs directory + providerDocsDirFileInfo, err := os.Stat(filepath.Join(providerDir, "docs")) + + if err != nil { + return "", fmt.Errorf("error getting information for provider docs directory %q: %w", providerDir, err) + } + + if providerDocsDirFileInfo.IsDir() { + return "docs", nil + } + + return "", fmt.Errorf("unable to determine website directory for provider %q", providerDir) + +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/schema.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/schema.go new file mode 100644 index 00000000..3338fe98 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/schema.go @@ -0,0 +1,136 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package provider + +import ( + "context" + "fmt" + "os" + "os/exec" + "path/filepath" + "runtime" + + "github.com/hashicorp/go-version" + install "github.com/hashicorp/hc-install" + "github.com/hashicorp/hc-install/checkpoint" + "github.com/hashicorp/hc-install/fs" + "github.com/hashicorp/hc-install/product" + "github.com/hashicorp/hc-install/releases" + "github.com/hashicorp/hc-install/src" + "github.com/hashicorp/terraform-exec/tfexec" + tfjson "github.com/hashicorp/terraform-json" +) + +func TerraformProviderSchemaFromTerraform(ctx context.Context, providerName, providerDir, tfVersion string, l *Logger) (*tfjson.ProviderSchema, error) { + var err error + + shortName := providerShortName(providerName) + + tmpDir, err := os.MkdirTemp("", "tfws") + if err != nil { + return nil, fmt.Errorf("unable to create temporary provider install directory %q: %w", tmpDir, err) + } + defer os.RemoveAll(tmpDir) + + l.infof("compiling provider %q", shortName) + providerPath := fmt.Sprintf("plugins/registry.terraform.io/hashicorp/%s/0.0.1/%s_%s", shortName, runtime.GOOS, runtime.GOARCH) + outFile := filepath.Join(tmpDir, providerPath, fmt.Sprintf("terraform-provider-%s", shortName)) + switch runtime.GOOS { + case "windows": + outFile = outFile + ".exe" + } + buildCmd := exec.Command("go", "build", "-o", outFile) + buildCmd.Dir = providerDir + // TODO: constrain env here to make it a little safer? + _, err = runCmd(buildCmd) + if err != nil { + return nil, fmt.Errorf("unable to execute go build command: %w", err) + } + + err = writeFile(filepath.Join(tmpDir, "provider.tf"), fmt.Sprintf(` +provider %[1]q { +} +`, shortName)) + if err != nil { + return nil, fmt.Errorf("unable to write provider.tf file: %w", err) + } + + i := install.NewInstaller() + var sources []src.Source + if tfVersion != "" { + l.infof("downloading Terraform CLI binary version from releases.hashicorp.com: %s", tfVersion) + sources = []src.Source{ + &releases.ExactVersion{ + Product: product.Terraform, + Version: version.Must(version.NewVersion(tfVersion)), + InstallDir: tmpDir, + }, + } + } else { + l.infof("using Terraform CLI binary from PATH if available, otherwise downloading latest Terraform CLI binary") + sources = []src.Source{ + &fs.AnyVersion{ + Product: &product.Terraform, + }, + &checkpoint.LatestVersion{ + InstallDir: tmpDir, + Product: product.Terraform, + }, + } + } + + tfBin, err := i.Ensure(context.Background(), sources) + if err != nil { + return nil, fmt.Errorf("unable to download Terraform binary: %w", err) + } + + tf, err := tfexec.NewTerraform(tmpDir, tfBin) + if err != nil { + return nil, fmt.Errorf("unable to create new terraform exec instance: %w", err) + } + + l.infof("running terraform init") + err = tf.Init(ctx, tfexec.Get(false), tfexec.PluginDir("./plugins")) + if err != nil { + return nil, fmt.Errorf("unable to run terraform init on provider: %w", err) + } + + l.infof("getting provider schema") + schemas, err := tf.ProvidersSchema(ctx) + if err != nil { + return nil, fmt.Errorf("unable to retrieve provider schema from terraform exec: %w", err) + } + + if ps, ok := schemas.Schemas[shortName]; ok { + return ps, nil + } + + if ps, ok := schemas.Schemas["registry.terraform.io/hashicorp/"+shortName]; ok { + return ps, nil + } + + return nil, fmt.Errorf("unable to find schema in JSON for provider %q", shortName) +} + +func TerraformProviderSchemaFromFile(providerName, providersSchemaPath string, l *Logger) (*tfjson.ProviderSchema, error) { + var err error + + shortName := providerShortName(providerName) + + l.infof("getting provider schema") + schemas, err := extractSchemaFromFile(providersSchemaPath) + if err != nil { + return nil, fmt.Errorf("unable to retrieve provider schema from JSON file: %w", err) + } + + if ps, ok := schemas.Schemas[shortName]; ok { + return ps, nil + } + + if ps, ok := schemas.Schemas["registry.terraform.io/hashicorp/"+shortName]; ok { + return ps, nil + } + + return nil, fmt.Errorf("unable to find schema in JSON for provider %q", shortName) +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/template.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/template.go index 3d71b419..58766489 100644 --- a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/template.go +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/template.go @@ -16,23 +16,27 @@ import ( tfjson "github.com/hashicorp/terraform-json" + "github.com/hashicorp/terraform-plugin-docs/internal/schemamd" + + "github.com/hashicorp/terraform-plugin-docs/internal/functionmd" "github.com/hashicorp/terraform-plugin-docs/internal/mdplain" "github.com/hashicorp/terraform-plugin-docs/internal/tmplfuncs" - "github.com/hashicorp/terraform-plugin-docs/schemamd" ) const ( - schemaComment = "" + schemaComment = "" + signatureComment = "" + argumentComment = "" + variadicComment = "" + frontmatterComment = "# generated by https://github.com/hashicorp/terraform-plugin-docs" ) type ( resourceTemplate string + functionTemplate string providerTemplate string - resourceFileTemplate string - providerFileTemplate string - docTemplate string ) @@ -116,38 +120,45 @@ func (t docTemplate) Render(providerDir string, out io.Writer) error { return renderTemplate(providerDir, "docTemplate", s, out, nil) } -func (t resourceFileTemplate) Render(providerDir, name, providerName string) (string, error) { +func (t providerTemplate) Render(providerDir, providerName, renderedProviderName, exampleFile string, schema *tfjson.Schema) (string, error) { + schemaBuffer := bytes.NewBuffer(nil) + err := schemamd.Render(schema, schemaBuffer) + if err != nil { + return "", fmt.Errorf("unable to render schema: %w", err) + } + s := string(t) if s == "" { return "", nil } - return renderStringTemplate(providerDir, "resourceFileTemplate", s, struct { - Name string - ShortName string + + return renderStringTemplate(providerDir, "providerTemplate", s, struct { + Description string + + HasExample bool + ExampleFile string ProviderName string ProviderShortName string + SchemaMarkdown string + + RenderedProviderName string }{ - Name: name, - ShortName: resourceShortName(name, providerName), + Description: schema.Block.Description, + + HasExample: exampleFile != "" && fileExists(exampleFile), + ExampleFile: exampleFile, ProviderName: providerName, ProviderShortName: providerShortName(providerName), - }) -} -func (t providerFileTemplate) Render(providerDir, name string) (string, error) { - s := string(t) - if s == "" { - return "", nil - } - return renderStringTemplate(providerDir, "providerFileTemplate", s, struct { - Name string - ShortName string - }{name, providerShortName(name)}) + SchemaMarkdown: schemaComment + "\n" + schemaBuffer.String(), + + RenderedProviderName: renderedProviderName, + }) } -func (t providerTemplate) Render(providerDir, providerName, renderedProviderName, exampleFile string, schema *tfjson.Schema) (string, error) { +func (t resourceTemplate) Render(providerDir, name, providerName, renderedProviderName, typeName, exampleFile, importFile string, schema *tfjson.Schema) (string, error) { schemaBuffer := bytes.NewBuffer(nil) err := schemamd.Render(schema, schemaBuffer) if err != nil { @@ -159,12 +170,17 @@ func (t providerTemplate) Render(providerDir, providerName, renderedProviderName return "", nil } - return renderStringTemplate(providerDir, "providerTemplate", s, struct { + return renderStringTemplate(providerDir, "resourceTemplate", s, struct { + Type string + Name string Description string HasExample bool ExampleFile string + HasImport bool + ImportFile string + ProviderName string ProviderShortName string @@ -172,11 +188,16 @@ func (t providerTemplate) Render(providerDir, providerName, renderedProviderName RenderedProviderName string }{ + Type: typeName, + Name: name, Description: schema.Block.Description, HasExample: exampleFile != "" && fileExists(exampleFile), ExampleFile: exampleFile, + HasImport: importFile != "" && fileExists(importFile), + ImportFile: importFile, + ProviderName: providerName, ProviderShortName: providerShortName(providerName), @@ -186,11 +207,20 @@ func (t providerTemplate) Render(providerDir, providerName, renderedProviderName }) } -func (t resourceTemplate) Render(providerDir, name, providerName, renderedProviderName, typeName, exampleFile, importFile string, schema *tfjson.Schema) (string, error) { - schemaBuffer := bytes.NewBuffer(nil) - err := schemamd.Render(schema, schemaBuffer) +func (t functionTemplate) Render(providerDir, name, providerName, renderedProviderName, typeName, exampleFile string, signature *tfjson.FunctionSignature) (string, error) { + funcSig, err := functionmd.RenderSignature(name, signature) if err != nil { - return "", fmt.Errorf("unable to render schema: %w", err) + return "", fmt.Errorf("unable to render function signature: %w", err) + } + + funcArgs, err := functionmd.RenderArguments(signature) + if err != nil { + return "", fmt.Errorf("unable to render function arguments: %w", err) + } + + funcVarArg, err := functionmd.RenderVariadicArg(signature) + if err != nil { + return "", fmt.Errorf("unable to render variadic argument: %w", err) } s := string(t) @@ -202,34 +232,38 @@ func (t resourceTemplate) Render(providerDir, name, providerName, renderedProvid Type string Name string Description string + Summary string HasExample bool ExampleFile string - HasImport bool - ImportFile string - ProviderName string ProviderShortName string - SchemaMarkdown string + FunctionSignatureMarkdown string + FunctionArgumentsMarkdown string + + HasVariadic bool + FunctionVariadicArgumentMarkdown string RenderedProviderName string }{ Type: typeName, Name: name, - Description: schema.Block.Description, + Description: signature.Description, + Summary: signature.Summary, HasExample: exampleFile != "" && fileExists(exampleFile), ExampleFile: exampleFile, - HasImport: importFile != "" && fileExists(importFile), - ImportFile: importFile, - ProviderName: providerName, ProviderShortName: providerShortName(providerName), - SchemaMarkdown: schemaComment + "\n" + schemaBuffer.String(), + FunctionSignatureMarkdown: signatureComment + "\n" + funcSig, + FunctionArgumentsMarkdown: argumentComment + "\n" + funcArgs, + + HasVariadic: signature.VariadicParameter != nil, + FunctionVariadicArgumentMarkdown: variadicComment + "\n" + funcVarArg, RenderedProviderName: renderedProviderName, }) @@ -250,7 +284,7 @@ description: |- {{ if .HasExample -}} ## Example Usage -{{ printf "{{tffile %q}}" .ExampleFile }} +{{tffile .ExampleFile }} {{- end }} {{ .SchemaMarkdown | trimspace }} @@ -260,7 +294,37 @@ description: |- Import is supported using the following syntax: -{{ printf "{{codefile \"shell\" %q}}" .ImportFile }} +{{codefile "shell" .ImportFile }} +{{- end }} +` + +const defaultFunctionTemplate functionTemplate = `--- +` + frontmatterComment + ` +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ .Summary | plainmarkdown | trimspace | prefixlines " " }} +--- + +# {{.Type}}: {{.Name}} + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{tffile .ExampleFile }} +{{- end }} + +## Signature + +{{ .FunctionSignatureMarkdown }} + +## Arguments + +{{ .FunctionArgumentsMarkdown }} +{{ if .HasVariadic -}} +{{ .FunctionVariadicArgumentMarkdown }} {{- end }} ` @@ -279,8 +343,20 @@ description: |- {{ if .HasExample -}} ## Example Usage -{{ printf "{{tffile %q}}" .ExampleFile }} +{{tffile .ExampleFile }} {{- end }} {{ .SchemaMarkdown | trimspace }} ` + +const migrateProviderTemplateComment string = ` +{{/* This template serves as a starting point for documentation generation, and can be customized with hardcoded values and/or doc gen templates. + +For example, the {{ .SchemaMarkdown }} template can be used to replace manual schema documentation if descriptions of schema attributes are added in the provider source code. */ -}} +` + +const migrateFunctionTemplateComment string = ` +{{/* This template serves as a starting point for documentation generation, and can be customized with hardcoded values and/or doc gen templates. + +For example, the {{ .FunctionArgumentsMarkdown }} template can be used to replace manual argument documentation if descriptions of function arguments are added in the provider source code. */ -}} +` diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/util.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/util.go index 06aee6b1..7a3ec336 100644 --- a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/util.go +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/util.go @@ -12,7 +12,12 @@ import ( "path/filepath" "strings" + "github.com/Kunde21/markdownfmt/v3/markdown" tfjson "github.com/hashicorp/terraform-json" + "github.com/yuin/goldmark" + meta "github.com/yuin/goldmark-meta" + "github.com/yuin/goldmark/extension" + "github.com/yuin/goldmark/parser" ) func providerShortName(n string) string { @@ -31,6 +36,12 @@ func copyFile(srcPath, dstPath string, mode os.FileMode) error { } defer srcFile.Close() + // Ensure destination path exists for file creation + err = os.MkdirAll(filepath.Dir(dstPath), 0755) + if err != nil { + return err + } + // If the destination file already exists, we shouldn't blow it away dstFile, err := os.OpenFile(dstPath, os.O_WRONLY|os.O_CREATE|os.O_EXCL, mode) if err != nil { @@ -75,9 +86,13 @@ func resourceSchema(schemas map[string]*tfjson.Schema, providerShortName, templa func writeFile(path string, data string) error { dir, _ := filepath.Split(path) - err := os.MkdirAll(dir, 0755) - if err != nil { - return fmt.Errorf("unable to make dir %q: %w", dir, err) + + var err error + if dir != "" { + err = os.MkdirAll(dir, 0755) + if err != nil { + return fmt.Errorf("unable to make dir %q: %w", dir, err) + } } err = os.WriteFile(path, []byte(data), 0644) @@ -88,6 +103,7 @@ func writeFile(path string, data string) error { return nil } +//nolint:unparam func runCmd(cmd *exec.Cmd) ([]byte, error) { output, err := cmd.CombinedOutput() if err != nil { @@ -136,3 +152,39 @@ func fileExists(filename string) bool { } return !info.IsDir() } + +func extractSchemaFromFile(path string) (*tfjson.ProviderSchemas, error) { + schemajson, err := os.ReadFile(path) + if err != nil { + return nil, fmt.Errorf("unable to read file %q: %w", path, err) + } + + schemas := &tfjson.ProviderSchemas{ + FormatVersion: "", + Schemas: nil, + } + err = schemas.UnmarshalJSON(schemajson) + if err != nil { + return nil, err + } + + return schemas, nil +} + +func newMarkdownRenderer() goldmark.Markdown { + mr := markdown.NewRenderer() + extensions := []goldmark.Extender{ + extension.GFM, + meta.Meta, // We need this to skip YAML frontmatter when parsing. + } + parserOptions := []parser.Option{ + parser.WithAttribute(), // We need this to enable # headers {#custom-ids}. + } + + gm := goldmark.New( + goldmark.WithExtensions(extensions...), + goldmark.WithParserOptions(parserOptions...), + goldmark.WithRenderer(mr), + ) + return gm +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/validate.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/validate.go index 329e05d1..a72be373 100644 --- a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/validate.go +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/validate.go @@ -4,267 +4,363 @@ package provider import ( + "context" + "errors" "fmt" + "log" "os" "path/filepath" - "strings" - "github.com/mitchellh/cli" + "github.com/bmatcuk/doublestar/v4" + "github.com/hashicorp/cli" + tfjson "github.com/hashicorp/terraform-json" + + "github.com/hashicorp/terraform-plugin-docs/internal/check" ) -func Validate(ui cli.Ui) error { - dirExists := func(name string) bool { - if _, err := os.Stat(name); err != nil { - return false - } +const ( + FileExtensionHtmlMarkdown = `.html.markdown` + FileExtensionHtmlMd = `.html.md` + FileExtensionMarkdown = `.markdown` + FileExtensionMd = `.md` - return true - } + DocumentationGlobPattern = `{docs/index.md,docs/{,cdktf/}{data-sources,guides,resources,functions}/**/*,website/docs/**/*}` + DocumentationDirGlobPattern = `{docs/{,cdktf/}{data-sources,guides,resources,functions}{,/*},website/docs/**/*}` +) - switch { - default: - ui.Warn("no website detected, exiting") - case dirExists("templates"): - ui.Info("detected templates directory, running checks...") - err := validateTemplates(ui, "templates") - if err != nil { - return err - } - if dirExists("examples") { - ui.Info("detected examples directory for templates, running checks...") - err = validateExamples(ui, "examples") - if err != nil { - return err - } - } - return err - case dirExists("docs"): - ui.Info("detected static docs directory, running checks") - return validateStaticDocs(ui, "docs") - case dirExists("website"): - ui.Info("detected legacy website directory, running checks") - return validateLegacyWebsite(ui, "website") - } +var ValidLegacyFileExtensions = []string{ + FileExtensionHtmlMarkdown, + FileExtensionHtmlMd, + FileExtensionMarkdown, + FileExtensionMd, +} - return nil +var ValidRegistryFileExtensions = []string{ + FileExtensionMd, } -func validateExamples(ui cli.Ui, dir string) error { - return nil +var LegacyFrontMatterOptions = &check.FrontMatterOptions{ + NoSidebarCurrent: true, + RequireDescription: true, + RequireLayout: true, + RequirePageTitle: true, } -func validateTemplates(ui cli.Ui, dir string) error { - checks := []check{ - checkAllowedFiles( - "index.md", - "index.md.tmpl", - ), - checkAllowedDirs( - "data-sources", - "guides", - "resources", - ), - checkBlockedExtensions( - ".html.md.tmpl", - ), - checkAllowedExtensions( - ".md", - ".md.tmpl", - ), - } - issues := []issue{} - for _, c := range checks { - checkIssues, err := c(dir) +var LegacyIndexFrontMatterOptions = &check.FrontMatterOptions{ + NoSidebarCurrent: true, + NoSubcategory: true, + RequireDescription: true, + RequireLayout: true, + RequirePageTitle: true, +} + +var LegacyGuideFrontMatterOptions = &check.FrontMatterOptions{ + NoSidebarCurrent: true, + RequireDescription: true, + RequireLayout: true, + RequirePageTitle: true, +} + +var RegistryFrontMatterOptions = &check.FrontMatterOptions{ + NoLayout: true, + NoSidebarCurrent: true, +} + +var RegistryIndexFrontMatterOptions = &check.FrontMatterOptions{ + NoLayout: true, + NoSidebarCurrent: true, + NoSubcategory: true, +} + +var RegistryGuideFrontMatterOptions = &check.FrontMatterOptions{ + NoLayout: true, + NoSidebarCurrent: true, + RequirePageTitle: true, +} + +type validator struct { + providerName string + providerDir string + providersSchemaPath string + + tfVersion string + providerSchema *tfjson.ProviderSchema + + logger *Logger +} + +func Validate(ui cli.Ui, providerDir, providerName, providersSchemaPath, tfversion string) error { + // Ensure provider directory is resolved absolute path + if providerDir == "" { + wd, err := os.Getwd() + if err != nil { - return err + return fmt.Errorf("error getting working directory: %w", err) + } + + providerDir = wd + } else { + absProviderDir, err := filepath.Abs(providerDir) + + if err != nil { + return fmt.Errorf("error getting absolute path with provider directory %q: %w", providerDir, err) } - issues = append(issues, checkIssues...) + + providerDir = absProviderDir } - for _, issue := range issues { - ui.Warn(fmt.Sprintf("%s: %s", issue.file, issue.message)) + + // Verify provider directory + providerDirFileInfo, err := os.Stat(providerDir) + + if err != nil { + return fmt.Errorf("error getting information for provider directory %q: %w", providerDir, err) + } + + if !providerDirFileInfo.IsDir() { + return fmt.Errorf("expected %q to be a directory", providerDir) } - if len(issues) > 0 { - return fmt.Errorf("invalid templates directory") + + v := &validator{ + providerName: providerName, + providerDir: providerDir, + providersSchemaPath: providersSchemaPath, + tfVersion: tfversion, + + logger: NewLogger(ui), } - return nil + + ctx := context.Background() + + return v.validate(ctx) } -func validateStaticDocs(ui cli.Ui, dir string) error { - checks := []check{ - checkAllowedFiles( - "index.md", - ), - checkAllowedDirs( - "data-sources", - "guides", - "resources", - "cdktf", - ), - checkBlockedExtensions( - ".html.md.tmpl", - ".html.md", - ".md.tmpl", - ), - checkAllowedExtensions( - ".md", - ), +func (v *validator) validate(ctx context.Context) error { + var result error + + var err error + + if v.providerName == "" { + v.providerName = filepath.Base(v.providerDir) } - issues := []issue{} - for _, c := range checks { - checkIssues, err := c(dir) + + if v.providersSchemaPath == "" { + v.logger.infof("exporting schema from Terraform") + v.providerSchema, err = TerraformProviderSchemaFromTerraform(ctx, v.providerName, v.providerDir, v.tfVersion, v.logger) if err != nil { - return err + return fmt.Errorf("error exporting provider schema from Terraform: %w", err) + } + } else { + v.logger.infof("exporting schema from JSON file") + v.providerSchema, err = TerraformProviderSchemaFromFile(v.providerName, v.providersSchemaPath, v.logger) + if err != nil { + return fmt.Errorf("error exporting provider schema from JSON file: %w", err) } - issues = append(issues, checkIssues...) } - for _, issue := range issues { - ui.Warn(fmt.Sprintf("%s: %s", issue.file, issue.message)) + + providerFs := os.DirFS(v.providerDir) + + files, globErr := doublestar.Glob(providerFs, DocumentationGlobPattern) + if globErr != nil { + return fmt.Errorf("error finding documentation files: %w", err) } - if len(issues) > 0 { - return fmt.Errorf("invalid templates directory") + + log.Printf("[DEBUG] Found documentation files %v", files) + + v.logger.infof("running mixed directories check") + err = check.MixedDirectoriesCheck(files) + result = errors.Join(result, err) + + if dirExists(filepath.Join(v.providerDir, "docs")) { + v.logger.infof("detected static docs directory, running checks") + err = v.validateStaticDocs(filepath.Join(v.providerDir, "docs")) + result = errors.Join(result, err) + + } + if dirExists(filepath.Join(v.providerDir, filepath.Join("website", "docs"))) { + v.logger.infof("detected legacy website directory, running checks") + err = v.validateLegacyWebsite(filepath.Join(v.providerDir, "website/docs")) + result = errors.Join(result, err) } - return nil -} -func validateLegacyWebsite(ui cli.Ui, dir string) error { - panic("not implemented") + return result } -type issue struct { - file string - message string -} +func (v *validator) validateStaticDocs(dir string) error { -type check func(dir string) ([]issue, error) + var result error -func checkBlockedExtensions(exts ...string) check { - return func(dir string) ([]issue, error) { - issues := []issue{} - err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { - if err != nil { - return err - } - if info.IsDir() { - return nil - } - for _, ext := range exts { - if strings.HasSuffix(path, ext) { - _, file := filepath.Split(path) - issues = append(issues, issue{ - file: path, - message: fmt.Sprintf("the extension for %q is not supported", file), - }) - break - } - } - return nil - }) + options := &check.ProviderFileOptions{ + FrontMatter: RegistryFrontMatterOptions, + ValidExtensions: ValidRegistryFileExtensions, + } + + var files []string + + err := filepath.WalkDir(dir, func(path string, d os.DirEntry, err error) error { if err != nil { - return nil, err + return fmt.Errorf("error walking directory %q: %w", dir, err) } - return issues, nil - } -} -func checkAllowedExtensions(exts ...string) check { - return func(dir string) ([]issue, error) { - issues := []issue{} - err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { + rel, err := filepath.Rel(v.providerDir, path) + if err != nil { + return err + } + if d.IsDir() { + match, err := doublestar.PathMatch(filepath.FromSlash(DocumentationDirGlobPattern), rel) if err != nil { return err } - if info.IsDir() { - return nil - } - valid := false - for _, ext := range exts { - if strings.HasSuffix(path, ext) { - valid = true - break - } - } - if !valid { - _, file := filepath.Split(path) - issues = append(issues, issue{ - file: path, - message: fmt.Sprintf("the extension for %q is not expected", file), - }) + if !match { + return nil // skip valid non-documentation directories } + + v.logger.infof("running invalid directories check on %s", rel) + result = errors.Join(result, check.InvalidDirectoriesCheck(rel)) return nil - }) + } + match, err := doublestar.PathMatch(filepath.FromSlash(DocumentationGlobPattern), rel) if err != nil { - return nil, err + return err + } + if !match { + return nil // skip valid non-documentation files } - return issues, nil + + // Configure FrontMatterOptions based on file type + if d.Name() == "index.md" { + options.FrontMatter = RegistryIndexFrontMatterOptions + } else if _, relErr := filepath.Rel(rel, "guides"); relErr != nil { + options.FrontMatter = RegistryGuideFrontMatterOptions + } else { + options.FrontMatter = RegistryFrontMatterOptions + } + v.logger.infof("running file checks on %s", rel) + result = errors.Join(result, check.NewProviderFileCheck(options).Run(path)) + + files = append(files, path) + return nil + }) + if err != nil { + return fmt.Errorf("error walking directory %q: %w", dir, err) } -} -func checkAllowedDirs(dirs ...string) check { - allowedDirs := map[string]bool{} - for _, d := range dirs { - allowedDirs[d] = true + mismatchOpt := &check.FileMismatchOptions{ + ProviderShortName: providerShortName(v.providerName), + Schema: v.providerSchema, + } + + if dirExists(filepath.Join(dir, "data-sources")) { + dataSourceFiles, _ := os.ReadDir(filepath.Join(dir, "data-sources")) + mismatchOpt.DatasourceEntries = dataSourceFiles + } + if dirExists(filepath.Join(dir, "resources")) { + resourceFiles, _ := os.ReadDir(filepath.Join(dir, "resources")) + mismatchOpt.ResourceEntries = resourceFiles + } + if dirExists(filepath.Join(dir, "functions")) { + functionFiles, _ := os.ReadDir(filepath.Join(dir, "functions")) + mismatchOpt.FunctionEntries = functionFiles } - return func(dir string) ([]issue, error) { - issues := []issue{} + v.logger.infof("running file mismatch check") + if err := check.NewFileMismatchCheck(mismatchOpt).Run(); err != nil { + result = errors.Join(result, err) + } + + return result +} + +func (v *validator) validateLegacyWebsite(dir string) error { + + var result error + + options := &check.ProviderFileOptions{ + FrontMatter: LegacyFrontMatterOptions, + ValidExtensions: ValidLegacyFileExtensions, + } - f, err := os.Open(dir) + var files []string + err := filepath.WalkDir(dir, func(path string, d os.DirEntry, err error) error { if err != nil { - return nil, err + return fmt.Errorf("error walking directory %q: %w", dir, err) } - infos, err := f.Readdir(-1) + + rel, err := filepath.Rel(v.providerDir, path) if err != nil { - return nil, err + return err } - - for _, fi := range infos { - if !fi.IsDir() { - continue + if d.IsDir() { + match, err := doublestar.PathMatch(filepath.FromSlash(DocumentationDirGlobPattern), rel) + if err != nil { + return err } - - if !allowedDirs[fi.Name()] { - issues = append(issues, issue{ - file: filepath.Join(dir, fi.Name()), - message: fmt.Sprintf("directory %q is not allowed", fi.Name()), - }) + if !match { + return nil // skip valid non-documentation directories } + + v.logger.infof("running invalid directories check on %s", rel) + result = errors.Join(result, check.InvalidDirectoriesCheck(rel)) + return nil } - return issues, nil - } -} + match, err := doublestar.PathMatch(filepath.FromSlash(DocumentationGlobPattern), rel) + if err != nil { + return err + } + if !match { + return nil // skip non-documentation files + } + + // Configure FrontMatterOptions based on file type + if d.Name() == "index.md" { + options.FrontMatter = LegacyIndexFrontMatterOptions + } else if _, relErr := filepath.Rel(rel, "guides"); relErr != nil { + options.FrontMatter = LegacyGuideFrontMatterOptions + } else { + options.FrontMatter = LegacyFrontMatterOptions + } + v.logger.infof("running file checks on %s", rel) + result = errors.Join(result, check.NewProviderFileCheck(options).Run(path)) -func checkAllowedFiles(dirs ...string) check { - allowedFiles := map[string]bool{} - for _, d := range dirs { - allowedFiles[d] = true + files = append(files, path) + return nil + }) + if err != nil { + return fmt.Errorf("error walking directory %q: %w", dir, err) } - return func(dir string) ([]issue, error) { - issues := []issue{} + mismatchOpt := &check.FileMismatchOptions{ + ProviderShortName: providerShortName(v.providerName), + Schema: v.providerSchema, + } - f, err := os.Open(dir) - if err != nil { - return nil, err - } - infos, err := f.Readdir(-1) - if err != nil { - return nil, err - } + if dirExists(filepath.Join(dir, "d")) { + dataSourceFiles, _ := os.ReadDir(filepath.Join(dir, "d")) + mismatchOpt.DatasourceEntries = dataSourceFiles + } + if dirExists(filepath.Join(dir, "r")) { + resourceFiles, _ := os.ReadDir(filepath.Join(dir, "r")) + mismatchOpt.ResourceEntries = resourceFiles + } + if dirExists(filepath.Join(dir, "functions")) { + functionFiles, _ := os.ReadDir(filepath.Join(dir, "functions")) + mismatchOpt.FunctionEntries = functionFiles + } - for _, fi := range infos { - if fi.IsDir() { - continue - } + v.logger.infof("running file mismatch check") + if err := check.NewFileMismatchCheck(mismatchOpt).Run(); err != nil { + result = errors.Join(result, err) + } - if !allowedFiles[fi.Name()] { - issues = append(issues, issue{ - file: filepath.Join(dir, fi.Name()), - message: fmt.Sprintf("file %q is not allowed", fi.Name()), - }) - } - } + return result +} - return issues, nil +func dirExists(name string) bool { + if file, err := os.Stat(name); err != nil { + return false + } else if !file.IsDir() { + return false } + + return true } diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/schemamd/behaviors.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/schemamd/behaviors.go similarity index 100% rename from vendor/github.com/hashicorp/terraform-plugin-docs/schemamd/behaviors.go rename to vendor/github.com/hashicorp/terraform-plugin-docs/internal/schemamd/behaviors.go diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/schemamd/render.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/schemamd/render.go similarity index 90% rename from vendor/github.com/hashicorp/terraform-plugin-docs/schemamd/render.go rename to vendor/github.com/hashicorp/terraform-plugin-docs/internal/schemamd/render.go index 90617d86..a459d0ba 100644 --- a/vendor/github.com/hashicorp/terraform-plugin-docs/schemamd/render.go +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/schemamd/render.go @@ -57,11 +57,12 @@ var ( ) type nestedType struct { - anchorID string - path []string - block *tfjson.SchemaBlock - object *cty.Type - attrs *tfjson.SchemaNestedAttributeType + anchorID string + pathTitle string + path []string + block *tfjson.SchemaBlock + object *cty.Type + attrs *tfjson.SchemaNestedAttributeType group groupFilter } @@ -87,6 +88,7 @@ func writeAttribute(w io.Writer, path []string, att *tfjson.SchemaAttribute, gro } anchorID := "nestedatt--" + strings.Join(path, "--") + pathTitle := strings.Join(path, ".") nestedTypes := []nestedType{} switch { case att.AttributeNestedType != nil: @@ -96,9 +98,10 @@ func writeAttribute(w io.Writer, path []string, att *tfjson.SchemaAttribute, gro } nestedTypes = append(nestedTypes, nestedType{ - anchorID: anchorID, - path: path, - attrs: att.AttributeNestedType, + anchorID: anchorID, + pathTitle: pathTitle, + path: path, + attrs: att.AttributeNestedType, group: group, }) @@ -109,9 +112,10 @@ func writeAttribute(w io.Writer, path []string, att *tfjson.SchemaAttribute, gro } nestedTypes = append(nestedTypes, nestedType{ - anchorID: anchorID, - path: path, - object: &att.AttributeType, + anchorID: anchorID, + pathTitle: pathTitle, + path: path, + object: &att.AttributeType, group: group, }) @@ -123,9 +127,10 @@ func writeAttribute(w io.Writer, path []string, att *tfjson.SchemaAttribute, gro nt := att.AttributeType.ElementType() nestedTypes = append(nestedTypes, nestedType{ - anchorID: anchorID, - path: path, - object: &nt, + anchorID: anchorID, + pathTitle: pathTitle, + path: path, + object: &nt, group: group, }) @@ -153,10 +158,12 @@ func writeBlockType(w io.Writer, path []string, block *tfjson.SchemaBlockType) ( } anchorID := "nestedblock--" + strings.Join(path, "--") + pathTitle := strings.Join(path, ".") nt := nestedType{ - anchorID: anchorID, - path: path, - block: block.Block, + anchorID: anchorID, + pathTitle: pathTitle, + path: path, + block: block.Block, } _, err = io.WriteString(w, " (see [below for nested schema](#"+anchorID+"))") @@ -231,7 +238,7 @@ nameLoop: // // If a `.Description` is provided instead, the behaviour will be the // same as for every other attribute. - if strings.ToLower(n) == "id" && childAtt.Description == "" { + if strings.ToLower(n) == "id" && len(parents) == 0 && childAtt.Description == "" { if strings.Contains(gf.topLevelTitle, "Read-Only") { childAtt.Description = "The ID of this resource." groups[i] = append(groups[i], n) @@ -344,7 +351,7 @@ func writeNestedTypes(w io.Writer, nestedTypes []nestedType) error { return err } - _, err = io.WriteString(w, "### Nested Schema for `"+strings.Join(nt.path, ".")+"`\n\n") + _, err = io.WriteString(w, "### Nested Schema for `"+nt.pathTitle+"`\n\n") if err != nil { return err } @@ -401,6 +408,7 @@ func writeObjectAttribute(w io.Writer, path []string, att cty.Type, group groupF } anchorID := "nestedobjatt--" + strings.Join(path, "--") + pathTitle := strings.Join(path, ".") nestedTypes := []nestedType{} switch { case att.IsObjectType(): @@ -410,9 +418,10 @@ func writeObjectAttribute(w io.Writer, path []string, att cty.Type, group groupF } nestedTypes = append(nestedTypes, nestedType{ - anchorID: anchorID, - path: path, - object: &att, + anchorID: anchorID, + pathTitle: pathTitle, + path: path, + object: &att, group: group, }) @@ -424,9 +433,10 @@ func writeObjectAttribute(w io.Writer, path []string, att cty.Type, group groupF nt := att.ElementType() nestedTypes = append(nestedTypes, nestedType{ - anchorID: anchorID, - path: path, - object: &nt, + anchorID: anchorID, + pathTitle: pathTitle, + path: path, + object: &nt, group: group, }) @@ -456,7 +466,9 @@ func writeObjectChildren(w io.Writer, parents []string, ty cty.Type, group group for _, name := range sortedNames { att := atts[name] - path := append(parents, name) + path := make([]string, len(parents), len(parents)+1) + copy(path, parents) + path = append(path, name) nt, err := writeObjectAttribute(w, path, att, group) if err != nil { @@ -512,7 +524,9 @@ func writeNestedAttributeChildren(w io.Writer, parents []string, nestedAttribute for _, name := range names { att := nestedAttributes.Attributes[name] - path := append(parents, name) + path := make([]string, len(parents), len(parents)+1) + copy(path, parents) + path = append(path, name) nt, err := writeAttribute(w, path, att, group) if err != nil { diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/schemamd/write_attribute_description.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/schemamd/write_attribute_description.go similarity index 100% rename from vendor/github.com/hashicorp/terraform-plugin-docs/schemamd/write_attribute_description.go rename to vendor/github.com/hashicorp/terraform-plugin-docs/internal/schemamd/write_attribute_description.go diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/schemamd/write_block_type_description.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/schemamd/write_block_type_description.go similarity index 100% rename from vendor/github.com/hashicorp/terraform-plugin-docs/schemamd/write_block_type_description.go rename to vendor/github.com/hashicorp/terraform-plugin-docs/internal/schemamd/write_block_type_description.go diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/schemamd/write_nested_attribute_type_description.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/schemamd/write_nested_attribute_type_description.go similarity index 100% rename from vendor/github.com/hashicorp/terraform-plugin-docs/schemamd/write_nested_attribute_type_description.go rename to vendor/github.com/hashicorp/terraform-plugin-docs/internal/schemamd/write_nested_attribute_type_description.go diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/schemamd/write_type.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/schemamd/write_type.go similarity index 100% rename from vendor/github.com/hashicorp/terraform-plugin-docs/schemamd/write_type.go rename to vendor/github.com/hashicorp/terraform-plugin-docs/internal/schemamd/write_type.go diff --git a/vendor/github.com/mattn/go-runewidth/.travis.yml b/vendor/github.com/mattn/go-runewidth/.travis.yml new file mode 100644 index 00000000..6a21813a --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/.travis.yml @@ -0,0 +1,16 @@ +language: go +sudo: false +go: + - 1.13.x + - tip + +before_install: + - go get -t -v ./... + +script: + - go generate + - git diff --cached --exit-code + - ./go.test.sh + +after_success: + - bash <(curl -s https://codecov.io/bash) diff --git a/vendor/github.com/mattn/go-runewidth/LICENSE b/vendor/github.com/mattn/go-runewidth/LICENSE new file mode 100644 index 00000000..91b5cef3 --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Yasuhiro Matsumoto + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/mattn/go-runewidth/README.md b/vendor/github.com/mattn/go-runewidth/README.md new file mode 100644 index 00000000..aa56ab96 --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/README.md @@ -0,0 +1,27 @@ +go-runewidth +============ + +[![Build Status](https://travis-ci.org/mattn/go-runewidth.png?branch=master)](https://travis-ci.org/mattn/go-runewidth) +[![Codecov](https://codecov.io/gh/mattn/go-runewidth/branch/master/graph/badge.svg)](https://codecov.io/gh/mattn/go-runewidth) +[![GoDoc](https://godoc.org/github.com/mattn/go-runewidth?status.svg)](http://godoc.org/github.com/mattn/go-runewidth) +[![Go Report Card](https://goreportcard.com/badge/github.com/mattn/go-runewidth)](https://goreportcard.com/report/github.com/mattn/go-runewidth) + +Provides functions to get fixed width of the character or string. + +Usage +----- + +```go +runewidth.StringWidth("つのだ☆HIRO") == 12 +``` + + +Author +------ + +Yasuhiro Matsumoto + +License +------- + +under the MIT License: http://mattn.mit-license.org/2013 diff --git a/vendor/github.com/mattn/go-runewidth/go.test.sh b/vendor/github.com/mattn/go-runewidth/go.test.sh new file mode 100644 index 00000000..012162b0 --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/go.test.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + +set -e +echo "" > coverage.txt + +for d in $(go list ./... | grep -v vendor); do + go test -race -coverprofile=profile.out -covermode=atomic "$d" + if [ -f profile.out ]; then + cat profile.out >> coverage.txt + rm profile.out + fi +done diff --git a/vendor/github.com/mattn/go-runewidth/runewidth.go b/vendor/github.com/mattn/go-runewidth/runewidth.go new file mode 100644 index 00000000..19f8e044 --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/runewidth.go @@ -0,0 +1,257 @@ +package runewidth + +import ( + "os" +) + +//go:generate go run script/generate.go + +var ( + // EastAsianWidth will be set true if the current locale is CJK + EastAsianWidth bool + + // ZeroWidthJoiner is flag to set to use UTR#51 ZWJ + ZeroWidthJoiner bool + + // DefaultCondition is a condition in current locale + DefaultCondition = &Condition{} +) + +func init() { + handleEnv() +} + +func handleEnv() { + env := os.Getenv("RUNEWIDTH_EASTASIAN") + if env == "" { + EastAsianWidth = IsEastAsian() + } else { + EastAsianWidth = env == "1" + } + // update DefaultCondition + DefaultCondition.EastAsianWidth = EastAsianWidth + DefaultCondition.ZeroWidthJoiner = ZeroWidthJoiner +} + +type interval struct { + first rune + last rune +} + +type table []interval + +func inTables(r rune, ts ...table) bool { + for _, t := range ts { + if inTable(r, t) { + return true + } + } + return false +} + +func inTable(r rune, t table) bool { + if r < t[0].first { + return false + } + + bot := 0 + top := len(t) - 1 + for top >= bot { + mid := (bot + top) >> 1 + + switch { + case t[mid].last < r: + bot = mid + 1 + case t[mid].first > r: + top = mid - 1 + default: + return true + } + } + + return false +} + +var private = table{ + {0x00E000, 0x00F8FF}, {0x0F0000, 0x0FFFFD}, {0x100000, 0x10FFFD}, +} + +var nonprint = table{ + {0x0000, 0x001F}, {0x007F, 0x009F}, {0x00AD, 0x00AD}, + {0x070F, 0x070F}, {0x180B, 0x180E}, {0x200B, 0x200F}, + {0x2028, 0x202E}, {0x206A, 0x206F}, {0xD800, 0xDFFF}, + {0xFEFF, 0xFEFF}, {0xFFF9, 0xFFFB}, {0xFFFE, 0xFFFF}, +} + +// Condition have flag EastAsianWidth whether the current locale is CJK or not. +type Condition struct { + EastAsianWidth bool + ZeroWidthJoiner bool +} + +// NewCondition return new instance of Condition which is current locale. +func NewCondition() *Condition { + return &Condition{ + EastAsianWidth: EastAsianWidth, + ZeroWidthJoiner: ZeroWidthJoiner, + } +} + +// RuneWidth returns the number of cells in r. +// See http://www.unicode.org/reports/tr11/ +func (c *Condition) RuneWidth(r rune) int { + switch { + case r < 0 || r > 0x10FFFF || inTables(r, nonprint, combining, notassigned): + return 0 + case (c.EastAsianWidth && IsAmbiguousWidth(r)) || inTables(r, doublewidth): + return 2 + default: + return 1 + } +} + +func (c *Condition) stringWidth(s string) (width int) { + for _, r := range []rune(s) { + width += c.RuneWidth(r) + } + return width +} + +func (c *Condition) stringWidthZeroJoiner(s string) (width int) { + r1, r2 := rune(0), rune(0) + for _, r := range []rune(s) { + if r == 0xFE0E || r == 0xFE0F { + continue + } + w := c.RuneWidth(r) + if r2 == 0x200D && inTables(r, emoji) && inTables(r1, emoji) { + if width < w { + width = w + } + } else { + width += w + } + r1, r2 = r2, r + } + return width +} + +// StringWidth return width as you can see +func (c *Condition) StringWidth(s string) (width int) { + if c.ZeroWidthJoiner { + return c.stringWidthZeroJoiner(s) + } + return c.stringWidth(s) +} + +// Truncate return string truncated with w cells +func (c *Condition) Truncate(s string, w int, tail string) string { + if c.StringWidth(s) <= w { + return s + } + r := []rune(s) + tw := c.StringWidth(tail) + w -= tw + width := 0 + i := 0 + for ; i < len(r); i++ { + cw := c.RuneWidth(r[i]) + if width+cw > w { + break + } + width += cw + } + return string(r[0:i]) + tail +} + +// Wrap return string wrapped with w cells +func (c *Condition) Wrap(s string, w int) string { + width := 0 + out := "" + for _, r := range []rune(s) { + cw := RuneWidth(r) + if r == '\n' { + out += string(r) + width = 0 + continue + } else if width+cw > w { + out += "\n" + width = 0 + out += string(r) + width += cw + continue + } + out += string(r) + width += cw + } + return out +} + +// FillLeft return string filled in left by spaces in w cells +func (c *Condition) FillLeft(s string, w int) string { + width := c.StringWidth(s) + count := w - width + if count > 0 { + b := make([]byte, count) + for i := range b { + b[i] = ' ' + } + return string(b) + s + } + return s +} + +// FillRight return string filled in left by spaces in w cells +func (c *Condition) FillRight(s string, w int) string { + width := c.StringWidth(s) + count := w - width + if count > 0 { + b := make([]byte, count) + for i := range b { + b[i] = ' ' + } + return s + string(b) + } + return s +} + +// RuneWidth returns the number of cells in r. +// See http://www.unicode.org/reports/tr11/ +func RuneWidth(r rune) int { + return DefaultCondition.RuneWidth(r) +} + +// IsAmbiguousWidth returns whether is ambiguous width or not. +func IsAmbiguousWidth(r rune) bool { + return inTables(r, private, ambiguous) +} + +// IsNeutralWidth returns whether is neutral width or not. +func IsNeutralWidth(r rune) bool { + return inTable(r, neutral) +} + +// StringWidth return width as you can see +func StringWidth(s string) (width int) { + return DefaultCondition.StringWidth(s) +} + +// Truncate return string truncated with w cells +func Truncate(s string, w int, tail string) string { + return DefaultCondition.Truncate(s, w, tail) +} + +// Wrap return string wrapped with w cells +func Wrap(s string, w int) string { + return DefaultCondition.Wrap(s, w) +} + +// FillLeft return string filled in left by spaces in w cells +func FillLeft(s string, w int) string { + return DefaultCondition.FillLeft(s, w) +} + +// FillRight return string filled in left by spaces in w cells +func FillRight(s string, w int) string { + return DefaultCondition.FillRight(s, w) +} diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_appengine.go b/vendor/github.com/mattn/go-runewidth/runewidth_appengine.go new file mode 100644 index 00000000..7d99f6e5 --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/runewidth_appengine.go @@ -0,0 +1,8 @@ +// +build appengine + +package runewidth + +// IsEastAsian return true if the current locale is CJK +func IsEastAsian() bool { + return false +} diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_js.go b/vendor/github.com/mattn/go-runewidth/runewidth_js.go new file mode 100644 index 00000000..c5fdf40b --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/runewidth_js.go @@ -0,0 +1,9 @@ +// +build js +// +build !appengine + +package runewidth + +func IsEastAsian() bool { + // TODO: Implement this for the web. Detect east asian in a compatible way, and return true. + return false +} diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_posix.go b/vendor/github.com/mattn/go-runewidth/runewidth_posix.go new file mode 100644 index 00000000..480ad748 --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/runewidth_posix.go @@ -0,0 +1,82 @@ +// +build !windows +// +build !js +// +build !appengine + +package runewidth + +import ( + "os" + "regexp" + "strings" +) + +var reLoc = regexp.MustCompile(`^[a-z][a-z][a-z]?(?:_[A-Z][A-Z])?\.(.+)`) + +var mblenTable = map[string]int{ + "utf-8": 6, + "utf8": 6, + "jis": 8, + "eucjp": 3, + "euckr": 2, + "euccn": 2, + "sjis": 2, + "cp932": 2, + "cp51932": 2, + "cp936": 2, + "cp949": 2, + "cp950": 2, + "big5": 2, + "gbk": 2, + "gb2312": 2, +} + +func isEastAsian(locale string) bool { + charset := strings.ToLower(locale) + r := reLoc.FindStringSubmatch(locale) + if len(r) == 2 { + charset = strings.ToLower(r[1]) + } + + if strings.HasSuffix(charset, "@cjk_narrow") { + return false + } + + for pos, b := range []byte(charset) { + if b == '@' { + charset = charset[:pos] + break + } + } + max := 1 + if m, ok := mblenTable[charset]; ok { + max = m + } + if max > 1 && (charset[0] != 'u' || + strings.HasPrefix(locale, "ja") || + strings.HasPrefix(locale, "ko") || + strings.HasPrefix(locale, "zh")) { + return true + } + return false +} + +// IsEastAsian return true if the current locale is CJK +func IsEastAsian() bool { + locale := os.Getenv("LC_ALL") + if locale == "" { + locale = os.Getenv("LC_CTYPE") + } + if locale == "" { + locale = os.Getenv("LANG") + } + + // ignore C locale + if locale == "POSIX" || locale == "C" { + return false + } + if len(locale) > 1 && locale[0] == 'C' && (locale[1] == '.' || locale[1] == '-') { + return false + } + + return isEastAsian(locale) +} diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_table.go b/vendor/github.com/mattn/go-runewidth/runewidth_table.go new file mode 100644 index 00000000..b27d77d8 --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/runewidth_table.go @@ -0,0 +1,437 @@ +// Code generated by script/generate.go. DO NOT EDIT. + +package runewidth + +var combining = table{ + {0x0300, 0x036F}, {0x0483, 0x0489}, {0x07EB, 0x07F3}, + {0x0C00, 0x0C00}, {0x0C04, 0x0C04}, {0x0D00, 0x0D01}, + {0x135D, 0x135F}, {0x1A7F, 0x1A7F}, {0x1AB0, 0x1AC0}, + {0x1B6B, 0x1B73}, {0x1DC0, 0x1DF9}, {0x1DFB, 0x1DFF}, + {0x20D0, 0x20F0}, {0x2CEF, 0x2CF1}, {0x2DE0, 0x2DFF}, + {0x3099, 0x309A}, {0xA66F, 0xA672}, {0xA674, 0xA67D}, + {0xA69E, 0xA69F}, {0xA6F0, 0xA6F1}, {0xA8E0, 0xA8F1}, + {0xFE20, 0xFE2F}, {0x101FD, 0x101FD}, {0x10376, 0x1037A}, + {0x10EAB, 0x10EAC}, {0x10F46, 0x10F50}, {0x11300, 0x11301}, + {0x1133B, 0x1133C}, {0x11366, 0x1136C}, {0x11370, 0x11374}, + {0x16AF0, 0x16AF4}, {0x1D165, 0x1D169}, {0x1D16D, 0x1D172}, + {0x1D17B, 0x1D182}, {0x1D185, 0x1D18B}, {0x1D1AA, 0x1D1AD}, + {0x1D242, 0x1D244}, {0x1E000, 0x1E006}, {0x1E008, 0x1E018}, + {0x1E01B, 0x1E021}, {0x1E023, 0x1E024}, {0x1E026, 0x1E02A}, + {0x1E8D0, 0x1E8D6}, +} + +var doublewidth = table{ + {0x1100, 0x115F}, {0x231A, 0x231B}, {0x2329, 0x232A}, + {0x23E9, 0x23EC}, {0x23F0, 0x23F0}, {0x23F3, 0x23F3}, + {0x25FD, 0x25FE}, {0x2614, 0x2615}, {0x2648, 0x2653}, + {0x267F, 0x267F}, {0x2693, 0x2693}, {0x26A1, 0x26A1}, + {0x26AA, 0x26AB}, {0x26BD, 0x26BE}, {0x26C4, 0x26C5}, + {0x26CE, 0x26CE}, {0x26D4, 0x26D4}, {0x26EA, 0x26EA}, + {0x26F2, 0x26F3}, {0x26F5, 0x26F5}, {0x26FA, 0x26FA}, + {0x26FD, 0x26FD}, {0x2705, 0x2705}, {0x270A, 0x270B}, + {0x2728, 0x2728}, {0x274C, 0x274C}, {0x274E, 0x274E}, + {0x2753, 0x2755}, {0x2757, 0x2757}, {0x2795, 0x2797}, + {0x27B0, 0x27B0}, {0x27BF, 0x27BF}, {0x2B1B, 0x2B1C}, + {0x2B50, 0x2B50}, {0x2B55, 0x2B55}, {0x2E80, 0x2E99}, + {0x2E9B, 0x2EF3}, {0x2F00, 0x2FD5}, {0x2FF0, 0x2FFB}, + {0x3000, 0x303E}, {0x3041, 0x3096}, {0x3099, 0x30FF}, + {0x3105, 0x312F}, {0x3131, 0x318E}, {0x3190, 0x31E3}, + {0x31F0, 0x321E}, {0x3220, 0x3247}, {0x3250, 0x4DBF}, + {0x4E00, 0xA48C}, {0xA490, 0xA4C6}, {0xA960, 0xA97C}, + {0xAC00, 0xD7A3}, {0xF900, 0xFAFF}, {0xFE10, 0xFE19}, + {0xFE30, 0xFE52}, {0xFE54, 0xFE66}, {0xFE68, 0xFE6B}, + {0xFF01, 0xFF60}, {0xFFE0, 0xFFE6}, {0x16FE0, 0x16FE4}, + {0x16FF0, 0x16FF1}, {0x17000, 0x187F7}, {0x18800, 0x18CD5}, + {0x18D00, 0x18D08}, {0x1B000, 0x1B11E}, {0x1B150, 0x1B152}, + {0x1B164, 0x1B167}, {0x1B170, 0x1B2FB}, {0x1F004, 0x1F004}, + {0x1F0CF, 0x1F0CF}, {0x1F18E, 0x1F18E}, {0x1F191, 0x1F19A}, + {0x1F200, 0x1F202}, {0x1F210, 0x1F23B}, {0x1F240, 0x1F248}, + {0x1F250, 0x1F251}, {0x1F260, 0x1F265}, {0x1F300, 0x1F320}, + {0x1F32D, 0x1F335}, {0x1F337, 0x1F37C}, {0x1F37E, 0x1F393}, + {0x1F3A0, 0x1F3CA}, {0x1F3CF, 0x1F3D3}, {0x1F3E0, 0x1F3F0}, + {0x1F3F4, 0x1F3F4}, {0x1F3F8, 0x1F43E}, {0x1F440, 0x1F440}, + {0x1F442, 0x1F4FC}, {0x1F4FF, 0x1F53D}, {0x1F54B, 0x1F54E}, + {0x1F550, 0x1F567}, {0x1F57A, 0x1F57A}, {0x1F595, 0x1F596}, + {0x1F5A4, 0x1F5A4}, {0x1F5FB, 0x1F64F}, {0x1F680, 0x1F6C5}, + {0x1F6CC, 0x1F6CC}, {0x1F6D0, 0x1F6D2}, {0x1F6D5, 0x1F6D7}, + {0x1F6EB, 0x1F6EC}, {0x1F6F4, 0x1F6FC}, {0x1F7E0, 0x1F7EB}, + {0x1F90C, 0x1F93A}, {0x1F93C, 0x1F945}, {0x1F947, 0x1F978}, + {0x1F97A, 0x1F9CB}, {0x1F9CD, 0x1F9FF}, {0x1FA70, 0x1FA74}, + {0x1FA78, 0x1FA7A}, {0x1FA80, 0x1FA86}, {0x1FA90, 0x1FAA8}, + {0x1FAB0, 0x1FAB6}, {0x1FAC0, 0x1FAC2}, {0x1FAD0, 0x1FAD6}, + {0x20000, 0x2FFFD}, {0x30000, 0x3FFFD}, +} + +var ambiguous = table{ + {0x00A1, 0x00A1}, {0x00A4, 0x00A4}, {0x00A7, 0x00A8}, + {0x00AA, 0x00AA}, {0x00AD, 0x00AE}, {0x00B0, 0x00B4}, + {0x00B6, 0x00BA}, {0x00BC, 0x00BF}, {0x00C6, 0x00C6}, + {0x00D0, 0x00D0}, {0x00D7, 0x00D8}, {0x00DE, 0x00E1}, + {0x00E6, 0x00E6}, {0x00E8, 0x00EA}, {0x00EC, 0x00ED}, + {0x00F0, 0x00F0}, {0x00F2, 0x00F3}, {0x00F7, 0x00FA}, + {0x00FC, 0x00FC}, {0x00FE, 0x00FE}, {0x0101, 0x0101}, + {0x0111, 0x0111}, {0x0113, 0x0113}, {0x011B, 0x011B}, + {0x0126, 0x0127}, {0x012B, 0x012B}, {0x0131, 0x0133}, + {0x0138, 0x0138}, {0x013F, 0x0142}, {0x0144, 0x0144}, + {0x0148, 0x014B}, {0x014D, 0x014D}, {0x0152, 0x0153}, + {0x0166, 0x0167}, {0x016B, 0x016B}, {0x01CE, 0x01CE}, + {0x01D0, 0x01D0}, {0x01D2, 0x01D2}, {0x01D4, 0x01D4}, + {0x01D6, 0x01D6}, {0x01D8, 0x01D8}, {0x01DA, 0x01DA}, + {0x01DC, 0x01DC}, {0x0251, 0x0251}, {0x0261, 0x0261}, + {0x02C4, 0x02C4}, {0x02C7, 0x02C7}, {0x02C9, 0x02CB}, + {0x02CD, 0x02CD}, {0x02D0, 0x02D0}, {0x02D8, 0x02DB}, + {0x02DD, 0x02DD}, {0x02DF, 0x02DF}, {0x0300, 0x036F}, + {0x0391, 0x03A1}, {0x03A3, 0x03A9}, {0x03B1, 0x03C1}, + {0x03C3, 0x03C9}, {0x0401, 0x0401}, {0x0410, 0x044F}, + {0x0451, 0x0451}, {0x2010, 0x2010}, {0x2013, 0x2016}, + {0x2018, 0x2019}, {0x201C, 0x201D}, {0x2020, 0x2022}, + {0x2024, 0x2027}, {0x2030, 0x2030}, {0x2032, 0x2033}, + {0x2035, 0x2035}, {0x203B, 0x203B}, {0x203E, 0x203E}, + {0x2074, 0x2074}, {0x207F, 0x207F}, {0x2081, 0x2084}, + {0x20AC, 0x20AC}, {0x2103, 0x2103}, {0x2105, 0x2105}, + {0x2109, 0x2109}, {0x2113, 0x2113}, {0x2116, 0x2116}, + {0x2121, 0x2122}, {0x2126, 0x2126}, {0x212B, 0x212B}, + {0x2153, 0x2154}, {0x215B, 0x215E}, {0x2160, 0x216B}, + {0x2170, 0x2179}, {0x2189, 0x2189}, {0x2190, 0x2199}, + {0x21B8, 0x21B9}, {0x21D2, 0x21D2}, {0x21D4, 0x21D4}, + {0x21E7, 0x21E7}, {0x2200, 0x2200}, {0x2202, 0x2203}, + {0x2207, 0x2208}, {0x220B, 0x220B}, {0x220F, 0x220F}, + {0x2211, 0x2211}, {0x2215, 0x2215}, {0x221A, 0x221A}, + {0x221D, 0x2220}, {0x2223, 0x2223}, {0x2225, 0x2225}, + {0x2227, 0x222C}, {0x222E, 0x222E}, {0x2234, 0x2237}, + {0x223C, 0x223D}, {0x2248, 0x2248}, {0x224C, 0x224C}, + {0x2252, 0x2252}, {0x2260, 0x2261}, {0x2264, 0x2267}, + {0x226A, 0x226B}, {0x226E, 0x226F}, {0x2282, 0x2283}, + {0x2286, 0x2287}, {0x2295, 0x2295}, {0x2299, 0x2299}, + {0x22A5, 0x22A5}, {0x22BF, 0x22BF}, {0x2312, 0x2312}, + {0x2460, 0x24E9}, {0x24EB, 0x254B}, {0x2550, 0x2573}, + {0x2580, 0x258F}, {0x2592, 0x2595}, {0x25A0, 0x25A1}, + {0x25A3, 0x25A9}, {0x25B2, 0x25B3}, {0x25B6, 0x25B7}, + {0x25BC, 0x25BD}, {0x25C0, 0x25C1}, {0x25C6, 0x25C8}, + {0x25CB, 0x25CB}, {0x25CE, 0x25D1}, {0x25E2, 0x25E5}, + {0x25EF, 0x25EF}, {0x2605, 0x2606}, {0x2609, 0x2609}, + {0x260E, 0x260F}, {0x261C, 0x261C}, {0x261E, 0x261E}, + {0x2640, 0x2640}, {0x2642, 0x2642}, {0x2660, 0x2661}, + {0x2663, 0x2665}, {0x2667, 0x266A}, {0x266C, 0x266D}, + {0x266F, 0x266F}, {0x269E, 0x269F}, {0x26BF, 0x26BF}, + {0x26C6, 0x26CD}, {0x26CF, 0x26D3}, {0x26D5, 0x26E1}, + {0x26E3, 0x26E3}, {0x26E8, 0x26E9}, {0x26EB, 0x26F1}, + {0x26F4, 0x26F4}, {0x26F6, 0x26F9}, {0x26FB, 0x26FC}, + {0x26FE, 0x26FF}, {0x273D, 0x273D}, {0x2776, 0x277F}, + {0x2B56, 0x2B59}, {0x3248, 0x324F}, {0xE000, 0xF8FF}, + {0xFE00, 0xFE0F}, {0xFFFD, 0xFFFD}, {0x1F100, 0x1F10A}, + {0x1F110, 0x1F12D}, {0x1F130, 0x1F169}, {0x1F170, 0x1F18D}, + {0x1F18F, 0x1F190}, {0x1F19B, 0x1F1AC}, {0xE0100, 0xE01EF}, + {0xF0000, 0xFFFFD}, {0x100000, 0x10FFFD}, +} +var notassigned = table{ + {0x27E6, 0x27ED}, {0x2985, 0x2986}, +} + +var neutral = table{ + {0x0000, 0x001F}, {0x007F, 0x00A0}, {0x00A9, 0x00A9}, + {0x00AB, 0x00AB}, {0x00B5, 0x00B5}, {0x00BB, 0x00BB}, + {0x00C0, 0x00C5}, {0x00C7, 0x00CF}, {0x00D1, 0x00D6}, + {0x00D9, 0x00DD}, {0x00E2, 0x00E5}, {0x00E7, 0x00E7}, + {0x00EB, 0x00EB}, {0x00EE, 0x00EF}, {0x00F1, 0x00F1}, + {0x00F4, 0x00F6}, {0x00FB, 0x00FB}, {0x00FD, 0x00FD}, + {0x00FF, 0x0100}, {0x0102, 0x0110}, {0x0112, 0x0112}, + {0x0114, 0x011A}, {0x011C, 0x0125}, {0x0128, 0x012A}, + {0x012C, 0x0130}, {0x0134, 0x0137}, {0x0139, 0x013E}, + {0x0143, 0x0143}, {0x0145, 0x0147}, {0x014C, 0x014C}, + {0x014E, 0x0151}, {0x0154, 0x0165}, {0x0168, 0x016A}, + {0x016C, 0x01CD}, {0x01CF, 0x01CF}, {0x01D1, 0x01D1}, + {0x01D3, 0x01D3}, {0x01D5, 0x01D5}, {0x01D7, 0x01D7}, + {0x01D9, 0x01D9}, {0x01DB, 0x01DB}, {0x01DD, 0x0250}, + {0x0252, 0x0260}, {0x0262, 0x02C3}, {0x02C5, 0x02C6}, + {0x02C8, 0x02C8}, {0x02CC, 0x02CC}, {0x02CE, 0x02CF}, + {0x02D1, 0x02D7}, {0x02DC, 0x02DC}, {0x02DE, 0x02DE}, + {0x02E0, 0x02FF}, {0x0370, 0x0377}, {0x037A, 0x037F}, + {0x0384, 0x038A}, {0x038C, 0x038C}, {0x038E, 0x0390}, + {0x03AA, 0x03B0}, {0x03C2, 0x03C2}, {0x03CA, 0x0400}, + {0x0402, 0x040F}, {0x0450, 0x0450}, {0x0452, 0x052F}, + {0x0531, 0x0556}, {0x0559, 0x058A}, {0x058D, 0x058F}, + {0x0591, 0x05C7}, {0x05D0, 0x05EA}, {0x05EF, 0x05F4}, + {0x0600, 0x061C}, {0x061E, 0x070D}, {0x070F, 0x074A}, + {0x074D, 0x07B1}, {0x07C0, 0x07FA}, {0x07FD, 0x082D}, + {0x0830, 0x083E}, {0x0840, 0x085B}, {0x085E, 0x085E}, + {0x0860, 0x086A}, {0x08A0, 0x08B4}, {0x08B6, 0x08C7}, + {0x08D3, 0x0983}, {0x0985, 0x098C}, {0x098F, 0x0990}, + {0x0993, 0x09A8}, {0x09AA, 0x09B0}, {0x09B2, 0x09B2}, + {0x09B6, 0x09B9}, {0x09BC, 0x09C4}, {0x09C7, 0x09C8}, + {0x09CB, 0x09CE}, {0x09D7, 0x09D7}, {0x09DC, 0x09DD}, + {0x09DF, 0x09E3}, {0x09E6, 0x09FE}, {0x0A01, 0x0A03}, + {0x0A05, 0x0A0A}, {0x0A0F, 0x0A10}, {0x0A13, 0x0A28}, + {0x0A2A, 0x0A30}, {0x0A32, 0x0A33}, {0x0A35, 0x0A36}, + {0x0A38, 0x0A39}, {0x0A3C, 0x0A3C}, {0x0A3E, 0x0A42}, + {0x0A47, 0x0A48}, {0x0A4B, 0x0A4D}, {0x0A51, 0x0A51}, + {0x0A59, 0x0A5C}, {0x0A5E, 0x0A5E}, {0x0A66, 0x0A76}, + {0x0A81, 0x0A83}, {0x0A85, 0x0A8D}, {0x0A8F, 0x0A91}, + {0x0A93, 0x0AA8}, {0x0AAA, 0x0AB0}, {0x0AB2, 0x0AB3}, + {0x0AB5, 0x0AB9}, {0x0ABC, 0x0AC5}, {0x0AC7, 0x0AC9}, + {0x0ACB, 0x0ACD}, {0x0AD0, 0x0AD0}, {0x0AE0, 0x0AE3}, + {0x0AE6, 0x0AF1}, {0x0AF9, 0x0AFF}, {0x0B01, 0x0B03}, + {0x0B05, 0x0B0C}, {0x0B0F, 0x0B10}, {0x0B13, 0x0B28}, + {0x0B2A, 0x0B30}, {0x0B32, 0x0B33}, {0x0B35, 0x0B39}, + {0x0B3C, 0x0B44}, {0x0B47, 0x0B48}, {0x0B4B, 0x0B4D}, + {0x0B55, 0x0B57}, {0x0B5C, 0x0B5D}, {0x0B5F, 0x0B63}, + {0x0B66, 0x0B77}, {0x0B82, 0x0B83}, {0x0B85, 0x0B8A}, + {0x0B8E, 0x0B90}, {0x0B92, 0x0B95}, {0x0B99, 0x0B9A}, + {0x0B9C, 0x0B9C}, {0x0B9E, 0x0B9F}, {0x0BA3, 0x0BA4}, + {0x0BA8, 0x0BAA}, {0x0BAE, 0x0BB9}, {0x0BBE, 0x0BC2}, + {0x0BC6, 0x0BC8}, {0x0BCA, 0x0BCD}, {0x0BD0, 0x0BD0}, + {0x0BD7, 0x0BD7}, {0x0BE6, 0x0BFA}, {0x0C00, 0x0C0C}, + {0x0C0E, 0x0C10}, {0x0C12, 0x0C28}, {0x0C2A, 0x0C39}, + {0x0C3D, 0x0C44}, {0x0C46, 0x0C48}, {0x0C4A, 0x0C4D}, + {0x0C55, 0x0C56}, {0x0C58, 0x0C5A}, {0x0C60, 0x0C63}, + {0x0C66, 0x0C6F}, {0x0C77, 0x0C8C}, {0x0C8E, 0x0C90}, + {0x0C92, 0x0CA8}, {0x0CAA, 0x0CB3}, {0x0CB5, 0x0CB9}, + {0x0CBC, 0x0CC4}, {0x0CC6, 0x0CC8}, {0x0CCA, 0x0CCD}, + {0x0CD5, 0x0CD6}, {0x0CDE, 0x0CDE}, {0x0CE0, 0x0CE3}, + {0x0CE6, 0x0CEF}, {0x0CF1, 0x0CF2}, {0x0D00, 0x0D0C}, + {0x0D0E, 0x0D10}, {0x0D12, 0x0D44}, {0x0D46, 0x0D48}, + {0x0D4A, 0x0D4F}, {0x0D54, 0x0D63}, {0x0D66, 0x0D7F}, + {0x0D81, 0x0D83}, {0x0D85, 0x0D96}, {0x0D9A, 0x0DB1}, + {0x0DB3, 0x0DBB}, {0x0DBD, 0x0DBD}, {0x0DC0, 0x0DC6}, + {0x0DCA, 0x0DCA}, {0x0DCF, 0x0DD4}, {0x0DD6, 0x0DD6}, + {0x0DD8, 0x0DDF}, {0x0DE6, 0x0DEF}, {0x0DF2, 0x0DF4}, + {0x0E01, 0x0E3A}, {0x0E3F, 0x0E5B}, {0x0E81, 0x0E82}, + {0x0E84, 0x0E84}, {0x0E86, 0x0E8A}, {0x0E8C, 0x0EA3}, + {0x0EA5, 0x0EA5}, {0x0EA7, 0x0EBD}, {0x0EC0, 0x0EC4}, + {0x0EC6, 0x0EC6}, {0x0EC8, 0x0ECD}, {0x0ED0, 0x0ED9}, + {0x0EDC, 0x0EDF}, {0x0F00, 0x0F47}, {0x0F49, 0x0F6C}, + {0x0F71, 0x0F97}, {0x0F99, 0x0FBC}, {0x0FBE, 0x0FCC}, + {0x0FCE, 0x0FDA}, {0x1000, 0x10C5}, {0x10C7, 0x10C7}, + {0x10CD, 0x10CD}, {0x10D0, 0x10FF}, {0x1160, 0x1248}, + {0x124A, 0x124D}, {0x1250, 0x1256}, {0x1258, 0x1258}, + {0x125A, 0x125D}, {0x1260, 0x1288}, {0x128A, 0x128D}, + {0x1290, 0x12B0}, {0x12B2, 0x12B5}, {0x12B8, 0x12BE}, + {0x12C0, 0x12C0}, {0x12C2, 0x12C5}, {0x12C8, 0x12D6}, + {0x12D8, 0x1310}, {0x1312, 0x1315}, {0x1318, 0x135A}, + {0x135D, 0x137C}, {0x1380, 0x1399}, {0x13A0, 0x13F5}, + {0x13F8, 0x13FD}, {0x1400, 0x169C}, {0x16A0, 0x16F8}, + {0x1700, 0x170C}, {0x170E, 0x1714}, {0x1720, 0x1736}, + {0x1740, 0x1753}, {0x1760, 0x176C}, {0x176E, 0x1770}, + {0x1772, 0x1773}, {0x1780, 0x17DD}, {0x17E0, 0x17E9}, + {0x17F0, 0x17F9}, {0x1800, 0x180E}, {0x1810, 0x1819}, + {0x1820, 0x1878}, {0x1880, 0x18AA}, {0x18B0, 0x18F5}, + {0x1900, 0x191E}, {0x1920, 0x192B}, {0x1930, 0x193B}, + {0x1940, 0x1940}, {0x1944, 0x196D}, {0x1970, 0x1974}, + {0x1980, 0x19AB}, {0x19B0, 0x19C9}, {0x19D0, 0x19DA}, + {0x19DE, 0x1A1B}, {0x1A1E, 0x1A5E}, {0x1A60, 0x1A7C}, + {0x1A7F, 0x1A89}, {0x1A90, 0x1A99}, {0x1AA0, 0x1AAD}, + {0x1AB0, 0x1AC0}, {0x1B00, 0x1B4B}, {0x1B50, 0x1B7C}, + {0x1B80, 0x1BF3}, {0x1BFC, 0x1C37}, {0x1C3B, 0x1C49}, + {0x1C4D, 0x1C88}, {0x1C90, 0x1CBA}, {0x1CBD, 0x1CC7}, + {0x1CD0, 0x1CFA}, {0x1D00, 0x1DF9}, {0x1DFB, 0x1F15}, + {0x1F18, 0x1F1D}, {0x1F20, 0x1F45}, {0x1F48, 0x1F4D}, + {0x1F50, 0x1F57}, {0x1F59, 0x1F59}, {0x1F5B, 0x1F5B}, + {0x1F5D, 0x1F5D}, {0x1F5F, 0x1F7D}, {0x1F80, 0x1FB4}, + {0x1FB6, 0x1FC4}, {0x1FC6, 0x1FD3}, {0x1FD6, 0x1FDB}, + {0x1FDD, 0x1FEF}, {0x1FF2, 0x1FF4}, {0x1FF6, 0x1FFE}, + {0x2000, 0x200F}, {0x2011, 0x2012}, {0x2017, 0x2017}, + {0x201A, 0x201B}, {0x201E, 0x201F}, {0x2023, 0x2023}, + {0x2028, 0x202F}, {0x2031, 0x2031}, {0x2034, 0x2034}, + {0x2036, 0x203A}, {0x203C, 0x203D}, {0x203F, 0x2064}, + {0x2066, 0x2071}, {0x2075, 0x207E}, {0x2080, 0x2080}, + {0x2085, 0x208E}, {0x2090, 0x209C}, {0x20A0, 0x20A8}, + {0x20AA, 0x20AB}, {0x20AD, 0x20BF}, {0x20D0, 0x20F0}, + {0x2100, 0x2102}, {0x2104, 0x2104}, {0x2106, 0x2108}, + {0x210A, 0x2112}, {0x2114, 0x2115}, {0x2117, 0x2120}, + {0x2123, 0x2125}, {0x2127, 0x212A}, {0x212C, 0x2152}, + {0x2155, 0x215A}, {0x215F, 0x215F}, {0x216C, 0x216F}, + {0x217A, 0x2188}, {0x218A, 0x218B}, {0x219A, 0x21B7}, + {0x21BA, 0x21D1}, {0x21D3, 0x21D3}, {0x21D5, 0x21E6}, + {0x21E8, 0x21FF}, {0x2201, 0x2201}, {0x2204, 0x2206}, + {0x2209, 0x220A}, {0x220C, 0x220E}, {0x2210, 0x2210}, + {0x2212, 0x2214}, {0x2216, 0x2219}, {0x221B, 0x221C}, + {0x2221, 0x2222}, {0x2224, 0x2224}, {0x2226, 0x2226}, + {0x222D, 0x222D}, {0x222F, 0x2233}, {0x2238, 0x223B}, + {0x223E, 0x2247}, {0x2249, 0x224B}, {0x224D, 0x2251}, + {0x2253, 0x225F}, {0x2262, 0x2263}, {0x2268, 0x2269}, + {0x226C, 0x226D}, {0x2270, 0x2281}, {0x2284, 0x2285}, + {0x2288, 0x2294}, {0x2296, 0x2298}, {0x229A, 0x22A4}, + {0x22A6, 0x22BE}, {0x22C0, 0x2311}, {0x2313, 0x2319}, + {0x231C, 0x2328}, {0x232B, 0x23E8}, {0x23ED, 0x23EF}, + {0x23F1, 0x23F2}, {0x23F4, 0x2426}, {0x2440, 0x244A}, + {0x24EA, 0x24EA}, {0x254C, 0x254F}, {0x2574, 0x257F}, + {0x2590, 0x2591}, {0x2596, 0x259F}, {0x25A2, 0x25A2}, + {0x25AA, 0x25B1}, {0x25B4, 0x25B5}, {0x25B8, 0x25BB}, + {0x25BE, 0x25BF}, {0x25C2, 0x25C5}, {0x25C9, 0x25CA}, + {0x25CC, 0x25CD}, {0x25D2, 0x25E1}, {0x25E6, 0x25EE}, + {0x25F0, 0x25FC}, {0x25FF, 0x2604}, {0x2607, 0x2608}, + {0x260A, 0x260D}, {0x2610, 0x2613}, {0x2616, 0x261B}, + {0x261D, 0x261D}, {0x261F, 0x263F}, {0x2641, 0x2641}, + {0x2643, 0x2647}, {0x2654, 0x265F}, {0x2662, 0x2662}, + {0x2666, 0x2666}, {0x266B, 0x266B}, {0x266E, 0x266E}, + {0x2670, 0x267E}, {0x2680, 0x2692}, {0x2694, 0x269D}, + {0x26A0, 0x26A0}, {0x26A2, 0x26A9}, {0x26AC, 0x26BC}, + {0x26C0, 0x26C3}, {0x26E2, 0x26E2}, {0x26E4, 0x26E7}, + {0x2700, 0x2704}, {0x2706, 0x2709}, {0x270C, 0x2727}, + {0x2729, 0x273C}, {0x273E, 0x274B}, {0x274D, 0x274D}, + {0x274F, 0x2752}, {0x2756, 0x2756}, {0x2758, 0x2775}, + {0x2780, 0x2794}, {0x2798, 0x27AF}, {0x27B1, 0x27BE}, + {0x27C0, 0x27E5}, {0x27EE, 0x2984}, {0x2987, 0x2B1A}, + {0x2B1D, 0x2B4F}, {0x2B51, 0x2B54}, {0x2B5A, 0x2B73}, + {0x2B76, 0x2B95}, {0x2B97, 0x2C2E}, {0x2C30, 0x2C5E}, + {0x2C60, 0x2CF3}, {0x2CF9, 0x2D25}, {0x2D27, 0x2D27}, + {0x2D2D, 0x2D2D}, {0x2D30, 0x2D67}, {0x2D6F, 0x2D70}, + {0x2D7F, 0x2D96}, {0x2DA0, 0x2DA6}, {0x2DA8, 0x2DAE}, + {0x2DB0, 0x2DB6}, {0x2DB8, 0x2DBE}, {0x2DC0, 0x2DC6}, + {0x2DC8, 0x2DCE}, {0x2DD0, 0x2DD6}, {0x2DD8, 0x2DDE}, + {0x2DE0, 0x2E52}, {0x303F, 0x303F}, {0x4DC0, 0x4DFF}, + {0xA4D0, 0xA62B}, {0xA640, 0xA6F7}, {0xA700, 0xA7BF}, + {0xA7C2, 0xA7CA}, {0xA7F5, 0xA82C}, {0xA830, 0xA839}, + {0xA840, 0xA877}, {0xA880, 0xA8C5}, {0xA8CE, 0xA8D9}, + {0xA8E0, 0xA953}, {0xA95F, 0xA95F}, {0xA980, 0xA9CD}, + {0xA9CF, 0xA9D9}, {0xA9DE, 0xA9FE}, {0xAA00, 0xAA36}, + {0xAA40, 0xAA4D}, {0xAA50, 0xAA59}, {0xAA5C, 0xAAC2}, + {0xAADB, 0xAAF6}, {0xAB01, 0xAB06}, {0xAB09, 0xAB0E}, + {0xAB11, 0xAB16}, {0xAB20, 0xAB26}, {0xAB28, 0xAB2E}, + {0xAB30, 0xAB6B}, {0xAB70, 0xABED}, {0xABF0, 0xABF9}, + {0xD7B0, 0xD7C6}, {0xD7CB, 0xD7FB}, {0xD800, 0xDFFF}, + {0xFB00, 0xFB06}, {0xFB13, 0xFB17}, {0xFB1D, 0xFB36}, + {0xFB38, 0xFB3C}, {0xFB3E, 0xFB3E}, {0xFB40, 0xFB41}, + {0xFB43, 0xFB44}, {0xFB46, 0xFBC1}, {0xFBD3, 0xFD3F}, + {0xFD50, 0xFD8F}, {0xFD92, 0xFDC7}, {0xFDF0, 0xFDFD}, + {0xFE20, 0xFE2F}, {0xFE70, 0xFE74}, {0xFE76, 0xFEFC}, + {0xFEFF, 0xFEFF}, {0xFFF9, 0xFFFC}, {0x10000, 0x1000B}, + {0x1000D, 0x10026}, {0x10028, 0x1003A}, {0x1003C, 0x1003D}, + {0x1003F, 0x1004D}, {0x10050, 0x1005D}, {0x10080, 0x100FA}, + {0x10100, 0x10102}, {0x10107, 0x10133}, {0x10137, 0x1018E}, + {0x10190, 0x1019C}, {0x101A0, 0x101A0}, {0x101D0, 0x101FD}, + {0x10280, 0x1029C}, {0x102A0, 0x102D0}, {0x102E0, 0x102FB}, + {0x10300, 0x10323}, {0x1032D, 0x1034A}, {0x10350, 0x1037A}, + {0x10380, 0x1039D}, {0x1039F, 0x103C3}, {0x103C8, 0x103D5}, + {0x10400, 0x1049D}, {0x104A0, 0x104A9}, {0x104B0, 0x104D3}, + {0x104D8, 0x104FB}, {0x10500, 0x10527}, {0x10530, 0x10563}, + {0x1056F, 0x1056F}, {0x10600, 0x10736}, {0x10740, 0x10755}, + {0x10760, 0x10767}, {0x10800, 0x10805}, {0x10808, 0x10808}, + {0x1080A, 0x10835}, {0x10837, 0x10838}, {0x1083C, 0x1083C}, + {0x1083F, 0x10855}, {0x10857, 0x1089E}, {0x108A7, 0x108AF}, + {0x108E0, 0x108F2}, {0x108F4, 0x108F5}, {0x108FB, 0x1091B}, + {0x1091F, 0x10939}, {0x1093F, 0x1093F}, {0x10980, 0x109B7}, + {0x109BC, 0x109CF}, {0x109D2, 0x10A03}, {0x10A05, 0x10A06}, + {0x10A0C, 0x10A13}, {0x10A15, 0x10A17}, {0x10A19, 0x10A35}, + {0x10A38, 0x10A3A}, {0x10A3F, 0x10A48}, {0x10A50, 0x10A58}, + {0x10A60, 0x10A9F}, {0x10AC0, 0x10AE6}, {0x10AEB, 0x10AF6}, + {0x10B00, 0x10B35}, {0x10B39, 0x10B55}, {0x10B58, 0x10B72}, + {0x10B78, 0x10B91}, {0x10B99, 0x10B9C}, {0x10BA9, 0x10BAF}, + {0x10C00, 0x10C48}, {0x10C80, 0x10CB2}, {0x10CC0, 0x10CF2}, + {0x10CFA, 0x10D27}, {0x10D30, 0x10D39}, {0x10E60, 0x10E7E}, + {0x10E80, 0x10EA9}, {0x10EAB, 0x10EAD}, {0x10EB0, 0x10EB1}, + {0x10F00, 0x10F27}, {0x10F30, 0x10F59}, {0x10FB0, 0x10FCB}, + {0x10FE0, 0x10FF6}, {0x11000, 0x1104D}, {0x11052, 0x1106F}, + {0x1107F, 0x110C1}, {0x110CD, 0x110CD}, {0x110D0, 0x110E8}, + {0x110F0, 0x110F9}, {0x11100, 0x11134}, {0x11136, 0x11147}, + {0x11150, 0x11176}, {0x11180, 0x111DF}, {0x111E1, 0x111F4}, + {0x11200, 0x11211}, {0x11213, 0x1123E}, {0x11280, 0x11286}, + {0x11288, 0x11288}, {0x1128A, 0x1128D}, {0x1128F, 0x1129D}, + {0x1129F, 0x112A9}, {0x112B0, 0x112EA}, {0x112F0, 0x112F9}, + {0x11300, 0x11303}, {0x11305, 0x1130C}, {0x1130F, 0x11310}, + {0x11313, 0x11328}, {0x1132A, 0x11330}, {0x11332, 0x11333}, + {0x11335, 0x11339}, {0x1133B, 0x11344}, {0x11347, 0x11348}, + {0x1134B, 0x1134D}, {0x11350, 0x11350}, {0x11357, 0x11357}, + {0x1135D, 0x11363}, {0x11366, 0x1136C}, {0x11370, 0x11374}, + {0x11400, 0x1145B}, {0x1145D, 0x11461}, {0x11480, 0x114C7}, + {0x114D0, 0x114D9}, {0x11580, 0x115B5}, {0x115B8, 0x115DD}, + {0x11600, 0x11644}, {0x11650, 0x11659}, {0x11660, 0x1166C}, + {0x11680, 0x116B8}, {0x116C0, 0x116C9}, {0x11700, 0x1171A}, + {0x1171D, 0x1172B}, {0x11730, 0x1173F}, {0x11800, 0x1183B}, + {0x118A0, 0x118F2}, {0x118FF, 0x11906}, {0x11909, 0x11909}, + {0x1190C, 0x11913}, {0x11915, 0x11916}, {0x11918, 0x11935}, + {0x11937, 0x11938}, {0x1193B, 0x11946}, {0x11950, 0x11959}, + {0x119A0, 0x119A7}, {0x119AA, 0x119D7}, {0x119DA, 0x119E4}, + {0x11A00, 0x11A47}, {0x11A50, 0x11AA2}, {0x11AC0, 0x11AF8}, + {0x11C00, 0x11C08}, {0x11C0A, 0x11C36}, {0x11C38, 0x11C45}, + {0x11C50, 0x11C6C}, {0x11C70, 0x11C8F}, {0x11C92, 0x11CA7}, + {0x11CA9, 0x11CB6}, {0x11D00, 0x11D06}, {0x11D08, 0x11D09}, + {0x11D0B, 0x11D36}, {0x11D3A, 0x11D3A}, {0x11D3C, 0x11D3D}, + {0x11D3F, 0x11D47}, {0x11D50, 0x11D59}, {0x11D60, 0x11D65}, + {0x11D67, 0x11D68}, {0x11D6A, 0x11D8E}, {0x11D90, 0x11D91}, + {0x11D93, 0x11D98}, {0x11DA0, 0x11DA9}, {0x11EE0, 0x11EF8}, + {0x11FB0, 0x11FB0}, {0x11FC0, 0x11FF1}, {0x11FFF, 0x12399}, + {0x12400, 0x1246E}, {0x12470, 0x12474}, {0x12480, 0x12543}, + {0x13000, 0x1342E}, {0x13430, 0x13438}, {0x14400, 0x14646}, + {0x16800, 0x16A38}, {0x16A40, 0x16A5E}, {0x16A60, 0x16A69}, + {0x16A6E, 0x16A6F}, {0x16AD0, 0x16AED}, {0x16AF0, 0x16AF5}, + {0x16B00, 0x16B45}, {0x16B50, 0x16B59}, {0x16B5B, 0x16B61}, + {0x16B63, 0x16B77}, {0x16B7D, 0x16B8F}, {0x16E40, 0x16E9A}, + {0x16F00, 0x16F4A}, {0x16F4F, 0x16F87}, {0x16F8F, 0x16F9F}, + {0x1BC00, 0x1BC6A}, {0x1BC70, 0x1BC7C}, {0x1BC80, 0x1BC88}, + {0x1BC90, 0x1BC99}, {0x1BC9C, 0x1BCA3}, {0x1D000, 0x1D0F5}, + {0x1D100, 0x1D126}, {0x1D129, 0x1D1E8}, {0x1D200, 0x1D245}, + {0x1D2E0, 0x1D2F3}, {0x1D300, 0x1D356}, {0x1D360, 0x1D378}, + {0x1D400, 0x1D454}, {0x1D456, 0x1D49C}, {0x1D49E, 0x1D49F}, + {0x1D4A2, 0x1D4A2}, {0x1D4A5, 0x1D4A6}, {0x1D4A9, 0x1D4AC}, + {0x1D4AE, 0x1D4B9}, {0x1D4BB, 0x1D4BB}, {0x1D4BD, 0x1D4C3}, + {0x1D4C5, 0x1D505}, {0x1D507, 0x1D50A}, {0x1D50D, 0x1D514}, + {0x1D516, 0x1D51C}, {0x1D51E, 0x1D539}, {0x1D53B, 0x1D53E}, + {0x1D540, 0x1D544}, {0x1D546, 0x1D546}, {0x1D54A, 0x1D550}, + {0x1D552, 0x1D6A5}, {0x1D6A8, 0x1D7CB}, {0x1D7CE, 0x1DA8B}, + {0x1DA9B, 0x1DA9F}, {0x1DAA1, 0x1DAAF}, {0x1E000, 0x1E006}, + {0x1E008, 0x1E018}, {0x1E01B, 0x1E021}, {0x1E023, 0x1E024}, + {0x1E026, 0x1E02A}, {0x1E100, 0x1E12C}, {0x1E130, 0x1E13D}, + {0x1E140, 0x1E149}, {0x1E14E, 0x1E14F}, {0x1E2C0, 0x1E2F9}, + {0x1E2FF, 0x1E2FF}, {0x1E800, 0x1E8C4}, {0x1E8C7, 0x1E8D6}, + {0x1E900, 0x1E94B}, {0x1E950, 0x1E959}, {0x1E95E, 0x1E95F}, + {0x1EC71, 0x1ECB4}, {0x1ED01, 0x1ED3D}, {0x1EE00, 0x1EE03}, + {0x1EE05, 0x1EE1F}, {0x1EE21, 0x1EE22}, {0x1EE24, 0x1EE24}, + {0x1EE27, 0x1EE27}, {0x1EE29, 0x1EE32}, {0x1EE34, 0x1EE37}, + {0x1EE39, 0x1EE39}, {0x1EE3B, 0x1EE3B}, {0x1EE42, 0x1EE42}, + {0x1EE47, 0x1EE47}, {0x1EE49, 0x1EE49}, {0x1EE4B, 0x1EE4B}, + {0x1EE4D, 0x1EE4F}, {0x1EE51, 0x1EE52}, {0x1EE54, 0x1EE54}, + {0x1EE57, 0x1EE57}, {0x1EE59, 0x1EE59}, {0x1EE5B, 0x1EE5B}, + {0x1EE5D, 0x1EE5D}, {0x1EE5F, 0x1EE5F}, {0x1EE61, 0x1EE62}, + {0x1EE64, 0x1EE64}, {0x1EE67, 0x1EE6A}, {0x1EE6C, 0x1EE72}, + {0x1EE74, 0x1EE77}, {0x1EE79, 0x1EE7C}, {0x1EE7E, 0x1EE7E}, + {0x1EE80, 0x1EE89}, {0x1EE8B, 0x1EE9B}, {0x1EEA1, 0x1EEA3}, + {0x1EEA5, 0x1EEA9}, {0x1EEAB, 0x1EEBB}, {0x1EEF0, 0x1EEF1}, + {0x1F000, 0x1F003}, {0x1F005, 0x1F02B}, {0x1F030, 0x1F093}, + {0x1F0A0, 0x1F0AE}, {0x1F0B1, 0x1F0BF}, {0x1F0C1, 0x1F0CE}, + {0x1F0D1, 0x1F0F5}, {0x1F10B, 0x1F10F}, {0x1F12E, 0x1F12F}, + {0x1F16A, 0x1F16F}, {0x1F1AD, 0x1F1AD}, {0x1F1E6, 0x1F1FF}, + {0x1F321, 0x1F32C}, {0x1F336, 0x1F336}, {0x1F37D, 0x1F37D}, + {0x1F394, 0x1F39F}, {0x1F3CB, 0x1F3CE}, {0x1F3D4, 0x1F3DF}, + {0x1F3F1, 0x1F3F3}, {0x1F3F5, 0x1F3F7}, {0x1F43F, 0x1F43F}, + {0x1F441, 0x1F441}, {0x1F4FD, 0x1F4FE}, {0x1F53E, 0x1F54A}, + {0x1F54F, 0x1F54F}, {0x1F568, 0x1F579}, {0x1F57B, 0x1F594}, + {0x1F597, 0x1F5A3}, {0x1F5A5, 0x1F5FA}, {0x1F650, 0x1F67F}, + {0x1F6C6, 0x1F6CB}, {0x1F6CD, 0x1F6CF}, {0x1F6D3, 0x1F6D4}, + {0x1F6E0, 0x1F6EA}, {0x1F6F0, 0x1F6F3}, {0x1F700, 0x1F773}, + {0x1F780, 0x1F7D8}, {0x1F800, 0x1F80B}, {0x1F810, 0x1F847}, + {0x1F850, 0x1F859}, {0x1F860, 0x1F887}, {0x1F890, 0x1F8AD}, + {0x1F8B0, 0x1F8B1}, {0x1F900, 0x1F90B}, {0x1F93B, 0x1F93B}, + {0x1F946, 0x1F946}, {0x1FA00, 0x1FA53}, {0x1FA60, 0x1FA6D}, + {0x1FB00, 0x1FB92}, {0x1FB94, 0x1FBCA}, {0x1FBF0, 0x1FBF9}, + {0xE0001, 0xE0001}, {0xE0020, 0xE007F}, +} + +var emoji = table{ + {0x203C, 0x203C}, {0x2049, 0x2049}, {0x2122, 0x2122}, + {0x2139, 0x2139}, {0x2194, 0x2199}, {0x21A9, 0x21AA}, + {0x231A, 0x231B}, {0x2328, 0x2328}, {0x2388, 0x2388}, + {0x23CF, 0x23CF}, {0x23E9, 0x23F3}, {0x23F8, 0x23FA}, + {0x24C2, 0x24C2}, {0x25AA, 0x25AB}, {0x25B6, 0x25B6}, + {0x25C0, 0x25C0}, {0x25FB, 0x25FE}, {0x2600, 0x2605}, + {0x2607, 0x2612}, {0x2614, 0x2685}, {0x2690, 0x2705}, + {0x2708, 0x2712}, {0x2714, 0x2714}, {0x2716, 0x2716}, + {0x271D, 0x271D}, {0x2721, 0x2721}, {0x2728, 0x2728}, + {0x2733, 0x2734}, {0x2744, 0x2744}, {0x2747, 0x2747}, + {0x274C, 0x274C}, {0x274E, 0x274E}, {0x2753, 0x2755}, + {0x2757, 0x2757}, {0x2763, 0x2767}, {0x2795, 0x2797}, + {0x27A1, 0x27A1}, {0x27B0, 0x27B0}, {0x27BF, 0x27BF}, + {0x2934, 0x2935}, {0x2B05, 0x2B07}, {0x2B1B, 0x2B1C}, + {0x2B50, 0x2B50}, {0x2B55, 0x2B55}, {0x3030, 0x3030}, + {0x303D, 0x303D}, {0x3297, 0x3297}, {0x3299, 0x3299}, + {0x1F000, 0x1F0FF}, {0x1F10D, 0x1F10F}, {0x1F12F, 0x1F12F}, + {0x1F16C, 0x1F171}, {0x1F17E, 0x1F17F}, {0x1F18E, 0x1F18E}, + {0x1F191, 0x1F19A}, {0x1F1AD, 0x1F1E5}, {0x1F201, 0x1F20F}, + {0x1F21A, 0x1F21A}, {0x1F22F, 0x1F22F}, {0x1F232, 0x1F23A}, + {0x1F23C, 0x1F23F}, {0x1F249, 0x1F3FA}, {0x1F400, 0x1F53D}, + {0x1F546, 0x1F64F}, {0x1F680, 0x1F6FF}, {0x1F774, 0x1F77F}, + {0x1F7D5, 0x1F7FF}, {0x1F80C, 0x1F80F}, {0x1F848, 0x1F84F}, + {0x1F85A, 0x1F85F}, {0x1F888, 0x1F88F}, {0x1F8AE, 0x1F8FF}, + {0x1F90C, 0x1F93A}, {0x1F93C, 0x1F945}, {0x1F947, 0x1FAFF}, + {0x1FC00, 0x1FFFD}, +} diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_windows.go b/vendor/github.com/mattn/go-runewidth/runewidth_windows.go new file mode 100644 index 00000000..d6a61777 --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/runewidth_windows.go @@ -0,0 +1,28 @@ +// +build windows +// +build !appengine + +package runewidth + +import ( + "syscall" +) + +var ( + kernel32 = syscall.NewLazyDLL("kernel32") + procGetConsoleOutputCP = kernel32.NewProc("GetConsoleOutputCP") +) + +// IsEastAsian return true if the current locale is CJK +func IsEastAsian() bool { + r1, _, _ := procGetConsoleOutputCP.Call() + if r1 == 0 { + return false + } + + switch int(r1) { + case 932, 51932, 936, 949, 950: + return true + } + + return false +} diff --git a/vendor/github.com/russross/blackfriday/.gitignore b/vendor/github.com/russross/blackfriday/.gitignore deleted file mode 100644 index 75623dcc..00000000 --- a/vendor/github.com/russross/blackfriday/.gitignore +++ /dev/null @@ -1,8 +0,0 @@ -*.out -*.swp -*.8 -*.6 -_obj -_test* -markdown -tags diff --git a/vendor/github.com/russross/blackfriday/.travis.yml b/vendor/github.com/russross/blackfriday/.travis.yml deleted file mode 100644 index a49fff15..00000000 --- a/vendor/github.com/russross/blackfriday/.travis.yml +++ /dev/null @@ -1,18 +0,0 @@ -sudo: false -language: go -go: - - "1.9.x" - - "1.10.x" - - "1.11.x" - - tip -matrix: - fast_finish: true - allow_failures: - - go: tip -install: - - # Do nothing. This is needed to prevent default install action "go get -t -v ./..." from happening here (we want it to happen inside script step). -script: - - go get -t -v ./... - - diff -u <(echo -n) <(gofmt -d -s .) - - go tool vet . - - go test -v -race ./... diff --git a/vendor/github.com/russross/blackfriday/LICENSE.txt b/vendor/github.com/russross/blackfriday/LICENSE.txt deleted file mode 100644 index 7fbb253a..00000000 --- a/vendor/github.com/russross/blackfriday/LICENSE.txt +++ /dev/null @@ -1,28 +0,0 @@ -Blackfriday is distributed under the Simplified BSD License: - -Copyright © 2011 Russ Ross -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. -2. Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided with - the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/russross/blackfriday/README.md b/vendor/github.com/russross/blackfriday/README.md deleted file mode 100644 index 997ef5d4..00000000 --- a/vendor/github.com/russross/blackfriday/README.md +++ /dev/null @@ -1,364 +0,0 @@ -Blackfriday -[![Build Status][BuildV2SVG]][BuildV2URL] -[![PkgGoDev][PkgGoDevV2SVG]][PkgGoDevV2URL] -=========== - -Blackfriday is a [Markdown][1] processor implemented in [Go][2]. It -is paranoid about its input (so you can safely feed it user-supplied -data), it is fast, it supports common extensions (tables, smart -punctuation substitutions, etc.), and it is safe for all utf-8 -(unicode) input. - -HTML output is currently supported, along with Smartypants -extensions. - -It started as a translation from C of [Sundown][3]. - - -Installation ------------- - -Blackfriday is compatible with modern Go releases in module mode. -With Go installed: - - go get github.com/russross/blackfriday - -will resolve and add the package to the current development module, -then build and install it. Alternatively, you can achieve the same -if you import it in a package: - - import "github.com/russross/blackfriday" - -and `go get` without parameters. - -Old versions of Go and legacy GOPATH mode might work, -but no effort is made to keep them working. - - -Versions --------- - -Currently maintained and recommended version of Blackfriday is `v2`. It's being -developed on its own branch: https://github.com/russross/blackfriday/tree/v2 and the -documentation is available at -https://pkg.go.dev/github.com/russross/blackfriday/v2. - -It is `go get`-able in module mode at `github.com/russross/blackfriday/v2`. - -Version 2 offers a number of improvements over v1: - -* Cleaned up API -* A separate call to [`Parse`][4], which produces an abstract syntax tree for - the document -* Latest bug fixes -* Flexibility to easily add your own rendering extensions - -Potential drawbacks: - -* Our benchmarks show v2 to be slightly slower than v1. Currently in the - ballpark of around 15%. -* API breakage. If you can't afford modifying your code to adhere to the new API - and don't care too much about the new features, v2 is probably not for you. -* Several bug fixes are trailing behind and still need to be forward-ported to - v2. See issue [#348](https://github.com/russross/blackfriday/issues/348) for - tracking. - -If you are still interested in the legacy `v1`, you can import it from -`github.com/russross/blackfriday`. Documentation for the legacy v1 can be found -here: https://pkg.go.dev/github.com/russross/blackfriday. - - -Usage ------ - -### v1 - -For basic usage, it is as simple as getting your input into a byte -slice and calling: - -```go -output := blackfriday.MarkdownBasic(input) -``` - -This renders it with no extensions enabled. To get a more useful -feature set, use this instead: - -```go -output := blackfriday.MarkdownCommon(input) -``` - -### v2 - -For the most sensible markdown processing, it is as simple as getting your input -into a byte slice and calling: - -```go -output := blackfriday.Run(input) -``` - -Your input will be parsed and the output rendered with a set of most popular -extensions enabled. If you want the most basic feature set, corresponding with -the bare Markdown specification, use: - -```go -output := blackfriday.Run(input, blackfriday.WithNoExtensions()) -``` - -### Sanitize untrusted content - -Blackfriday itself does nothing to protect against malicious content. If you are -dealing with user-supplied markdown, we recommend running Blackfriday's output -through HTML sanitizer such as [Bluemonday][5]. - -Here's an example of simple usage of Blackfriday together with Bluemonday: - -```go -import ( - "github.com/microcosm-cc/bluemonday" - "github.com/russross/blackfriday" -) - -// ... -unsafe := blackfriday.Run(input) -html := bluemonday.UGCPolicy().SanitizeBytes(unsafe) -``` - -### Custom options, v1 - -If you want to customize the set of options, first get a renderer -(currently only the HTML output engine), then use it to -call the more general `Markdown` function. For examples, see the -implementations of `MarkdownBasic` and `MarkdownCommon` in -`markdown.go`. - -### Custom options, v2 - -If you want to customize the set of options, use `blackfriday.WithExtensions`, -`blackfriday.WithRenderer` and `blackfriday.WithRefOverride`. - -### `blackfriday-tool` - -You can also check out `blackfriday-tool` for a more complete example -of how to use it. Download and install it using: - - go get github.com/russross/blackfriday-tool - -This is a simple command-line tool that allows you to process a -markdown file using a standalone program. You can also browse the -source directly on github if you are just looking for some example -code: - -* - -Note that if you have not already done so, installing -`blackfriday-tool` will be sufficient to download and install -blackfriday in addition to the tool itself. The tool binary will be -installed in `$GOPATH/bin`. This is a statically-linked binary that -can be copied to wherever you need it without worrying about -dependencies and library versions. - -### Sanitized anchor names - -Blackfriday includes an algorithm for creating sanitized anchor names -corresponding to a given input text. This algorithm is used to create -anchors for headings when `EXTENSION_AUTO_HEADER_IDS` is enabled. The -algorithm has a specification, so that other packages can create -compatible anchor names and links to those anchors. - -The specification is located at https://pkg.go.dev/github.com/russross/blackfriday#hdr-Sanitized_Anchor_Names. - -[`SanitizedAnchorName`](https://pkg.go.dev/github.com/russross/blackfriday#SanitizedAnchorName) exposes this functionality, and can be used to -create compatible links to the anchor names generated by blackfriday. -This algorithm is also implemented in a small standalone package at -[`github.com/shurcooL/sanitized_anchor_name`](https://pkg.go.dev/github.com/shurcooL/sanitized_anchor_name). It can be useful for clients -that want a small package and don't need full functionality of blackfriday. - - -Features --------- - -All features of Sundown are supported, including: - -* **Compatibility**. The Markdown v1.0.3 test suite passes with - the `--tidy` option. Without `--tidy`, the differences are - mostly in whitespace and entity escaping, where blackfriday is - more consistent and cleaner. - -* **Common extensions**, including table support, fenced code - blocks, autolinks, strikethroughs, non-strict emphasis, etc. - -* **Safety**. Blackfriday is paranoid when parsing, making it safe - to feed untrusted user input without fear of bad things - happening. The test suite stress tests this and there are no - known inputs that make it crash. If you find one, please let me - know and send me the input that does it. - - NOTE: "safety" in this context means *runtime safety only*. In order to - protect yourself against JavaScript injection in untrusted content, see - [this example](https://github.com/russross/blackfriday#sanitize-untrusted-content). - -* **Fast processing**. It is fast enough to render on-demand in - most web applications without having to cache the output. - -* **Thread safety**. You can run multiple parsers in different - goroutines without ill effect. There is no dependence on global - shared state. - -* **Minimal dependencies**. Blackfriday only depends on standard - library packages in Go. The source code is pretty - self-contained, so it is easy to add to any project, including - Google App Engine projects. - -* **Standards compliant**. Output successfully validates using the - W3C validation tool for HTML 4.01 and XHTML 1.0 Transitional. - - -Extensions ----------- - -In addition to the standard markdown syntax, this package -implements the following extensions: - -* **Intra-word emphasis supression**. The `_` character is - commonly used inside words when discussing code, so having - markdown interpret it as an emphasis command is usually the - wrong thing. Blackfriday lets you treat all emphasis markers as - normal characters when they occur inside a word. - -* **Tables**. Tables can be created by drawing them in the input - using a simple syntax: - - ``` - Name | Age - --------|------ - Bob | 27 - Alice | 23 - ``` - -* **Fenced code blocks**. In addition to the normal 4-space - indentation to mark code blocks, you can explicitly mark them - and supply a language (to make syntax highlighting simple). Just - mark it like this: - - ```go - func getTrue() bool { - return true - } - ``` - - You can use 3 or more backticks to mark the beginning of the - block, and the same number to mark the end of the block. - - To preserve classes of fenced code blocks while using the bluemonday - HTML sanitizer, use the following policy: - - ```go - p := bluemonday.UGCPolicy() - p.AllowAttrs("class").Matching(regexp.MustCompile("^language-[a-zA-Z0-9]+$")).OnElements("code") - html := p.SanitizeBytes(unsafe) - ``` - -* **Definition lists**. A simple definition list is made of a single-line - term followed by a colon and the definition for that term. - - Cat - : Fluffy animal everyone likes - - Internet - : Vector of transmission for pictures of cats - - Terms must be separated from the previous definition by a blank line. - -* **Footnotes**. A marker in the text that will become a superscript number; - a footnote definition that will be placed in a list of footnotes at the - end of the document. A footnote looks like this: - - This is a footnote.[^1] - - [^1]: the footnote text. - -* **Autolinking**. Blackfriday can find URLs that have not been - explicitly marked as links and turn them into links. - -* **Strikethrough**. Use two tildes (`~~`) to mark text that - should be crossed out. - -* **Hard line breaks**. With this extension enabled (it is off by - default in the `MarkdownBasic` and `MarkdownCommon` convenience - functions), newlines in the input translate into line breaks in - the output. - -* **Smart quotes**. Smartypants-style punctuation substitution is - supported, turning normal double- and single-quote marks into - curly quotes, etc. - -* **LaTeX-style dash parsing** is an additional option, where `--` - is translated into `–`, and `---` is translated into - `—`. This differs from most smartypants processors, which - turn a single hyphen into an ndash and a double hyphen into an - mdash. - -* **Smart fractions**, where anything that looks like a fraction - is translated into suitable HTML (instead of just a few special - cases like most smartypant processors). For example, `4/5` - becomes `45`, which renders as - 45. - - -Other renderers ---------------- - -Blackfriday is structured to allow alternative rendering engines. Here -are a few of note: - -* [github_flavored_markdown](https://pkg.go.dev/github.com/shurcooL/github_flavored_markdown): - provides a GitHub Flavored Markdown renderer with fenced code block - highlighting, clickable heading anchor links. - - It's not customizable, and its goal is to produce HTML output - equivalent to the [GitHub Markdown API endpoint](https://developer.github.com/v3/markdown/#render-a-markdown-document-in-raw-mode), - except the rendering is performed locally. - -* [markdownfmt](https://github.com/shurcooL/markdownfmt): like gofmt, - but for markdown. - -* [LaTeX output](https://gitlab.com/ambrevar/blackfriday-latex): - renders output as LaTeX. - -* [bfchroma](https://github.com/Depado/bfchroma/): provides convenience - integration with the [Chroma](https://github.com/alecthomas/chroma) code - highlighting library. bfchroma is only compatible with v2 of Blackfriday and - provides a drop-in renderer ready to use with Blackfriday, as well as - options and means for further customization. - -* [Blackfriday-Confluence](https://github.com/kentaro-m/blackfriday-confluence): provides a [Confluence Wiki Markup](https://confluence.atlassian.com/doc/confluence-wiki-markup-251003035.html) renderer. - -* [Blackfriday-Slack](https://github.com/karriereat/blackfriday-slack): converts markdown to slack message style - - -TODO ----- - -* More unit testing -* Improve Unicode support. It does not understand all Unicode - rules (about what constitutes a letter, a punctuation symbol, - etc.), so it may fail to detect word boundaries correctly in - some instances. It is safe on all UTF-8 input. - - -License -------- - -[Blackfriday is distributed under the Simplified BSD License](LICENSE.txt) - - - [1]: https://daringfireball.net/projects/markdown/ "Markdown" - [2]: https://golang.org/ "Go Language" - [3]: https://github.com/vmg/sundown "Sundown" - [4]: https://pkg.go.dev/github.com/russross/blackfriday/v2#Parse "Parse func" - [5]: https://github.com/microcosm-cc/bluemonday "Bluemonday" - - [BuildV2SVG]: https://travis-ci.org/russross/blackfriday.svg?branch=v2 - [BuildV2URL]: https://travis-ci.org/russross/blackfriday - [PkgGoDevV2SVG]: https://pkg.go.dev/badge/github.com/russross/blackfriday/v2 - [PkgGoDevV2URL]: https://pkg.go.dev/github.com/russross/blackfriday/v2 diff --git a/vendor/github.com/russross/blackfriday/block.go b/vendor/github.com/russross/blackfriday/block.go deleted file mode 100644 index 563cb290..00000000 --- a/vendor/github.com/russross/blackfriday/block.go +++ /dev/null @@ -1,1480 +0,0 @@ -// -// Blackfriday Markdown Processor -// Available at http://github.com/russross/blackfriday -// -// Copyright © 2011 Russ Ross . -// Distributed under the Simplified BSD License. -// See README.md for details. -// - -// -// Functions to parse block-level elements. -// - -package blackfriday - -import ( - "bytes" - "strings" - "unicode" -) - -// Parse block-level data. -// Note: this function and many that it calls assume that -// the input buffer ends with a newline. -func (p *parser) block(out *bytes.Buffer, data []byte) { - if len(data) == 0 || data[len(data)-1] != '\n' { - panic("block input is missing terminating newline") - } - - // this is called recursively: enforce a maximum depth - if p.nesting >= p.maxNesting { - return - } - p.nesting++ - - // parse out one block-level construct at a time - for len(data) > 0 { - // prefixed header: - // - // # Header 1 - // ## Header 2 - // ... - // ###### Header 6 - if p.isPrefixHeader(data) { - data = data[p.prefixHeader(out, data):] - continue - } - - // block of preformatted HTML: - // - //
- // ... - //
- if data[0] == '<' { - if i := p.html(out, data, true); i > 0 { - data = data[i:] - continue - } - } - - // title block - // - // % stuff - // % more stuff - // % even more stuff - if p.flags&EXTENSION_TITLEBLOCK != 0 { - if data[0] == '%' { - if i := p.titleBlock(out, data, true); i > 0 { - data = data[i:] - continue - } - } - } - - // blank lines. note: returns the # of bytes to skip - if i := p.isEmpty(data); i > 0 { - data = data[i:] - continue - } - - // indented code block: - // - // func max(a, b int) int { - // if a > b { - // return a - // } - // return b - // } - if p.codePrefix(data) > 0 { - data = data[p.code(out, data):] - continue - } - - // fenced code block: - // - // ``` go info string here - // func fact(n int) int { - // if n <= 1 { - // return n - // } - // return n * fact(n-1) - // } - // ``` - if p.flags&EXTENSION_FENCED_CODE != 0 { - if i := p.fencedCodeBlock(out, data, true); i > 0 { - data = data[i:] - continue - } - } - - // horizontal rule: - // - // ------ - // or - // ****** - // or - // ______ - if p.isHRule(data) { - p.r.HRule(out) - var i int - for i = 0; data[i] != '\n'; i++ { - } - data = data[i:] - continue - } - - // block quote: - // - // > A big quote I found somewhere - // > on the web - if p.quotePrefix(data) > 0 { - data = data[p.quote(out, data):] - continue - } - - // table: - // - // Name | Age | Phone - // ------|-----|--------- - // Bob | 31 | 555-1234 - // Alice | 27 | 555-4321 - if p.flags&EXTENSION_TABLES != 0 { - if i := p.table(out, data); i > 0 { - data = data[i:] - continue - } - } - - // an itemized/unordered list: - // - // * Item 1 - // * Item 2 - // - // also works with + or - - if p.uliPrefix(data) > 0 { - data = data[p.list(out, data, 0):] - continue - } - - // a numbered/ordered list: - // - // 1. Item 1 - // 2. Item 2 - if p.oliPrefix(data) > 0 { - data = data[p.list(out, data, LIST_TYPE_ORDERED):] - continue - } - - // definition lists: - // - // Term 1 - // : Definition a - // : Definition b - // - // Term 2 - // : Definition c - if p.flags&EXTENSION_DEFINITION_LISTS != 0 { - if p.dliPrefix(data) > 0 { - data = data[p.list(out, data, LIST_TYPE_DEFINITION):] - continue - } - } - - // anything else must look like a normal paragraph - // note: this finds underlined headers, too - data = data[p.paragraph(out, data):] - } - - p.nesting-- -} - -func (p *parser) isPrefixHeader(data []byte) bool { - if data[0] != '#' { - return false - } - - if p.flags&EXTENSION_SPACE_HEADERS != 0 { - level := 0 - for level < 6 && data[level] == '#' { - level++ - } - if data[level] != ' ' { - return false - } - } - return true -} - -func (p *parser) prefixHeader(out *bytes.Buffer, data []byte) int { - level := 0 - for level < 6 && data[level] == '#' { - level++ - } - i := skipChar(data, level, ' ') - end := skipUntilChar(data, i, '\n') - skip := end - id := "" - if p.flags&EXTENSION_HEADER_IDS != 0 { - j, k := 0, 0 - // find start/end of header id - for j = i; j < end-1 && (data[j] != '{' || data[j+1] != '#'); j++ { - } - for k = j + 1; k < end && data[k] != '}'; k++ { - } - // extract header id iff found - if j < end && k < end { - id = string(data[j+2 : k]) - end = j - skip = k + 1 - for end > 0 && data[end-1] == ' ' { - end-- - } - } - } - for end > 0 && data[end-1] == '#' { - if isBackslashEscaped(data, end-1) { - break - } - end-- - } - for end > 0 && data[end-1] == ' ' { - end-- - } - if end > i { - if id == "" && p.flags&EXTENSION_AUTO_HEADER_IDS != 0 { - id = SanitizedAnchorName(string(data[i:end])) - } - work := func() bool { - p.inline(out, data[i:end]) - return true - } - p.r.Header(out, work, level, id) - } - return skip -} - -func (p *parser) isUnderlinedHeader(data []byte) int { - // test of level 1 header - if data[0] == '=' { - i := skipChar(data, 1, '=') - i = skipChar(data, i, ' ') - if data[i] == '\n' { - return 1 - } else { - return 0 - } - } - - // test of level 2 header - if data[0] == '-' { - i := skipChar(data, 1, '-') - i = skipChar(data, i, ' ') - if data[i] == '\n' { - return 2 - } else { - return 0 - } - } - - return 0 -} - -func (p *parser) titleBlock(out *bytes.Buffer, data []byte, doRender bool) int { - if data[0] != '%' { - return 0 - } - splitData := bytes.Split(data, []byte("\n")) - var i int - for idx, b := range splitData { - if !bytes.HasPrefix(b, []byte("%")) { - i = idx // - 1 - break - } - } - - data = bytes.Join(splitData[0:i], []byte("\n")) - p.r.TitleBlock(out, data) - - return len(data) -} - -func (p *parser) html(out *bytes.Buffer, data []byte, doRender bool) int { - var i, j int - - // identify the opening tag - if data[0] != '<' { - return 0 - } - curtag, tagfound := p.htmlFindTag(data[1:]) - - // handle special cases - if !tagfound { - // check for an HTML comment - if size := p.htmlComment(out, data, doRender); size > 0 { - return size - } - - // check for an
tag - if size := p.htmlHr(out, data, doRender); size > 0 { - return size - } - - // check for HTML CDATA - if size := p.htmlCDATA(out, data, doRender); size > 0 { - return size - } - - // no special case recognized - return 0 - } - - // look for an unindented matching closing tag - // followed by a blank line - found := false - /* - closetag := []byte("\n") - j = len(curtag) + 1 - for !found { - // scan for a closing tag at the beginning of a line - if skip := bytes.Index(data[j:], closetag); skip >= 0 { - j += skip + len(closetag) - } else { - break - } - - // see if it is the only thing on the line - if skip := p.isEmpty(data[j:]); skip > 0 { - // see if it is followed by a blank line/eof - j += skip - if j >= len(data) { - found = true - i = j - } else { - if skip := p.isEmpty(data[j:]); skip > 0 { - j += skip - found = true - i = j - } - } - } - } - */ - - // if not found, try a second pass looking for indented match - // but not if tag is "ins" or "del" (following original Markdown.pl) - if !found && curtag != "ins" && curtag != "del" { - i = 1 - for i < len(data) { - i++ - for i < len(data) && !(data[i-1] == '<' && data[i] == '/') { - i++ - } - - if i+2+len(curtag) >= len(data) { - break - } - - j = p.htmlFindEnd(curtag, data[i-1:]) - - if j > 0 { - i += j - 1 - found = true - break - } - } - } - - if !found { - return 0 - } - - // the end of the block has been found - if doRender { - // trim newlines - end := i - for end > 0 && data[end-1] == '\n' { - end-- - } - p.r.BlockHtml(out, data[:end]) - } - - return i -} - -func (p *parser) renderHTMLBlock(out *bytes.Buffer, data []byte, start int, doRender bool) int { - // html block needs to end with a blank line - if i := p.isEmpty(data[start:]); i > 0 { - size := start + i - if doRender { - // trim trailing newlines - end := size - for end > 0 && data[end-1] == '\n' { - end-- - } - p.r.BlockHtml(out, data[:end]) - } - return size - } - return 0 -} - -// HTML comment, lax form -func (p *parser) htmlComment(out *bytes.Buffer, data []byte, doRender bool) int { - i := p.inlineHTMLComment(out, data) - return p.renderHTMLBlock(out, data, i, doRender) -} - -// HTML CDATA section -func (p *parser) htmlCDATA(out *bytes.Buffer, data []byte, doRender bool) int { - const cdataTag = "') { - i++ - } - i++ - // no end-of-comment marker - if i >= len(data) { - return 0 - } - return p.renderHTMLBlock(out, data, i, doRender) -} - -// HR, which is the only self-closing block tag considered -func (p *parser) htmlHr(out *bytes.Buffer, data []byte, doRender bool) int { - if data[0] != '<' || (data[1] != 'h' && data[1] != 'H') || (data[2] != 'r' && data[2] != 'R') { - return 0 - } - if data[3] != ' ' && data[3] != '/' && data[3] != '>' { - // not an
tag after all; at least not a valid one - return 0 - } - - i := 3 - for data[i] != '>' && data[i] != '\n' { - i++ - } - - if data[i] == '>' { - return p.renderHTMLBlock(out, data, i+1, doRender) - } - - return 0 -} - -func (p *parser) htmlFindTag(data []byte) (string, bool) { - i := 0 - for isalnum(data[i]) { - i++ - } - key := string(data[:i]) - if _, ok := blockTags[key]; ok { - return key, true - } - return "", false -} - -func (p *parser) htmlFindEnd(tag string, data []byte) int { - // assume data[0] == '<' && data[1] == '/' already tested - - // check if tag is a match - closetag := []byte("") - if !bytes.HasPrefix(data, closetag) { - return 0 - } - i := len(closetag) - - // check that the rest of the line is blank - skip := 0 - if skip = p.isEmpty(data[i:]); skip == 0 { - return 0 - } - i += skip - skip = 0 - - if i >= len(data) { - return i - } - - if p.flags&EXTENSION_LAX_HTML_BLOCKS != 0 { - return i - } - if skip = p.isEmpty(data[i:]); skip == 0 { - // following line must be blank - return 0 - } - - return i + skip -} - -func (*parser) isEmpty(data []byte) int { - // it is okay to call isEmpty on an empty buffer - if len(data) == 0 { - return 0 - } - - var i int - for i = 0; i < len(data) && data[i] != '\n'; i++ { - if data[i] != ' ' && data[i] != '\t' { - return 0 - } - } - return i + 1 -} - -func (*parser) isHRule(data []byte) bool { - i := 0 - - // skip up to three spaces - for i < 3 && data[i] == ' ' { - i++ - } - - // look at the hrule char - if data[i] != '*' && data[i] != '-' && data[i] != '_' { - return false - } - c := data[i] - - // the whole line must be the char or whitespace - n := 0 - for data[i] != '\n' { - switch { - case data[i] == c: - n++ - case data[i] != ' ': - return false - } - i++ - } - - return n >= 3 -} - -// isFenceLine checks if there's a fence line (e.g., ``` or ``` go) at the beginning of data, -// and returns the end index if so, or 0 otherwise. It also returns the marker found. -// If syntax is not nil, it gets set to the syntax specified in the fence line. -// A final newline is mandatory to recognize the fence line, unless newlineOptional is true. -func isFenceLine(data []byte, info *string, oldmarker string, newlineOptional bool) (end int, marker string) { - i, size := 0, 0 - - // skip up to three spaces - for i < len(data) && i < 3 && data[i] == ' ' { - i++ - } - - // check for the marker characters: ~ or ` - if i >= len(data) { - return 0, "" - } - if data[i] != '~' && data[i] != '`' { - return 0, "" - } - - c := data[i] - - // the whole line must be the same char or whitespace - for i < len(data) && data[i] == c { - size++ - i++ - } - - // the marker char must occur at least 3 times - if size < 3 { - return 0, "" - } - marker = string(data[i-size : i]) - - // if this is the end marker, it must match the beginning marker - if oldmarker != "" && marker != oldmarker { - return 0, "" - } - - // TODO(shurcooL): It's probably a good idea to simplify the 2 code paths here - // into one, always get the info string, and discard it if the caller doesn't care. - if info != nil { - infoLength := 0 - i = skipChar(data, i, ' ') - - if i >= len(data) { - if newlineOptional && i == len(data) { - return i, marker - } - return 0, "" - } - - infoStart := i - - if data[i] == '{' { - i++ - infoStart++ - - for i < len(data) && data[i] != '}' && data[i] != '\n' { - infoLength++ - i++ - } - - if i >= len(data) || data[i] != '}' { - return 0, "" - } - - // strip all whitespace at the beginning and the end - // of the {} block - for infoLength > 0 && isspace(data[infoStart]) { - infoStart++ - infoLength-- - } - - for infoLength > 0 && isspace(data[infoStart+infoLength-1]) { - infoLength-- - } - - i++ - } else { - for i < len(data) && !isverticalspace(data[i]) { - infoLength++ - i++ - } - } - - *info = strings.TrimSpace(string(data[infoStart : infoStart+infoLength])) - } - - i = skipChar(data, i, ' ') - if i >= len(data) { - if newlineOptional { - return i, marker - } - return 0, "" - } - if data[i] == '\n' { - i++ // Take newline into account - } - - return i, marker -} - -// fencedCodeBlock returns the end index if data contains a fenced code block at the beginning, -// or 0 otherwise. It writes to out if doRender is true, otherwise it has no side effects. -// If doRender is true, a final newline is mandatory to recognize the fenced code block. -func (p *parser) fencedCodeBlock(out *bytes.Buffer, data []byte, doRender bool) int { - var infoString string - beg, marker := isFenceLine(data, &infoString, "", false) - if beg == 0 || beg >= len(data) { - return 0 - } - - var work bytes.Buffer - - for { - // safe to assume beg < len(data) - - // check for the end of the code block - newlineOptional := !doRender - fenceEnd, _ := isFenceLine(data[beg:], nil, marker, newlineOptional) - if fenceEnd != 0 { - beg += fenceEnd - break - } - - // copy the current line - end := skipUntilChar(data, beg, '\n') + 1 - - // did we reach the end of the buffer without a closing marker? - if end >= len(data) { - return 0 - } - - // verbatim copy to the working buffer - if doRender { - work.Write(data[beg:end]) - } - beg = end - } - - if doRender { - p.r.BlockCode(out, work.Bytes(), infoString) - } - - return beg -} - -func (p *parser) table(out *bytes.Buffer, data []byte) int { - var header bytes.Buffer - i, columns := p.tableHeader(&header, data) - if i == 0 { - return 0 - } - - var body bytes.Buffer - - for i < len(data) { - pipes, rowStart := 0, i - for ; data[i] != '\n'; i++ { - if data[i] == '|' { - pipes++ - } - } - - if pipes == 0 { - i = rowStart - break - } - - // include the newline in data sent to tableRow - i++ - p.tableRow(&body, data[rowStart:i], columns, false) - } - - p.r.Table(out, header.Bytes(), body.Bytes(), columns) - - return i -} - -// check if the specified position is preceded by an odd number of backslashes -func isBackslashEscaped(data []byte, i int) bool { - backslashes := 0 - for i-backslashes-1 >= 0 && data[i-backslashes-1] == '\\' { - backslashes++ - } - return backslashes&1 == 1 -} - -func (p *parser) tableHeader(out *bytes.Buffer, data []byte) (size int, columns []int) { - i := 0 - colCount := 1 - for i = 0; data[i] != '\n'; i++ { - if data[i] == '|' && !isBackslashEscaped(data, i) { - colCount++ - } - } - - // doesn't look like a table header - if colCount == 1 { - return - } - - // include the newline in the data sent to tableRow - header := data[:i+1] - - // column count ignores pipes at beginning or end of line - if data[0] == '|' { - colCount-- - } - if i > 2 && data[i-1] == '|' && !isBackslashEscaped(data, i-1) { - colCount-- - } - - columns = make([]int, colCount) - - // move on to the header underline - i++ - if i >= len(data) { - return - } - - if data[i] == '|' && !isBackslashEscaped(data, i) { - i++ - } - i = skipChar(data, i, ' ') - - // each column header is of form: / *:?-+:? *|/ with # dashes + # colons >= 3 - // and trailing | optional on last column - col := 0 - for data[i] != '\n' { - dashes := 0 - - if data[i] == ':' { - i++ - columns[col] |= TABLE_ALIGNMENT_LEFT - dashes++ - } - for data[i] == '-' { - i++ - dashes++ - } - if data[i] == ':' { - i++ - columns[col] |= TABLE_ALIGNMENT_RIGHT - dashes++ - } - for data[i] == ' ' { - i++ - } - - // end of column test is messy - switch { - case dashes < 3: - // not a valid column - return - - case data[i] == '|' && !isBackslashEscaped(data, i): - // marker found, now skip past trailing whitespace - col++ - i++ - for data[i] == ' ' { - i++ - } - - // trailing junk found after last column - if col >= colCount && data[i] != '\n' { - return - } - - case (data[i] != '|' || isBackslashEscaped(data, i)) && col+1 < colCount: - // something else found where marker was required - return - - case data[i] == '\n': - // marker is optional for the last column - col++ - - default: - // trailing junk found after last column - return - } - } - if col != colCount { - return - } - - p.tableRow(out, header, columns, true) - size = i + 1 - return -} - -func (p *parser) tableRow(out *bytes.Buffer, data []byte, columns []int, header bool) { - i, col := 0, 0 - var rowWork bytes.Buffer - - if data[i] == '|' && !isBackslashEscaped(data, i) { - i++ - } - - for col = 0; col < len(columns) && i < len(data); col++ { - for data[i] == ' ' { - i++ - } - - cellStart := i - - for (data[i] != '|' || isBackslashEscaped(data, i)) && data[i] != '\n' { - i++ - } - - cellEnd := i - - // skip the end-of-cell marker, possibly taking us past end of buffer - i++ - - for cellEnd > cellStart && data[cellEnd-1] == ' ' { - cellEnd-- - } - - var cellWork bytes.Buffer - p.inline(&cellWork, data[cellStart:cellEnd]) - - if header { - p.r.TableHeaderCell(&rowWork, cellWork.Bytes(), columns[col]) - } else { - p.r.TableCell(&rowWork, cellWork.Bytes(), columns[col]) - } - } - - // pad it out with empty columns to get the right number - for ; col < len(columns); col++ { - if header { - p.r.TableHeaderCell(&rowWork, nil, columns[col]) - } else { - p.r.TableCell(&rowWork, nil, columns[col]) - } - } - - // silently ignore rows with too many cells - - p.r.TableRow(out, rowWork.Bytes()) -} - -// returns blockquote prefix length -func (p *parser) quotePrefix(data []byte) int { - i := 0 - for i < 3 && data[i] == ' ' { - i++ - } - if data[i] == '>' { - if data[i+1] == ' ' { - return i + 2 - } - return i + 1 - } - return 0 -} - -// blockquote ends with at least one blank line -// followed by something without a blockquote prefix -func (p *parser) terminateBlockquote(data []byte, beg, end int) bool { - if p.isEmpty(data[beg:]) <= 0 { - return false - } - if end >= len(data) { - return true - } - return p.quotePrefix(data[end:]) == 0 && p.isEmpty(data[end:]) == 0 -} - -// parse a blockquote fragment -func (p *parser) quote(out *bytes.Buffer, data []byte) int { - var raw bytes.Buffer - beg, end := 0, 0 - for beg < len(data) { - end = beg - // Step over whole lines, collecting them. While doing that, check for - // fenced code and if one's found, incorporate it altogether, - // irregardless of any contents inside it - for data[end] != '\n' { - if p.flags&EXTENSION_FENCED_CODE != 0 { - if i := p.fencedCodeBlock(out, data[end:], false); i > 0 { - // -1 to compensate for the extra end++ after the loop: - end += i - 1 - break - } - } - end++ - } - end++ - - if pre := p.quotePrefix(data[beg:]); pre > 0 { - // skip the prefix - beg += pre - } else if p.terminateBlockquote(data, beg, end) { - break - } - - // this line is part of the blockquote - raw.Write(data[beg:end]) - beg = end - } - - var cooked bytes.Buffer - p.block(&cooked, raw.Bytes()) - p.r.BlockQuote(out, cooked.Bytes()) - return end -} - -// returns prefix length for block code -func (p *parser) codePrefix(data []byte) int { - if data[0] == ' ' && data[1] == ' ' && data[2] == ' ' && data[3] == ' ' { - return 4 - } - return 0 -} - -func (p *parser) code(out *bytes.Buffer, data []byte) int { - var work bytes.Buffer - - i := 0 - for i < len(data) { - beg := i - for data[i] != '\n' { - i++ - } - i++ - - blankline := p.isEmpty(data[beg:i]) > 0 - if pre := p.codePrefix(data[beg:i]); pre > 0 { - beg += pre - } else if !blankline { - // non-empty, non-prefixed line breaks the pre - i = beg - break - } - - // verbatim copy to the working buffeu - if blankline { - work.WriteByte('\n') - } else { - work.Write(data[beg:i]) - } - } - - // trim all the \n off the end of work - workbytes := work.Bytes() - eol := len(workbytes) - for eol > 0 && workbytes[eol-1] == '\n' { - eol-- - } - if eol != len(workbytes) { - work.Truncate(eol) - } - - work.WriteByte('\n') - - p.r.BlockCode(out, work.Bytes(), "") - - return i -} - -// returns unordered list item prefix -func (p *parser) uliPrefix(data []byte) int { - i := 0 - - // start with up to 3 spaces - for i < 3 && data[i] == ' ' { - i++ - } - - // need a *, +, or - followed by a space - if (data[i] != '*' && data[i] != '+' && data[i] != '-') || - data[i+1] != ' ' { - return 0 - } - return i + 2 -} - -// returns ordered list item prefix -func (p *parser) oliPrefix(data []byte) int { - i := 0 - - // start with up to 3 spaces - for i < 3 && data[i] == ' ' { - i++ - } - - // count the digits - start := i - for data[i] >= '0' && data[i] <= '9' { - i++ - } - - // we need >= 1 digits followed by a dot and a space - if start == i || data[i] != '.' || data[i+1] != ' ' { - return 0 - } - return i + 2 -} - -// returns definition list item prefix -func (p *parser) dliPrefix(data []byte) int { - i := 0 - - // need a : followed by a spaces - if data[i] != ':' || data[i+1] != ' ' { - return 0 - } - for data[i] == ' ' { - i++ - } - return i + 2 -} - -// parse ordered or unordered list block -func (p *parser) list(out *bytes.Buffer, data []byte, flags int) int { - i := 0 - flags |= LIST_ITEM_BEGINNING_OF_LIST - work := func() bool { - for i < len(data) { - skip := p.listItem(out, data[i:], &flags) - i += skip - - if skip == 0 || flags&LIST_ITEM_END_OF_LIST != 0 { - break - } - flags &= ^LIST_ITEM_BEGINNING_OF_LIST - } - return true - } - - p.r.List(out, work, flags) - return i -} - -// Parse a single list item. -// Assumes initial prefix is already removed if this is a sublist. -func (p *parser) listItem(out *bytes.Buffer, data []byte, flags *int) int { - // keep track of the indentation of the first line - itemIndent := 0 - for itemIndent < 3 && data[itemIndent] == ' ' { - itemIndent++ - } - - i := p.uliPrefix(data) - if i == 0 { - i = p.oliPrefix(data) - } - if i == 0 { - i = p.dliPrefix(data) - // reset definition term flag - if i > 0 { - *flags &= ^LIST_TYPE_TERM - } - } - if i == 0 { - // if in defnition list, set term flag and continue - if *flags&LIST_TYPE_DEFINITION != 0 { - *flags |= LIST_TYPE_TERM - } else { - return 0 - } - } - - // skip leading whitespace on first line - for data[i] == ' ' { - i++ - } - - // find the end of the line - line := i - for i > 0 && data[i-1] != '\n' { - i++ - } - - // process the following lines - containsBlankLine := false - sublist := 0 - codeBlockMarker := "" - if p.flags&EXTENSION_FENCED_CODE != 0 && i > line { - // determine if codeblock starts on the first line - _, codeBlockMarker = isFenceLine(data[line:i], nil, "", false) - } - - // get working buffer - var raw bytes.Buffer - - // put the first line into the working buffer - raw.Write(data[line:i]) - line = i - -gatherlines: - for line < len(data) { - i++ - - // find the end of this line - for data[i-1] != '\n' { - i++ - } - // if it is an empty line, guess that it is part of this item - // and move on to the next line - if p.isEmpty(data[line:i]) > 0 { - containsBlankLine = true - raw.Write(data[line:i]) - line = i - continue - } - - // calculate the indentation - indent := 0 - for indent < 4 && line+indent < i && data[line+indent] == ' ' { - indent++ - } - - chunk := data[line+indent : i] - - if p.flags&EXTENSION_FENCED_CODE != 0 { - // determine if in or out of codeblock - // if in codeblock, ignore normal list processing - _, marker := isFenceLine(chunk, nil, codeBlockMarker, false) - if marker != "" { - if codeBlockMarker == "" { - // start of codeblock - codeBlockMarker = marker - } else { - // end of codeblock. - *flags |= LIST_ITEM_CONTAINS_BLOCK - codeBlockMarker = "" - } - } - // we are in a codeblock, write line, and continue - if codeBlockMarker != "" || marker != "" { - raw.Write(data[line+indent : i]) - line = i - continue gatherlines - } - } - - // evaluate how this line fits in - switch { - // is this a nested list item? - case (p.uliPrefix(chunk) > 0 && !p.isHRule(chunk)) || - p.oliPrefix(chunk) > 0 || - p.dliPrefix(chunk) > 0: - - if containsBlankLine { - // end the list if the type changed after a blank line - if indent <= itemIndent && - ((*flags&LIST_TYPE_ORDERED != 0 && p.uliPrefix(chunk) > 0) || - (*flags&LIST_TYPE_ORDERED == 0 && p.oliPrefix(chunk) > 0)) { - - *flags |= LIST_ITEM_END_OF_LIST - break gatherlines - } - *flags |= LIST_ITEM_CONTAINS_BLOCK - } - - // to be a nested list, it must be indented more - // if not, it is the next item in the same list - if indent <= itemIndent { - break gatherlines - } - - // is this the first item in the nested list? - if sublist == 0 { - sublist = raw.Len() - } - - // is this a nested prefix header? - case p.isPrefixHeader(chunk): - // if the header is not indented, it is not nested in the list - // and thus ends the list - if containsBlankLine && indent < 4 { - *flags |= LIST_ITEM_END_OF_LIST - break gatherlines - } - *flags |= LIST_ITEM_CONTAINS_BLOCK - - // anything following an empty line is only part - // of this item if it is indented 4 spaces - // (regardless of the indentation of the beginning of the item) - case containsBlankLine && indent < 4: - if *flags&LIST_TYPE_DEFINITION != 0 && i < len(data)-1 { - // is the next item still a part of this list? - next := i - for data[next] != '\n' { - next++ - } - for next < len(data)-1 && data[next] == '\n' { - next++ - } - if i < len(data)-1 && data[i] != ':' && data[next] != ':' { - *flags |= LIST_ITEM_END_OF_LIST - } - } else { - *flags |= LIST_ITEM_END_OF_LIST - } - break gatherlines - - // a blank line means this should be parsed as a block - case containsBlankLine: - *flags |= LIST_ITEM_CONTAINS_BLOCK - } - - containsBlankLine = false - - // add the line into the working buffer without prefix - raw.Write(data[line+indent : i]) - - line = i - } - - // If reached end of data, the Renderer.ListItem call we're going to make below - // is definitely the last in the list. - if line >= len(data) { - *flags |= LIST_ITEM_END_OF_LIST - } - - rawBytes := raw.Bytes() - - // render the contents of the list item - var cooked bytes.Buffer - if *flags&LIST_ITEM_CONTAINS_BLOCK != 0 && *flags&LIST_TYPE_TERM == 0 { - // intermediate render of block item, except for definition term - if sublist > 0 { - p.block(&cooked, rawBytes[:sublist]) - p.block(&cooked, rawBytes[sublist:]) - } else { - p.block(&cooked, rawBytes) - } - } else { - // intermediate render of inline item - if sublist > 0 { - p.inline(&cooked, rawBytes[:sublist]) - p.block(&cooked, rawBytes[sublist:]) - } else { - p.inline(&cooked, rawBytes) - } - } - - // render the actual list item - cookedBytes := cooked.Bytes() - parsedEnd := len(cookedBytes) - - // strip trailing newlines - for parsedEnd > 0 && cookedBytes[parsedEnd-1] == '\n' { - parsedEnd-- - } - p.r.ListItem(out, cookedBytes[:parsedEnd], *flags) - - return line -} - -// render a single paragraph that has already been parsed out -func (p *parser) renderParagraph(out *bytes.Buffer, data []byte) { - if len(data) == 0 { - return - } - - // trim leading spaces - beg := 0 - for data[beg] == ' ' { - beg++ - } - - // trim trailing newline - end := len(data) - 1 - - // trim trailing spaces - for end > beg && data[end-1] == ' ' { - end-- - } - - work := func() bool { - p.inline(out, data[beg:end]) - return true - } - p.r.Paragraph(out, work) -} - -func (p *parser) paragraph(out *bytes.Buffer, data []byte) int { - // prev: index of 1st char of previous line - // line: index of 1st char of current line - // i: index of cursor/end of current line - var prev, line, i int - - // keep going until we find something to mark the end of the paragraph - for i < len(data) { - // mark the beginning of the current line - prev = line - current := data[i:] - line = i - - // did we find a blank line marking the end of the paragraph? - if n := p.isEmpty(current); n > 0 { - // did this blank line followed by a definition list item? - if p.flags&EXTENSION_DEFINITION_LISTS != 0 { - if i < len(data)-1 && data[i+1] == ':' { - return p.list(out, data[prev:], LIST_TYPE_DEFINITION) - } - } - - p.renderParagraph(out, data[:i]) - return i + n - } - - // an underline under some text marks a header, so our paragraph ended on prev line - if i > 0 { - if level := p.isUnderlinedHeader(current); level > 0 { - // render the paragraph - p.renderParagraph(out, data[:prev]) - - // ignore leading and trailing whitespace - eol := i - 1 - for prev < eol && data[prev] == ' ' { - prev++ - } - for eol > prev && data[eol-1] == ' ' { - eol-- - } - - // render the header - // this ugly double closure avoids forcing variables onto the heap - work := func(o *bytes.Buffer, pp *parser, d []byte) func() bool { - return func() bool { - pp.inline(o, d) - return true - } - }(out, p, data[prev:eol]) - - id := "" - if p.flags&EXTENSION_AUTO_HEADER_IDS != 0 { - id = SanitizedAnchorName(string(data[prev:eol])) - } - - p.r.Header(out, work, level, id) - - // find the end of the underline - for data[i] != '\n' { - i++ - } - return i - } - } - - // if the next line starts a block of HTML, then the paragraph ends here - if p.flags&EXTENSION_LAX_HTML_BLOCKS != 0 { - if data[i] == '<' && p.html(out, current, false) > 0 { - // rewind to before the HTML block - p.renderParagraph(out, data[:i]) - return i - } - } - - // if there's a prefixed header or a horizontal rule after this, paragraph is over - if p.isPrefixHeader(current) || p.isHRule(current) { - p.renderParagraph(out, data[:i]) - return i - } - - // if there's a fenced code block, paragraph is over - if p.flags&EXTENSION_FENCED_CODE != 0 { - if p.fencedCodeBlock(out, current, false) > 0 { - p.renderParagraph(out, data[:i]) - return i - } - } - - // if there's a definition list item, prev line is a definition term - if p.flags&EXTENSION_DEFINITION_LISTS != 0 { - if p.dliPrefix(current) != 0 { - return p.list(out, data[prev:], LIST_TYPE_DEFINITION) - } - } - - // if there's a list after this, paragraph is over - if p.flags&EXTENSION_NO_EMPTY_LINE_BEFORE_BLOCK != 0 { - if p.uliPrefix(current) != 0 || - p.oliPrefix(current) != 0 || - p.quotePrefix(current) != 0 || - p.codePrefix(current) != 0 { - p.renderParagraph(out, data[:i]) - return i - } - } - - // otherwise, scan to the beginning of the next line - for data[i] != '\n' { - i++ - } - i++ - } - - p.renderParagraph(out, data[:i]) - return i -} - -// SanitizedAnchorName returns a sanitized anchor name for the given text. -// -// It implements the algorithm specified in the package comment. -func SanitizedAnchorName(text string) string { - var anchorName []rune - futureDash := false - for _, r := range text { - switch { - case unicode.IsLetter(r) || unicode.IsNumber(r): - if futureDash && len(anchorName) > 0 { - anchorName = append(anchorName, '-') - } - futureDash = false - anchorName = append(anchorName, unicode.ToLower(r)) - default: - futureDash = true - } - } - return string(anchorName) -} diff --git a/vendor/github.com/russross/blackfriday/doc.go b/vendor/github.com/russross/blackfriday/doc.go deleted file mode 100644 index 9656c42a..00000000 --- a/vendor/github.com/russross/blackfriday/doc.go +++ /dev/null @@ -1,32 +0,0 @@ -// Package blackfriday is a Markdown processor. -// -// It translates plain text with simple formatting rules into HTML or LaTeX. -// -// Sanitized Anchor Names -// -// Blackfriday includes an algorithm for creating sanitized anchor names -// corresponding to a given input text. This algorithm is used to create -// anchors for headings when EXTENSION_AUTO_HEADER_IDS is enabled. The -// algorithm is specified below, so that other packages can create -// compatible anchor names and links to those anchors. -// -// The algorithm iterates over the input text, interpreted as UTF-8, -// one Unicode code point (rune) at a time. All runes that are letters (category L) -// or numbers (category N) are considered valid characters. They are mapped to -// lower case, and included in the output. All other runes are considered -// invalid characters. Invalid characters that preceed the first valid character, -// as well as invalid character that follow the last valid character -// are dropped completely. All other sequences of invalid characters -// between two valid characters are replaced with a single dash character '-'. -// -// SanitizedAnchorName exposes this functionality, and can be used to -// create compatible links to the anchor names generated by blackfriday. -// This algorithm is also implemented in a small standalone package at -// github.com/shurcooL/sanitized_anchor_name. It can be useful for clients -// that want a small package and don't need full functionality of blackfriday. -package blackfriday - -// NOTE: Keep Sanitized Anchor Name algorithm in sync with package -// github.com/shurcooL/sanitized_anchor_name. -// Otherwise, users of sanitized_anchor_name will get anchor names -// that are incompatible with those generated by blackfriday. diff --git a/vendor/github.com/russross/blackfriday/html.go b/vendor/github.com/russross/blackfriday/html.go deleted file mode 100644 index fa044ca2..00000000 --- a/vendor/github.com/russross/blackfriday/html.go +++ /dev/null @@ -1,945 +0,0 @@ -// -// Blackfriday Markdown Processor -// Available at http://github.com/russross/blackfriday -// -// Copyright © 2011 Russ Ross . -// Distributed under the Simplified BSD License. -// See README.md for details. -// - -// -// -// HTML rendering backend -// -// - -package blackfriday - -import ( - "bytes" - "fmt" - "regexp" - "strconv" - "strings" -) - -// Html renderer configuration options. -const ( - HTML_SKIP_HTML = 1 << iota // skip preformatted HTML blocks - HTML_SKIP_STYLE // skip embedded