From 6ba1e1d79fd4b54924d9d85ba0ac159534d58479 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ralph=20K=C3=BChnert?= Date: Sat, 6 Nov 2021 12:34:01 +0100 Subject: [PATCH] update helm concept/import commands to support helm repos & update dependencies --- cmd/helmConcept.go | 10 +- cmd/helmImport.go | 7 +- go.mod | 8 +- go.sum | 237 +- pkg/helm/import.go | 16 +- .../goutils/cryptorandomstringutils.go | 25 +- .../Masterminds/goutils/randomstringutils.go | 24 +- .../Masterminds/goutils/stringutils.go | 16 + .../Masterminds/semver/v3/CHANGELOG.md | 6 + .../Masterminds/semver/v3/constraints.go | 11 +- .../Masterminds/sprig/v3/CHANGELOG.md | 36 + .../github.com/Masterminds/sprig/v3/crypto.go | 224 +- .../Masterminds/sprig/v3/defaults.go | 41 + .../github.com/Masterminds/sprig/v3/dict.go | 26 + .../Masterminds/sprig/v3/functions.go | 45 +- vendor/github.com/Masterminds/sprig/v3/go.mod | 7 +- vendor/github.com/Masterminds/sprig/v3/go.sum | 28 +- .../github.com/Masterminds/sprig/v3/list.go | 45 + .../Masterminds/sprig/v3/network.go | 2 +- .../Masterminds/sprig/v3/numeric.go | 31 + .../github.com/Masterminds/sprig/v3/regex.go | 4 + vendor/github.com/fatih/color/README.md | 32 +- vendor/github.com/fatih/color/color.go | 25 +- vendor/github.com/fatih/color/doc.go | 2 + vendor/github.com/fatih/color/go.mod | 4 +- vendor/github.com/fatih/color/go.sum | 17 +- vendor/github.com/go-logr/logr/LICENSE | 201 + vendor/github.com/go-logr/logr/README.md | 183 + vendor/github.com/go-logr/logr/discard.go | 51 + vendor/github.com/go-logr/logr/go.mod | 3 + vendor/github.com/go-logr/logr/logr.go | 266 + .../gogo/protobuf/proto/text_parser.go | 2 +- .../golang/protobuf/proto/registry.go | 10 +- .../golang/protobuf/proto/text_decode.go | 2 +- .../github.com/golang/protobuf/ptypes/any.go | 14 + .../github.com/golang/protobuf/ptypes/doc.go | 4 + .../golang/protobuf/ptypes/duration.go | 4 + .../golang/protobuf/ptypes/timestamp.go | 9 + vendor/github.com/google/go-cmp/LICENSE | 27 + .../github.com/google/go-cmp/cmp/compare.go | 682 + .../google/go-cmp/cmp/export_panic.go | 15 + .../google/go-cmp/cmp/export_unsafe.go | 35 + .../go-cmp/cmp/internal/diff/debug_disable.go | 17 + .../go-cmp/cmp/internal/diff/debug_enable.go | 122 + .../google/go-cmp/cmp/internal/diff/diff.go | 398 + .../google/go-cmp/cmp/internal/flags/flags.go | 9 + .../cmp/internal/flags/toolchain_legacy.go | 10 + .../cmp/internal/flags/toolchain_recent.go | 10 + .../go-cmp/cmp/internal/function/func.go | 99 + .../google/go-cmp/cmp/internal/value/name.go | 157 + .../cmp/internal/value/pointer_purego.go | 33 + .../cmp/internal/value/pointer_unsafe.go | 36 + .../google/go-cmp/cmp/internal/value/sort.go | 106 + .../google/go-cmp/cmp/internal/value/zero.go | 48 + .../github.com/google/go-cmp/cmp/options.go | 552 + vendor/github.com/google/go-cmp/cmp/path.go | 378 + vendor/github.com/google/go-cmp/cmp/report.go | 54 + .../google/go-cmp/cmp/report_compare.go | 432 + .../google/go-cmp/cmp/report_references.go | 264 + .../google/go-cmp/cmp/report_reflect.go | 402 + .../google/go-cmp/cmp/report_slices.go | 613 + .../google/go-cmp/cmp/report_text.go | 431 + .../google/go-cmp/cmp/report_value.go | 121 + .../google/go-jsonnet/.goreleaser.yml | 34 + .../github.com/google/go-jsonnet/.travis.yml | 8 +- .../github.com/google/go-jsonnet/BUILD.bazel | 1 + vendor/github.com/google/go-jsonnet/README.md | 17 +- .../google/go-jsonnet/astgen/BUILD.bazel | 2 +- .../google/go-jsonnet/astgen/stdast.go | 10570 ++++++++++++---- .../github.com/google/go-jsonnet/builtins.go | 573 +- .../github.com/google/go-jsonnet/imports.go | 10 +- .../go-jsonnet/internal/program/desugarer.go | 50 +- .../google/go-jsonnet/interpreter.go | 295 +- vendor/github.com/google/go-jsonnet/tests.sh | 5 +- vendor/github.com/google/go-jsonnet/thunks.go | 42 +- .../google/go-jsonnet/update_cpp_jsonnet.sh | 16 + vendor/github.com/google/go-jsonnet/util.go | 38 + vendor/github.com/google/go-jsonnet/value.go | 76 +- vendor/github.com/google/go-jsonnet/vm.go | 2 +- vendor/github.com/google/uuid/README.md | 2 +- vendor/github.com/google/uuid/marshal.go | 7 +- vendor/github.com/google/uuid/version1.go | 12 +- vendor/github.com/google/uuid/version4.go | 7 +- .../grafana/tanka/pkg/helm/charts.go | 99 +- .../github.com/grafana/tanka/pkg/helm/helm.go | 3 +- .../grafana/tanka/pkg/helm/jsonnet.go | 45 +- .../github.com/grafana/tanka/pkg/helm/spec.go | 15 +- .../grafana/tanka/pkg/helm/template.go | 16 +- .../grafana/tanka/pkg/jsonnet/eval.go | 104 +- .../grafana/tanka/pkg/jsonnet/evalcache.go | 48 + .../grafana/tanka/pkg/jsonnet/imports.go | 62 +- .../grafana/tanka/pkg/jsonnet/jpath/dirs.go | 124 + .../grafana/tanka/pkg/jsonnet/jpath/errors.go | 31 + .../grafana/tanka/pkg/jsonnet/jpath/jpath.go | 98 +- .../grafana/tanka/pkg/jsonnet/native/funcs.go | 2 + .../grafana/tanka/pkg/kubernetes/apply.go | 9 +- .../tanka/pkg/kubernetes/client/client.go | 15 +- .../tanka/pkg/kubernetes/client/delete.go | 14 +- .../tanka/pkg/kubernetes/client/diff.go | 42 +- .../tanka/pkg/kubernetes/client/errors.go | 7 + .../tanka/pkg/kubernetes/client/get.go | 19 +- .../tanka/pkg/kubernetes/client/kubectl.go | 38 +- .../tanka/pkg/kubernetes/client/resources.go | 10 +- .../grafana/tanka/pkg/kubernetes/diff.go | 33 +- .../tanka/pkg/kubernetes/kubernetes.go | 11 +- .../tanka/pkg/kubernetes/manifest/errors.go | 11 + .../tanka/pkg/kubernetes/manifest/manifest.go | 50 +- .../grafana/tanka/pkg/kubernetes/util/diff.go | 7 +- .../grafana/tanka/pkg/kustomize/build.go | 40 + .../grafana/tanka/pkg/kustomize/jsonnet.go | 86 + .../grafana/tanka/pkg/kustomize/kustomize.go | 39 + .../grafana/tanka/pkg/process/process.go | 14 +- .../github.com/grafana/tanka/pkg/spec/spec.go | 52 +- .../v1alpha1/{config.go => environment.go} | 35 +- .../grafana/tanka/pkg/tanka/errors.go | 39 + .../grafana/tanka/pkg/tanka/evaluators.go | 147 + .../grafana/tanka/pkg/tanka/export.go | 221 + .../grafana/tanka/pkg/tanka/find.go | 124 + .../grafana/tanka/pkg/tanka/format.go | 10 +- .../grafana/tanka/pkg/tanka/inline.go | 170 + .../grafana/tanka/pkg/tanka/load.go | 170 + .../grafana/tanka/pkg/tanka/parallel.go | 98 + .../grafana/tanka/pkg/tanka/parse.go | 188 - .../grafana/tanka/pkg/tanka/prune.go | 25 +- .../grafana/tanka/pkg/tanka/static.go | 102 + .../grafana/tanka/pkg/tanka/status.go | 6 +- .../grafana/tanka/pkg/tanka/tanka.go | 41 +- .../grafana/tanka/pkg/tanka/workflow.go | 33 +- .../grafana/tanka/pkg/term/alert.go | 34 +- vendor/github.com/huandu/xstrings/common.go | 8 +- vendor/github.com/huandu/xstrings/convert.go | 15 +- vendor/github.com/huandu/xstrings/format.go | 13 +- .../github.com/huandu/xstrings/manipulate.go | 5 +- .../huandu/xstrings/stringbuilder.go | 7 - .../huandu/xstrings/stringbuilder_go110.go | 9 - .../github.com/huandu/xstrings/translate.go | 9 +- vendor/github.com/imdario/mergo/README.md | 69 +- vendor/github.com/imdario/mergo/doc.go | 141 +- vendor/github.com/imdario/mergo/go.mod | 5 + vendor/github.com/imdario/mergo/go.sum | 4 + vendor/github.com/imdario/mergo/map.go | 10 +- vendor/github.com/imdario/mergo/merge.go | 269 +- vendor/github.com/imdario/mergo/mergo.go | 21 +- vendor/github.com/karrick/godirwalk/README.md | 161 +- .../karrick/godirwalk/readdir_unix.go | 11 +- .../karrick/godirwalk/scandir_unix.go | 16 +- vendor/github.com/karrick/godirwalk/walk.go | 67 +- .../mattn/go-colorable/colorable_windows.go | 4 +- .../mattn/go-colorable/noncolorable.go | 5 +- vendor/github.com/mattn/go-isatty/.travis.yml | 14 - vendor/github.com/mattn/go-isatty/go.mod | 2 +- .../github.com/mattn/go-isatty/isatty_bsd.go | 1 + .../mattn/go-isatty/isatty_others.go | 3 +- .../mattn/go-isatty/isatty_plan9.go | 1 + .../mattn/go-isatty/isatty_solaris.go | 9 +- .../mattn/go-isatty/isatty_tcgets.go | 3 +- .../mattn/go-isatty/isatty_windows.go | 6 +- .../github.com/mattn/go-isatty/renovate.json | 8 - .../github.com/shopspring/decimal/.gitignore | 6 + .../github.com/shopspring/decimal/.travis.yml | 13 + .../shopspring/decimal/CHANGELOG.md | 19 + vendor/github.com/shopspring/decimal/LICENSE | 45 + .../github.com/shopspring/decimal/README.md | 130 + .../shopspring/decimal/decimal-go.go | 415 + .../github.com/shopspring/decimal/decimal.go | 1477 +++ vendor/github.com/shopspring/decimal/go.mod | 3 + .../github.com/shopspring/decimal/rounding.go | 119 + vendor/github.com/stretchr/objx/accessors.go | 74 +- .../testify/assert/assertion_compare.go | 172 +- .../testify/assert/assertion_format.go | 97 + .../testify/assert/assertion_forward.go | 194 + .../testify/assert/assertion_order.go | 81 + .../stretchr/testify/assert/assertions.go | 83 +- vendor/github.com/thoas/go-funk/.travis.yml | 6 +- vendor/github.com/thoas/go-funk/Makefile | 8 +- vendor/github.com/thoas/go-funk/README.rst | 241 +- vendor/github.com/thoas/go-funk/assign.go | 129 + vendor/github.com/thoas/go-funk/builder.go | 8 +- .../github.com/thoas/go-funk/chain_builder.go | 14 +- vendor/github.com/thoas/go-funk/go.mod | 5 + vendor/github.com/thoas/go-funk/go.sum | 11 + vendor/github.com/thoas/go-funk/helpers.go | 23 +- .../github.com/thoas/go-funk/intersection.go | 185 + vendor/github.com/thoas/go-funk/join.go | 111 + .../thoas/go-funk/join_primitives.go | 373 + .../github.com/thoas/go-funk/lazy_builder.go | 14 +- vendor/github.com/thoas/go-funk/max.go | 178 + vendor/github.com/thoas/go-funk/min.go | 177 + vendor/github.com/thoas/go-funk/options.go | 24 + .../github.com/thoas/go-funk/permutation.go | 29 + vendor/github.com/thoas/go-funk/predicate.go | 47 + vendor/github.com/thoas/go-funk/presence.go | 53 +- vendor/github.com/thoas/go-funk/reduce.go | 5 +- vendor/github.com/thoas/go-funk/retrieve.go | 59 +- vendor/github.com/thoas/go-funk/short_if.go | 8 + vendor/github.com/thoas/go-funk/subset.go | 41 + .../github.com/thoas/go-funk/subtraction.go | 87 + vendor/github.com/thoas/go-funk/transform.go | 199 +- vendor/github.com/thoas/go-funk/typesafe.go | 555 +- vendor/github.com/thoas/go-funk/utils.go | 63 +- vendor/github.com/thoas/go-funk/without.go | 19 + vendor/github.com/thoas/go-funk/zip.go | 24 +- vendor/golang.org/x/crypto/acme/acme.go | 6 +- .../x/crypto/acme/autocert/autocert.go | 8 +- vendor/golang.org/x/crypto/acme/http.go | 4 + vendor/golang.org/x/crypto/acme/jws.go | 54 +- vendor/golang.org/x/crypto/acme/rfc8555.go | 56 +- vendor/golang.org/x/crypto/acme/types.go | 74 +- .../golang.org/x/crypto/acme/version_go112.go | 1 + .../x/crypto/chacha20/chacha_arm64.go | 3 +- .../x/crypto/chacha20/chacha_arm64.s | 3 +- .../x/crypto/chacha20/chacha_noasm.go | 3 +- .../x/crypto/chacha20/chacha_ppc64le.go | 3 +- .../x/crypto/chacha20/chacha_ppc64le.s | 3 +- .../x/crypto/chacha20/chacha_s390x.go | 3 +- .../x/crypto/chacha20/chacha_s390x.s | 3 +- .../x/crypto/curve25519/curve25519.go | 52 +- .../x/crypto/curve25519/curve25519_amd64.go | 240 - .../x/crypto/curve25519/curve25519_amd64.s | 1793 --- .../x/crypto/curve25519/curve25519_generic.go | 828 -- .../x/crypto/curve25519/curve25519_noasm.go | 11 - .../x/crypto/curve25519/internal/field/README | 7 + .../x/crypto/curve25519/internal/field/fe.go | 416 + .../curve25519/internal/field/fe_amd64.go | 13 + .../curve25519/internal/field/fe_amd64.s | 379 + .../internal/field/fe_amd64_noasm.go | 12 + .../curve25519/internal/field/fe_arm64.go | 16 + .../curve25519/internal/field/fe_arm64.s | 43 + .../internal/field/fe_arm64_noasm.go | 12 + .../curve25519/internal/field/fe_generic.go | 264 + .../curve25519/internal/field/sync.checkpoint | 1 + .../crypto/curve25519/internal/field/sync.sh | 19 + vendor/golang.org/x/crypto/ed25519/ed25519.go | 1 + .../x/crypto/ed25519/ed25519_go113.go | 1 + .../{ => internal}/poly1305/bits_compat.go | 1 + .../{ => internal}/poly1305/bits_go1.13.go | 1 + .../{ => internal}/poly1305/mac_noasm.go | 3 +- .../{ => internal}/poly1305/poly1305.go | 2 +- .../{ => internal}/poly1305/sum_amd64.go | 3 +- .../{ => internal}/poly1305/sum_amd64.s | 3 +- .../{ => internal}/poly1305/sum_generic.go | 0 .../{ => internal}/poly1305/sum_ppc64le.go | 3 +- .../{ => internal}/poly1305/sum_ppc64le.s | 21 +- .../{ => internal}/poly1305/sum_s390x.go | 3 +- .../{ => internal}/poly1305/sum_s390x.s | 5 +- .../x/crypto/internal/subtle/aliasing.go | 3 +- ...iasing_appengine.go => aliasing_purego.go} | 3 +- .../x/crypto/openpgp/armor/armor.go | 6 + .../x/crypto/openpgp/elgamal/elgamal.go | 6 + .../x/crypto/openpgp/errors/errors.go | 6 + .../x/crypto/openpgp/packet/packet.go | 6 + vendor/golang.org/x/crypto/openpgp/read.go | 6 + vendor/golang.org/x/crypto/openpgp/s2k/s2k.go | 6 + vendor/golang.org/x/crypto/scrypt/scrypt.go | 23 +- vendor/golang.org/x/crypto/ssh/cipher.go | 2 +- vendor/golang.org/x/crypto/ssh/client.go | 2 +- vendor/golang.org/x/crypto/ssh/client_auth.go | 2 +- vendor/golang.org/x/crypto/ssh/kex.go | 13 +- vendor/golang.org/x/crypto/ssh/server.go | 4 + .../x/crypto/ssh/terminal/terminal.go | 987 +- vendor/golang.org/x/net/context/go17.go | 1 + vendor/golang.org/x/net/context/go19.go | 1 + vendor/golang.org/x/net/context/pre_go17.go | 1 + vendor/golang.org/x/net/context/pre_go19.go | 1 + vendor/golang.org/x/net/html/foreign.go | 119 +- vendor/golang.org/x/net/html/parse.go | 39 +- .../golang.org/x/net/http/httpguts/httplex.go | 10 +- vendor/golang.org/x/net/http2/Dockerfile | 2 +- vendor/golang.org/x/net/http2/ascii.go | 49 + .../x/net/http2/client_conn_pool.go | 79 +- vendor/golang.org/x/net/http2/go111.go | 1 + vendor/golang.org/x/net/http2/go115.go | 27 + vendor/golang.org/x/net/http2/h2c/h2c.go | 10 +- vendor/golang.org/x/net/http2/headermap.go | 7 +- vendor/golang.org/x/net/http2/not_go111.go | 1 + vendor/golang.org/x/net/http2/not_go115.go | 31 + vendor/golang.org/x/net/http2/server.go | 56 +- vendor/golang.org/x/net/http2/transport.go | 103 +- vendor/golang.org/x/net/http2/write.go | 7 +- vendor/golang.org/x/net/idna/idna10.0.0.go | 114 +- vendor/golang.org/x/net/idna/idna9.0.0.go | 94 +- vendor/golang.org/x/net/idna/tables10.0.0.go | 1 + vendor/golang.org/x/net/idna/tables11.0.0.go | 1 + .../idna/{tables12.00.go => tables12.0.0.go} | 3 +- vendor/golang.org/x/net/idna/tables13.0.0.go | 4840 +++++++ vendor/golang.org/x/net/idna/tables9.0.0.go | 1 + vendor/golang.org/x/sys/cpu/asm_aix_ppc64.s | 1 + vendor/golang.org/x/sys/cpu/cpu.go | 5 +- vendor/golang.org/x/sys/cpu/cpu_aix.go | 2 + vendor/golang.org/x/sys/cpu/cpu_arm64.s | 1 + vendor/golang.org/x/sys/cpu/cpu_gc_arm64.go | 1 + vendor/golang.org/x/sys/cpu/cpu_gc_s390x.go | 1 + vendor/golang.org/x/sys/cpu/cpu_gc_x86.go | 5 + .../golang.org/x/sys/cpu/cpu_gccgo_arm64.go | 1 + .../golang.org/x/sys/cpu/cpu_gccgo_s390x.go | 1 + vendor/golang.org/x/sys/cpu/cpu_gccgo_x86.go | 7 + vendor/golang.org/x/sys/cpu/cpu_linux.go | 1 + .../golang.org/x/sys/cpu/cpu_linux_mips64x.go | 1 + .../golang.org/x/sys/cpu/cpu_linux_noinit.go | 1 + .../golang.org/x/sys/cpu/cpu_linux_ppc64x.go | 1 + vendor/golang.org/x/sys/cpu/cpu_mips64x.go | 1 + vendor/golang.org/x/sys/cpu/cpu_mipsx.go | 1 + vendor/golang.org/x/sys/cpu/cpu_other_arm.go | 1 + .../golang.org/x/sys/cpu/cpu_other_arm64.go | 4 +- .../golang.org/x/sys/cpu/cpu_other_mips64x.go | 1 + vendor/golang.org/x/sys/cpu/cpu_ppc64x.go | 1 + vendor/golang.org/x/sys/cpu/cpu_riscv64.go | 1 + vendor/golang.org/x/sys/cpu/cpu_s390x.s | 1 + vendor/golang.org/x/sys/cpu/cpu_wasm.go | 1 + vendor/golang.org/x/sys/cpu/cpu_x86.go | 11 +- vendor/golang.org/x/sys/cpu/cpu_x86.s | 25 + .../golang.org/x/sys/cpu/syscall_aix_gccgo.go | 4 +- .../x/sys/cpu/syscall_aix_ppc64_gc.go | 4 +- vendor/golang.org/x/sys/plan9/asm.s | 8 + .../asm_plan9_386.s} | 19 +- .../asm_plan9_amd64.s} | 17 +- .../asm_plan9_arm.s} | 18 +- vendor/golang.org/x/sys/plan9/const_plan9.go | 70 + vendor/golang.org/x/sys/plan9/dir_plan9.go | 212 + vendor/golang.org/x/sys/plan9/env_plan9.go | 31 + vendor/golang.org/x/sys/plan9/errors_plan9.go | 50 + vendor/golang.org/x/sys/plan9/mkall.sh | 150 + vendor/golang.org/x/sys/plan9/mkerrors.sh | 246 + .../golang.org/x/sys/plan9/mksysnum_plan9.sh | 23 + .../golang.org/x/sys/plan9/pwd_go15_plan9.go | 21 + vendor/golang.org/x/sys/plan9/pwd_plan9.go | 23 + vendor/golang.org/x/sys/plan9/race.go | 30 + vendor/golang.org/x/sys/plan9/race0.go | 25 + vendor/golang.org/x/sys/plan9/str.go | 22 + vendor/golang.org/x/sys/plan9/syscall.go | 116 + .../golang.org/x/sys/plan9/syscall_plan9.go | 349 + .../x/sys/plan9/zsyscall_plan9_386.go | 284 + .../x/sys/plan9/zsyscall_plan9_amd64.go | 284 + .../x/sys/plan9/zsyscall_plan9_arm.go | 284 + .../golang.org/x/sys/plan9/zsysnum_plan9.go | 49 + vendor/golang.org/x/sys/unix/README.md | 6 +- vendor/golang.org/x/sys/unix/aliases.go | 3 +- vendor/golang.org/x/sys/unix/asm_aix_ppc64.s | 1 + .../unix/{asm_freebsd_386.s => asm_bsd_386.s} | 10 +- .../{asm_openbsd_amd64.s => asm_bsd_amd64.s} | 8 +- .../unix/{asm_freebsd_arm.s => asm_bsd_arm.s} | 8 +- .../{asm_netbsd_amd64.s => asm_bsd_arm64.s} | 8 +- .../golang.org/x/sys/unix/asm_darwin_amd64.s | 29 - vendor/golang.org/x/sys/unix/asm_darwin_arm.s | 30 - .../golang.org/x/sys/unix/asm_darwin_arm64.s | 30 - .../x/sys/unix/asm_dragonfly_amd64.s | 29 - .../golang.org/x/sys/unix/asm_freebsd_arm64.s | 29 - vendor/golang.org/x/sys/unix/asm_linux_386.s | 1 + .../golang.org/x/sys/unix/asm_linux_amd64.s | 1 + vendor/golang.org/x/sys/unix/asm_linux_arm.s | 1 + .../golang.org/x/sys/unix/asm_linux_arm64.s | 1 + .../golang.org/x/sys/unix/asm_linux_mips64x.s | 1 + .../golang.org/x/sys/unix/asm_linux_mipsx.s | 1 + .../golang.org/x/sys/unix/asm_linux_ppc64x.s | 1 + .../golang.org/x/sys/unix/asm_linux_riscv64.s | 4 +- .../golang.org/x/sys/unix/asm_linux_s390x.s | 3 +- vendor/golang.org/x/sys/unix/asm_netbsd_arm.s | 29 - .../golang.org/x/sys/unix/asm_netbsd_arm64.s | 29 - .../golang.org/x/sys/unix/asm_openbsd_386.s | 29 - .../golang.org/x/sys/unix/asm_openbsd_arm.s | 29 - .../golang.org/x/sys/unix/asm_openbsd_arm64.s | 29 - .../x/sys/unix/asm_openbsd_mips64.s | 1 + .../golang.org/x/sys/unix/asm_solaris_amd64.s | 1 + vendor/golang.org/x/sys/unix/asm_zos_s390x.s | 426 + vendor/golang.org/x/sys/unix/cap_freebsd.go | 1 + vendor/golang.org/x/sys/unix/constants.go | 3 +- vendor/golang.org/x/sys/unix/dev_aix_ppc.go | 4 +- vendor/golang.org/x/sys/unix/dev_aix_ppc64.go | 4 +- vendor/golang.org/x/sys/unix/dev_zos.go | 29 + vendor/golang.org/x/sys/unix/dirent.go | 1 + vendor/golang.org/x/sys/unix/endian_big.go | 1 + vendor/golang.org/x/sys/unix/endian_little.go | 1 + vendor/golang.org/x/sys/unix/env_unix.go | 3 +- vendor/golang.org/x/sys/unix/epoll_zos.go | 221 + vendor/golang.org/x/sys/unix/fcntl.go | 1 + .../x/sys/unix/fcntl_linux_32bit.go | 3 +- vendor/golang.org/x/sys/unix/fdset.go | 3 +- vendor/golang.org/x/sys/unix/fstatfs_zos.go | 164 + vendor/golang.org/x/sys/unix/gccgo.go | 4 +- .../x/sys/unix/gccgo_linux_amd64.go | 1 + vendor/golang.org/x/sys/unix/ioctl.go | 1 + vendor/golang.org/x/sys/unix/ioctl_linux.go | 196 + vendor/golang.org/x/sys/unix/ioctl_zos.go | 74 + vendor/golang.org/x/sys/unix/mkall.sh | 14 +- vendor/golang.org/x/sys/unix/mkerrors.sh | 42 +- vendor/golang.org/x/sys/unix/pagesize_unix.go | 1 + vendor/golang.org/x/sys/unix/ptrace_darwin.go | 12 + vendor/golang.org/x/sys/unix/ptrace_ios.go | 12 + vendor/golang.org/x/sys/unix/race.go | 1 + vendor/golang.org/x/sys/unix/race0.go | 3 +- .../x/sys/unix/readdirent_getdents.go | 1 + .../x/sys/unix/readdirent_getdirentries.go | 1 + vendor/golang.org/x/sys/unix/sockcmsg_unix.go | 3 +- .../x/sys/unix/sockcmsg_unix_other.go | 7 +- vendor/golang.org/x/sys/unix/str.go | 1 + vendor/golang.org/x/sys/unix/syscall.go | 3 +- vendor/golang.org/x/sys/unix/syscall_aix.go | 13 +- .../golang.org/x/sys/unix/syscall_aix_ppc.go | 4 +- .../x/sys/unix/syscall_aix_ppc64.go | 4 +- vendor/golang.org/x/sys/unix/syscall_bsd.go | 9 +- .../x/sys/unix/syscall_darwin.1_12.go | 1 + .../x/sys/unix/syscall_darwin.1_13.go | 5 +- .../golang.org/x/sys/unix/syscall_darwin.go | 68 +- .../x/sys/unix/syscall_darwin_386.go | 50 - .../x/sys/unix/syscall_darwin_amd64.go | 3 +- .../x/sys/unix/syscall_darwin_arm.go | 51 - .../x/sys/unix/syscall_darwin_arm64.go | 3 +- .../x/sys/unix/syscall_darwin_libSystem.go | 10 +- .../x/sys/unix/syscall_dragonfly.go | 18 +- .../x/sys/unix/syscall_dragonfly_amd64.go | 1 + .../golang.org/x/sys/unix/syscall_freebsd.go | 17 +- .../x/sys/unix/syscall_freebsd_386.go | 1 + .../x/sys/unix/syscall_freebsd_amd64.go | 1 + .../x/sys/unix/syscall_freebsd_arm.go | 1 + .../x/sys/unix/syscall_freebsd_arm64.go | 1 + .../golang.org/x/sys/unix/syscall_illumos.go | 108 +- vendor/golang.org/x/sys/unix/syscall_linux.go | 193 +- .../x/sys/unix/syscall_linux_386.go | 11 +- .../x/sys/unix/syscall_linux_amd64.go | 7 +- .../x/sys/unix/syscall_linux_amd64_gc.go | 4 +- .../x/sys/unix/syscall_linux_arm.go | 15 +- .../x/sys/unix/syscall_linux_arm64.go | 7 +- .../golang.org/x/sys/unix/syscall_linux_gc.go | 1 + .../x/sys/unix/syscall_linux_gc_386.go | 1 + .../x/sys/unix/syscall_linux_gc_arm.go | 1 + .../x/sys/unix/syscall_linux_gccgo_386.go | 1 + .../x/sys/unix/syscall_linux_gccgo_arm.go | 1 + .../x/sys/unix/syscall_linux_mips64x.go | 7 +- .../x/sys/unix/syscall_linux_mipsx.go | 13 +- .../x/sys/unix/syscall_linux_ppc.go | 276 + .../x/sys/unix/syscall_linux_ppc64x.go | 9 +- .../x/sys/unix/syscall_linux_riscv64.go | 7 +- .../x/sys/unix/syscall_linux_s390x.go | 9 +- .../x/sys/unix/syscall_linux_sparc64.go | 9 +- .../golang.org/x/sys/unix/syscall_netbsd.go | 21 +- .../x/sys/unix/syscall_netbsd_386.go | 1 + .../x/sys/unix/syscall_netbsd_amd64.go | 1 + .../x/sys/unix/syscall_netbsd_arm.go | 1 + .../x/sys/unix/syscall_netbsd_arm64.go | 1 + .../golang.org/x/sys/unix/syscall_openbsd.go | 4 +- .../x/sys/unix/syscall_openbsd_386.go | 1 + .../x/sys/unix/syscall_openbsd_amd64.go | 1 + .../x/sys/unix/syscall_openbsd_arm.go | 1 + .../x/sys/unix/syscall_openbsd_arm64.go | 1 + .../golang.org/x/sys/unix/syscall_solaris.go | 23 +- .../x/sys/unix/syscall_solaris_amd64.go | 1 + vendor/golang.org/x/sys/unix/syscall_unix.go | 1 + .../golang.org/x/sys/unix/syscall_unix_gc.go | 5 +- .../x/sys/unix/syscall_unix_gc_ppc64x.go | 1 + .../x/sys/unix/syscall_zos_s390x.go | 1829 +++ vendor/golang.org/x/sys/unix/timestruct.go | 29 +- vendor/golang.org/x/sys/unix/xattr_bsd.go | 1 + .../golang.org/x/sys/unix/zerrors_aix_ppc.go | 1 + .../x/sys/unix/zerrors_aix_ppc64.go | 1 + .../x/sys/unix/zerrors_darwin_386.go | 1788 --- .../x/sys/unix/zerrors_darwin_amd64.go | 93 +- .../x/sys/unix/zerrors_darwin_arm.go | 1788 --- .../x/sys/unix/zerrors_darwin_arm64.go | 93 +- .../x/sys/unix/zerrors_dragonfly_amd64.go | 1 + .../x/sys/unix/zerrors_freebsd_386.go | 12 + .../x/sys/unix/zerrors_freebsd_amd64.go | 12 + .../x/sys/unix/zerrors_freebsd_arm.go | 21 + .../x/sys/unix/zerrors_freebsd_arm64.go | 12 + vendor/golang.org/x/sys/unix/zerrors_linux.go | 286 +- .../x/sys/unix/zerrors_linux_386.go | 32 +- .../x/sys/unix/zerrors_linux_amd64.go | 32 +- .../x/sys/unix/zerrors_linux_arm.go | 32 +- .../x/sys/unix/zerrors_linux_arm64.go | 35 +- .../x/sys/unix/zerrors_linux_mips.go | 32 +- .../x/sys/unix/zerrors_linux_mips64.go | 32 +- .../x/sys/unix/zerrors_linux_mips64le.go | 32 +- .../x/sys/unix/zerrors_linux_mipsle.go | 32 +- .../x/sys/unix/zerrors_linux_ppc.go | 879 ++ .../x/sys/unix/zerrors_linux_ppc64.go | 32 +- .../x/sys/unix/zerrors_linux_ppc64le.go | 32 +- .../x/sys/unix/zerrors_linux_riscv64.go | 32 +- .../x/sys/unix/zerrors_linux_s390x.go | 34 +- .../x/sys/unix/zerrors_linux_sparc64.go | 32 +- .../x/sys/unix/zerrors_netbsd_386.go | 1 + .../x/sys/unix/zerrors_netbsd_amd64.go | 1 + .../x/sys/unix/zerrors_netbsd_arm.go | 1 + .../x/sys/unix/zerrors_netbsd_arm64.go | 1 + .../x/sys/unix/zerrors_openbsd_386.go | 1 + .../x/sys/unix/zerrors_openbsd_amd64.go | 1 + .../x/sys/unix/zerrors_openbsd_arm.go | 1 + .../x/sys/unix/zerrors_openbsd_arm64.go | 1 + .../x/sys/unix/zerrors_openbsd_mips64.go | 1 + .../x/sys/unix/zerrors_solaris_amd64.go | 4 + .../x/sys/unix/zerrors_zos_s390x.go | 860 ++ .../x/sys/unix/zptrace_armnn_linux.go | 1 + .../x/sys/unix/zptrace_mipsnn_linux.go | 1 + .../x/sys/unix/zptrace_mipsnnle_linux.go | 1 + .../x/sys/unix/zptrace_x86_linux.go | 1 + .../golang.org/x/sys/unix/zsyscall_aix_ppc.go | 1 + .../x/sys/unix/zsyscall_aix_ppc64.go | 1 + .../x/sys/unix/zsyscall_aix_ppc64_gc.go | 4 +- .../x/sys/unix/zsyscall_aix_ppc64_gccgo.go | 4 +- .../x/sys/unix/zsyscall_darwin_386.1_13.go | 39 - .../x/sys/unix/zsyscall_darwin_386.1_13.s | 12 - .../x/sys/unix/zsyscall_darwin_386.go | 2432 ---- .../x/sys/unix/zsyscall_darwin_386.s | 290 - .../x/sys/unix/zsyscall_darwin_amd64.1_13.go | 9 +- .../x/sys/unix/zsyscall_darwin_amd64.1_13.s | 19 +- .../x/sys/unix/zsyscall_darwin_amd64.go | 579 +- .../x/sys/unix/zsyscall_darwin_amd64.s | 853 +- .../x/sys/unix/zsyscall_darwin_arm.1_13.go | 39 - .../x/sys/unix/zsyscall_darwin_arm.1_13.s | 12 - .../x/sys/unix/zsyscall_darwin_arm.go | 2418 ---- .../x/sys/unix/zsyscall_darwin_arm.s | 288 - .../x/sys/unix/zsyscall_darwin_arm64.1_13.go | 9 +- .../x/sys/unix/zsyscall_darwin_arm64.1_13.s | 19 +- .../x/sys/unix/zsyscall_darwin_arm64.go | 579 +- .../x/sys/unix/zsyscall_darwin_arm64.s | 853 +- .../x/sys/unix/zsyscall_dragonfly_amd64.go | 7 +- .../x/sys/unix/zsyscall_freebsd_386.go | 1 + .../x/sys/unix/zsyscall_freebsd_amd64.go | 1 + .../x/sys/unix/zsyscall_freebsd_arm.go | 1 + .../x/sys/unix/zsyscall_freebsd_arm64.go | 1 + .../x/sys/unix/zsyscall_illumos_amd64.go | 24 +- .../golang.org/x/sys/unix/zsyscall_linux.go | 11 + .../x/sys/unix/zsyscall_linux_386.go | 1 + .../x/sys/unix/zsyscall_linux_amd64.go | 1 + .../x/sys/unix/zsyscall_linux_arm.go | 1 + .../x/sys/unix/zsyscall_linux_arm64.go | 1 + .../x/sys/unix/zsyscall_linux_mips.go | 1 + .../x/sys/unix/zsyscall_linux_mips64.go | 1 + .../x/sys/unix/zsyscall_linux_mips64le.go | 1 + .../x/sys/unix/zsyscall_linux_mipsle.go | 1 + .../x/sys/unix/zsyscall_linux_ppc.go | 762 ++ .../x/sys/unix/zsyscall_linux_ppc64.go | 1 + .../x/sys/unix/zsyscall_linux_ppc64le.go | 1 + .../x/sys/unix/zsyscall_linux_riscv64.go | 1 + .../x/sys/unix/zsyscall_linux_s390x.go | 1 + .../x/sys/unix/zsyscall_linux_sparc64.go | 1 + .../x/sys/unix/zsyscall_netbsd_386.go | 11 + .../x/sys/unix/zsyscall_netbsd_amd64.go | 11 + .../x/sys/unix/zsyscall_netbsd_arm.go | 11 + .../x/sys/unix/zsyscall_netbsd_arm64.go | 11 + .../x/sys/unix/zsyscall_openbsd_386.go | 1 + .../x/sys/unix/zsyscall_openbsd_amd64.go | 1 + .../x/sys/unix/zsyscall_openbsd_arm.go | 1 + .../x/sys/unix/zsyscall_openbsd_arm64.go | 1 + .../x/sys/unix/zsyscall_openbsd_mips64.go | 1 + .../x/sys/unix/zsyscall_solaris_amd64.go | 33 +- .../x/sys/unix/zsyscall_zos_s390x.go | 1255 ++ .../x/sys/unix/zsysctl_openbsd_386.go | 1 + .../x/sys/unix/zsysctl_openbsd_amd64.go | 1 + .../x/sys/unix/zsysctl_openbsd_arm.go | 1 + .../x/sys/unix/zsysctl_openbsd_arm64.go | 1 + .../x/sys/unix/zsysctl_openbsd_mips64.go | 1 + .../x/sys/unix/zsysnum_darwin_386.go | 437 - .../x/sys/unix/zsysnum_darwin_amd64.go | 1 + .../x/sys/unix/zsysnum_darwin_arm.go | 437 - .../x/sys/unix/zsysnum_darwin_arm64.go | 1 + .../x/sys/unix/zsysnum_dragonfly_amd64.go | 1 + .../x/sys/unix/zsysnum_freebsd_386.go | 1 + .../x/sys/unix/zsysnum_freebsd_amd64.go | 1 + .../x/sys/unix/zsysnum_freebsd_arm.go | 1 + .../x/sys/unix/zsysnum_freebsd_arm64.go | 1 + .../x/sys/unix/zsysnum_linux_386.go | 4 + .../x/sys/unix/zsysnum_linux_amd64.go | 4 + .../x/sys/unix/zsysnum_linux_arm.go | 4 + .../x/sys/unix/zsysnum_linux_arm64.go | 4 + .../x/sys/unix/zsysnum_linux_mips.go | 4 + .../x/sys/unix/zsysnum_linux_mips64.go | 4 + .../x/sys/unix/zsysnum_linux_mips64le.go | 4 + .../x/sys/unix/zsysnum_linux_mipsle.go | 4 + .../x/sys/unix/zsysnum_linux_ppc.go | 434 + .../x/sys/unix/zsysnum_linux_ppc64.go | 4 + .../x/sys/unix/zsysnum_linux_ppc64le.go | 4 + .../x/sys/unix/zsysnum_linux_riscv64.go | 4 + .../x/sys/unix/zsysnum_linux_s390x.go | 4 + .../x/sys/unix/zsysnum_linux_sparc64.go | 4 + .../x/sys/unix/zsysnum_netbsd_386.go | 1 + .../x/sys/unix/zsysnum_netbsd_amd64.go | 1 + .../x/sys/unix/zsysnum_netbsd_arm.go | 1 + .../x/sys/unix/zsysnum_netbsd_arm64.go | 1 + .../x/sys/unix/zsysnum_openbsd_386.go | 1 + .../x/sys/unix/zsysnum_openbsd_amd64.go | 1 + .../x/sys/unix/zsysnum_openbsd_arm.go | 1 + .../x/sys/unix/zsysnum_openbsd_arm64.go | 1 + .../x/sys/unix/zsysnum_openbsd_mips64.go | 1 + .../x/sys/unix/zsysnum_zos_s390x.go | 2670 ++++ .../golang.org/x/sys/unix/ztypes_aix_ppc.go | 2 + .../golang.org/x/sys/unix/ztypes_aix_ppc64.go | 2 + .../x/sys/unix/ztypes_darwin_386.go | 516 - .../x/sys/unix/ztypes_darwin_amd64.go | 120 + .../x/sys/unix/ztypes_darwin_arm.go | 516 - .../x/sys/unix/ztypes_darwin_arm64.go | 120 + .../x/sys/unix/ztypes_dragonfly_amd64.go | 5 + .../x/sys/unix/ztypes_freebsd_386.go | 16 +- .../x/sys/unix/ztypes_freebsd_amd64.go | 16 +- .../x/sys/unix/ztypes_freebsd_arm.go | 16 +- .../x/sys/unix/ztypes_freebsd_arm64.go | 16 +- .../x/sys/unix/ztypes_illumos_amd64.go | 40 + vendor/golang.org/x/sys/unix/ztypes_linux.go | 1724 ++- .../golang.org/x/sys/unix/ztypes_linux_386.go | 21 +- .../x/sys/unix/ztypes_linux_amd64.go | 21 +- .../golang.org/x/sys/unix/ztypes_linux_arm.go | 21 +- .../x/sys/unix/ztypes_linux_arm64.go | 21 +- .../x/sys/unix/ztypes_linux_mips.go | 21 +- .../x/sys/unix/ztypes_linux_mips64.go | 21 +- .../x/sys/unix/ztypes_linux_mips64le.go | 21 +- .../x/sys/unix/ztypes_linux_mipsle.go | 21 +- .../golang.org/x/sys/unix/ztypes_linux_ppc.go | 639 + .../x/sys/unix/ztypes_linux_ppc64.go | 21 +- .../x/sys/unix/ztypes_linux_ppc64le.go | 21 +- .../x/sys/unix/ztypes_linux_riscv64.go | 21 +- .../x/sys/unix/ztypes_linux_s390x.go | 21 +- .../x/sys/unix/ztypes_linux_sparc64.go | 21 +- .../x/sys/unix/ztypes_netbsd_386.go | 6 +- .../x/sys/unix/ztypes_netbsd_amd64.go | 6 +- .../x/sys/unix/ztypes_netbsd_arm.go | 6 +- .../x/sys/unix/ztypes_netbsd_arm64.go | 6 +- .../x/sys/unix/ztypes_openbsd_386.go | 6 +- .../x/sys/unix/ztypes_openbsd_amd64.go | 6 +- .../x/sys/unix/ztypes_openbsd_arm.go | 6 +- .../x/sys/unix/ztypes_openbsd_arm64.go | 6 +- .../x/sys/unix/ztypes_openbsd_mips64.go | 6 +- .../x/sys/unix/ztypes_solaris_amd64.go | 2 + .../golang.org/x/sys/unix/ztypes_zos_s390x.go | 406 + vendor/golang.org/x/sys/windows/empty.s | 1 + .../golang.org/x/sys/windows/exec_windows.go | 98 + vendor/golang.org/x/sys/windows/mkerrors.bash | 7 + .../x/sys/windows/security_windows.go | 29 +- .../x/sys/windows/syscall_windows.go | 225 +- .../golang.org/x/sys/windows/types_windows.go | 993 +- .../x/sys/windows/types_windows_arm64.go | 34 + .../x/sys/windows/zerrors_windows.go | 2619 +++- .../x/sys/windows/zsyscall_windows.go | 487 +- vendor/golang.org/x/term/AUTHORS | 3 + vendor/golang.org/x/term/CONTRIBUTING.md | 26 + vendor/golang.org/x/term/CONTRIBUTORS | 3 + vendor/golang.org/x/term/LICENSE | 27 + vendor/golang.org/x/term/PATENTS | 22 + vendor/golang.org/x/term/README.md | 17 + vendor/golang.org/x/term/go.mod | 5 + vendor/golang.org/x/term/go.sum | 2 + .../terminal/util_plan9.go => term/term.go} | 36 +- vendor/golang.org/x/term/term_plan9.go | 42 + .../util_solaris.go => term/term_solaris.go} | 41 +- .../terminal/util.go => term/term_unix.go} | 47 +- .../util_linux.go => term/term_unix_aix.go} | 4 +- .../util_bsd.go => term/term_unix_bsd.go} | 2 +- .../util_aix.go => term/term_unix_linux.go} | 6 +- vendor/golang.org/x/term/term_unix_zos.go | 10 + vendor/golang.org/x/term/term_unsupported.go | 38 + .../util_windows.go => term/term_windows.go} | 48 +- vendor/golang.org/x/term/terminal.go | 987 ++ .../x/text/secure/bidirule/bidirule10.0.0.go | 1 + .../x/text/secure/bidirule/bidirule9.0.0.go | 1 + vendor/golang.org/x/text/unicode/bidi/bidi.go | 221 +- vendor/golang.org/x/text/unicode/bidi/core.go | 63 +- .../x/text/unicode/bidi/tables10.0.0.go | 1 + .../x/text/unicode/bidi/tables11.0.0.go | 1 + .../x/text/unicode/bidi/tables12.0.0.go | 3 +- .../x/text/unicode/bidi/tables13.0.0.go | 1956 +++ .../x/text/unicode/bidi/tables9.0.0.go | 1 + .../x/text/unicode/norm/tables10.0.0.go | 1 + .../x/text/unicode/norm/tables11.0.0.go | 1 + .../x/text/unicode/norm/tables12.0.0.go | 3 +- .../x/text/unicode/norm/tables13.0.0.go | 7761 ++++++++++++ .../x/text/unicode/norm/tables9.0.0.go | 1 + .../golang.org/x/text/width/tables10.0.0.go | 1 + .../golang.org/x/text/width/tables11.0.0.go | 1 + .../golang.org/x/text/width/tables12.0.0.go | 3 +- .../golang.org/x/text/width/tables13.0.0.go | 1352 ++ vendor/golang.org/x/text/width/tables9.0.0.go | 1 + .../googleapis/rpc/status/status.pb.go | 14 +- .../protobuf/encoding/prototext/decode.go | 95 +- .../protobuf/encoding/prototext/encode.go | 96 +- .../protobuf/internal/descfmt/stringer.go | 2 + .../protobuf/internal/detrand/rand.go | 8 + .../encoding/messageset/messageset.go | 33 +- .../protobuf/internal/encoding/tag/tag.go | 2 +- .../protobuf/internal/encoding/text/encode.go | 8 +- .../protobuf/internal/fieldnum/any_gen.go | 13 - .../protobuf/internal/fieldnum/api_gen.go | 35 - .../internal/fieldnum/descriptor_gen.go | 240 - .../protobuf/internal/fieldnum/doc.go | 7 - .../internal/fieldnum/duration_gen.go | 13 - .../protobuf/internal/fieldnum/empty_gen.go | 10 - .../internal/fieldnum/field_mask_gen.go | 12 - .../internal/fieldnum/source_context_gen.go | 12 - .../protobuf/internal/fieldnum/struct_gen.go | 33 - .../internal/fieldnum/timestamp_gen.go | 13 - .../protobuf/internal/fieldnum/type_gen.go | 53 - .../internal/fieldnum/wrappers_gen.go | 52 - .../protobuf/internal/fieldsort/fieldsort.go | 40 - .../protobuf/internal/filedesc/build.go | 19 +- .../protobuf/internal/filedesc/desc.go | 82 +- .../protobuf/internal/filedesc/desc_init.go | 62 +- .../protobuf/internal/filedesc/desc_lazy.go | 128 +- .../protobuf/internal/filedesc/desc_list.go | 176 +- .../internal/filedesc/desc_list_gen.go | 11 + .../protobuf/internal/genid/any_gen.go | 34 + .../protobuf/internal/genid/api_gen.go | 106 + .../protobuf/internal/genid/descriptor_gen.go | 829 ++ .../protobuf/internal/genid/doc.go | 11 + .../protobuf/internal/genid/duration_gen.go | 34 + .../protobuf/internal/genid/empty_gen.go | 19 + .../protobuf/internal/genid/field_mask_gen.go | 31 + .../protobuf/internal/genid/goname.go | 25 + .../protobuf/internal/genid/map_entry.go | 16 + .../internal/genid/source_context_gen.go | 31 + .../protobuf/internal/genid/struct_gen.go | 116 + .../protobuf/internal/genid/timestamp_gen.go | 34 + .../protobuf/internal/genid/type_gen.go | 184 + .../protobuf/internal/genid/wrappers.go | 13 + .../protobuf/internal/genid/wrappers_gen.go | 175 + .../protobuf/internal/genname/name.go | 25 - .../protobuf/internal/impl/api_export.go | 9 +- .../protobuf/internal/impl/codec_field.go | 18 +- .../protobuf/internal/impl/codec_gen.go | 974 +- .../protobuf/internal/impl/codec_map.go | 24 +- .../protobuf/internal/impl/codec_message.go | 68 +- .../internal/impl/codec_messageset.go | 21 +- .../protobuf/internal/impl/codec_reflect.go | 8 +- .../protobuf/internal/impl/convert.go | 29 + .../protobuf/internal/impl/decode.go | 16 +- .../protobuf/internal/impl/encode.go | 10 +- .../protobuf/internal/impl/legacy_export.go | 2 +- .../internal/impl/legacy_extension.go | 3 +- .../protobuf/internal/impl/legacy_message.go | 122 +- .../protobuf/internal/impl/merge.go | 6 +- .../protobuf/internal/impl/message.go | 79 +- .../protobuf/internal/impl/message_reflect.go | 125 +- .../internal/impl/message_reflect_field.go | 85 +- .../protobuf/internal/impl/pointer_reflect.go | 1 + .../protobuf/internal/impl/pointer_unsafe.go | 1 + .../protobuf/internal/impl/validate.go | 5 +- .../protobuf/internal/mapsort/mapsort.go | 43 - .../protobuf/internal/order/order.go | 89 + .../protobuf/internal/order/range.go | 115 + .../protobuf/internal/version/version.go | 2 +- .../protobuf/proto/decode.go | 23 +- .../protobuf/proto/decode_gen.go | 128 +- .../protobuf/proto/encode.go | 55 +- .../google.golang.org/protobuf/proto/equal.go | 25 +- .../protobuf/proto/messageset.go | 7 +- .../google.golang.org/protobuf/proto/proto.go | 9 + .../protobuf/reflect/protodesc/desc.go | 276 + .../protobuf/reflect/protodesc/desc_init.go | 248 + .../reflect/protodesc/desc_resolve.go | 286 + .../reflect/protodesc/desc_validate.go | 374 + .../protobuf/reflect/protodesc/proto.go | 252 + .../protobuf/reflect/protoreflect/proto.go | 50 +- .../protobuf/reflect/protoreflect/source.go | 84 +- .../reflect/protoreflect/source_gen.go | 461 + .../protobuf/reflect/protoreflect/type.go | 34 + .../reflect/protoregistry/registry.go | 157 +- .../types/descriptorpb/descriptor.pb.go | 4039 ++++++ .../protobuf/types/known/anypb/any.pb.go | 229 +- .../types/known/durationpb/duration.pb.go | 150 +- .../types/known/timestamppb/timestamp.pb.go | 139 +- vendor/gopkg.in/yaml.v2/.travis.yml | 1 + vendor/gopkg.in/yaml.v2/apic.go | 6 +- vendor/gopkg.in/yaml.v2/go.mod | 8 +- vendor/gopkg.in/yaml.v2/yaml.go | 14 +- vendor/gopkg.in/yaml.v3/.travis.yml | 16 - vendor/gopkg.in/yaml.v3/apic.go | 1 + vendor/gopkg.in/yaml.v3/decode.go | 65 +- vendor/gopkg.in/yaml.v3/emitterc.go | 58 +- vendor/gopkg.in/yaml.v3/encode.go | 30 +- vendor/gopkg.in/yaml.v3/parserc.go | 48 +- vendor/gopkg.in/yaml.v3/scannerc.go | 49 +- vendor/gopkg.in/yaml.v3/yaml.go | 40 +- vendor/gopkg.in/yaml.v3/yamlh.go | 2 + vendor/k8s.io/apimachinery/LICENSE | 202 + vendor/k8s.io/apimachinery/pkg/labels/doc.go | 19 + .../k8s.io/apimachinery/pkg/labels/labels.go | 172 + .../apimachinery/pkg/labels/selector.go | 956 ++ .../pkg/labels/zz_generated.deepcopy.go | 42 + .../apimachinery/pkg/selection/operator.go | 33 + .../apimachinery/pkg/util/errors/doc.go | 18 + .../apimachinery/pkg/util/errors/errors.go | 249 + .../k8s.io/apimachinery/pkg/util/sets/byte.go | 205 + .../k8s.io/apimachinery/pkg/util/sets/doc.go | 20 + .../apimachinery/pkg/util/sets/empty.go | 23 + .../k8s.io/apimachinery/pkg/util/sets/int.go | 205 + .../apimachinery/pkg/util/sets/int32.go | 205 + .../apimachinery/pkg/util/sets/int64.go | 205 + .../apimachinery/pkg/util/sets/string.go | 205 + .../pkg/util/validation/field/errors.go | 272 + .../pkg/util/validation/field/path.go | 117 + .../pkg/util/validation/validation.go | 503 + vendor/k8s.io/klog/v2/.gitignore | 17 + vendor/k8s.io/klog/v2/CONTRIBUTING.md | 22 + vendor/k8s.io/klog/v2/LICENSE | 191 + vendor/k8s.io/klog/v2/OWNERS | 19 + vendor/k8s.io/klog/v2/README.md | 103 + vendor/k8s.io/klog/v2/RELEASE.md | 9 + vendor/k8s.io/klog/v2/SECURITY.md | 22 + vendor/k8s.io/klog/v2/SECURITY_CONTACTS | 20 + vendor/k8s.io/klog/v2/code-of-conduct.md | 3 + vendor/k8s.io/klog/v2/go.mod | 5 + vendor/k8s.io/klog/v2/go.sum | 2 + vendor/k8s.io/klog/v2/klog.go | 1605 +++ vendor/k8s.io/klog/v2/klog_file.go | 164 + vendor/modules.txt | 86 +- vendor/sigs.k8s.io/yaml/.gitignore | 4 + vendor/sigs.k8s.io/yaml/.travis.yml | 7 +- vendor/sigs.k8s.io/yaml/README.md | 2 +- vendor/sigs.k8s.io/yaml/go.mod | 2 +- vendor/sigs.k8s.io/yaml/go.sum | 9 +- 805 files changed, 82756 insertions(+), 23555 deletions(-) create mode 100644 vendor/github.com/go-logr/logr/LICENSE create mode 100644 vendor/github.com/go-logr/logr/README.md create mode 100644 vendor/github.com/go-logr/logr/discard.go create mode 100644 vendor/github.com/go-logr/logr/go.mod create mode 100644 vendor/github.com/go-logr/logr/logr.go create mode 100644 vendor/github.com/google/go-cmp/LICENSE create mode 100644 vendor/github.com/google/go-cmp/cmp/compare.go create mode 100644 vendor/github.com/google/go-cmp/cmp/export_panic.go create mode 100644 vendor/github.com/google/go-cmp/cmp/export_unsafe.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/diff/debug_disable.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/diff/debug_enable.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/diff/diff.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/flags/flags.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_legacy.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_recent.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/function/func.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/value/name.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/value/pointer_purego.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/value/pointer_unsafe.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/value/sort.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/value/zero.go create mode 100644 vendor/github.com/google/go-cmp/cmp/options.go create mode 100644 vendor/github.com/google/go-cmp/cmp/path.go create mode 100644 vendor/github.com/google/go-cmp/cmp/report.go create mode 100644 vendor/github.com/google/go-cmp/cmp/report_compare.go create mode 100644 vendor/github.com/google/go-cmp/cmp/report_references.go create mode 100644 vendor/github.com/google/go-cmp/cmp/report_reflect.go create mode 100644 vendor/github.com/google/go-cmp/cmp/report_slices.go create mode 100644 vendor/github.com/google/go-cmp/cmp/report_text.go create mode 100644 vendor/github.com/google/go-cmp/cmp/report_value.go create mode 100644 vendor/github.com/google/go-jsonnet/update_cpp_jsonnet.sh create mode 100644 vendor/github.com/google/go-jsonnet/util.go create mode 100644 vendor/github.com/grafana/tanka/pkg/jsonnet/evalcache.go create mode 100644 vendor/github.com/grafana/tanka/pkg/jsonnet/jpath/dirs.go create mode 100644 vendor/github.com/grafana/tanka/pkg/jsonnet/jpath/errors.go create mode 100644 vendor/github.com/grafana/tanka/pkg/kustomize/build.go create mode 100644 vendor/github.com/grafana/tanka/pkg/kustomize/jsonnet.go create mode 100644 vendor/github.com/grafana/tanka/pkg/kustomize/kustomize.go rename vendor/github.com/grafana/tanka/pkg/spec/v1alpha1/{config.go => environment.go} (64%) create mode 100644 vendor/github.com/grafana/tanka/pkg/tanka/errors.go create mode 100644 vendor/github.com/grafana/tanka/pkg/tanka/evaluators.go create mode 100644 vendor/github.com/grafana/tanka/pkg/tanka/export.go create mode 100644 vendor/github.com/grafana/tanka/pkg/tanka/find.go create mode 100644 vendor/github.com/grafana/tanka/pkg/tanka/inline.go create mode 100644 vendor/github.com/grafana/tanka/pkg/tanka/load.go create mode 100644 vendor/github.com/grafana/tanka/pkg/tanka/parallel.go delete mode 100644 vendor/github.com/grafana/tanka/pkg/tanka/parse.go create mode 100644 vendor/github.com/grafana/tanka/pkg/tanka/static.go delete mode 100644 vendor/github.com/huandu/xstrings/stringbuilder.go delete mode 100644 vendor/github.com/huandu/xstrings/stringbuilder_go110.go create mode 100644 vendor/github.com/imdario/mergo/go.mod create mode 100644 vendor/github.com/imdario/mergo/go.sum delete mode 100644 vendor/github.com/mattn/go-isatty/.travis.yml delete mode 100644 vendor/github.com/mattn/go-isatty/renovate.json create mode 100644 vendor/github.com/shopspring/decimal/.gitignore create mode 100644 vendor/github.com/shopspring/decimal/.travis.yml create mode 100644 vendor/github.com/shopspring/decimal/CHANGELOG.md create mode 100644 vendor/github.com/shopspring/decimal/LICENSE create mode 100644 vendor/github.com/shopspring/decimal/README.md create mode 100644 vendor/github.com/shopspring/decimal/decimal-go.go create mode 100644 vendor/github.com/shopspring/decimal/decimal.go create mode 100644 vendor/github.com/shopspring/decimal/go.mod create mode 100644 vendor/github.com/shopspring/decimal/rounding.go create mode 100644 vendor/github.com/stretchr/testify/assert/assertion_order.go create mode 100644 vendor/github.com/thoas/go-funk/assign.go create mode 100644 vendor/github.com/thoas/go-funk/go.mod create mode 100644 vendor/github.com/thoas/go-funk/go.sum create mode 100644 vendor/github.com/thoas/go-funk/join.go create mode 100644 vendor/github.com/thoas/go-funk/join_primitives.go create mode 100644 vendor/github.com/thoas/go-funk/max.go create mode 100644 vendor/github.com/thoas/go-funk/min.go create mode 100644 vendor/github.com/thoas/go-funk/options.go create mode 100644 vendor/github.com/thoas/go-funk/permutation.go create mode 100644 vendor/github.com/thoas/go-funk/predicate.go create mode 100644 vendor/github.com/thoas/go-funk/short_if.go create mode 100644 vendor/github.com/thoas/go-funk/subset.go create mode 100644 vendor/github.com/thoas/go-funk/subtraction.go create mode 100644 vendor/github.com/thoas/go-funk/without.go delete mode 100644 vendor/golang.org/x/crypto/curve25519/curve25519_amd64.go delete mode 100644 vendor/golang.org/x/crypto/curve25519/curve25519_amd64.s delete mode 100644 vendor/golang.org/x/crypto/curve25519/curve25519_generic.go delete mode 100644 vendor/golang.org/x/crypto/curve25519/curve25519_noasm.go create mode 100644 vendor/golang.org/x/crypto/curve25519/internal/field/README create mode 100644 vendor/golang.org/x/crypto/curve25519/internal/field/fe.go create mode 100644 vendor/golang.org/x/crypto/curve25519/internal/field/fe_amd64.go create mode 100644 vendor/golang.org/x/crypto/curve25519/internal/field/fe_amd64.s create mode 100644 vendor/golang.org/x/crypto/curve25519/internal/field/fe_amd64_noasm.go create mode 100644 vendor/golang.org/x/crypto/curve25519/internal/field/fe_arm64.go create mode 100644 vendor/golang.org/x/crypto/curve25519/internal/field/fe_arm64.s create mode 100644 vendor/golang.org/x/crypto/curve25519/internal/field/fe_arm64_noasm.go create mode 100644 vendor/golang.org/x/crypto/curve25519/internal/field/fe_generic.go create mode 100644 vendor/golang.org/x/crypto/curve25519/internal/field/sync.checkpoint create mode 100644 vendor/golang.org/x/crypto/curve25519/internal/field/sync.sh rename vendor/golang.org/x/crypto/{ => internal}/poly1305/bits_compat.go (98%) rename vendor/golang.org/x/crypto/{ => internal}/poly1305/bits_go1.13.go (96%) rename vendor/golang.org/x/crypto/{ => internal}/poly1305/mac_noasm.go (66%) rename vendor/golang.org/x/crypto/{ => internal}/poly1305/poly1305.go (98%) rename vendor/golang.org/x/crypto/{ => internal}/poly1305/sum_amd64.go (95%) rename vendor/golang.org/x/crypto/{ => internal}/poly1305/sum_amd64.s (98%) rename vendor/golang.org/x/crypto/{ => internal}/poly1305/sum_generic.go (100%) rename vendor/golang.org/x/crypto/{ => internal}/poly1305/sum_ppc64le.go (95%) rename vendor/golang.org/x/crypto/{ => internal}/poly1305/sum_ppc64le.s (94%) rename vendor/golang.org/x/crypto/{ => internal}/poly1305/sum_s390x.go (97%) rename vendor/golang.org/x/crypto/{ => internal}/poly1305/sum_s390x.s (99%) rename vendor/golang.org/x/crypto/internal/subtle/{aliasing_appengine.go => aliasing_purego.go} (97%) create mode 100644 vendor/golang.org/x/net/http2/ascii.go create mode 100644 vendor/golang.org/x/net/http2/go115.go create mode 100644 vendor/golang.org/x/net/http2/not_go115.go rename vendor/golang.org/x/net/idna/{tables12.00.go => tables12.0.0.go} (99%) create mode 100644 vendor/golang.org/x/net/idna/tables13.0.0.go create mode 100644 vendor/golang.org/x/sys/plan9/asm.s rename vendor/golang.org/x/sys/{unix/asm_darwin_386.s => plan9/asm_plan9_386.s} (64%) rename vendor/golang.org/x/sys/{unix/asm_freebsd_amd64.s => plan9/asm_plan9_amd64.s} (69%) rename vendor/golang.org/x/sys/{unix/asm_netbsd_386.s => plan9/asm_plan9_arm.s} (66%) create mode 100644 vendor/golang.org/x/sys/plan9/const_plan9.go create mode 100644 vendor/golang.org/x/sys/plan9/dir_plan9.go create mode 100644 vendor/golang.org/x/sys/plan9/env_plan9.go create mode 100644 vendor/golang.org/x/sys/plan9/errors_plan9.go create mode 100644 vendor/golang.org/x/sys/plan9/mkall.sh create mode 100644 vendor/golang.org/x/sys/plan9/mkerrors.sh create mode 100644 vendor/golang.org/x/sys/plan9/mksysnum_plan9.sh create mode 100644 vendor/golang.org/x/sys/plan9/pwd_go15_plan9.go create mode 100644 vendor/golang.org/x/sys/plan9/pwd_plan9.go create mode 100644 vendor/golang.org/x/sys/plan9/race.go create mode 100644 vendor/golang.org/x/sys/plan9/race0.go create mode 100644 vendor/golang.org/x/sys/plan9/str.go create mode 100644 vendor/golang.org/x/sys/plan9/syscall.go create mode 100644 vendor/golang.org/x/sys/plan9/syscall_plan9.go create mode 100644 vendor/golang.org/x/sys/plan9/zsyscall_plan9_386.go create mode 100644 vendor/golang.org/x/sys/plan9/zsyscall_plan9_amd64.go create mode 100644 vendor/golang.org/x/sys/plan9/zsyscall_plan9_arm.go create mode 100644 vendor/golang.org/x/sys/plan9/zsysnum_plan9.go rename vendor/golang.org/x/sys/unix/{asm_freebsd_386.s => asm_bsd_386.s} (72%) rename vendor/golang.org/x/sys/unix/{asm_openbsd_amd64.s => asm_bsd_amd64.s} (72%) rename vendor/golang.org/x/sys/unix/{asm_freebsd_arm.s => asm_bsd_arm.s} (76%) rename vendor/golang.org/x/sys/unix/{asm_netbsd_amd64.s => asm_bsd_arm64.s} (75%) delete mode 100644 vendor/golang.org/x/sys/unix/asm_darwin_amd64.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_darwin_arm.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_darwin_arm64.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_dragonfly_amd64.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_freebsd_arm64.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_netbsd_arm.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_netbsd_arm64.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_openbsd_386.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_openbsd_arm.s delete mode 100644 vendor/golang.org/x/sys/unix/asm_openbsd_arm64.s create mode 100644 vendor/golang.org/x/sys/unix/asm_zos_s390x.s create mode 100644 vendor/golang.org/x/sys/unix/dev_zos.go create mode 100644 vendor/golang.org/x/sys/unix/epoll_zos.go create mode 100644 vendor/golang.org/x/sys/unix/fstatfs_zos.go create mode 100644 vendor/golang.org/x/sys/unix/ioctl_linux.go create mode 100644 vendor/golang.org/x/sys/unix/ioctl_zos.go create mode 100644 vendor/golang.org/x/sys/unix/ptrace_darwin.go create mode 100644 vendor/golang.org/x/sys/unix/ptrace_ios.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_darwin_386.go delete mode 100644 vendor/golang.org/x/sys/unix/syscall_darwin_arm.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_ppc.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_zos_s390x.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_darwin_386.go delete mode 100644 vendor/golang.org/x/sys/unix/zerrors_darwin_arm.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_ppc.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_zos_s390x.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_darwin_386.1_13.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_darwin_386.1_13.s delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_darwin_386.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_darwin_386.s delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_darwin_arm.1_13.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_darwin_arm.1_13.s delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_darwin_arm.go delete mode 100644 vendor/golang.org/x/sys/unix/zsyscall_darwin_arm.s create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_ppc.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_zos_s390x.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_darwin_386.go delete mode 100644 vendor/golang.org/x/sys/unix/zsysnum_darwin_arm.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_ppc.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_zos_s390x.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_darwin_386.go delete mode 100644 vendor/golang.org/x/sys/unix/ztypes_darwin_arm.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_illumos_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_ppc.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_zos_s390x.go create mode 100644 vendor/golang.org/x/sys/windows/types_windows_arm64.go create mode 100644 vendor/golang.org/x/term/AUTHORS create mode 100644 vendor/golang.org/x/term/CONTRIBUTING.md create mode 100644 vendor/golang.org/x/term/CONTRIBUTORS create mode 100644 vendor/golang.org/x/term/LICENSE create mode 100644 vendor/golang.org/x/term/PATENTS create mode 100644 vendor/golang.org/x/term/README.md create mode 100644 vendor/golang.org/x/term/go.mod create mode 100644 vendor/golang.org/x/term/go.sum rename vendor/golang.org/x/{crypto/ssh/terminal/util_plan9.go => term/term.go} (56%) create mode 100644 vendor/golang.org/x/term/term_plan9.go rename vendor/golang.org/x/{crypto/ssh/terminal/util_solaris.go => term/term_solaris.go} (61%) rename vendor/golang.org/x/{crypto/ssh/terminal/util.go => term/term_unix.go} (53%) rename vendor/golang.org/x/{crypto/ssh/terminal/util_linux.go => term/term_unix_aix.go} (74%) rename vendor/golang.org/x/{crypto/ssh/terminal/util_bsd.go => term/term_unix_bsd.go} (95%) rename vendor/golang.org/x/{crypto/ssh/terminal/util_aix.go => term/term_unix_linux.go} (71%) create mode 100644 vendor/golang.org/x/term/term_unix_zos.go create mode 100644 vendor/golang.org/x/term/term_unsupported.go rename vendor/golang.org/x/{crypto/ssh/terminal/util_windows.go => term/term_windows.go} (53%) create mode 100644 vendor/golang.org/x/term/terminal.go create mode 100644 vendor/golang.org/x/text/unicode/bidi/tables13.0.0.go create mode 100644 vendor/golang.org/x/text/unicode/norm/tables13.0.0.go create mode 100644 vendor/golang.org/x/text/width/tables13.0.0.go delete mode 100644 vendor/google.golang.org/protobuf/internal/fieldnum/any_gen.go delete mode 100644 vendor/google.golang.org/protobuf/internal/fieldnum/api_gen.go delete mode 100644 vendor/google.golang.org/protobuf/internal/fieldnum/descriptor_gen.go delete mode 100644 vendor/google.golang.org/protobuf/internal/fieldnum/doc.go delete mode 100644 vendor/google.golang.org/protobuf/internal/fieldnum/duration_gen.go delete mode 100644 vendor/google.golang.org/protobuf/internal/fieldnum/empty_gen.go delete mode 100644 vendor/google.golang.org/protobuf/internal/fieldnum/field_mask_gen.go delete mode 100644 vendor/google.golang.org/protobuf/internal/fieldnum/source_context_gen.go delete mode 100644 vendor/google.golang.org/protobuf/internal/fieldnum/struct_gen.go delete mode 100644 vendor/google.golang.org/protobuf/internal/fieldnum/timestamp_gen.go delete mode 100644 vendor/google.golang.org/protobuf/internal/fieldnum/type_gen.go delete mode 100644 vendor/google.golang.org/protobuf/internal/fieldnum/wrappers_gen.go delete mode 100644 vendor/google.golang.org/protobuf/internal/fieldsort/fieldsort.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/any_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/api_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/descriptor_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/doc.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/duration_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/empty_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/field_mask_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/goname.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/map_entry.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/source_context_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/struct_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/timestamp_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/type_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/wrappers.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/wrappers_gen.go delete mode 100644 vendor/google.golang.org/protobuf/internal/genname/name.go delete mode 100644 vendor/google.golang.org/protobuf/internal/mapsort/mapsort.go create mode 100644 vendor/google.golang.org/protobuf/internal/order/order.go create mode 100644 vendor/google.golang.org/protobuf/internal/order/range.go create mode 100644 vendor/google.golang.org/protobuf/reflect/protodesc/desc.go create mode 100644 vendor/google.golang.org/protobuf/reflect/protodesc/desc_init.go create mode 100644 vendor/google.golang.org/protobuf/reflect/protodesc/desc_resolve.go create mode 100644 vendor/google.golang.org/protobuf/reflect/protodesc/desc_validate.go create mode 100644 vendor/google.golang.org/protobuf/reflect/protodesc/proto.go create mode 100644 vendor/google.golang.org/protobuf/reflect/protoreflect/source_gen.go create mode 100644 vendor/google.golang.org/protobuf/types/descriptorpb/descriptor.pb.go delete mode 100644 vendor/gopkg.in/yaml.v3/.travis.yml create mode 100644 vendor/k8s.io/apimachinery/LICENSE create mode 100644 vendor/k8s.io/apimachinery/pkg/labels/doc.go create mode 100644 vendor/k8s.io/apimachinery/pkg/labels/labels.go create mode 100644 vendor/k8s.io/apimachinery/pkg/labels/selector.go create mode 100644 vendor/k8s.io/apimachinery/pkg/labels/zz_generated.deepcopy.go create mode 100644 vendor/k8s.io/apimachinery/pkg/selection/operator.go create mode 100644 vendor/k8s.io/apimachinery/pkg/util/errors/doc.go create mode 100644 vendor/k8s.io/apimachinery/pkg/util/errors/errors.go create mode 100644 vendor/k8s.io/apimachinery/pkg/util/sets/byte.go create mode 100644 vendor/k8s.io/apimachinery/pkg/util/sets/doc.go create mode 100644 vendor/k8s.io/apimachinery/pkg/util/sets/empty.go create mode 100644 vendor/k8s.io/apimachinery/pkg/util/sets/int.go create mode 100644 vendor/k8s.io/apimachinery/pkg/util/sets/int32.go create mode 100644 vendor/k8s.io/apimachinery/pkg/util/sets/int64.go create mode 100644 vendor/k8s.io/apimachinery/pkg/util/sets/string.go create mode 100644 vendor/k8s.io/apimachinery/pkg/util/validation/field/errors.go create mode 100644 vendor/k8s.io/apimachinery/pkg/util/validation/field/path.go create mode 100644 vendor/k8s.io/apimachinery/pkg/util/validation/validation.go create mode 100644 vendor/k8s.io/klog/v2/.gitignore create mode 100644 vendor/k8s.io/klog/v2/CONTRIBUTING.md create mode 100644 vendor/k8s.io/klog/v2/LICENSE create mode 100644 vendor/k8s.io/klog/v2/OWNERS create mode 100644 vendor/k8s.io/klog/v2/README.md create mode 100644 vendor/k8s.io/klog/v2/RELEASE.md create mode 100644 vendor/k8s.io/klog/v2/SECURITY.md create mode 100644 vendor/k8s.io/klog/v2/SECURITY_CONTACTS create mode 100644 vendor/k8s.io/klog/v2/code-of-conduct.md create mode 100644 vendor/k8s.io/klog/v2/go.mod create mode 100644 vendor/k8s.io/klog/v2/go.sum create mode 100644 vendor/k8s.io/klog/v2/klog.go create mode 100644 vendor/k8s.io/klog/v2/klog_file.go diff --git a/cmd/helmConcept.go b/cmd/helmConcept.go index 7a94522..0abe71c 100644 --- a/cmd/helmConcept.go +++ b/cmd/helmConcept.go @@ -24,14 +24,15 @@ import ( ) var chartVersion string -var chartRepo string +var chartRepoName string +var chartRepoURL string var dir string // helmImportCmd represents the import command var helmConceptCmd = &cobra.Command{ Use: "concept", Short: "Create a concept, wrapping a helm chart from a git repo", - Example: "kable helm concept --directory sentry sentry --repo stable --version 4.3.0", + Example: "kable helm concept --directory sentry sentry --repoName stable --version 4.3.0", Args: func(cmd *cobra.Command, args []string) error { if len(args) != 1 { return errors.New("requires exactly ONE argument") @@ -40,7 +41,7 @@ var helmConceptCmd = &cobra.Command{ }, Run: func(cmd *cobra.Command, args []string) { PrintMsg("Creating concept from helm chart '%s'...", args[0]) - if err := helm.InitHelmConcept(helm.HelmChart{Name: args[0], Version: chartVersion, Repo: chartRepo}, dir); err != nil { + if err := helm.InitHelmConcept(helm.HelmChart{Name: args[0], Version: chartVersion, Repo: helm.HelmRepo{Name: chartRepoName, URL: chartRepoURL}}, dir); err != nil { PrintError("unable to import helm chart: %s", err) } }, @@ -58,6 +59,7 @@ func init() { // Cobra supports local flags which will only run when this command // is called directly, e.g.: helmConceptCmd.Flags().StringVarP(&chartVersion, "version", "v", "", "The version of the helm chart.") - helmConceptCmd.Flags().StringVarP(&chartRepo, "repo", "r", "stable", "The repo where the helm chart resides.") + helmConceptCmd.Flags().StringVar(&chartRepoName, "repoName", "stable", "The name of the repository where the helm chart resides. (stable: https://charts.helm.sh/stable)") + helmConceptCmd.Flags().StringVar(&chartRepoURL, "repoURL", "", "The URL of the repository where the helm chart resides.") helmConceptCmd.Flags().StringVarP(&dir, "directory", "d", ".", "The directory to create the concept in.") } diff --git a/cmd/helmImport.go b/cmd/helmImport.go index dca7822..8d647f4 100644 --- a/cmd/helmImport.go +++ b/cmd/helmImport.go @@ -31,7 +31,7 @@ var importSubdir string var helmImportCmd = &cobra.Command{ Use: "import", Short: "Import a helm chart from a helm repo into the concept", - Example: "kable helm import sentry --repo stable --version 4.3.0", + Example: "kable helm import sentry --repoName stable --version 4.3.0", Args: func(cmd *cobra.Command, args []string) error { if len(args) != 1 { return errors.New("requires exactly ONE argument") @@ -43,7 +43,7 @@ var helmImportCmd = &cobra.Command{ PrintError("current directory is not a concept directory: %s", err) } PrintMsg("Importing helm chart '%s' into current concept...", args[0]) - if err := helm.ImportHelmChart(helm.HelmChart{Name: args[0], Version: chartVersion, Repo: chartRepo}, dir); err != nil { + if err := helm.ImportHelmChart(helm.HelmChart{Name: args[0], Version: chartVersion, Repo: helm.HelmRepo{Name: chartRepoName, URL: chartRepoURL}}, dir); err != nil { PrintError("unable to import helm chart: %s", err) } }, @@ -61,6 +61,7 @@ func init() { // Cobra supports local flags which will only run when this command // is called directly, e.g.: helmImportCmd.Flags().StringVarP(&chartVersion, "version", "v", "", "The version of the helm chart.") - helmImportCmd.Flags().StringVarP(&chartRepo, "repo", "r", ".", "The repo where the helm chart resides.") + helmImportCmd.Flags().StringVar(&chartRepoName, "repoName", "stable", "The name of the repository where the helm chart resides. (stable: https://charts.helm.sh/stable)") + helmImportCmd.Flags().StringVar(&chartRepoURL, "repoURL", "", "The URL of the repository where the helm chart resides.") helmImportCmd.Flags().StringVarP(&dir, "directory", "d", ".", "The directory of the concept.") } diff --git a/go.mod b/go.mod index 549b742..ed458ae 100644 --- a/go.mod +++ b/go.mod @@ -6,14 +6,14 @@ require ( github.com/AlecAivazis/survey/v2 v2.1.1 github.com/coreos/etcd v3.3.13+incompatible github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f - github.com/fatih/color v1.9.0 + github.com/fatih/color v1.13.0 github.com/fatih/structs v1.1.0 github.com/go-git/go-git/v5 v5.1.0 github.com/gofiber/fiber/v2 v2.3.0 github.com/gofiber/template v1.6.6 github.com/google/go-querystring v1.0.0 github.com/google/logger v1.1.0 - github.com/grafana/tanka v0.12.0 + github.com/grafana/tanka v0.18.2 github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4 // indirect github.com/grpc-ecosystem/grpc-gateway v1.9.5 // indirect github.com/jsonnet-bundler/jsonnet-bundler v0.4.0 @@ -29,8 +29,10 @@ require ( github.com/spf13/afero v1.2.2 // indirect github.com/spf13/cobra v1.0.0 github.com/spf13/viper v1.7.0 - github.com/stretchr/testify v1.6.1 + github.com/stretchr/testify v1.7.0 go.etcd.io/bbolt v1.3.3 // indirect ) replace github.com/Joker/jade v1.0.0 => github.com/Joker/jade v1.0.1-0.20200506134858-ee26e3c533bb + +replace github.com/grafana/tanka v0.18.2 => github.com/redradrat/tanka v0.18.2-fix639 diff --git a/go.sum b/go.sum index 6932651..a69064c 100644 --- a/go.sum +++ b/go.sum @@ -19,30 +19,25 @@ github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym github.com/CloudyKit/fastprinter v0.0.0-20200109182630-33d98a066a53/go.mod h1:+3IMCy2vIlbG1XG/0ggNQv0SvxCAIpPM5b1nCz56Xno= github.com/CloudyKit/jet/v3 v3.0.1/go.mod h1:HKQPgSJmdK8hdoAbKUUWajkHyHo4RaU5rMdUywE7VMo= github.com/Djarvur/go-err113 v0.0.0-20200511133814-5174e21577d5/go.mod h1:4UJr5HIiMZrwgkSPdsjy2uOQExX/WEILpIrO9UPGuXs= -github.com/Joker/hpp v0.0.0-20180418125244-6893e659854a/go.mod h1:MzD2WMdSxvbHw5fM/OXOFily/lipJWRc9C1px0Mt0ZE= github.com/Joker/hpp v1.0.0 h1:65+iuJYdRXv/XyN62C1uEmmOx3432rNG/rKlX6V7Kkc= github.com/Joker/hpp v1.0.0/go.mod h1:8x5n+M1Hp5hC0g8okX3sR3vFQwynaX/UgSOM9MeBKzY= -github.com/Joker/jade v1.0.0 h1:lOCEPvTAtWfLpSZYMOv/g44MGQFAolbKh2khHHGu0Kc= -github.com/Joker/jade v1.0.0/go.mod h1:efZIdO0py/LtcJRSa/j2WEklMSAw84WV0zZVMxNToB8= github.com/Joker/jade v1.0.1-0.20200506134858-ee26e3c533bb h1:keFglnNEP/KZr+ZjUE2YKn+xylNtOozoNZJgf6POjDk= github.com/Joker/jade v1.0.1-0.20200506134858-ee26e3c533bb/go.mod h1:C5O3w7HbsWdb9ik1puKS81QsllcBd+CXRVCbXFwSdsE= -github.com/Masterminds/goutils v1.1.0 h1:zukEsf/1JZwCMgHiK3GZftabmxiCw4apj3a28RPBiVg= -github.com/Masterminds/goutils v1.1.0/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= -github.com/Masterminds/semver v1.4.2 h1:WBLTQ37jOCzSLtXNdoo8bNM8876KhNqOKvrlGITgsTc= -github.com/Masterminds/semver v1.4.2/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= +github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI= +github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= -github.com/Masterminds/semver/v3 v3.1.0 h1:Y2lUDsFKVRSYGojLJ1yLxSXdMmMYTYls0rCvoqmMUQk= -github.com/Masterminds/semver/v3 v3.1.0/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= -github.com/Masterminds/sprig/v3 v3.1.0 h1:j7GpgZ7PdFqNsmncycTHsLmVPf5/3wJtlgW9TNDYD9Y= -github.com/Masterminds/sprig/v3 v3.1.0/go.mod h1:ONGMf7UfYGAbMXCZmQLy8x3lCDIPrEZE/rU8pmrbihA= +github.com/Masterminds/semver/v3 v3.1.1 h1:hLg3sBzpNErnxhQtUy/mmLR2I9foDujNK030IGemrRc= +github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= +github.com/Masterminds/sprig/v3 v3.2.2 h1:17jRggJu518dr3QaafizSXOjKYp94wKfABxUmyxvxX8= +github.com/Masterminds/sprig/v3 v3.2.2/go.mod h1:UoaO7Yp8KlPnJIYWTFkMaqPUYKTfGFPhxNuwnnxkKlk= github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ= github.com/Netflix/go-expect v0.0.0-20180615182759-c93bf25de8e8 h1:xzYJEypr/85nBpB11F9br+3HUrpgb+fcm5iADzXXYEw= github.com/Netflix/go-expect v0.0.0-20180615182759-c93bf25de8e8/go.mod h1:oX5x61PbNXchhh0oikYAH+4Pcfw5LKv21+Jnpr6r6Pc= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/OpenPeeDeeP/depguard v1.0.1/go.mod h1:xsIw86fROiiwelg+jB2uM9PiKihMMmUx/1V+TNhjQvM= -github.com/PuerkitoBio/purell v1.0.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= -github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= +github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= +github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= github.com/StackExchange/wmi v0.0.0-20180116203802-5d049714c4a6/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg= github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7 h1:uSoVVbwJiQipAclBbw+8quDsfcvFjOpI5iCf4p/cqCs= github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7/go.mod h1:6zEj6s6u/ghQa61ZWa/C2Aw3RkjiTBOix7dkqa1VLIs= @@ -58,6 +53,7 @@ github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmV github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= +github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY= github.com/aymerick/raymond v2.0.2+incompatible/go.mod h1:osfaiScAUVup+UC9Nfq76eWqDhXlp+4UYaA8uhTBO6g= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.0 h1:HWo1m869IqiPhD389kmkxeTalrjNbbJTC8LXupb+sl0= @@ -72,11 +68,9 @@ github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghf github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/coreos/bbolt v1.3.2 h1:wZwiHHUieZCquLkDL0B8UhzreNWsPHooDAG3q34zk0s= github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= -github.com/coreos/etcd v3.3.10+incompatible h1:jFneRYjIvLMLhDLCzuTuU4rSJUjRplcJQ7pD7MnhC04= github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= github.com/coreos/etcd v3.3.13+incompatible h1:8F3hqu9fGYLBifCmRCJsicFqDx/D68Rt3q1JMazcgBQ= github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= -github.com/coreos/go-semver v0.2.0 h1:3Jm3tLmsgAYcjC+4Up7hJrFBPr+n7rAqYeSw/SZazuY= github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-semver v0.3.0 h1:wkHLiw0WNATZnSG7epLsujiMCgPAc9xhjJ4tgnAxmfM= github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= @@ -84,7 +78,6 @@ github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e h1:Wf6HqHfScWJN9 github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f h1:lBNOc5arjvs8E5mO2tbpBpLoyyu8B6e44T7hJy6potg= github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= -github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM= github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -94,7 +87,6 @@ github.com/denis-tingajkin/go-header v0.3.1/go.mod h1:sq/2IxMhaZX+RRcgHfCRx/m0M5 github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM= github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= -github.com/docker/spdystream v0.0.0-20160310174837-449fdfce4d96/go.mod h1:Qh8CwZgvJUkLughtfhJv5dyTYa91l1fOUCrgjqmcifM= github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE= github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385/go.mod h1:0vRUJqYpeSZifjYj7uP3BG/gKcuzL9xWVV/Y+cK33KM= github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc= @@ -103,26 +95,24 @@ github.com/emirpasic/gods v1.12.0 h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/evanphx/json-patch v4.9.0+incompatible h1:kLcOMZeuLAJvL2BPWLMIj5oaZQobrkAqrL+WFZwQses= -github.com/evanphx/json-patch v4.9.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= +github.com/evanphx/json-patch v4.11.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/fatih/color v1.9.0 h1:8xPHl4/q1VyqGIPif1F+1V3Y3lSmrq01EabUW3CoW5s= github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= +github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w= +github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo= github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= github.com/flosch/pongo2/v4 v4.0.1/go.mod h1:B5ObFANs/36VwxxlgKpdchIJHMvHB562PW+BWPhwZD8= github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568 h1:BHsljHzVlRcyQhjrss6TZTdY2VfCqZPbv5k3iBFa2ZQ= github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc= -github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= -github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/gliderlabs/ssh v0.2.2 h1:6zsha5zo/TWhRhwqCD3+EarCAgZ2yN28ipRnGPnwkI0= github.com/gliderlabs/ssh v0.2.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0= -github.com/go-clix/cli v0.1.1/go.mod h1:dYJevXraB9mXZFhz5clyQestG0qGcmT5rRC/P9etoRQ= +github.com/go-clix/cli v0.2.0/go.mod h1:yWI9abpv187r47lDjz8Z9TWev93aUTWaW2seSb5JmPQ= github.com/go-critic/go-critic v0.5.0/go.mod h1:4jeRh3ZAVnRYhuWdOEvwzVqLUpxMSoAT0xZ74JsTPlo= github.com/go-git/gcfg v1.5.0 h1:Q5ViNfGF8zFgyJWPqYwA7qGFoMTEiBmdlkcfRmpIMa4= github.com/go-git/gcfg v1.5.0/go.mod h1:5m20vg6GwYabIxaOonVkTdrILxQMpEShl1xiMF4ua+E= @@ -138,13 +128,12 @@ github.com/go-lintpack/lintpack v0.5.2/go.mod h1:NwZuYi2nUHho8XEIZ6SIxihrnPoqBTD github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas= -github.com/go-logr/logr v0.2.0 h1:QvGt2nLcHH0WK9orKa+ppBPAxREcH364nPUedEpK0TY= -github.com/go-logr/logr v0.2.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU= +github.com/go-logr/logr v0.4.0 h1:K7/B1jt6fIBQVd4Owv2MqGQClcgf0R266+7C/QjRcLc= +github.com/go-logr/logr v0.4.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU= github.com/go-ole/go-ole v1.2.1/go.mod h1:7FAglXiTm7HKlQRDeOQ6ZNUHidzCWXuZWq/1dTyBNF8= -github.com/go-openapi/jsonpointer v0.0.0-20160704185906-46af16f9f7b1/go.mod h1:+35s3my2LFTysnkMfxsJBAMHj/DoqoB9knIWoYG/Vk0= -github.com/go-openapi/jsonreference v0.0.0-20160704190145-13c6e3589ad9/go.mod h1:W3Z9FmVs9qj+KR4zFKmDPGiLdk1D9Rlm7cyMvf57TTg= -github.com/go-openapi/spec v0.0.0-20160808142527-6aced65f8501/go.mod h1:J8+jY1nAiCcj+friV/PDoE1/3eeccG9LYBs0tYvLOWc= -github.com/go-openapi/swag v0.0.0-20160704191624-1d0bd113de87/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I= +github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonreference v0.19.3/go.mod h1:rjx6GuL8TTa9VaixXglHmQmIL98+wF9xc8zWvFonSJ8= +github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-toolsmith/astcast v1.0.0/go.mod h1:mt2OdQTeAQcY4DQgPSArJjHCcOwlX+Wl/kwN+LbLGQ4= @@ -172,29 +161,28 @@ github.com/gofiber/template v1.6.6/go.mod h1:WmzkzTh6QWwrGysWFU8bwNsmHmqRbxHmWZc github.com/gofrs/flock v0.7.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= -github.com/gogo/protobuf v1.3.1 h1:DqDEcV5aeaTmdFBePNpYsp3FlcVH/2ISVVM9Qf8PSls= -github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef h1:veQD95Isof8w9/WXiA+pa3tz3fJXkt5B7QaRBrM62gk= github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7 h1:5ZkaAPbicIKTF2I64qf5Fh8Aa83Q/dnOafMYV0OMwjA= -github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.1 h1:YF8+flBXS5eO826T4nzqPrxfhQThhXl0YzfuUPu4SBg= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= -github.com/golang/protobuf v1.4.2 h1:+Z5KGCizgyZCbGh1KZqA0fcLLkwbsjIzS4aV2v7wJX0= github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2/go.mod h1:k9Qvh+8juN+UKMCS/3jFtGICgW8O96FVaZsaxdzDkR4= github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a/go.mod h1:ryS0uhF+x9jgbj/N71xsEqODy9BN81/GonCZiOzirOk= github.com/golangci/errcheck v0.0.0-20181223084120-ef45e06d44b6/go.mod h1:DbHgvLiFKX1Sh2T1w8Q/h4NAI8MHIpzCdnBUDTXU3I0= @@ -214,19 +202,17 @@ github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Z github.com/google/btree v1.0.0 h1:0udJVsspx3VBr5FwtLhQQtuAsVc79tTq0ocGIPAU6qo= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= -github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.4.0 h1:xsAVV57WRhGj6kEIi8ReJzQlHHqcBYCElAvkovg3B/4= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.2-0.20200818193711-d2fcc899bdc2 h1:CZtx9gNen+kr3PuC/JQff3n1pJbgpy7Wr3hzjnupqdw= -github.com/google/go-cmp v0.5.2-0.20200818193711-d2fcc899bdc2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-jsonnet v0.16.1-0.20200908152747-b70cbd441a39 h1:noLRnY1ESguFGDPxXvIcESe2rG63f+ZSbSGYfVa6iHo= -github.com/google/go-jsonnet v0.16.1-0.20200908152747-b70cbd441a39/go.mod h1:sOcuej3UW1vpPTZOr8L7RQimqai1a57bt5j22LzGZCw= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-jsonnet v0.17.0 h1:/9NIEfhK1NQRKl3sP2536b2+x5HnZMdql7x3yK/l8JY= +github.com/google/go-jsonnet v0.17.0/go.mod h1:sOcuej3UW1vpPTZOr8L7RQimqai1a57bt5j22LzGZCw= github.com/google/go-querystring v1.0.0 h1:Xkwi/a1rcvNg1PPYe5vI8GbeBY/jrVuDX5ASuANWTrk= github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/gofuzz v1.1.0 h1:Hsa8mG0dQ46ij8Sl2AYJDUv1oA9/d6Vk+3LG99Oe02g= github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/logger v1.1.0 h1:saB74Etb4EAJNH3z74CVbCKk75hld/8T0CsXKetWCwM= github.com/google/logger v1.1.0/go.mod h1:w7O8nrRr0xufejBlQMI83MXqRusvREoJdaAxV+CoAB4= @@ -235,30 +221,26 @@ github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OI github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.1.1 h1:Gkbcsh/GbpXz7lPftLA3P6TYMwjCLYm83jiFQZF/3gY= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.1.2 h1:EVhdT+1Kseyi1/pUmXKaFxYsDNy9RQYkMWRH68J/W7Y= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= -github.com/googleapis/gnostic v0.4.1 h1:DLJCy1n/vrD4HPjOvYcT8aYQXpPIzoRZONaYwyycI+I= -github.com/googleapis/gnostic v0.4.1/go.mod h1:LRhVm6pbyptWbWbuZ38d1eyptfvIytN3ir6b65WBswg= +github.com/googleapis/gnostic v0.5.1/go.mod h1:6U4PtQXGIEt/Z3h5MAT7FNofLnw9vXk2cUuW7uA/OeU= +github.com/googleapis/gnostic v0.5.5/go.mod h1:7+EbHbldMins07ALC74bsA81Ovc97DwqyJO1AENw9kA= github.com/gookit/color v1.2.4/go.mod h1:AhIE+pS6D4Ql0SQWbBeXPHw7gY0/sjHoA4s/n1KB7xg= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= -github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q= github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc= github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/gostaticanalysis/analysisutil v0.0.0-20190318220348-4088753ea4d3/go.mod h1:eEOZF4jCKGi+aprrirO9e7WKB3beBRtWgqGunKl6pKE= github.com/gostaticanalysis/analysisutil v0.0.3/go.mod h1:eEOZF4jCKGi+aprrirO9e7WKB3beBRtWgqGunKl6pKE= -github.com/grafana/tanka v0.12.0 h1:dT6hPdM3lJws5lHAYz1AidvJUsVli29wqM9Yj1o04x8= -github.com/grafana/tanka v0.12.0/go.mod h1:Zd1n8C4QiXsmj4QoIb492BexopI3ALDkcv9AegIZgfg= -github.com/grpc-ecosystem/go-grpc-middleware v1.0.0 h1:Iju5GlWwrvL6UBg4zJJt3btmonfrMlCDdsejg4CZE7c= github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4 h1:z53tR0945TRRQO/fLEVPI6SMv7ZflF0TEaTAoU7tOzg= github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 h1:Ovs26xHkKqVztRpIrF/92BcuyuQ/YW4NSIpoGtfXNho= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= -github.com/grpc-ecosystem/grpc-gateway v1.9.0 h1:bM6ZAFZmc/wPFaRDi0d5L7hGEZEx/2u+Tmr2evNHDiI= github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= github.com/grpc-ecosystem/grpc-gateway v1.9.5 h1:UImYN5qQ8tuGpGE16ZmjvcTtTw24zw1QAp/SlnNrZhI= github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= @@ -286,17 +268,15 @@ github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/J github.com/hinshun/vt10x v0.0.0-20180616224451-1954e6464174 h1:WlZsjVhE8Af9IcZDGgJGQpNflI3+MJSBhsgT5PCtzBQ= github.com/hinshun/vt10x v0.0.0-20180616224451-1954e6464174/go.mod h1:DqJ97dSdRW1W22yXSB90986pcOyQ7r45iio1KN2ez1A= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/huandu/xstrings v1.3.1 h1:4jgBlKK6tLKFvO8u5pmYjG91cqytmDCDvGh7ECVFfFs= github.com/huandu/xstrings v1.3.1/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= -github.com/huandu/xstrings v1.3.2 h1:L18LIDzqlW6xN2rEkpdV8+oL/IXWJ1APd+vsdYy4Wdw= -github.com/huandu/xstrings v1.3.2/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= -github.com/imdario/mergo v0.3.8/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= -github.com/imdario/mergo v0.3.9 h1:UauaLniWCFHWd+Jp9oCEkTBj8VO/9DKg3PV3VCNMDIg= github.com/imdario/mergo v0.3.9/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= +github.com/imdario/mergo v0.3.11 h1:3tnifQM4i+fbajXKBHXWEH+KvNHqojZ778UH75j3bGA= +github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= -github.com/jessevdk/go-flags v1.4.0 h1:4IU2WS7AumrZ/40jfhf4QVDMsQwqA7VEHozFRrGARJA= github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= github.com/jingyugao/rowserrcheck v0.0.0-20191204022205-72ab7603b68a/go.mod h1:xRskid8CManxVta/ALEhJha/pweKBaVG6fWgc0yH25s= github.com/jirfag/go-printf-func-name v0.0.0-20191110105641-45db9963cdd3/go.mod h1:HEWGJkRDzjJY2sqdDwxccsGicWEf9BQOZsq2tV+xzM0= @@ -305,41 +285,37 @@ github.com/jmoiron/sqlx v1.2.1-0.20190826204134-d7d95172beb5/go.mod h1:1FEQNm3xl github.com/jonboulle/clockwork v0.1.0 h1:VKV+ZcuP6l3yW9doeqz6ziZGgcynBVQO+obU0+0hcPo= github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= -github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68= -github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.11 h1:uVUAXhF2To8cbw/3xN3pxj6kk7TYKs98NIrTqPlMWAQ= +github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/jsonnet-bundler/jsonnet-bundler v0.4.0 h1:4BKZ6LDqPc2wJDmaKnmYD/vDjUptJtnUpai802MibFc= github.com/jsonnet-bundler/jsonnet-bundler v0.4.0/go.mod h1:/by7P/OoohkI3q4CgSFqcoFsVY+IaNbzOVDknEsKDeU= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= -github.com/karrick/godirwalk v1.15.5 h1:ErdAEFW/cKxQ5+9Gm/hopxB8ki21/di+vyNb9mHnHrA= -github.com/karrick/godirwalk v1.15.5/go.mod h1:j4mkqPuvaLI8mp1DroR3P6ad7cyYd4c1qeJ3RV7ULlk= +github.com/karrick/godirwalk v1.16.1 h1:DynhcF+bztK8gooS0+NDJFrdNZjJ3gzVzC545UNA9iw= +github.com/karrick/godirwalk v1.16.1/go.mod h1:j4mkqPuvaLI8mp1DroR3P6ad7cyYd4c1qeJ3RV7ULlk= github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs= github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= github.com/kevinburke/ssh_config v0.0.0-20190725054713-01f96b0aa0cd h1:Coekwdh0v2wtGp9Gmz1Ze3eVRAWJMLokvN3QjdzCHLY= github.com/kevinburke/ssh_config v0.0.0-20190725054713-01f96b0aa0cd/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= -github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.10.4/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.10.5/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.10.7 h1:7rix8v8GpI3ZBb0nSozFRgbtXKv+hOe+qfEpZqybrAg= github.com/klauspost/compress v1.10.7/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= -github.com/konsorten/go-windows-terminal-sequences v1.0.1 h1:mweAR1A6xJ3oS2pRaGiHgQ4OO8tzTaLawm8vnODuwDk= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.3 h1:CE8S1cTafDpPvMhIxNJKvHsGVBgn1xWYf1NbHQhywc8= github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= -github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/pty v1.1.4 h1:5Myjjh3JY/NaAi4IsUbHADytDyl1VE1Y9PXDlL+P/VQ= github.com/kr/pty v1.1.4/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/pty v1.1.5 h1:hyz3dwM5QLc1Rfoz4FuWJQG5BN7tc6K1MndAUnGpQr4= github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA= -github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= @@ -350,27 +326,28 @@ github.com/labstack/gommon v0.3.0 h1:JEeO0bvc78PKdyHxloTKiF8BD5iGrH8T6MSeGvSgob0 github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k= github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/logrusorgru/aurora v0.0.0-20181002194514-a7b3b318ed4e/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= -github.com/magiconair/properties v1.8.0 h1:LLgXmsheXeRoUOBOjtwPQCWIYqM/LU1ayDtDePerRcY= github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/magiconair/properties v1.8.1 h1:ZC2Vc7/ZFkGmsVC9KvOjumD+G5lXy2RtTKyzRKO2BQ4= github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= -github.com/mailru/easyjson v0.0.0-20160728113105-d5b7844b561a/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/maratori/testpackage v1.0.1/go.mod h1:ddKdw+XG0Phzhx8BFDTKgpWP4i7MpApTE5fXSKAqwDU= github.com/matoous/godox v0.0.0-20190911065817-5d6d842e92eb/go.mod h1:1BELzlh859Sh1c6+90blK8lbYy0kwQf1bYlBhBysy1s= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= -github.com/mattn/go-colorable v0.1.6 h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE= github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.7 h1:bQGKb3vps/j0E9GfJQ03JyhRuxsvdAanXlT9BTw3mdw= github.com/mattn/go-colorable v0.1.7/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.9 h1:sqDoxXbdeALODt0DAeJCVp38ps9ZogZEAXjus69YV3U= +github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.6/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE= -github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-runewidth v0.0.7/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0= github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= @@ -397,6 +374,7 @@ github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQz github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/reflectwalk v1.0.0 h1:9D+8oIskB4VJBN5SFlmc27fSlIBZaov1Wpk/IfikLNY= github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/moby/spdystream v0.2.0/go.mod h1:f7i0iNDQJ059oMTcWxx8MA/zKFIuD/lY+0GqbN2Wy8c= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -412,24 +390,25 @@ github.com/nbutton23/zxcvbn-go v0.0.0-20180912185939-ae427f1e4c1d/go.mod h1:o96d github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/nishanths/exhaustive v0.0.0-20200525081945-8e46705b6132/go.mod h1:wBEpHwM2OdmeNpdCvRPUlkEbBuaFmcK4Wv8Q7FuGW3c= +github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= github.com/olekukonko/tablewriter v0.0.4 h1:vHD/YYe1Wolo78koG299f7V/VAS08c6IpCLn+Ejf/w8= github.com/olekukonko/tablewriter v0.0.4/go.mod h1:zq6QwlOf5SlnkVbMSr5EoBv3636FWnp+qbPhuoO21uA= github.com/onsi/ginkgo v0.0.0-20170829012221-11459a886d9c/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.11.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.12.0/go.mod h1:oUhWkIvk5aDxtKvDDuw8gItl8pKl42LzjC9KZE0HfGg= +github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= +github.com/onsi/ginkgo v1.14.0/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY= github.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= -github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/onsi/gomega v1.9.0/go.mod h1:Ho0h+IUsWyvy1OpqCwxlQ/21gkhVunqlU8fDGcoTdcA= +github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc= github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d/go.mod h1:3OzsM7FXDQlpCiw2j81fOmAwQLnZnLGXVKUzeKQXIAw= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -438,23 +417,19 @@ github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZN github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= -github.com/prometheus/client_golang v0.9.3 h1:9iH4JKXLzFbOAdtqv/a+j8aewx2Y8lAjAydhbaScPF8= github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso= github.com/prometheus/client_golang v1.0.0 h1:vrDKnkGzuGvhNAL56c7DBz29ZL+KxnoR0x7enabFceM= github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4 h1:gQz4mCbXsO+nc9n1hCxHcGA3Zx3Eo+UHZoInFGUIXNM= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.2.0 h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M= github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= -github.com/prometheus/common v0.4.0 h1:7etb9YClo3a6HjLzfl6rIQaU+FDfi0VSX39io3aQ+DM= github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= github.com/prometheus/common v0.4.1 h1:K0MGApIoQvMw27RTdJkPbr3JZ7DNbtxQNyi5STVM6Kw= github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= -github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084 h1:sofwID9zm4tzrgykg80hfFph1mryUeLRsUfoocVVmRY= github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/procfs v0.0.11 h1:DhHlBtkHWPYi8O2y31JkK0TF+DGM+51OopZjH/Ia5qI= @@ -463,10 +438,11 @@ github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40T github.com/quasilyte/go-consistent v0.0.0-20190521200055-c6f3937de18c/go.mod h1:5STLWrekHfjyYwxBRVRXNOSewLJ3PWfDJd1VyTS21fI= github.com/quasilyte/go-ruleguard v0.1.2-0.20200318202121-b00d7a75d3d8/go.mod h1:CGFX09Ci3pq9QZdj86B+VGIdNj4VyCo2iPOGS9esB/k= github.com/quasilyte/regex/syntax v0.0.0-20200407221936-30656e2c4a95/go.mod h1:rlzQ04UMyJXu/aOvhd8qT+hvDrFpiwqp8MRXDY9szc0= +github.com/redradrat/tanka v0.18.2-fix639 h1:qHEDkTl5c9m9kuPPh5C5+bNRai2jEa7IfgR3YfhkzoI= +github.com/redradrat/tanka v0.18.2-fix639/go.mod h1:Ayl82UPX6lUJuZlWmG9E6U3sYVRxKP/qUOksFAcJiks= github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.5.2/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryancurrah/gomodguard v1.1.0/go.mod h1:4O8tr7hBODaGE6VIhfJDHcwzh5GUccKSJBU0UMXJFVM= github.com/ryanrolds/sqlclosecheck v0.3.0/go.mod h1:1gREqxyTGR3lVtpngyFo3hZAgk0KCtEdgEkHwDbigdA= @@ -477,11 +453,11 @@ github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/shirou/gopsutil v0.0.0-20190901111213-e4ec7b275ada/go.mod h1:WWnYX4lzhCH5h/3YBfyVA3VbLYjlMZZAQcW9ojMexNc= github.com/shirou/w32 v0.0.0-20160930032740-bb4de0191aa4/go.mod h1:qsXQc7+bwAM3Q1u/4XEfrquwF8Lw7D7y5cD8CuHnfIc= +github.com/shopspring/decimal v1.2.0 h1:abSATXmQEYyShuxI4/vyW3tV1MrKAJzCZ/0zLUXYbsQ= +github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk= github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041/go.mod h1:N5mDOmsrJOB+vfqUK+7DmDyjhSLIIBnXo9lvZJj3MWQ= -github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= -github.com/sirupsen/logrus v1.2.0 h1:juTguoYk5qI21pwyTXY3B3Y5cOTH3ZUyZCg1v/mihuo= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.6.0 h1:UBcNElsrwanuuMsnGSlYmtmgbb23qDR5dG+6X6Oo89I= github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= @@ -494,11 +470,9 @@ github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4k github.com/sonatard/noctx v0.0.1/go.mod h1:9D2D/EoULe8Yy2joDHJj7bv3sZoq9AaSb8B4lqBjiZI= github.com/sourcegraph/go-diff v0.5.3/go.mod h1:v9JDtjCE4HHHCZGId75rg8gkKKa98RVjBcBGsVmMmak= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= -github.com/spf13/afero v1.1.2 h1:m8/z1t7/fwjysjQRYbP0RD+bUIF/8tJwPdEZsI83ACI= github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= github.com/spf13/afero v1.2.2 h1:5jhuqJyZCZf2JRofRvN/nIFgIWNzPa3/Vz8mYylgbWc= github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk= -github.com/spf13/cast v1.3.0 h1:oget//CVOEoFewqQxwr0Ej5yjygnqGkvggSE/gB35Q8= github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng= github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= @@ -507,31 +481,31 @@ github.com/spf13/cobra v1.0.0/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHN github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk= github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= github.com/spf13/pflag v0.0.0-20170130214245-9ff6c6923cff/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.4.0 h1:yXHLWeravcrgGyFSyCgdYpXQ9dR9c/WED3pg1RhxqEU= github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE= github.com/spf13/viper v1.7.0 h1:xVKxvI7ouOI5I+U9s2eeiUfMaWBVoXA3AWskkrqK0VM= github.com/spf13/viper v1.7.0/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg= +github.com/stoewer/go-strcase v1.2.0/go.mod h1:IBiWB2sKIp3wVVQ3Y035++gc+knqhUQag1KpM8ahLw8= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.2.0 h1:Hbg2NidpLE8veEBkEZTL3CvlkUIVzuU9jDplZO54c48= -github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= +github.com/stretchr/objx v0.3.0 h1:NGXK3lHquSN08v5vWalVI/L8XU9hdzE/G6xsrze47As= +github.com/stretchr/objx v0.3.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= -github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s= github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= github.com/tdakkota/asciicheck v0.0.0-20200416190851-d7f85be797a2/go.mod h1:yHp0ai0Z9gUljN3o0xMhYJnH/IcvkdTBOX2fmJ93JEM= github.com/tetafro/godot v0.4.2/go.mod h1:/7NLHhv08H1+8DNj0MElpAACw1ajsCuf3TKNQxA5S+0= -github.com/thoas/go-funk v0.4.0 h1:KBaa5NL7NMtsFlQaD8nQMbDt1wuM+OOaNQyYNYQFhVo= -github.com/thoas/go-funk v0.4.0/go.mod h1:mlR+dHGb+4YgXkf13rkQTuzrneeHANxOm6+ZnEV9HsA= +github.com/thoas/go-funk v0.9.1 h1:O549iLZqPpTUQ10ykd26sZhzD+rmR5pWhuElrhbC20M= +github.com/thoas/go-funk v0.9.1/go.mod h1:+IWnUfUmFO1+WVYQWQtIJHeRRdaIyyYglZN7xzUPe4Q= github.com/timakin/bodyclose v0.0.0-20190930140734-f7f2e9bca95e/go.mod h1:Qimiffbc6q9tBWlVV6x0P9sat/ao1xEkREYPPj9hphk= github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5 h1:LnC5Kc/wtumK+WB441p7ynQJzVuNRJiqddSIE3IlSEQ= github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= @@ -560,7 +534,7 @@ github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1: github.com/yosssi/ace v0.0.5/go.mod h1:ALfIzm2vT7t5ZE7uoIZqF3TQ7SAOyupFZnkrF5id+K0= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -go.etcd.io/bbolt v1.3.2 h1:Z/90sZLPOeCy2PwprqkFa25PdkusRzaj9P8zm/KNyvk= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.etcd.io/bbolt v1.3.3 h1:MUGmc65QhB3pIlaQ5bB4LwqSj6GIonVJXpZiaKNyaKk= go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= @@ -580,13 +554,12 @@ golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20190530122614-20be4c3c3ed5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20200221231518-2aa609cf4a9d h1:1ZiEyfaQIg3Qh0EoqpwAakHVhecoE5wlSg5GjnafJGw= golang.org/x/crypto v0.0.0-20200221231518-2aa609cf4a9d/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073 h1:xMPOj6Pz6UipU1wXLkrtqpHbR0AVFnyPEQq/wRWz9lM= golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200414173820-0848c9571904/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9 h1:psW17arqaxU48Z5kZ0CQnkZWQJsqcURM6tKiBApRjXI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519 h1:7I4JAnoQBe7ZtJcBaYHi5UtiO8tQHbUSXxL+pnGRANg= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -625,15 +598,16 @@ golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200226121028-0de0cce0169b h1:0mm1VjtFUOIlE1SbDlwjYaDxZVDP2S5ou6y0gSgXHu8= +golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200301022130-244492dfa37a h1:GuSPYbZzB5/dcLNCwLQLsg3obCJtX9IJhpXkvY7kzk0= golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200707034311-ab3426394381 h1:VXak5I6aEWmAXeQjA+QSZzlgNrpq9mjcfDemuexIKsU= -golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20201016165138-7b1cca2348c0 h1:5kGOVHlq0euqwzgTC9Vu15p6fV1Wi0ArVi8da2urnVg= +golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20201016165138-7b1cca2348c0/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210520170846-37e1c6afe023 h1:ADo5wSpq2gqaCGQWzk7S5vd//0iyyLeAratkEoG5dLE= +golang.org/x/net v0.0.0-20210520170846-37e1c6afe023/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -643,6 +617,7 @@ golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -663,36 +638,38 @@ golang.org/x/sys v0.0.0-20190530182044-ad28b68e88f1/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae h1:/WDfKMnPU+m5M4xB+6x4kaepxRw6jWvR5iDRdvjHgy8= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527 h1:uYVVQ9WP/Ds2ROhcaGPeIdVq0RIXVLwsHlnvJ+cT1So= golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200622214017-ed371f2e16b4 h1:5/PjkGUjvEU5Gl6BxmvKRPpqo2uNMv4rcHBMwzk/st8= -golang.org/x/sys v0.0.0-20200622214017-ed371f2e16b4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201210223839-7e3030f88018 h1:XKi8B/gRBuTZN1vU9gFsLMm6zVz5FSCDzm8JYACnjy8= golang.org/x/sys v0.0.0-20201210223839-7e3030f88018/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c h1:F1jZWGFhYfh0Ci55sIpILtKKK8p3i2/krTr0H1rg74I= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 h1:v+OssWQX+hTHEmOBgwxdZxK4zHq3yOs8F9J7mk0PY8E= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= -golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4 h1:SvFZT6jyqRaOeXpc5h/JSfZenJ2O330aBsf7JfSUXmQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20181011042414-1f849cf54d09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181117154741-2ddaf7f79a09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190110163146-51295c7ec13a/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -729,12 +706,15 @@ golang.org/x/tools v0.0.0-20200414032229-332987a829c3/go.mod h1:EkVYQZoAsY45+roY golang.org/x/tools v0.0.0-20200422022333-3d57cf2e726e/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200428185508-e9a00ec82136/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200519015757-0d0afa43d58a/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200625211823-6506e20df31f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200702044944-0cc1aa72b347/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= @@ -744,7 +724,6 @@ google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9Ywl google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= -google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8 h1:Nw54tB0rB7hY/N0NQvRW8DG4Yk3Q6T9cu9RcFQDu1tc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= @@ -754,11 +733,11 @@ google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98 google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013 h1:+kGHl1aib/qcwaRi1CbqBZ1rk19r85MNUf8HaBghugY= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20201019141844-1ed22bb0c154 h1:bFFRpT+e8JJVY7lMMfvezL1ZIwqiwmPl2bsE2yx4HqM= +google.golang.org/genproto v0.0.0-20201019141844-1ed22bb0c154/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= -google.golang.org/grpc v1.21.0 h1:G+97AoqBnmZIT91cLG/EkCoK9NSelj64P8bOHHNmGn0= google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= @@ -772,61 +751,59 @@ google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzi google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.24.0 h1:UhZDfRO8JRQru4/+LlLE0BRKGF8L+PICnvYZmx/fEGA= google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b h1:QRR6H1YWRnHb4Y/HeNFCTJLFVxaq6wH4YuVdsUOr75U= gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= -gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= gopkg.in/ini.v1 v1.51.0 h1:AQvPpx3LzTDM0AjnIRlVFwFFGC+npRopjZxLJj6gdno= gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= -gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.4 h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v3 v3.0.0-20191010095647-fc94e3f71652/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -k8s.io/apimachinery v0.19.2 h1:5Gy9vQpAGTKHPVOh5c4plE274X8D/6cuEiTO2zve7tc= -k8s.io/apimachinery v0.19.2/go.mod h1:DnPGDnARWFvYa3pMHgSxtbZb7gpzzAZ1pTfaUNDVlmA= +k8s.io/apimachinery v0.22.2 h1:ejz6y/zNma8clPVfNDLnPbleBo6MpoFy/HBiBqCouVk= +k8s.io/apimachinery v0.22.2/go.mod h1:O3oNtNadZdeOMxHFVxOreoznohCpy0z6mocxbZr7oJ0= k8s.io/gengo v0.0.0-20200413195148-3a45101e95ac/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= k8s.io/klog/v2 v2.0.0/go.mod h1:PBfzABfn139FHAV07az/IF9Wp1bkk3vpT2XSJ76fSDE= -k8s.io/klog/v2 v2.2.0 h1:XRvcwJozkgZ1UQJmfMGpvRthQHOvihEhYtDfAaxMz/A= -k8s.io/klog/v2 v2.2.0/go.mod h1:Od+F08eJP+W3HUb4pSrPpgp9DGU4GzlpG/TmITuYh/Y= -k8s.io/kube-openapi v0.0.0-20200805222855-6aeccd4b50c6 h1:+WnxoVtG8TMiudHBSEtrVL1egv36TkkJm+bA8AxicmQ= -k8s.io/kube-openapi v0.0.0-20200805222855-6aeccd4b50c6/go.mod h1:UuqjUnNftUyPE5H64/qeyjQoUZhGpeFDVdxjTeEVN2o= +k8s.io/klog/v2 v2.9.0 h1:D7HV+n1V57XeZ0m6tdRkfknthUaM06VFbWldOFh8kzM= +k8s.io/klog/v2 v2.9.0/go.mod h1:hy9LJ/NvuK+iVyP4Ehqva4HxZG/oXyIS3n3Jmire4Ec= +k8s.io/kube-openapi v0.0.0-20210421082810-95288971da7e/go.mod h1:vHXdDvt9+2spS2Rx9ql3I8tycm3H9FDfdUoIuKCefvw= mvdan.cc/gofumpt v0.0.0-20200513141252-abc0db2c416a/go.mod h1:4q/PlrZKQLU5MowSvCKM3U4xJUPtJ8vKWx7vsWFJ3MI= mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed/go.mod h1:Xkxe497xwlCKkIaQYRfC7CSLworTXY9RMqwhhCm+8Nc= mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b/go.mod h1:2odslEg/xrtNQqCYg2/jCoyKnw3vv5biOc3JnIcYfL4= mvdan.cc/unparam v0.0.0-20190720180237-d51796306d8f/go.mod h1:4G1h5nDURzA3bwVMZIVpwbkw+04kSxk3rAtzlimaUJw= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= -sigs.k8s.io/structured-merge-diff/v4 v4.0.1 h1:YXTMot5Qz/X1iBRJhAt+vI+HVttY0WkSqqhKxQ0xVbA= -sigs.k8s.io/structured-merge-diff/v4 v4.0.1/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw= -sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= -sigs.k8s.io/yaml v1.2.0 h1:kr/MCeFWJWTwyaHoR9c8EjH9OumOmoF9YGiZd7lFm/Q= +sigs.k8s.io/structured-merge-diff/v4 v4.0.2/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw= +sigs.k8s.io/structured-merge-diff/v4 v4.1.2/go.mod h1:j/nl6xW8vLS49O8YvXW1ocPhZawJtm+Yrr7PPRQ0Vg4= sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc= +sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo= +sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8= sourcegraph.com/sqs/pbtypes v0.0.0-20180604144634-d3ebe8f20ae4/go.mod h1:ketZ/q3QxT9HOBeFhu6RdvsftgpsbFHBF5Cas6cDKZ0= diff --git a/pkg/helm/import.go b/pkg/helm/import.go index 3e8cda2..26686be 100644 --- a/pkg/helm/import.go +++ b/pkg/helm/import.go @@ -52,14 +52,16 @@ local values = { lib + { _values+: values } ` +type HelmRepo helm.Repo + type HelmChart struct { - Repo string + Repo HelmRepo Name string Version string } func (hc HelmChart) Requirement() string { - return fmt.Sprintf("%s/%s@%s", hc.Repo, hc.Name, hc.Version) + return fmt.Sprintf("%s/%s@%s", hc.Repo.Name, hc.Name, hc.Version) } func InitHelmConcept(chart HelmChart, out string) error { @@ -122,6 +124,16 @@ func ImportHelmChart(helmChart HelmChart, out string) error { return err } + // If repo name is not stable, we add the repo to the chartfile. + if helmChart.Repo.Name != "stable" { + if err := cf.AddRepos(helm.Repo{ + Name: helmChart.Repo.Name, + URL: helmChart.Repo.URL, + }); err != nil { + return err + } + } + if err := cf.Add([]string{helmChart.Requirement()}); err != nil { return err } diff --git a/vendor/github.com/Masterminds/goutils/cryptorandomstringutils.go b/vendor/github.com/Masterminds/goutils/cryptorandomstringutils.go index 177dd86..8dbd924 100644 --- a/vendor/github.com/Masterminds/goutils/cryptorandomstringutils.go +++ b/vendor/github.com/Masterminds/goutils/cryptorandomstringutils.go @@ -21,7 +21,6 @@ import ( "fmt" "math" "math/big" - "regexp" "unicode" ) @@ -99,27 +98,7 @@ Returns: error - an error stemming from an invalid parameter within underlying function, CryptoRandom(...) */ func CryptoRandomAlphaNumeric(count int) (string, error) { - if count == 0 { - return "", nil - } - RandomString, err := CryptoRandom(count, 0, 0, true, true) - if err != nil { - return "", fmt.Errorf("Error: %s", err) - } - match, err := regexp.MatchString("([0-9]+)", RandomString) - if err != nil { - panic(err) - } - - if !match { - //Get the position between 0 and the length of the string-1 to insert a random number - position := getCryptoRandomInt(count) - //Insert a random number between [0-9] in the position - RandomString = RandomString[:position] + string('0' + getCryptoRandomInt(10)) + RandomString[position + 1:] - return RandomString, err - } - return RandomString, err - + return CryptoRandom(count, 0, 0, true, true) } /* @@ -204,7 +183,7 @@ func CryptoRandom(count int, start int, end int, letters bool, numbers bool, cha if chars == nil { ch = rune(getCryptoRandomInt(gap) + int64(start)) } else { - ch = chars[getCryptoRandomInt(gap) + int64(start)] + ch = chars[getCryptoRandomInt(gap)+int64(start)] } if letters && unicode.IsLetter(ch) || numbers && unicode.IsDigit(ch) || !letters && !numbers { diff --git a/vendor/github.com/Masterminds/goutils/randomstringutils.go b/vendor/github.com/Masterminds/goutils/randomstringutils.go index 1364e0c..2726702 100644 --- a/vendor/github.com/Masterminds/goutils/randomstringutils.go +++ b/vendor/github.com/Masterminds/goutils/randomstringutils.go @@ -20,7 +20,6 @@ import ( "fmt" "math" "math/rand" - "regexp" "time" "unicode" ) @@ -75,12 +74,10 @@ func RandomNumeric(count int) (string, error) { /* RandomAlphabetic creates a random string whose length is the number of characters specified. -Characters will be chosen from the set of alpha-numeric characters as indicated by the arguments. +Characters will be chosen from the set of alphabetic characters. Parameters: count - the length of random string to create - letters - if true, generated string may include alphabetic characters - numbers - if true, generated string may include numeric characters Returns: string - the random string @@ -102,24 +99,7 @@ Returns: error - an error stemming from an invalid parameter within underlying function, RandomSeed(...) */ func RandomAlphaNumeric(count int) (string, error) { - RandomString, err := Random(count, 0, 0, true, true) - if err != nil { - return "", fmt.Errorf("Error: %s", err) - } - match, err := regexp.MatchString("([0-9]+)", RandomString) - if err != nil { - panic(err) - } - - if !match { - //Get the position between 0 and the length of the string-1 to insert a random number - position := rand.Intn(count) - //Insert a random number between [0-9] in the position - RandomString = RandomString[:position] + string('0'+rand.Intn(10)) + RandomString[position+1:] - return RandomString, err - } - return RandomString, err - + return Random(count, 0, 0, true, true) } /* diff --git a/vendor/github.com/Masterminds/goutils/stringutils.go b/vendor/github.com/Masterminds/goutils/stringutils.go index 5037c45..741bb53 100644 --- a/vendor/github.com/Masterminds/goutils/stringutils.go +++ b/vendor/github.com/Masterminds/goutils/stringutils.go @@ -222,3 +222,19 @@ func IndexOf(str string, sub string, start int) int { func IsEmpty(str string) bool { return len(str) == 0 } + +// Returns either the passed in string, or if the string is empty, the value of defaultStr. +func DefaultString(str string, defaultStr string) string { + if IsEmpty(str) { + return defaultStr + } + return str +} + +// Returns either the passed in string, or if the string is whitespace, empty (""), the value of defaultStr. +func DefaultIfBlank(str string, defaultStr string) string { + if IsBlank(str) { + return defaultStr + } + return str +} diff --git a/vendor/github.com/Masterminds/semver/v3/CHANGELOG.md b/vendor/github.com/Masterminds/semver/v3/CHANGELOG.md index 947210d..1f90c38 100644 --- a/vendor/github.com/Masterminds/semver/v3/CHANGELOG.md +++ b/vendor/github.com/Masterminds/semver/v3/CHANGELOG.md @@ -1,5 +1,11 @@ # Changelog +## 3.1.1 (2020-11-23) + +### Fixed + +- #158: Fixed issue with generated regex operation order that could cause problem + ## 3.1.0 (2020-04-15) ### Added diff --git a/vendor/github.com/Masterminds/semver/v3/constraints.go b/vendor/github.com/Masterminds/semver/v3/constraints.go index 7420823..547613f 100644 --- a/vendor/github.com/Masterminds/semver/v3/constraints.go +++ b/vendor/github.com/Masterminds/semver/v3/constraints.go @@ -164,14 +164,11 @@ func init() { "^": constraintCaret, } - ops := make([]string, 0, len(constraintOps)) - for k := range constraintOps { - ops = append(ops, regexp.QuoteMeta(k)) - } + ops := `=||!=|>|<|>=|=>|<=|=<|~|~>|\^` constraintRegex = regexp.MustCompile(fmt.Sprintf( `^\s*(%s)\s*(%s)\s*$`, - strings.Join(ops, "|"), + ops, cvRegex)) constraintRangeRegex = regexp.MustCompile(fmt.Sprintf( @@ -180,12 +177,12 @@ func init() { findConstraintRegex = regexp.MustCompile(fmt.Sprintf( `(%s)\s*(%s)`, - strings.Join(ops, "|"), + ops, cvRegex)) validConstraintRegex = regexp.MustCompile(fmt.Sprintf( `^(\s*(%s)\s*(%s)\s*\,?)+$`, - strings.Join(ops, "|"), + ops, cvRegex)) } diff --git a/vendor/github.com/Masterminds/sprig/v3/CHANGELOG.md b/vendor/github.com/Masterminds/sprig/v3/CHANGELOG.md index 83b8e03..fcdd4e8 100644 --- a/vendor/github.com/Masterminds/sprig/v3/CHANGELOG.md +++ b/vendor/github.com/Masterminds/sprig/v3/CHANGELOG.md @@ -1,5 +1,41 @@ # Changelog +## Release 3.2.1 (2021-02-04) + +### Changed + +- Upgraded `Masterminds/goutils` to `v1.1.1`. see the [Security Advisory](https://github.com/Masterminds/goutils/security/advisories/GHSA-xg2h-wx96-xgxr) + +## Release 3.2.0 (2020-12-14) + +### Added + +- #211: Added randInt function (thanks @kochurovro) +- #223: Added fromJson and mustFromJson functions (thanks @mholt) +- #242: Added a bcrypt function (thanks @robbiet480) +- #253: Added randBytes function (thanks @MikaelSmith) +- #254: Added dig function for dicts (thanks @nyarly) +- #257: Added regexQuoteMeta for quoting regex metadata (thanks @rheaton) +- #261: Added filepath functions osBase, osDir, osExt, osClean, osIsAbs (thanks @zugl) +- #268: Added and and all functions for testing conditions (thanks @phuslu) +- #181: Added float64 arithmetic addf, add1f, subf, divf, mulf, maxf, and minf + (thanks @andrewmostello) +- #265: Added chunk function to split array into smaller arrays (thanks @karelbilek) +- #270: Extend certificate functions to handle non-RSA keys + add support for + ed25519 keys (thanks @misberner) + +### Changed + +- Removed testing and support for Go 1.12. ed25519 support requires Go 1.13 or newer +- Using semver 3.1.1 and mergo 0.3.11 + +### Fixed + +- #249: Fix htmlDateInZone example (thanks @spawnia) + +NOTE: The dependency github.com/imdario/mergo reverted the breaking change in +0.3.9 via 0.3.10 release. + ## Release 3.1.0 (2020-04-16) NOTE: The dependency github.com/imdario/mergo made a behavior change in 0.3.9 diff --git a/vendor/github.com/Masterminds/sprig/v3/crypto.go b/vendor/github.com/Masterminds/sprig/v3/crypto.go index 7ae3991..13a5cd5 100644 --- a/vendor/github.com/Masterminds/sprig/v3/crypto.go +++ b/vendor/github.com/Masterminds/sprig/v3/crypto.go @@ -2,10 +2,12 @@ package sprig import ( "bytes" + "crypto" "crypto/aes" "crypto/cipher" "crypto/dsa" "crypto/ecdsa" + "crypto/ed25519" "crypto/elliptic" "crypto/hmac" "crypto/rand" @@ -30,7 +32,7 @@ import ( "strings" "github.com/google/uuid" - "golang.org/x/crypto/bcrypt" + bcrypt_lib "golang.org/x/crypto/bcrypt" "golang.org/x/crypto/scrypt" ) @@ -49,15 +51,28 @@ func adler32sum(input string) string { return fmt.Sprintf("%d", hash) } +func bcrypt(input string) string { + hash, err := bcrypt_lib.GenerateFromPassword([]byte(input), bcrypt_lib.DefaultCost) + if err != nil { + return fmt.Sprintf("failed to encrypt string with bcrypt: %s", err) + } + + return string(hash) +} + func htpasswd(username string, password string) string { if strings.Contains(username, ":") { return fmt.Sprintf("invalid username: %s", username) } - hash, err := bcrypt.GenerateFromPassword([]byte(password), bcrypt.DefaultCost) - if err != nil { - return fmt.Sprintf("failed to create htpasswd: %s", err) + return fmt.Sprintf("%s:%s", username, bcrypt(password)) +} + +func randBytes(count int) (string, error) { + buf := make([]byte, count) + if _, err := rand.Read(buf); err != nil { + return "", err } - return fmt.Sprintf("%s:%s", username, hash) + return base64.StdEncoding.EncodeToString(buf), nil } // uuidv4 provides a safe and secure UUID v4 implementation @@ -147,6 +162,8 @@ func generatePrivateKey(typ string) string { case "ecdsa": // again, good enough for government work priv, err = ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + case "ed25519": + _, priv, err = ed25519.GenerateKey(rand.Reader) default: return "Unknown type " + typ } @@ -179,7 +196,73 @@ func pemBlockForKey(priv interface{}) *pem.Block { b, _ := x509.MarshalECPrivateKey(k) return &pem.Block{Type: "EC PRIVATE KEY", Bytes: b} default: - return nil + // attempt PKCS#8 format for all other keys + b, err := x509.MarshalPKCS8PrivateKey(k) + if err != nil { + return nil + } + return &pem.Block{Type: "PRIVATE KEY", Bytes: b} + } +} + +func parsePrivateKeyPEM(pemBlock string) (crypto.PrivateKey, error) { + block, _ := pem.Decode([]byte(pemBlock)) + if block == nil { + return nil, errors.New("no PEM data in input") + } + + if block.Type == "PRIVATE KEY" { + priv, err := x509.ParsePKCS8PrivateKey(block.Bytes) + if err != nil { + return nil, fmt.Errorf("decoding PEM as PKCS#8: %s", err) + } + return priv, nil + } else if !strings.HasSuffix(block.Type, " PRIVATE KEY") { + return nil, fmt.Errorf("no private key data in PEM block of type %s", block.Type) + } + + switch block.Type[:len(block.Type)-12] { // strip " PRIVATE KEY" + case "RSA": + priv, err := x509.ParsePKCS1PrivateKey(block.Bytes) + if err != nil { + return nil, fmt.Errorf("parsing RSA private key from PEM: %s", err) + } + return priv, nil + case "EC": + priv, err := x509.ParseECPrivateKey(block.Bytes) + if err != nil { + return nil, fmt.Errorf("parsing EC private key from PEM: %s", err) + } + return priv, nil + case "DSA": + var k DSAKeyFormat + _, err := asn1.Unmarshal(block.Bytes, &k) + if err != nil { + return nil, fmt.Errorf("parsing DSA private key from PEM: %s", err) + } + priv := &dsa.PrivateKey{ + PublicKey: dsa.PublicKey{ + Parameters: dsa.Parameters{ + P: k.P, Q: k.Q, G: k.G, + }, + Y: k.Y, + }, + X: k.X, + } + return priv, nil + default: + return nil, fmt.Errorf("invalid private key type %s", block.Type) + } +} + +func getPublicKey(priv crypto.PrivateKey) (crypto.PublicKey, error) { + switch k := priv.(type) { + case interface{ Public() crypto.PublicKey }: + return k.Public(), nil + case *dsa.PrivateKey: + return &k.PublicKey, nil + default: + return nil, fmt.Errorf("unable to get public key for type %T", priv) } } @@ -213,14 +296,10 @@ func buildCustomCertificate(b64cert string, b64key string) (certificate, error) ) } - decodedKey, _ := pem.Decode(key) - if decodedKey == nil { - return crt, errors.New("unable to decode key") - } - _, err = x509.ParsePKCS1PrivateKey(decodedKey.Bytes) + _, err = parsePrivateKeyPEM(string(key)) if err != nil { return crt, fmt.Errorf( - "error parsing prive key: decodedKey.Bytes: %s", + "error parsing private key: %s", err, ) } @@ -234,6 +313,31 @@ func buildCustomCertificate(b64cert string, b64key string) (certificate, error) func generateCertificateAuthority( cn string, daysValid int, +) (certificate, error) { + priv, err := rsa.GenerateKey(rand.Reader, 2048) + if err != nil { + return certificate{}, fmt.Errorf("error generating rsa key: %s", err) + } + + return generateCertificateAuthorityWithKeyInternal(cn, daysValid, priv) +} + +func generateCertificateAuthorityWithPEMKey( + cn string, + daysValid int, + privPEM string, +) (certificate, error) { + priv, err := parsePrivateKeyPEM(privPEM) + if err != nil { + return certificate{}, fmt.Errorf("parsing private key: %s", err) + } + return generateCertificateAuthorityWithKeyInternal(cn, daysValid, priv) +} + +func generateCertificateAuthorityWithKeyInternal( + cn string, + daysValid int, + priv crypto.PrivateKey, ) (certificate, error) { ca := certificate{} @@ -247,11 +351,6 @@ func generateCertificateAuthority( x509.KeyUsageCertSign template.IsCA = true - priv, err := rsa.GenerateKey(rand.Reader, 2048) - if err != nil { - return ca, fmt.Errorf("error generating rsa key: %s", err) - } - ca.Cert, ca.Key, err = getCertAndKey(template, priv, template, priv) return ca, err @@ -263,16 +362,39 @@ func generateSelfSignedCertificate( alternateDNS []interface{}, daysValid int, ) (certificate, error) { - cert := certificate{} + priv, err := rsa.GenerateKey(rand.Reader, 2048) + if err != nil { + return certificate{}, fmt.Errorf("error generating rsa key: %s", err) + } + return generateSelfSignedCertificateWithKeyInternal(cn, ips, alternateDNS, daysValid, priv) +} - template, err := getBaseCertTemplate(cn, ips, alternateDNS, daysValid) +func generateSelfSignedCertificateWithPEMKey( + cn string, + ips []interface{}, + alternateDNS []interface{}, + daysValid int, + privPEM string, +) (certificate, error) { + priv, err := parsePrivateKeyPEM(privPEM) if err != nil { - return cert, err + return certificate{}, fmt.Errorf("parsing private key: %s", err) } + return generateSelfSignedCertificateWithKeyInternal(cn, ips, alternateDNS, daysValid, priv) +} - priv, err := rsa.GenerateKey(rand.Reader, 2048) +func generateSelfSignedCertificateWithKeyInternal( + cn string, + ips []interface{}, + alternateDNS []interface{}, + daysValid int, + priv crypto.PrivateKey, +) (certificate, error) { + cert := certificate{} + + template, err := getBaseCertTemplate(cn, ips, alternateDNS, daysValid) if err != nil { - return cert, fmt.Errorf("error generating rsa key: %s", err) + return cert, err } cert.Cert, cert.Key, err = getCertAndKey(template, priv, template, priv) @@ -286,6 +408,36 @@ func generateSignedCertificate( alternateDNS []interface{}, daysValid int, ca certificate, +) (certificate, error) { + priv, err := rsa.GenerateKey(rand.Reader, 2048) + if err != nil { + return certificate{}, fmt.Errorf("error generating rsa key: %s", err) + } + return generateSignedCertificateWithKeyInternal(cn, ips, alternateDNS, daysValid, ca, priv) +} + +func generateSignedCertificateWithPEMKey( + cn string, + ips []interface{}, + alternateDNS []interface{}, + daysValid int, + ca certificate, + privPEM string, +) (certificate, error) { + priv, err := parsePrivateKeyPEM(privPEM) + if err != nil { + return certificate{}, fmt.Errorf("parsing private key: %s", err) + } + return generateSignedCertificateWithKeyInternal(cn, ips, alternateDNS, daysValid, ca, priv) +} + +func generateSignedCertificateWithKeyInternal( + cn string, + ips []interface{}, + alternateDNS []interface{}, + daysValid int, + ca certificate, + priv crypto.PrivateKey, ) (certificate, error) { cert := certificate{} @@ -300,14 +452,10 @@ func generateSignedCertificate( err, ) } - decodedSignerKey, _ := pem.Decode([]byte(ca.Key)) - if decodedSignerKey == nil { - return cert, errors.New("unable to decode key") - } - signerKey, err := x509.ParsePKCS1PrivateKey(decodedSignerKey.Bytes) + signerKey, err := parsePrivateKeyPEM(ca.Key) if err != nil { return cert, fmt.Errorf( - "error parsing prive key: decodedSignerKey.Bytes: %s", + "error parsing private key: %s", err, ) } @@ -317,11 +465,6 @@ func generateSignedCertificate( return cert, err } - priv, err := rsa.GenerateKey(rand.Reader, 2048) - if err != nil { - return cert, fmt.Errorf("error generating rsa key: %s", err) - } - cert.Cert, cert.Key, err = getCertAndKey( template, priv, @@ -334,15 +477,19 @@ func generateSignedCertificate( func getCertAndKey( template *x509.Certificate, - signeeKey *rsa.PrivateKey, + signeeKey crypto.PrivateKey, parent *x509.Certificate, - signingKey *rsa.PrivateKey, + signingKey crypto.PrivateKey, ) (string, string, error) { + signeePubKey, err := getPublicKey(signeeKey) + if err != nil { + return "", "", fmt.Errorf("error retrieving public key from signee key: %s", err) + } derBytes, err := x509.CreateCertificate( rand.Reader, template, parent, - &signeeKey.PublicKey, + signeePubKey, signingKey, ) if err != nil { @@ -360,10 +507,7 @@ func getCertAndKey( keyBuffer := bytes.Buffer{} if err := pem.Encode( &keyBuffer, - &pem.Block{ - Type: "RSA PRIVATE KEY", - Bytes: x509.MarshalPKCS1PrivateKey(signeeKey), - }, + pemBlockForKey(signeeKey), ); err != nil { return "", "", fmt.Errorf("error pem-encoding key: %s", err) } diff --git a/vendor/github.com/Masterminds/sprig/v3/defaults.go b/vendor/github.com/Masterminds/sprig/v3/defaults.go index 97d7d6e..b9f9796 100644 --- a/vendor/github.com/Masterminds/sprig/v3/defaults.go +++ b/vendor/github.com/Masterminds/sprig/v3/defaults.go @@ -3,10 +3,16 @@ package sprig import ( "bytes" "encoding/json" + "math/rand" "reflect" "strings" + "time" ) +func init() { + rand.Seed(time.Now().UnixNano()) +} + // dfault checks whether `given` is set, and returns default if not set. // // This returns `d` if `given` appears not to be set, and `given` otherwise. @@ -63,6 +69,41 @@ func coalesce(v ...interface{}) interface{} { return nil } +// all returns true if empty(x) is false for all values x in the list. +// If the list is empty, return true. +func all(v ...interface{}) bool { + for _, val := range v { + if empty(val) { + return false + } + } + return true +} + +// any returns true if empty(x) is false for any x in the list. +// If the list is empty, return false. +func any(v ...interface{}) bool { + for _, val := range v { + if !empty(val) { + return true + } + } + return false +} + +// fromJson decodes JSON into a structured value, ignoring errors. +func fromJson(v string) interface{} { + output, _ := mustFromJson(v) + return output +} + +// mustFromJson decodes JSON into a structured value, returning errors. +func mustFromJson(v string) (interface{}, error) { + var output interface{} + err := json.Unmarshal([]byte(v), &output) + return output, err +} + // toJson encodes an item into a JSON string func toJson(v interface{}) string { output, _ := json.Marshal(v) diff --git a/vendor/github.com/Masterminds/sprig/v3/dict.go b/vendor/github.com/Masterminds/sprig/v3/dict.go index 11d943f..ade8896 100644 --- a/vendor/github.com/Masterminds/sprig/v3/dict.go +++ b/vendor/github.com/Masterminds/sprig/v3/dict.go @@ -146,3 +146,29 @@ func deepCopy(i interface{}) interface{} { func mustDeepCopy(i interface{}) (interface{}, error) { return copystructure.Copy(i) } + +func dig(ps ...interface{}) (interface{}, error) { + if len(ps) < 3 { + panic("dig needs at least three arguments") + } + dict := ps[len(ps)-1].(map[string]interface{}) + def := ps[len(ps)-2] + ks := make([]string, len(ps)-2) + for i := 0; i < len(ks); i++ { + ks[i] = ps[i].(string) + } + + return digFromDict(dict, def, ks) +} + +func digFromDict(dict map[string]interface{}, d interface{}, ks []string) (interface{}, error) { + k, ns := ks[0], ks[1:len(ks)] + step, has := dict[k] + if !has { + return d, nil + } + if len(ns) == 0 { + return step, nil + } + return digFromDict(step.(map[string]interface{}), d, ns) +} diff --git a/vendor/github.com/Masterminds/sprig/v3/functions.go b/vendor/github.com/Masterminds/sprig/v3/functions.go index c16e9c3..57fcec1 100644 --- a/vendor/github.com/Masterminds/sprig/v3/functions.go +++ b/vendor/github.com/Masterminds/sprig/v3/functions.go @@ -3,8 +3,10 @@ package sprig import ( "errors" "html/template" + "math/rand" "os" "path" + "path/filepath" "reflect" "strconv" "strings" @@ -13,6 +15,7 @@ import ( util "github.com/Masterminds/goutils" "github.com/huandu/xstrings" + "github.com/shopspring/decimal" ) // FuncMap produces the function map. @@ -80,6 +83,7 @@ var nonhermeticFunctions = []string{ "randAlpha", "randAscii", "randNumeric", + "randBytes", "uuidv4", // OS @@ -200,9 +204,28 @@ var genericMap = map[string]interface{}{ } return val }, + "randInt": func(min, max int) int { return rand.Intn(max-min) + min }, + "add1f": func(i interface{}) float64 { + return execDecimalOp(i, []interface{}{1}, func(d1, d2 decimal.Decimal) decimal.Decimal { return d1.Add(d2) }) + }, + "addf": func(i ...interface{}) float64 { + a := interface{}(float64(0)) + return execDecimalOp(a, i, func(d1, d2 decimal.Decimal) decimal.Decimal { return d1.Add(d2) }) + }, + "subf": func(a interface{}, v ...interface{}) float64 { + return execDecimalOp(a, v, func(d1, d2 decimal.Decimal) decimal.Decimal { return d1.Sub(d2) }) + }, + "divf": func(a interface{}, v ...interface{}) float64 { + return execDecimalOp(a, v, func(d1, d2 decimal.Decimal) decimal.Decimal { return d1.Div(d2) }) + }, + "mulf": func(a interface{}, v ...interface{}) float64 { + return execDecimalOp(a, v, func(d1, d2 decimal.Decimal) decimal.Decimal { return d1.Mul(d2) }) + }, "biggest": max, "max": max, "min": min, + "maxf": maxf, + "minf": minf, "ceil": ceil, "floor": floor, "round": round, @@ -216,11 +239,15 @@ var genericMap = map[string]interface{}{ "default": dfault, "empty": empty, "coalesce": coalesce, + "all": all, + "any": any, "compact": compact, "mustCompact": mustCompact, + "fromJson": fromJson, "toJson": toJson, "toPrettyJson": toPrettyJson, "toRawJson": toRawJson, + "mustFromJson": mustFromJson, "mustToJson": mustToJson, "mustToPrettyJson": mustToPrettyJson, "mustToRawJson": mustToRawJson, @@ -243,13 +270,20 @@ var genericMap = map[string]interface{}{ // Network: "getHostByName": getHostByName, - // File Paths: + // Paths: "base": path.Base, "dir": path.Dir, "clean": path.Clean, "ext": path.Ext, "isAbs": path.IsAbs, + // Filepaths: + "osBase": filepath.Base, + "osClean": filepath.Clean, + "osDir": filepath.Dir, + "osExt": filepath.Ext, + "osIsAbs": filepath.IsAbs, + // Encoding: "b64enc": base64encode, "b64dec": base64decode, @@ -297,17 +331,25 @@ var genericMap = map[string]interface{}{ "slice": slice, "mustSlice": mustSlice, "concat": concat, + "dig": dig, + "chunk": chunk, + "mustChunk": mustChunk, // Crypto: + "bcrypt": bcrypt, "htpasswd": htpasswd, "genPrivateKey": generatePrivateKey, "derivePassword": derivePassword, "buildCustomCert": buildCustomCertificate, "genCA": generateCertificateAuthority, + "genCAWithKey": generateCertificateAuthorityWithPEMKey, "genSelfSignedCert": generateSelfSignedCertificate, + "genSelfSignedCertWithKey": generateSelfSignedCertificateWithPEMKey, "genSignedCert": generateSignedCertificate, + "genSignedCertWithKey": generateSignedCertificateWithPEMKey, "encryptAES": encryptAES, "decryptAES": decryptAES, + "randBytes": randBytes, // UUIDs: "uuidv4": uuidv4, @@ -332,6 +374,7 @@ var genericMap = map[string]interface{}{ "mustRegexReplaceAllLiteral": mustRegexReplaceAllLiteral, "regexSplit": regexSplit, "mustRegexSplit": mustRegexSplit, + "regexQuoteMeta": regexQuoteMeta, // URLs: "urlParse": urlParse, diff --git a/vendor/github.com/Masterminds/sprig/v3/go.mod b/vendor/github.com/Masterminds/sprig/v3/go.mod index e064b8d..c259709 100644 --- a/vendor/github.com/Masterminds/sprig/v3/go.mod +++ b/vendor/github.com/Masterminds/sprig/v3/go.mod @@ -3,12 +3,13 @@ module github.com/Masterminds/sprig/v3 go 1.13 require ( - github.com/Masterminds/goutils v1.1.0 - github.com/Masterminds/semver/v3 v3.1.0 + github.com/Masterminds/goutils v1.1.1 + github.com/Masterminds/semver/v3 v3.1.1 github.com/google/uuid v1.1.1 github.com/huandu/xstrings v1.3.1 - github.com/imdario/mergo v0.3.8 + github.com/imdario/mergo v0.3.11 github.com/mitchellh/copystructure v1.0.0 + github.com/shopspring/decimal v1.2.0 github.com/spf13/cast v1.3.1 github.com/stretchr/testify v1.5.1 golang.org/x/crypto v0.0.0-20200414173820-0848c9571904 diff --git a/vendor/github.com/Masterminds/sprig/v3/go.sum b/vendor/github.com/Masterminds/sprig/v3/go.sum index 47157a6..b0e7f01 100644 --- a/vendor/github.com/Masterminds/sprig/v3/go.sum +++ b/vendor/github.com/Masterminds/sprig/v3/go.sum @@ -1,49 +1,43 @@ github.com/Masterminds/goutils v1.1.0 h1:zukEsf/1JZwCMgHiK3GZftabmxiCw4apj3a28RPBiVg= github.com/Masterminds/goutils v1.1.0/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= -github.com/Masterminds/semver/v3 v3.0.1 h1:2kKm5lb7dKVrt5TYUiAavE6oFc1cFT0057UVGT+JqLk= -github.com/Masterminds/semver/v3 v3.0.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= -github.com/Masterminds/semver/v3 v3.0.2 h1:tRi7ENs+AaOUCH+j6qwNQgPYfV26dX3JNonq+V4mhqc= -github.com/Masterminds/semver/v3 v3.0.2/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= -github.com/Masterminds/semver/v3 v3.0.3 h1:znjIyLfpXEDQjOIEWh+ehwpTU14UzUPub3c3sm36u14= -github.com/Masterminds/semver/v3 v3.0.3/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= +github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI= +github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= github.com/Masterminds/semver/v3 v3.1.0 h1:Y2lUDsFKVRSYGojLJ1yLxSXdMmMYTYls0rCvoqmMUQk= github.com/Masterminds/semver/v3 v3.1.0/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= +github.com/Masterminds/semver/v3 v3.1.1 h1:hLg3sBzpNErnxhQtUy/mmLR2I9foDujNK030IGemrRc= +github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/google/uuid v1.1.1 h1:Gkbcsh/GbpXz7lPftLA3P6TYMwjCLYm83jiFQZF/3gY= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/huandu/xstrings v1.2.0 h1:yPeWdRnmynF7p+lLYz0H2tthW9lqhMJrQV/U7yy4wX0= -github.com/huandu/xstrings v1.2.0/go.mod h1:DvyZB1rfVYsBIigL8HwpZgxHwXozlTgGqn63UyNX5k4= github.com/huandu/xstrings v1.3.1 h1:4jgBlKK6tLKFvO8u5pmYjG91cqytmDCDvGh7ECVFfFs= github.com/huandu/xstrings v1.3.1/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= -github.com/imdario/mergo v0.3.7 h1:Y+UAYTZ7gDEuOfhxKWy+dvb5dRQ6rJjFSdX2HZY1/gI= -github.com/imdario/mergo v0.3.7/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/imdario/mergo v0.3.8 h1:CGgOkSJeqMRmt0D9XLWExdT4m4F1vd3FV3VPt+0VxkQ= github.com/imdario/mergo v0.3.8/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/imdario/mergo v0.3.9 h1:UauaLniWCFHWd+Jp9oCEkTBj8VO/9DKg3PV3VCNMDIg= github.com/imdario/mergo v0.3.9/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= +github.com/imdario/mergo v0.3.10 h1:6q5mVkdH/vYmqngx7kZQTjJ5HRsx+ImorDIEQ+beJgc= +github.com/imdario/mergo v0.3.10/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= +github.com/imdario/mergo v0.3.11 h1:3tnifQM4i+fbajXKBHXWEH+KvNHqojZ778UH75j3bGA= +github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= github.com/mitchellh/copystructure v1.0.0 h1:Laisrj+bAB6b/yJwB5Bt3ITZhGJdqmxquMKeZ+mmkFQ= github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= github.com/mitchellh/reflectwalk v1.0.0 h1:9D+8oIskB4VJBN5SFlmc27fSlIBZaov1Wpk/IfikLNY= github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/spf13/cast v1.3.0 h1:oget//CVOEoFewqQxwr0Ej5yjygnqGkvggSE/gB35Q8= -github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/shopspring/decimal v1.2.0 h1:abSATXmQEYyShuxI4/vyW3tV1MrKAJzCZ/0zLUXYbsQ= +github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng= github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/stretchr/objx v0.1.0 h1:4G4v2dO3VZwixGIRoQ5Lfboy6nUhCyYzaqnIAPPhYs4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7 h1:0hQKqeLdqlt5iIwVOBErRisrHJAN57yOiPRQItI20fU= -golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200414173820-0848c9571904 h1:bXoxMPcSLOq08zI3/c5dEBT6lE4eh+jOh886GHrn6V8= golang.org/x/crypto v0.0.0-20200414173820-0848c9571904/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= @@ -54,3 +48,5 @@ gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+ gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/vendor/github.com/Masterminds/sprig/v3/list.go b/vendor/github.com/Masterminds/sprig/v3/list.go index 063fe4f..ca0fbb7 100644 --- a/vendor/github.com/Masterminds/sprig/v3/list.go +++ b/vendor/github.com/Masterminds/sprig/v3/list.go @@ -2,6 +2,7 @@ package sprig import ( "fmt" + "math" "reflect" "sort" ) @@ -72,6 +73,50 @@ func mustPrepend(list interface{}, v interface{}) ([]interface{}, error) { } } +func chunk(size int, list interface{}) [][]interface{} { + l, err := mustChunk(size, list) + if err != nil { + panic(err) + } + + return l +} + +func mustChunk(size int, list interface{}) ([][]interface{}, error) { + tp := reflect.TypeOf(list).Kind() + switch tp { + case reflect.Slice, reflect.Array: + l2 := reflect.ValueOf(list) + + l := l2.Len() + + cs := int(math.Floor(float64(l-1)/float64(size)) + 1) + nl := make([][]interface{}, cs) + + for i := 0; i < cs; i++ { + clen := size + if i == cs-1 { + clen = int(math.Floor(math.Mod(float64(l), float64(size)))) + if clen == 0 { + clen = size + } + } + + nl[i] = make([]interface{}, clen) + + for j := 0; j < clen; j++ { + ix := i*size + j + nl[i][j] = l2.Index(ix).Interface() + } + } + + return nl, nil + + default: + return nil, fmt.Errorf("Cannot chunk type %s", tp) + } +} + func last(list interface{}) interface{} { l, err := mustLast(list) if err != nil { diff --git a/vendor/github.com/Masterminds/sprig/v3/network.go b/vendor/github.com/Masterminds/sprig/v3/network.go index d786cc7..108d78a 100644 --- a/vendor/github.com/Masterminds/sprig/v3/network.go +++ b/vendor/github.com/Masterminds/sprig/v3/network.go @@ -7,6 +7,6 @@ import ( func getHostByName(name string) string { addrs, _ := net.LookupHost(name) - //TODO: add error handing when release v3 cames out + //TODO: add error handing when release v3 comes out return addrs[rand.Intn(len(addrs))] } diff --git a/vendor/github.com/Masterminds/sprig/v3/numeric.go b/vendor/github.com/Masterminds/sprig/v3/numeric.go index 638ef27..f68e418 100644 --- a/vendor/github.com/Masterminds/sprig/v3/numeric.go +++ b/vendor/github.com/Masterminds/sprig/v3/numeric.go @@ -7,6 +7,7 @@ import ( "strings" "github.com/spf13/cast" + "github.com/shopspring/decimal" ) // toFloat64 converts 64-bit floats @@ -34,6 +35,15 @@ func max(a interface{}, i ...interface{}) int64 { return aa } +func maxf(a interface{}, i ...interface{}) float64 { + aa := toFloat64(a) + for _, b := range i { + bb := toFloat64(b) + aa = math.Max(aa, bb) + } + return aa +} + func min(a interface{}, i ...interface{}) int64 { aa := toInt64(a) for _, b := range i { @@ -45,6 +55,15 @@ func min(a interface{}, i ...interface{}) int64 { return aa } +func minf(a interface{}, i ...interface{}) float64 { + aa := toFloat64(a) + for _, b := range i { + bb := toFloat64(b) + aa = math.Min(aa, bb) + } + return aa +} + func until(count int) []int { step := 1 if count < 0 { @@ -153,3 +172,15 @@ func seq(params ...int) string { func intArrayToString(slice []int, delimeter string) string { return strings.Trim(strings.Join(strings.Fields(fmt.Sprint(slice)), delimeter), "[]") } + +// performs a float and subsequent decimal.Decimal conversion on inputs, +// and iterates through a and b executing the mathmetical operation f +func execDecimalOp(a interface{}, b []interface{}, f func(d1, d2 decimal.Decimal) decimal.Decimal) float64 { + prt := decimal.NewFromFloat(toFloat64(a)) + for _, x := range b { + dx := decimal.NewFromFloat(toFloat64(x)) + prt = f(prt, dx) + } + rslt, _ := prt.Float64() + return rslt +} diff --git a/vendor/github.com/Masterminds/sprig/v3/regex.go b/vendor/github.com/Masterminds/sprig/v3/regex.go index 2370878..fab5510 100644 --- a/vendor/github.com/Masterminds/sprig/v3/regex.go +++ b/vendor/github.com/Masterminds/sprig/v3/regex.go @@ -77,3 +77,7 @@ func mustRegexSplit(regex string, s string, n int) ([]string, error) { } return r.Split(s, n), nil } + +func regexQuoteMeta(s string) string { + return regexp.QuoteMeta(s) +} diff --git a/vendor/github.com/fatih/color/README.md b/vendor/github.com/fatih/color/README.md index 42d9abc..5152bf5 100644 --- a/vendor/github.com/fatih/color/README.md +++ b/vendor/github.com/fatih/color/README.md @@ -1,20 +1,11 @@ -# Archived project. No maintenance. - -This project is not maintained anymore and is archived. Feel free to fork and -make your own changes if needed. For more detail read my blog post: [Taking an indefinite sabbatical from my projects](https://arslan.io/2018/10/09/taking-an-indefinite-sabbatical-from-my-projects/) - -Thanks to everyone for their valuable feedback and contributions. - - -# Color [![GoDoc](https://godoc.org/github.com/fatih/color?status.svg)](https://godoc.org/github.com/fatih/color) +# color [![](https://github.com/fatih/color/workflows/build/badge.svg)](https://github.com/fatih/color/actions) [![PkgGoDev](https://pkg.go.dev/badge/github.com/fatih/color)](https://pkg.go.dev/github.com/fatih/color) Color lets you use colorized outputs in terms of [ANSI Escape Codes](http://en.wikipedia.org/wiki/ANSI_escape_code#Colors) in Go (Golang). It has support for Windows too! The API can be used in several ways, pick one that suits you. - -![Color](https://i.imgur.com/c1JI0lA.png) +![Color](https://user-images.githubusercontent.com/438920/96832689-03b3e000-13f4-11eb-9803-46f4c4de3406.jpg) ## Install @@ -87,7 +78,7 @@ notice("Don't forget this...") ### Custom fprint functions (FprintFunc) ```go -blue := color.New(FgBlue).FprintfFunc() +blue := color.New(color.FgBlue).FprintfFunc() blue(myWriter, "important notice: %s", stars) // Mix up with multiple attributes @@ -136,14 +127,16 @@ fmt.Println("All text will now be bold magenta.") There might be a case where you want to explicitly disable/enable color output. the `go-isatty` package will automatically disable color output for non-tty output streams -(for example if the output were piped directly to `less`) +(for example if the output were piped directly to `less`). -`Color` has support to disable/enable colors both globally and for single color -definitions. For example suppose you have a CLI app and a `--no-color` bool flag. You -can easily disable the color output with: +The `color` package also disables color output if the [`NO_COLOR`](https://no-color.org) environment +variable is set (regardless of its value). -```go +`Color` has support to disable/enable colors programatically both globally and +for single color definitions. For example suppose you have a CLI app and a +`--no-color` bool flag. You can easily disable the color output with: +```go var flagNoColor = flag.Bool("no-color", false, "Disable color output") if *flagNoColor { @@ -165,6 +158,10 @@ c.EnableColor() c.Println("This prints again cyan...") ``` +## GitHub Actions + +To output color in GitHub Actions (or other CI systems that support ANSI colors), make sure to set `color.NoColor = false` so that it bypasses the check for non-tty output streams. + ## Todo * Save/Return previous values @@ -179,4 +176,3 @@ c.Println("This prints again cyan...") ## License The MIT License (MIT) - see [`LICENSE.md`](https://github.com/fatih/color/blob/master/LICENSE.md) for more details - diff --git a/vendor/github.com/fatih/color/color.go b/vendor/github.com/fatih/color/color.go index 91c8e9f..98a60f3 100644 --- a/vendor/github.com/fatih/color/color.go +++ b/vendor/github.com/fatih/color/color.go @@ -15,9 +15,11 @@ import ( var ( // NoColor defines if the output is colorized or not. It's dynamically set to // false or true based on the stdout's file descriptor referring to a terminal - // or not. This is a global option and affects all colors. For more control - // over each color block use the methods DisableColor() individually. - NoColor = os.Getenv("TERM") == "dumb" || + // or not. It's also set to true if the NO_COLOR environment variable is + // set (regardless of its value). This is a global option and affects all + // colors. For more control over each color block use the methods + // DisableColor() individually. + NoColor = noColorExists() || os.Getenv("TERM") == "dumb" || (!isatty.IsTerminal(os.Stdout.Fd()) && !isatty.IsCygwinTerminal(os.Stdout.Fd())) // Output defines the standard output of the print functions. By default @@ -33,6 +35,12 @@ var ( colorsCacheMu sync.Mutex // protects colorsCache ) +// noColorExists returns true if the environment variable NO_COLOR exists. +func noColorExists() bool { + _, exists := os.LookupEnv("NO_COLOR") + return exists +} + // Color defines a custom color object which is defined by SGR parameters. type Color struct { params []Attribute @@ -108,7 +116,14 @@ const ( // New returns a newly created color object. func New(value ...Attribute) *Color { - c := &Color{params: make([]Attribute, 0)} + c := &Color{ + params: make([]Attribute, 0), + } + + if noColorExists() { + c.noColor = boolPtr(true) + } + c.Add(value...) return c } @@ -387,7 +402,7 @@ func (c *Color) EnableColor() { } func (c *Color) isNoColorSet() bool { - // check first if we have user setted action + // check first if we have user set action if c.noColor != nil { return *c.noColor } diff --git a/vendor/github.com/fatih/color/doc.go b/vendor/github.com/fatih/color/doc.go index cf1e965..04541de 100644 --- a/vendor/github.com/fatih/color/doc.go +++ b/vendor/github.com/fatih/color/doc.go @@ -118,6 +118,8 @@ the color output with: color.NoColor = true // disables colorized output } +You can also disable the color by setting the NO_COLOR environment variable to any value. + It also has support for single color definitions (local). You can disable/enable color output on the fly: diff --git a/vendor/github.com/fatih/color/go.mod b/vendor/github.com/fatih/color/go.mod index bc0df75..c9b3cd5 100644 --- a/vendor/github.com/fatih/color/go.mod +++ b/vendor/github.com/fatih/color/go.mod @@ -3,6 +3,6 @@ module github.com/fatih/color go 1.13 require ( - github.com/mattn/go-colorable v0.1.4 - github.com/mattn/go-isatty v0.0.11 + github.com/mattn/go-colorable v0.1.9 + github.com/mattn/go-isatty v0.0.14 ) diff --git a/vendor/github.com/fatih/color/go.sum b/vendor/github.com/fatih/color/go.sum index 44328a8..cbbcfb6 100644 --- a/vendor/github.com/fatih/color/go.sum +++ b/vendor/github.com/fatih/color/go.sum @@ -1,8 +1,9 @@ -github.com/mattn/go-colorable v0.1.4 h1:snbPLB8fVfU9iwbbo30TPtbLRzwWu6aJS6Xh4eaaviA= -github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= -github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.11 h1:FxPOTFNqGkuDUGi3H/qkUbQO4ZiBa2brKq5r0l8TGeM= -github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE= -golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 h1:YyJpGZS1sBuBCzLAR1VEpK193GlqGZbnPFnPV/5Rsb4= -golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +github.com/mattn/go-colorable v0.1.9 h1:sqDoxXbdeALODt0DAeJCVp38ps9ZogZEAXjus69YV3U= +github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c h1:F1jZWGFhYfh0Ci55sIpILtKKK8p3i2/krTr0H1rg74I= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= diff --git a/vendor/github.com/go-logr/logr/LICENSE b/vendor/github.com/go-logr/logr/LICENSE new file mode 100644 index 0000000..8dada3e --- /dev/null +++ b/vendor/github.com/go-logr/logr/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/go-logr/logr/README.md b/vendor/github.com/go-logr/logr/README.md new file mode 100644 index 0000000..e9b5520 --- /dev/null +++ b/vendor/github.com/go-logr/logr/README.md @@ -0,0 +1,183 @@ +# A more minimal logging API for Go + +Before you consider this package, please read [this blog post by the +inimitable Dave Cheney][warning-makes-no-sense]. I really appreciate what +he has to say, and it largely aligns with my own experiences. Too many +choices of levels means inconsistent logs. + +This package offers a purely abstract interface, based on these ideas but with +a few twists. Code can depend on just this interface and have the actual +logging implementation be injected from callers. Ideally only `main()` knows +what logging implementation is being used. + +# Differences from Dave's ideas + +The main differences are: + +1) Dave basically proposes doing away with the notion of a logging API in favor +of `fmt.Printf()`. I disagree, especially when you consider things like output +locations, timestamps, file and line decorations, and structured logging. I +restrict the API to just 2 types of logs: info and error. + +Info logs are things you want to tell the user which are not errors. Error +logs are, well, errors. If your code receives an `error` from a subordinate +function call and is logging that `error` *and not returning it*, use error +logs. + +2) Verbosity-levels on info logs. This gives developers a chance to indicate +arbitrary grades of importance for info logs, without assigning names with +semantic meaning such as "warning", "trace", and "debug". Superficially this +may feel very similar, but the primary difference is the lack of semantics. +Because verbosity is a numerical value, it's safe to assume that an app running +with higher verbosity means more (and less important) logs will be generated. + +This is a BETA grade API. + +There are implementations for the following logging libraries: + +- **github.com/google/glog**: [glogr](https://github.com/go-logr/glogr) +- **k8s.io/klog**: [klogr](https://git.k8s.io/klog/klogr) +- **go.uber.org/zap**: [zapr](https://github.com/go-logr/zapr) +- **log** (the Go standard library logger): + [stdr](https://github.com/go-logr/stdr) +- **github.com/sirupsen/logrus**: [logrusr](https://github.com/bombsimon/logrusr) +- **github.com/wojas/genericr**: [genericr](https://github.com/wojas/genericr) (makes it easy to implement your own backend) +- **logfmt** (Heroku style [logging](https://www.brandur.org/logfmt)): [logfmtr](https://github.com/iand/logfmtr) + +# FAQ + +## Conceptual + +## Why structured logging? + +- **Structured logs are more easily queriable**: Since you've got + key-value pairs, it's much easier to query your structured logs for + particular values by filtering on the contents of a particular key -- + think searching request logs for error codes, Kubernetes reconcilers for + the name and namespace of the reconciled object, etc + +- **Structured logging makes it easier to have cross-referencable logs**: + Similarly to searchability, if you maintain conventions around your + keys, it becomes easy to gather all log lines related to a particular + concept. + +- **Structured logs allow better dimensions of filtering**: if you have + structure to your logs, you've got more precise control over how much + information is logged -- you might choose in a particular configuration + to log certain keys but not others, only log lines where a certain key + matches a certain value, etc, instead of just having v-levels and names + to key off of. + +- **Structured logs better represent structured data**: sometimes, the + data that you want to log is inherently structured (think tuple-link + objects). Structured logs allow you to preserve that structure when + outputting. + +## Why V-levels? + +**V-levels give operators an easy way to control the chattiness of log +operations**. V-levels provide a way for a given package to distinguish +the relative importance or verbosity of a given log message. Then, if +a particular logger or package is logging too many messages, the user +of the package can simply change the v-levels for that library. + +## Why not more named levels, like Warning? + +Read [Dave Cheney's post][warning-makes-no-sense]. Then read [Differences +from Dave's ideas](#differences-from-daves-ideas). + +## Why not allow format strings, too? + +**Format strings negate many of the benefits of structured logs**: + +- They're not easily searchable without resorting to fuzzy searching, + regular expressions, etc + +- They don't store structured data well, since contents are flattened into + a string + +- They're not cross-referencable + +- They don't compress easily, since the message is not constant + +(unless you turn positional parameters into key-value pairs with numerical +keys, at which point you've gotten key-value logging with meaningless +keys) + +## Practical + +## Why key-value pairs, and not a map? + +Key-value pairs are *much* easier to optimize, especially around +allocations. Zap (a structured logger that inspired logr's interface) has +[performance measurements](https://github.com/uber-go/zap#performance) +that show this quite nicely. + +While the interface ends up being a little less obvious, you get +potentially better performance, plus avoid making users type +`map[string]string{}` every time they want to log. + +## What if my V-levels differ between libraries? + +That's fine. Control your V-levels on a per-logger basis, and use the +`WithName` function to pass different loggers to different libraries. + +Generally, you should take care to ensure that you have relatively +consistent V-levels within a given logger, however, as this makes deciding +on what verbosity of logs to request easier. + +## But I *really* want to use a format string! + +That's not actually a question. Assuming your question is "how do +I convert my mental model of logging with format strings to logging with +constant messages": + +1. figure out what the error actually is, as you'd write in a TL;DR style, + and use that as a message + +2. For every place you'd write a format specifier, look to the word before + it, and add that as a key value pair + +For instance, consider the following examples (all taken from spots in the +Kubernetes codebase): + +- `klog.V(4).Infof("Client is returning errors: code %v, error %v", + responseCode, err)` becomes `logger.Error(err, "client returned an + error", "code", responseCode)` + +- `klog.V(4).Infof("Got a Retry-After %ds response for attempt %d to %v", + seconds, retries, url)` becomes `logger.V(4).Info("got a retry-after + response when requesting url", "attempt", retries, "after + seconds", seconds, "url", url)` + +If you *really* must use a format string, place it as a key value, and +call `fmt.Sprintf` yourself -- for instance, `log.Printf("unable to +reflect over type %T")` becomes `logger.Info("unable to reflect over +type", "type", fmt.Sprintf("%T"))`. In general though, the cases where +this is necessary should be few and far between. + +## How do I choose my V-levels? + +This is basically the only hard constraint: increase V-levels to denote +more verbose or more debug-y logs. + +Otherwise, you can start out with `0` as "you always want to see this", +`1` as "common logging that you might *possibly* want to turn off", and +`10` as "I would like to performance-test your log collection stack". + +Then gradually choose levels in between as you need them, working your way +down from 10 (for debug and trace style logs) and up from 1 (for chattier +info-type logs). + +## How do I choose my keys + +- make your keys human-readable +- constant keys are generally a good idea +- be consistent across your codebase +- keys should naturally match parts of the message string + +While key names are mostly unrestricted (and spaces are acceptable), +it's generally a good idea to stick to printable ascii characters, or at +least match the general character set of your log lines. + +[warning-makes-no-sense]: http://dave.cheney.net/2015/11/05/lets-talk-about-logging diff --git a/vendor/github.com/go-logr/logr/discard.go b/vendor/github.com/go-logr/logr/discard.go new file mode 100644 index 0000000..2bafb13 --- /dev/null +++ b/vendor/github.com/go-logr/logr/discard.go @@ -0,0 +1,51 @@ +/* +Copyright 2020 The logr Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package logr + +// Discard returns a valid Logger that discards all messages logged to it. +// It can be used whenever the caller is not interested in the logs. +func Discard() Logger { + return DiscardLogger{} +} + +// DiscardLogger is a Logger that discards all messages. +type DiscardLogger struct{} + +func (l DiscardLogger) Enabled() bool { + return false +} + +func (l DiscardLogger) Info(msg string, keysAndValues ...interface{}) { +} + +func (l DiscardLogger) Error(err error, msg string, keysAndValues ...interface{}) { +} + +func (l DiscardLogger) V(level int) Logger { + return l +} + +func (l DiscardLogger) WithValues(keysAndValues ...interface{}) Logger { + return l +} + +func (l DiscardLogger) WithName(name string) Logger { + return l +} + +// Verify that it actually implements the interface +var _ Logger = DiscardLogger{} diff --git a/vendor/github.com/go-logr/logr/go.mod b/vendor/github.com/go-logr/logr/go.mod new file mode 100644 index 0000000..591884e --- /dev/null +++ b/vendor/github.com/go-logr/logr/go.mod @@ -0,0 +1,3 @@ +module github.com/go-logr/logr + +go 1.14 diff --git a/vendor/github.com/go-logr/logr/logr.go b/vendor/github.com/go-logr/logr/logr.go new file mode 100644 index 0000000..842428b --- /dev/null +++ b/vendor/github.com/go-logr/logr/logr.go @@ -0,0 +1,266 @@ +/* +Copyright 2019 The logr Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// This design derives from Dave Cheney's blog: +// http://dave.cheney.net/2015/11/05/lets-talk-about-logging +// +// This is a BETA grade API. Until there is a significant 2nd implementation, +// I don't really know how it will change. + +// Package logr defines abstract interfaces for logging. Packages can depend on +// these interfaces and callers can implement logging in whatever way is +// appropriate. +// +// Usage +// +// Logging is done using a Logger. Loggers can have name prefixes and named +// values attached, so that all log messages logged with that Logger have some +// base context associated. +// +// The term "key" is used to refer to the name associated with a particular +// value, to disambiguate it from the general Logger name. +// +// For instance, suppose we're trying to reconcile the state of an object, and +// we want to log that we've made some decision. +// +// With the traditional log package, we might write: +// log.Printf("decided to set field foo to value %q for object %s/%s", +// targetValue, object.Namespace, object.Name) +// +// With logr's structured logging, we'd write: +// // elsewhere in the file, set up the logger to log with the prefix of +// // "reconcilers", and the named value target-type=Foo, for extra context. +// log := mainLogger.WithName("reconcilers").WithValues("target-type", "Foo") +// +// // later on... +// log.Info("setting foo on object", "value", targetValue, "object", object) +// +// Depending on our logging implementation, we could then make logging decisions +// based on field values (like only logging such events for objects in a certain +// namespace), or copy the structured information into a structured log store. +// +// For logging errors, Logger has a method called Error. Suppose we wanted to +// log an error while reconciling. With the traditional log package, we might +// write: +// log.Errorf("unable to reconcile object %s/%s: %v", object.Namespace, object.Name, err) +// +// With logr, we'd instead write: +// // assuming the above setup for log +// log.Error(err, "unable to reconcile object", "object", object) +// +// This functions similarly to: +// log.Info("unable to reconcile object", "error", err, "object", object) +// +// However, it ensures that a standard key for the error value ("error") is used +// across all error logging. Furthermore, certain implementations may choose to +// attach additional information (such as stack traces) on calls to Error, so +// it's preferred to use Error to log errors. +// +// Parts of a log line +// +// Each log message from a Logger has four types of context: +// logger name, log verbosity, log message, and the named values. +// +// The Logger name consists of a series of name "segments" added by successive +// calls to WithName. These name segments will be joined in some way by the +// underlying implementation. It is strongly recommended that name segments +// contain simple identifiers (letters, digits, and hyphen), and do not contain +// characters that could muddle the log output or confuse the joining operation +// (e.g. whitespace, commas, periods, slashes, brackets, quotes, etc). +// +// Log verbosity represents how little a log matters. Level zero, the default, +// matters most. Increasing levels matter less and less. Try to avoid lots of +// different verbosity levels, and instead provide useful keys, logger names, +// and log messages for users to filter on. It's illegal to pass a log level +// below zero. +// +// The log message consists of a constant message attached to the log line. +// This should generally be a simple description of what's occurring, and should +// never be a format string. +// +// Variable information can then be attached using named values (key/value +// pairs). Keys are arbitrary strings, while values may be any Go value. +// +// Key Naming Conventions +// +// Keys are not strictly required to conform to any specification or regex, but +// it is recommended that they: +// * be human-readable and meaningful (not auto-generated or simple ordinals) +// * be constant (not dependent on input data) +// * contain only printable characters +// * not contain whitespace or punctuation +// +// These guidelines help ensure that log data is processed properly regardless +// of the log implementation. For example, log implementations will try to +// output JSON data or will store data for later database (e.g. SQL) queries. +// +// While users are generally free to use key names of their choice, it's +// generally best to avoid using the following keys, as they're frequently used +// by implementations: +// +// * `"caller"`: the calling information (file/line) of a particular log line. +// * `"error"`: the underlying error value in the `Error` method. +// * `"level"`: the log level. +// * `"logger"`: the name of the associated logger. +// * `"msg"`: the log message. +// * `"stacktrace"`: the stack trace associated with a particular log line or +// error (often from the `Error` message). +// * `"ts"`: the timestamp for a log line. +// +// Implementations are encouraged to make use of these keys to represent the +// above concepts, when necessary (for example, in a pure-JSON output form, it +// would be necessary to represent at least message and timestamp as ordinary +// named values). +// +// Implementations may choose to give callers access to the underlying +// logging implementation. The recommended pattern for this is: +// // Underlier exposes access to the underlying logging implementation. +// // Since callers only have a logr.Logger, they have to know which +// // implementation is in use, so this interface is less of an abstraction +// // and more of way to test type conversion. +// type Underlier interface { +// GetUnderlying() +// } +package logr + +import ( + "context" +) + +// TODO: consider adding back in format strings if they're really needed +// TODO: consider other bits of zap/zapcore functionality like ObjectMarshaller (for arbitrary objects) +// TODO: consider other bits of glog functionality like Flush, OutputStats + +// Logger represents the ability to log messages, both errors and not. +type Logger interface { + // Enabled tests whether this Logger is enabled. For example, commandline + // flags might be used to set the logging verbosity and disable some info + // logs. + Enabled() bool + + // Info logs a non-error message with the given key/value pairs as context. + // + // The msg argument should be used to add some constant description to + // the log line. The key/value pairs can then be used to add additional + // variable information. The key/value pairs should alternate string + // keys and arbitrary values. + Info(msg string, keysAndValues ...interface{}) + + // Error logs an error, with the given message and key/value pairs as context. + // It functions similarly to calling Info with the "error" named value, but may + // have unique behavior, and should be preferred for logging errors (see the + // package documentations for more information). + // + // The msg field should be used to add context to any underlying error, + // while the err field should be used to attach the actual error that + // triggered this log line, if present. + Error(err error, msg string, keysAndValues ...interface{}) + + // V returns an Logger value for a specific verbosity level, relative to + // this Logger. In other words, V values are additive. V higher verbosity + // level means a log message is less important. It's illegal to pass a log + // level less than zero. + V(level int) Logger + + // WithValues adds some key-value pairs of context to a logger. + // See Info for documentation on how key/value pairs work. + WithValues(keysAndValues ...interface{}) Logger + + // WithName adds a new element to the logger's name. + // Successive calls with WithName continue to append + // suffixes to the logger's name. It's strongly recommended + // that name segments contain only letters, digits, and hyphens + // (see the package documentation for more information). + WithName(name string) Logger +} + +// InfoLogger provides compatibility with code that relies on the v0.1.0 +// interface. +// +// Deprecated: InfoLogger is an artifact of early versions of this API. New +// users should never use it and existing users should use Logger instead. This +// will be removed in a future release. +type InfoLogger = Logger + +type contextKey struct{} + +// FromContext returns a Logger constructed from ctx or nil if no +// logger details are found. +func FromContext(ctx context.Context) Logger { + if v, ok := ctx.Value(contextKey{}).(Logger); ok { + return v + } + + return nil +} + +// FromContextOrDiscard returns a Logger constructed from ctx or a Logger +// that discards all messages if no logger details are found. +func FromContextOrDiscard(ctx context.Context) Logger { + if v, ok := ctx.Value(contextKey{}).(Logger); ok { + return v + } + + return Discard() +} + +// NewContext returns a new context derived from ctx that embeds the Logger. +func NewContext(ctx context.Context, l Logger) context.Context { + return context.WithValue(ctx, contextKey{}, l) +} + +// CallDepthLogger represents a Logger that knows how to climb the call stack +// to identify the original call site and can offset the depth by a specified +// number of frames. This is useful for users who have helper functions +// between the "real" call site and the actual calls to Logger methods. +// Implementations that log information about the call site (such as file, +// function, or line) would otherwise log information about the intermediate +// helper functions. +// +// This is an optional interface and implementations are not required to +// support it. +type CallDepthLogger interface { + Logger + + // WithCallDepth returns a Logger that will offset the call stack by the + // specified number of frames when logging call site information. If depth + // is 0 the attribution should be to the direct caller of this method. If + // depth is 1 the attribution should skip 1 call frame, and so on. + // Successive calls to this are additive. + WithCallDepth(depth int) Logger +} + +// WithCallDepth returns a Logger that will offset the call stack by the +// specified number of frames when logging call site information, if possible. +// This is useful for users who have helper functions between the "real" call +// site and the actual calls to Logger methods. If depth is 0 the attribution +// should be to the direct caller of this function. If depth is 1 the +// attribution should skip 1 call frame, and so on. Successive calls to this +// are additive. +// +// If the underlying log implementation supports the CallDepthLogger interface, +// the WithCallDepth method will be called and the result returned. If the +// implementation does not support CallDepthLogger, the original Logger will be +// returned. +// +// Callers which care about whether this was supported or not should test for +// CallDepthLogger support themselves. +func WithCallDepth(logger Logger, depth int) Logger { + if decorator, ok := logger.(CallDepthLogger); ok { + return decorator.WithCallDepth(depth) + } + return logger +} diff --git a/vendor/github.com/gogo/protobuf/proto/text_parser.go b/vendor/github.com/gogo/protobuf/proto/text_parser.go index 1ce0be2..f85c0cc 100644 --- a/vendor/github.com/gogo/protobuf/proto/text_parser.go +++ b/vendor/github.com/gogo/protobuf/proto/text_parser.go @@ -318,7 +318,7 @@ func unescape(s string) (ch string, tail string, err error) { if i > utf8.MaxRune { return "", "", fmt.Errorf(`\%c%s is not a valid Unicode code point`, r, ss) } - return string(i), s, nil + return string(rune(i)), s, nil } return "", "", fmt.Errorf(`unknown escape \%c`, r) } diff --git a/vendor/github.com/golang/protobuf/proto/registry.go b/vendor/github.com/golang/protobuf/proto/registry.go index 1e7ff64..066b432 100644 --- a/vendor/github.com/golang/protobuf/proto/registry.go +++ b/vendor/github.com/golang/protobuf/proto/registry.go @@ -13,6 +13,7 @@ import ( "strings" "sync" + "google.golang.org/protobuf/reflect/protodesc" "google.golang.org/protobuf/reflect/protoreflect" "google.golang.org/protobuf/reflect/protoregistry" "google.golang.org/protobuf/runtime/protoimpl" @@ -62,14 +63,7 @@ func FileDescriptor(s filePath) fileDescGZIP { // Find the descriptor in the v2 registry. var b []byte if fd, _ := protoregistry.GlobalFiles.FindFileByPath(s); fd != nil { - if fd, ok := fd.(interface{ ProtoLegacyRawDesc() []byte }); ok { - b = fd.ProtoLegacyRawDesc() - } else { - // TODO: Use protodesc.ToFileDescriptorProto to construct - // a descriptorpb.FileDescriptorProto and marshal it. - // However, doing so causes the proto package to have a dependency - // on descriptorpb, leading to cyclic dependency issues. - } + b, _ = Marshal(protodesc.ToFileDescriptorProto(fd)) } // Locally cache the raw descriptor form for the file. diff --git a/vendor/github.com/golang/protobuf/proto/text_decode.go b/vendor/github.com/golang/protobuf/proto/text_decode.go index 4a59310..47eb3e4 100644 --- a/vendor/github.com/golang/protobuf/proto/text_decode.go +++ b/vendor/github.com/golang/protobuf/proto/text_decode.go @@ -765,7 +765,7 @@ func unescape(s string) (ch string, tail string, err error) { if i > utf8.MaxRune { return "", "", fmt.Errorf(`\%c%s is not a valid Unicode code point`, r, ss) } - return string(i), s, nil + return string(rune(i)), s, nil } return "", "", fmt.Errorf(`unknown escape \%c`, r) } diff --git a/vendor/github.com/golang/protobuf/ptypes/any.go b/vendor/github.com/golang/protobuf/ptypes/any.go index e729dcf..85f9f57 100644 --- a/vendor/github.com/golang/protobuf/ptypes/any.go +++ b/vendor/github.com/golang/protobuf/ptypes/any.go @@ -19,6 +19,8 @@ const urlPrefix = "type.googleapis.com/" // AnyMessageName returns the message name contained in an anypb.Any message. // Most type assertions should use the Is function instead. +// +// Deprecated: Call the any.MessageName method instead. func AnyMessageName(any *anypb.Any) (string, error) { name, err := anyMessageName(any) return string(name), err @@ -38,6 +40,8 @@ func anyMessageName(any *anypb.Any) (protoreflect.FullName, error) { } // MarshalAny marshals the given message m into an anypb.Any message. +// +// Deprecated: Call the anypb.New function instead. func MarshalAny(m proto.Message) (*anypb.Any, error) { switch dm := m.(type) { case DynamicAny: @@ -58,6 +62,9 @@ func MarshalAny(m proto.Message) (*anypb.Any, error) { // Empty returns a new message of the type specified in an anypb.Any message. // It returns protoregistry.NotFound if the corresponding message type could not // be resolved in the global registry. +// +// Deprecated: Use protoregistry.GlobalTypes.FindMessageByName instead +// to resolve the message name and create a new instance of it. func Empty(any *anypb.Any) (proto.Message, error) { name, err := anyMessageName(any) if err != nil { @@ -76,6 +83,8 @@ func Empty(any *anypb.Any) (proto.Message, error) { // // The target message m may be a *DynamicAny message. If the underlying message // type could not be resolved, then this returns protoregistry.NotFound. +// +// Deprecated: Call the any.UnmarshalTo method instead. func UnmarshalAny(any *anypb.Any, m proto.Message) error { if dm, ok := m.(*DynamicAny); ok { if dm.Message == nil { @@ -100,6 +109,8 @@ func UnmarshalAny(any *anypb.Any, m proto.Message) error { } // Is reports whether the Any message contains a message of the specified type. +// +// Deprecated: Call the any.MessageIs method instead. func Is(any *anypb.Any, m proto.Message) bool { if any == nil || m == nil { return false @@ -119,6 +130,9 @@ func Is(any *anypb.Any, m proto.Message) bool { // var x ptypes.DynamicAny // if err := ptypes.UnmarshalAny(a, &x); err != nil { ... } // fmt.Printf("unmarshaled message: %v", x.Message) +// +// Deprecated: Use the any.UnmarshalNew method instead to unmarshal +// the any message contents into a new instance of the underlying message. type DynamicAny struct{ proto.Message } func (m DynamicAny) String() string { diff --git a/vendor/github.com/golang/protobuf/ptypes/doc.go b/vendor/github.com/golang/protobuf/ptypes/doc.go index fb9edd5..d3c3325 100644 --- a/vendor/github.com/golang/protobuf/ptypes/doc.go +++ b/vendor/github.com/golang/protobuf/ptypes/doc.go @@ -3,4 +3,8 @@ // license that can be found in the LICENSE file. // Package ptypes provides functionality for interacting with well-known types. +// +// Deprecated: Well-known types have specialized functionality directly +// injected into the generated packages for each message type. +// See the deprecation notice for each function for the suggested alternative. package ptypes diff --git a/vendor/github.com/golang/protobuf/ptypes/duration.go b/vendor/github.com/golang/protobuf/ptypes/duration.go index 6110ae8..b2b55dd 100644 --- a/vendor/github.com/golang/protobuf/ptypes/duration.go +++ b/vendor/github.com/golang/protobuf/ptypes/duration.go @@ -21,6 +21,8 @@ const ( // Duration converts a durationpb.Duration to a time.Duration. // Duration returns an error if dur is invalid or overflows a time.Duration. +// +// Deprecated: Call the dur.AsDuration and dur.CheckValid methods instead. func Duration(dur *durationpb.Duration) (time.Duration, error) { if err := validateDuration(dur); err != nil { return 0, err @@ -39,6 +41,8 @@ func Duration(dur *durationpb.Duration) (time.Duration, error) { } // DurationProto converts a time.Duration to a durationpb.Duration. +// +// Deprecated: Call the durationpb.New function instead. func DurationProto(d time.Duration) *durationpb.Duration { nanos := d.Nanoseconds() secs := nanos / 1e9 diff --git a/vendor/github.com/golang/protobuf/ptypes/timestamp.go b/vendor/github.com/golang/protobuf/ptypes/timestamp.go index 026d0d4..8368a3f 100644 --- a/vendor/github.com/golang/protobuf/ptypes/timestamp.go +++ b/vendor/github.com/golang/protobuf/ptypes/timestamp.go @@ -33,6 +33,8 @@ const ( // // A nil Timestamp returns an error. The first return value in that case is // undefined. +// +// Deprecated: Call the ts.AsTime and ts.CheckValid methods instead. func Timestamp(ts *timestamppb.Timestamp) (time.Time, error) { // Don't return the zero value on error, because corresponds to a valid // timestamp. Instead return whatever time.Unix gives us. @@ -46,6 +48,8 @@ func Timestamp(ts *timestamppb.Timestamp) (time.Time, error) { } // TimestampNow returns a google.protobuf.Timestamp for the current time. +// +// Deprecated: Call the timestamppb.Now function instead. func TimestampNow() *timestamppb.Timestamp { ts, err := TimestampProto(time.Now()) if err != nil { @@ -56,6 +60,8 @@ func TimestampNow() *timestamppb.Timestamp { // TimestampProto converts the time.Time to a google.protobuf.Timestamp proto. // It returns an error if the resulting Timestamp is invalid. +// +// Deprecated: Call the timestamppb.New function instead. func TimestampProto(t time.Time) (*timestamppb.Timestamp, error) { ts := ×tamppb.Timestamp{ Seconds: t.Unix(), @@ -69,6 +75,9 @@ func TimestampProto(t time.Time) (*timestamppb.Timestamp, error) { // TimestampString returns the RFC 3339 string for valid Timestamps. // For invalid Timestamps, it returns an error message in parentheses. +// +// Deprecated: Call the ts.AsTime method instead, +// followed by a call to the Format method on the time.Time value. func TimestampString(ts *timestamppb.Timestamp) string { t, err := Timestamp(ts) if err != nil { diff --git a/vendor/github.com/google/go-cmp/LICENSE b/vendor/github.com/google/go-cmp/LICENSE new file mode 100644 index 0000000..32017f8 --- /dev/null +++ b/vendor/github.com/google/go-cmp/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2017 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/google/go-cmp/cmp/compare.go b/vendor/github.com/google/go-cmp/cmp/compare.go new file mode 100644 index 0000000..86d0903 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/compare.go @@ -0,0 +1,682 @@ +// Copyright 2017, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package cmp determines equality of values. +// +// This package is intended to be a more powerful and safer alternative to +// reflect.DeepEqual for comparing whether two values are semantically equal. +// It is intended to only be used in tests, as performance is not a goal and +// it may panic if it cannot compare the values. Its propensity towards +// panicking means that its unsuitable for production environments where a +// spurious panic may be fatal. +// +// The primary features of cmp are: +// +// • When the default behavior of equality does not suit the needs of the test, +// custom equality functions can override the equality operation. +// For example, an equality function may report floats as equal so long as they +// are within some tolerance of each other. +// +// • Types that have an Equal method may use that method to determine equality. +// This allows package authors to determine the equality operation for the types +// that they define. +// +// • If no custom equality functions are used and no Equal method is defined, +// equality is determined by recursively comparing the primitive kinds on both +// values, much like reflect.DeepEqual. Unlike reflect.DeepEqual, unexported +// fields are not compared by default; they result in panics unless suppressed +// by using an Ignore option (see cmpopts.IgnoreUnexported) or explicitly +// compared using the Exporter option. +package cmp + +import ( + "fmt" + "reflect" + "strings" + + "github.com/google/go-cmp/cmp/internal/diff" + "github.com/google/go-cmp/cmp/internal/flags" + "github.com/google/go-cmp/cmp/internal/function" + "github.com/google/go-cmp/cmp/internal/value" +) + +// Equal reports whether x and y are equal by recursively applying the +// following rules in the given order to x and y and all of their sub-values: +// +// • Let S be the set of all Ignore, Transformer, and Comparer options that +// remain after applying all path filters, value filters, and type filters. +// If at least one Ignore exists in S, then the comparison is ignored. +// If the number of Transformer and Comparer options in S is greater than one, +// then Equal panics because it is ambiguous which option to use. +// If S contains a single Transformer, then use that to transform the current +// values and recursively call Equal on the output values. +// If S contains a single Comparer, then use that to compare the current values. +// Otherwise, evaluation proceeds to the next rule. +// +// • If the values have an Equal method of the form "(T) Equal(T) bool" or +// "(T) Equal(I) bool" where T is assignable to I, then use the result of +// x.Equal(y) even if x or y is nil. Otherwise, no such method exists and +// evaluation proceeds to the next rule. +// +// • Lastly, try to compare x and y based on their basic kinds. +// Simple kinds like booleans, integers, floats, complex numbers, strings, and +// channels are compared using the equivalent of the == operator in Go. +// Functions are only equal if they are both nil, otherwise they are unequal. +// +// Structs are equal if recursively calling Equal on all fields report equal. +// If a struct contains unexported fields, Equal panics unless an Ignore option +// (e.g., cmpopts.IgnoreUnexported) ignores that field or the Exporter option +// explicitly permits comparing the unexported field. +// +// Slices are equal if they are both nil or both non-nil, where recursively +// calling Equal on all non-ignored slice or array elements report equal. +// Empty non-nil slices and nil slices are not equal; to equate empty slices, +// consider using cmpopts.EquateEmpty. +// +// Maps are equal if they are both nil or both non-nil, where recursively +// calling Equal on all non-ignored map entries report equal. +// Map keys are equal according to the == operator. +// To use custom comparisons for map keys, consider using cmpopts.SortMaps. +// Empty non-nil maps and nil maps are not equal; to equate empty maps, +// consider using cmpopts.EquateEmpty. +// +// Pointers and interfaces are equal if they are both nil or both non-nil, +// where they have the same underlying concrete type and recursively +// calling Equal on the underlying values reports equal. +// +// Before recursing into a pointer, slice element, or map, the current path +// is checked to detect whether the address has already been visited. +// If there is a cycle, then the pointed at values are considered equal +// only if both addresses were previously visited in the same path step. +func Equal(x, y interface{}, opts ...Option) bool { + s := newState(opts) + s.compareAny(rootStep(x, y)) + return s.result.Equal() +} + +// Diff returns a human-readable report of the differences between two values: +// y - x. It returns an empty string if and only if Equal returns true for the +// same input values and options. +// +// The output is displayed as a literal in pseudo-Go syntax. +// At the start of each line, a "-" prefix indicates an element removed from x, +// a "+" prefix to indicates an element added from y, and the lack of a prefix +// indicates an element common to both x and y. If possible, the output +// uses fmt.Stringer.String or error.Error methods to produce more humanly +// readable outputs. In such cases, the string is prefixed with either an +// 's' or 'e' character, respectively, to indicate that the method was called. +// +// Do not depend on this output being stable. If you need the ability to +// programmatically interpret the difference, consider using a custom Reporter. +func Diff(x, y interface{}, opts ...Option) string { + s := newState(opts) + + // Optimization: If there are no other reporters, we can optimize for the + // common case where the result is equal (and thus no reported difference). + // This avoids the expensive construction of a difference tree. + if len(s.reporters) == 0 { + s.compareAny(rootStep(x, y)) + if s.result.Equal() { + return "" + } + s.result = diff.Result{} // Reset results + } + + r := new(defaultReporter) + s.reporters = append(s.reporters, reporter{r}) + s.compareAny(rootStep(x, y)) + d := r.String() + if (d == "") != s.result.Equal() { + panic("inconsistent difference and equality results") + } + return d +} + +// rootStep constructs the first path step. If x and y have differing types, +// then they are stored within an empty interface type. +func rootStep(x, y interface{}) PathStep { + vx := reflect.ValueOf(x) + vy := reflect.ValueOf(y) + + // If the inputs are different types, auto-wrap them in an empty interface + // so that they have the same parent type. + var t reflect.Type + if !vx.IsValid() || !vy.IsValid() || vx.Type() != vy.Type() { + t = reflect.TypeOf((*interface{})(nil)).Elem() + if vx.IsValid() { + vvx := reflect.New(t).Elem() + vvx.Set(vx) + vx = vvx + } + if vy.IsValid() { + vvy := reflect.New(t).Elem() + vvy.Set(vy) + vy = vvy + } + } else { + t = vx.Type() + } + + return &pathStep{t, vx, vy} +} + +type state struct { + // These fields represent the "comparison state". + // Calling statelessCompare must not result in observable changes to these. + result diff.Result // The current result of comparison + curPath Path // The current path in the value tree + curPtrs pointerPath // The current set of visited pointers + reporters []reporter // Optional reporters + + // recChecker checks for infinite cycles applying the same set of + // transformers upon the output of itself. + recChecker recChecker + + // dynChecker triggers pseudo-random checks for option correctness. + // It is safe for statelessCompare to mutate this value. + dynChecker dynChecker + + // These fields, once set by processOption, will not change. + exporters []exporter // List of exporters for structs with unexported fields + opts Options // List of all fundamental and filter options +} + +func newState(opts []Option) *state { + // Always ensure a validator option exists to validate the inputs. + s := &state{opts: Options{validator{}}} + s.curPtrs.Init() + s.processOption(Options(opts)) + return s +} + +func (s *state) processOption(opt Option) { + switch opt := opt.(type) { + case nil: + case Options: + for _, o := range opt { + s.processOption(o) + } + case coreOption: + type filtered interface { + isFiltered() bool + } + if fopt, ok := opt.(filtered); ok && !fopt.isFiltered() { + panic(fmt.Sprintf("cannot use an unfiltered option: %v", opt)) + } + s.opts = append(s.opts, opt) + case exporter: + s.exporters = append(s.exporters, opt) + case reporter: + s.reporters = append(s.reporters, opt) + default: + panic(fmt.Sprintf("unknown option %T", opt)) + } +} + +// statelessCompare compares two values and returns the result. +// This function is stateless in that it does not alter the current result, +// or output to any registered reporters. +func (s *state) statelessCompare(step PathStep) diff.Result { + // We do not save and restore curPath and curPtrs because all of the + // compareX methods should properly push and pop from them. + // It is an implementation bug if the contents of the paths differ from + // when calling this function to when returning from it. + + oldResult, oldReporters := s.result, s.reporters + s.result = diff.Result{} // Reset result + s.reporters = nil // Remove reporters to avoid spurious printouts + s.compareAny(step) + res := s.result + s.result, s.reporters = oldResult, oldReporters + return res +} + +func (s *state) compareAny(step PathStep) { + // Update the path stack. + s.curPath.push(step) + defer s.curPath.pop() + for _, r := range s.reporters { + r.PushStep(step) + defer r.PopStep() + } + s.recChecker.Check(s.curPath) + + // Cycle-detection for slice elements (see NOTE in compareSlice). + t := step.Type() + vx, vy := step.Values() + if si, ok := step.(SliceIndex); ok && si.isSlice && vx.IsValid() && vy.IsValid() { + px, py := vx.Addr(), vy.Addr() + if eq, visited := s.curPtrs.Push(px, py); visited { + s.report(eq, reportByCycle) + return + } + defer s.curPtrs.Pop(px, py) + } + + // Rule 1: Check whether an option applies on this node in the value tree. + if s.tryOptions(t, vx, vy) { + return + } + + // Rule 2: Check whether the type has a valid Equal method. + if s.tryMethod(t, vx, vy) { + return + } + + // Rule 3: Compare based on the underlying kind. + switch t.Kind() { + case reflect.Bool: + s.report(vx.Bool() == vy.Bool(), 0) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + s.report(vx.Int() == vy.Int(), 0) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + s.report(vx.Uint() == vy.Uint(), 0) + case reflect.Float32, reflect.Float64: + s.report(vx.Float() == vy.Float(), 0) + case reflect.Complex64, reflect.Complex128: + s.report(vx.Complex() == vy.Complex(), 0) + case reflect.String: + s.report(vx.String() == vy.String(), 0) + case reflect.Chan, reflect.UnsafePointer: + s.report(vx.Pointer() == vy.Pointer(), 0) + case reflect.Func: + s.report(vx.IsNil() && vy.IsNil(), 0) + case reflect.Struct: + s.compareStruct(t, vx, vy) + case reflect.Slice, reflect.Array: + s.compareSlice(t, vx, vy) + case reflect.Map: + s.compareMap(t, vx, vy) + case reflect.Ptr: + s.comparePtr(t, vx, vy) + case reflect.Interface: + s.compareInterface(t, vx, vy) + default: + panic(fmt.Sprintf("%v kind not handled", t.Kind())) + } +} + +func (s *state) tryOptions(t reflect.Type, vx, vy reflect.Value) bool { + // Evaluate all filters and apply the remaining options. + if opt := s.opts.filter(s, t, vx, vy); opt != nil { + opt.apply(s, vx, vy) + return true + } + return false +} + +func (s *state) tryMethod(t reflect.Type, vx, vy reflect.Value) bool { + // Check if this type even has an Equal method. + m, ok := t.MethodByName("Equal") + if !ok || !function.IsType(m.Type, function.EqualAssignable) { + return false + } + + eq := s.callTTBFunc(m.Func, vx, vy) + s.report(eq, reportByMethod) + return true +} + +func (s *state) callTRFunc(f, v reflect.Value, step Transform) reflect.Value { + v = sanitizeValue(v, f.Type().In(0)) + if !s.dynChecker.Next() { + return f.Call([]reflect.Value{v})[0] + } + + // Run the function twice and ensure that we get the same results back. + // We run in goroutines so that the race detector (if enabled) can detect + // unsafe mutations to the input. + c := make(chan reflect.Value) + go detectRaces(c, f, v) + got := <-c + want := f.Call([]reflect.Value{v})[0] + if step.vx, step.vy = got, want; !s.statelessCompare(step).Equal() { + // To avoid false-positives with non-reflexive equality operations, + // we sanity check whether a value is equal to itself. + if step.vx, step.vy = want, want; !s.statelessCompare(step).Equal() { + return want + } + panic(fmt.Sprintf("non-deterministic function detected: %s", function.NameOf(f))) + } + return want +} + +func (s *state) callTTBFunc(f, x, y reflect.Value) bool { + x = sanitizeValue(x, f.Type().In(0)) + y = sanitizeValue(y, f.Type().In(1)) + if !s.dynChecker.Next() { + return f.Call([]reflect.Value{x, y})[0].Bool() + } + + // Swapping the input arguments is sufficient to check that + // f is symmetric and deterministic. + // We run in goroutines so that the race detector (if enabled) can detect + // unsafe mutations to the input. + c := make(chan reflect.Value) + go detectRaces(c, f, y, x) + got := <-c + want := f.Call([]reflect.Value{x, y})[0].Bool() + if !got.IsValid() || got.Bool() != want { + panic(fmt.Sprintf("non-deterministic or non-symmetric function detected: %s", function.NameOf(f))) + } + return want +} + +func detectRaces(c chan<- reflect.Value, f reflect.Value, vs ...reflect.Value) { + var ret reflect.Value + defer func() { + recover() // Ignore panics, let the other call to f panic instead + c <- ret + }() + ret = f.Call(vs)[0] +} + +// sanitizeValue converts nil interfaces of type T to those of type R, +// assuming that T is assignable to R. +// Otherwise, it returns the input value as is. +func sanitizeValue(v reflect.Value, t reflect.Type) reflect.Value { + // TODO(≥go1.10): Workaround for reflect bug (https://golang.org/issue/22143). + if !flags.AtLeastGo110 { + if v.Kind() == reflect.Interface && v.IsNil() && v.Type() != t { + return reflect.New(t).Elem() + } + } + return v +} + +func (s *state) compareStruct(t reflect.Type, vx, vy reflect.Value) { + var addr bool + var vax, vay reflect.Value // Addressable versions of vx and vy + + var mayForce, mayForceInit bool + step := StructField{&structField{}} + for i := 0; i < t.NumField(); i++ { + step.typ = t.Field(i).Type + step.vx = vx.Field(i) + step.vy = vy.Field(i) + step.name = t.Field(i).Name + step.idx = i + step.unexported = !isExported(step.name) + if step.unexported { + if step.name == "_" { + continue + } + // Defer checking of unexported fields until later to give an + // Ignore a chance to ignore the field. + if !vax.IsValid() || !vay.IsValid() { + // For retrieveUnexportedField to work, the parent struct must + // be addressable. Create a new copy of the values if + // necessary to make them addressable. + addr = vx.CanAddr() || vy.CanAddr() + vax = makeAddressable(vx) + vay = makeAddressable(vy) + } + if !mayForceInit { + for _, xf := range s.exporters { + mayForce = mayForce || xf(t) + } + mayForceInit = true + } + step.mayForce = mayForce + step.paddr = addr + step.pvx = vax + step.pvy = vay + step.field = t.Field(i) + } + s.compareAny(step) + } +} + +func (s *state) compareSlice(t reflect.Type, vx, vy reflect.Value) { + isSlice := t.Kind() == reflect.Slice + if isSlice && (vx.IsNil() || vy.IsNil()) { + s.report(vx.IsNil() && vy.IsNil(), 0) + return + } + + // NOTE: It is incorrect to call curPtrs.Push on the slice header pointer + // since slices represents a list of pointers, rather than a single pointer. + // The pointer checking logic must be handled on a per-element basis + // in compareAny. + // + // A slice header (see reflect.SliceHeader) in Go is a tuple of a starting + // pointer P, a length N, and a capacity C. Supposing each slice element has + // a memory size of M, then the slice is equivalent to the list of pointers: + // [P+i*M for i in range(N)] + // + // For example, v[:0] and v[:1] are slices with the same starting pointer, + // but they are clearly different values. Using the slice pointer alone + // violates the assumption that equal pointers implies equal values. + + step := SliceIndex{&sliceIndex{pathStep: pathStep{typ: t.Elem()}, isSlice: isSlice}} + withIndexes := func(ix, iy int) SliceIndex { + if ix >= 0 { + step.vx, step.xkey = vx.Index(ix), ix + } else { + step.vx, step.xkey = reflect.Value{}, -1 + } + if iy >= 0 { + step.vy, step.ykey = vy.Index(iy), iy + } else { + step.vy, step.ykey = reflect.Value{}, -1 + } + return step + } + + // Ignore options are able to ignore missing elements in a slice. + // However, detecting these reliably requires an optimal differencing + // algorithm, for which diff.Difference is not. + // + // Instead, we first iterate through both slices to detect which elements + // would be ignored if standing alone. The index of non-discarded elements + // are stored in a separate slice, which diffing is then performed on. + var indexesX, indexesY []int + var ignoredX, ignoredY []bool + for ix := 0; ix < vx.Len(); ix++ { + ignored := s.statelessCompare(withIndexes(ix, -1)).NumDiff == 0 + if !ignored { + indexesX = append(indexesX, ix) + } + ignoredX = append(ignoredX, ignored) + } + for iy := 0; iy < vy.Len(); iy++ { + ignored := s.statelessCompare(withIndexes(-1, iy)).NumDiff == 0 + if !ignored { + indexesY = append(indexesY, iy) + } + ignoredY = append(ignoredY, ignored) + } + + // Compute an edit-script for slices vx and vy (excluding ignored elements). + edits := diff.Difference(len(indexesX), len(indexesY), func(ix, iy int) diff.Result { + return s.statelessCompare(withIndexes(indexesX[ix], indexesY[iy])) + }) + + // Replay the ignore-scripts and the edit-script. + var ix, iy int + for ix < vx.Len() || iy < vy.Len() { + var e diff.EditType + switch { + case ix < len(ignoredX) && ignoredX[ix]: + e = diff.UniqueX + case iy < len(ignoredY) && ignoredY[iy]: + e = diff.UniqueY + default: + e, edits = edits[0], edits[1:] + } + switch e { + case diff.UniqueX: + s.compareAny(withIndexes(ix, -1)) + ix++ + case diff.UniqueY: + s.compareAny(withIndexes(-1, iy)) + iy++ + default: + s.compareAny(withIndexes(ix, iy)) + ix++ + iy++ + } + } +} + +func (s *state) compareMap(t reflect.Type, vx, vy reflect.Value) { + if vx.IsNil() || vy.IsNil() { + s.report(vx.IsNil() && vy.IsNil(), 0) + return + } + + // Cycle-detection for maps. + if eq, visited := s.curPtrs.Push(vx, vy); visited { + s.report(eq, reportByCycle) + return + } + defer s.curPtrs.Pop(vx, vy) + + // We combine and sort the two map keys so that we can perform the + // comparisons in a deterministic order. + step := MapIndex{&mapIndex{pathStep: pathStep{typ: t.Elem()}}} + for _, k := range value.SortKeys(append(vx.MapKeys(), vy.MapKeys()...)) { + step.vx = vx.MapIndex(k) + step.vy = vy.MapIndex(k) + step.key = k + if !step.vx.IsValid() && !step.vy.IsValid() { + // It is possible for both vx and vy to be invalid if the + // key contained a NaN value in it. + // + // Even with the ability to retrieve NaN keys in Go 1.12, + // there still isn't a sensible way to compare the values since + // a NaN key may map to multiple unordered values. + // The most reasonable way to compare NaNs would be to compare the + // set of values. However, this is impossible to do efficiently + // since set equality is provably an O(n^2) operation given only + // an Equal function. If we had a Less function or Hash function, + // this could be done in O(n*log(n)) or O(n), respectively. + // + // Rather than adding complex logic to deal with NaNs, make it + // the user's responsibility to compare such obscure maps. + const help = "consider providing a Comparer to compare the map" + panic(fmt.Sprintf("%#v has map key with NaNs\n%s", s.curPath, help)) + } + s.compareAny(step) + } +} + +func (s *state) comparePtr(t reflect.Type, vx, vy reflect.Value) { + if vx.IsNil() || vy.IsNil() { + s.report(vx.IsNil() && vy.IsNil(), 0) + return + } + + // Cycle-detection for pointers. + if eq, visited := s.curPtrs.Push(vx, vy); visited { + s.report(eq, reportByCycle) + return + } + defer s.curPtrs.Pop(vx, vy) + + vx, vy = vx.Elem(), vy.Elem() + s.compareAny(Indirect{&indirect{pathStep{t.Elem(), vx, vy}}}) +} + +func (s *state) compareInterface(t reflect.Type, vx, vy reflect.Value) { + if vx.IsNil() || vy.IsNil() { + s.report(vx.IsNil() && vy.IsNil(), 0) + return + } + vx, vy = vx.Elem(), vy.Elem() + if vx.Type() != vy.Type() { + s.report(false, 0) + return + } + s.compareAny(TypeAssertion{&typeAssertion{pathStep{vx.Type(), vx, vy}}}) +} + +func (s *state) report(eq bool, rf resultFlags) { + if rf&reportByIgnore == 0 { + if eq { + s.result.NumSame++ + rf |= reportEqual + } else { + s.result.NumDiff++ + rf |= reportUnequal + } + } + for _, r := range s.reporters { + r.Report(Result{flags: rf}) + } +} + +// recChecker tracks the state needed to periodically perform checks that +// user provided transformers are not stuck in an infinitely recursive cycle. +type recChecker struct{ next int } + +// Check scans the Path for any recursive transformers and panics when any +// recursive transformers are detected. Note that the presence of a +// recursive Transformer does not necessarily imply an infinite cycle. +// As such, this check only activates after some minimal number of path steps. +func (rc *recChecker) Check(p Path) { + const minLen = 1 << 16 + if rc.next == 0 { + rc.next = minLen + } + if len(p) < rc.next { + return + } + rc.next <<= 1 + + // Check whether the same transformer has appeared at least twice. + var ss []string + m := map[Option]int{} + for _, ps := range p { + if t, ok := ps.(Transform); ok { + t := t.Option() + if m[t] == 1 { // Transformer was used exactly once before + tf := t.(*transformer).fnc.Type() + ss = append(ss, fmt.Sprintf("%v: %v => %v", t, tf.In(0), tf.Out(0))) + } + m[t]++ + } + } + if len(ss) > 0 { + const warning = "recursive set of Transformers detected" + const help = "consider using cmpopts.AcyclicTransformer" + set := strings.Join(ss, "\n\t") + panic(fmt.Sprintf("%s:\n\t%s\n%s", warning, set, help)) + } +} + +// dynChecker tracks the state needed to periodically perform checks that +// user provided functions are symmetric and deterministic. +// The zero value is safe for immediate use. +type dynChecker struct{ curr, next int } + +// Next increments the state and reports whether a check should be performed. +// +// Checks occur every Nth function call, where N is a triangular number: +// 0 1 3 6 10 15 21 28 36 45 55 66 78 91 105 120 136 153 171 190 ... +// See https://en.wikipedia.org/wiki/Triangular_number +// +// This sequence ensures that the cost of checks drops significantly as +// the number of functions calls grows larger. +func (dc *dynChecker) Next() bool { + ok := dc.curr == dc.next + if ok { + dc.curr = 0 + dc.next++ + } + dc.curr++ + return ok +} + +// makeAddressable returns a value that is always addressable. +// It returns the input verbatim if it is already addressable, +// otherwise it creates a new value and returns an addressable copy. +func makeAddressable(v reflect.Value) reflect.Value { + if v.CanAddr() { + return v + } + vc := reflect.New(v.Type()).Elem() + vc.Set(v) + return vc +} diff --git a/vendor/github.com/google/go-cmp/cmp/export_panic.go b/vendor/github.com/google/go-cmp/cmp/export_panic.go new file mode 100644 index 0000000..5ff0b42 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/export_panic.go @@ -0,0 +1,15 @@ +// Copyright 2017, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build purego + +package cmp + +import "reflect" + +const supportExporters = false + +func retrieveUnexportedField(reflect.Value, reflect.StructField, bool) reflect.Value { + panic("no support for forcibly accessing unexported fields") +} diff --git a/vendor/github.com/google/go-cmp/cmp/export_unsafe.go b/vendor/github.com/google/go-cmp/cmp/export_unsafe.go new file mode 100644 index 0000000..21eb548 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/export_unsafe.go @@ -0,0 +1,35 @@ +// Copyright 2017, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !purego + +package cmp + +import ( + "reflect" + "unsafe" +) + +const supportExporters = true + +// retrieveUnexportedField uses unsafe to forcibly retrieve any field from +// a struct such that the value has read-write permissions. +// +// The parent struct, v, must be addressable, while f must be a StructField +// describing the field to retrieve. If addr is false, +// then the returned value will be shallowed copied to be non-addressable. +func retrieveUnexportedField(v reflect.Value, f reflect.StructField, addr bool) reflect.Value { + ve := reflect.NewAt(f.Type, unsafe.Pointer(uintptr(unsafe.Pointer(v.UnsafeAddr()))+f.Offset)).Elem() + if !addr { + // A field is addressable if and only if the struct is addressable. + // If the original parent value was not addressable, shallow copy the + // value to make it non-addressable to avoid leaking an implementation + // detail of how forcibly exporting a field works. + if ve.Kind() == reflect.Interface && ve.IsNil() { + return reflect.Zero(f.Type) + } + return reflect.ValueOf(ve.Interface()).Convert(f.Type) + } + return ve +} diff --git a/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_disable.go b/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_disable.go new file mode 100644 index 0000000..1daaaac --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_disable.go @@ -0,0 +1,17 @@ +// Copyright 2017, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !cmp_debug + +package diff + +var debug debugger + +type debugger struct{} + +func (debugger) Begin(_, _ int, f EqualFunc, _, _ *EditScript) EqualFunc { + return f +} +func (debugger) Update() {} +func (debugger) Finish() {} diff --git a/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_enable.go b/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_enable.go new file mode 100644 index 0000000..4b91dbc --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_enable.go @@ -0,0 +1,122 @@ +// Copyright 2017, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build cmp_debug + +package diff + +import ( + "fmt" + "strings" + "sync" + "time" +) + +// The algorithm can be seen running in real-time by enabling debugging: +// go test -tags=cmp_debug -v +// +// Example output: +// === RUN TestDifference/#34 +// ┌───────────────────────────────┐ +// │ \ · · · · · · · · · · · · · · │ +// │ · # · · · · · · · · · · · · · │ +// │ · \ · · · · · · · · · · · · · │ +// │ · · \ · · · · · · · · · · · · │ +// │ · · · X # · · · · · · · · · · │ +// │ · · · # \ · · · · · · · · · · │ +// │ · · · · · # # · · · · · · · · │ +// │ · · · · · # \ · · · · · · · · │ +// │ · · · · · · · \ · · · · · · · │ +// │ · · · · · · · · \ · · · · · · │ +// │ · · · · · · · · · \ · · · · · │ +// │ · · · · · · · · · · \ · · # · │ +// │ · · · · · · · · · · · \ # # · │ +// │ · · · · · · · · · · · # # # · │ +// │ · · · · · · · · · · # # # # · │ +// │ · · · · · · · · · # # # # # · │ +// │ · · · · · · · · · · · · · · \ │ +// └───────────────────────────────┘ +// [.Y..M.XY......YXYXY.|] +// +// The grid represents the edit-graph where the horizontal axis represents +// list X and the vertical axis represents list Y. The start of the two lists +// is the top-left, while the ends are the bottom-right. The '·' represents +// an unexplored node in the graph. The '\' indicates that the two symbols +// from list X and Y are equal. The 'X' indicates that two symbols are similar +// (but not exactly equal) to each other. The '#' indicates that the two symbols +// are different (and not similar). The algorithm traverses this graph trying to +// make the paths starting in the top-left and the bottom-right connect. +// +// The series of '.', 'X', 'Y', and 'M' characters at the bottom represents +// the currently established path from the forward and reverse searches, +// separated by a '|' character. + +const ( + updateDelay = 100 * time.Millisecond + finishDelay = 500 * time.Millisecond + ansiTerminal = true // ANSI escape codes used to move terminal cursor +) + +var debug debugger + +type debugger struct { + sync.Mutex + p1, p2 EditScript + fwdPath, revPath *EditScript + grid []byte + lines int +} + +func (dbg *debugger) Begin(nx, ny int, f EqualFunc, p1, p2 *EditScript) EqualFunc { + dbg.Lock() + dbg.fwdPath, dbg.revPath = p1, p2 + top := "┌─" + strings.Repeat("──", nx) + "┐\n" + row := "│ " + strings.Repeat("· ", nx) + "│\n" + btm := "└─" + strings.Repeat("──", nx) + "┘\n" + dbg.grid = []byte(top + strings.Repeat(row, ny) + btm) + dbg.lines = strings.Count(dbg.String(), "\n") + fmt.Print(dbg) + + // Wrap the EqualFunc so that we can intercept each result. + return func(ix, iy int) (r Result) { + cell := dbg.grid[len(top)+iy*len(row):][len("│ ")+len("· ")*ix:][:len("·")] + for i := range cell { + cell[i] = 0 // Zero out the multiple bytes of UTF-8 middle-dot + } + switch r = f(ix, iy); { + case r.Equal(): + cell[0] = '\\' + case r.Similar(): + cell[0] = 'X' + default: + cell[0] = '#' + } + return + } +} + +func (dbg *debugger) Update() { + dbg.print(updateDelay) +} + +func (dbg *debugger) Finish() { + dbg.print(finishDelay) + dbg.Unlock() +} + +func (dbg *debugger) String() string { + dbg.p1, dbg.p2 = *dbg.fwdPath, dbg.p2[:0] + for i := len(*dbg.revPath) - 1; i >= 0; i-- { + dbg.p2 = append(dbg.p2, (*dbg.revPath)[i]) + } + return fmt.Sprintf("%s[%v|%v]\n\n", dbg.grid, dbg.p1, dbg.p2) +} + +func (dbg *debugger) print(d time.Duration) { + if ansiTerminal { + fmt.Printf("\x1b[%dA", dbg.lines) // Reset terminal cursor + } + fmt.Print(dbg) + time.Sleep(d) +} diff --git a/vendor/github.com/google/go-cmp/cmp/internal/diff/diff.go b/vendor/github.com/google/go-cmp/cmp/internal/diff/diff.go new file mode 100644 index 0000000..bc196b1 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/internal/diff/diff.go @@ -0,0 +1,398 @@ +// Copyright 2017, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package diff implements an algorithm for producing edit-scripts. +// The edit-script is a sequence of operations needed to transform one list +// of symbols into another (or vice-versa). The edits allowed are insertions, +// deletions, and modifications. The summation of all edits is called the +// Levenshtein distance as this problem is well-known in computer science. +// +// This package prioritizes performance over accuracy. That is, the run time +// is more important than obtaining a minimal Levenshtein distance. +package diff + +import ( + "math/rand" + "time" + + "github.com/google/go-cmp/cmp/internal/flags" +) + +// EditType represents a single operation within an edit-script. +type EditType uint8 + +const ( + // Identity indicates that a symbol pair is identical in both list X and Y. + Identity EditType = iota + // UniqueX indicates that a symbol only exists in X and not Y. + UniqueX + // UniqueY indicates that a symbol only exists in Y and not X. + UniqueY + // Modified indicates that a symbol pair is a modification of each other. + Modified +) + +// EditScript represents the series of differences between two lists. +type EditScript []EditType + +// String returns a human-readable string representing the edit-script where +// Identity, UniqueX, UniqueY, and Modified are represented by the +// '.', 'X', 'Y', and 'M' characters, respectively. +func (es EditScript) String() string { + b := make([]byte, len(es)) + for i, e := range es { + switch e { + case Identity: + b[i] = '.' + case UniqueX: + b[i] = 'X' + case UniqueY: + b[i] = 'Y' + case Modified: + b[i] = 'M' + default: + panic("invalid edit-type") + } + } + return string(b) +} + +// stats returns a histogram of the number of each type of edit operation. +func (es EditScript) stats() (s struct{ NI, NX, NY, NM int }) { + for _, e := range es { + switch e { + case Identity: + s.NI++ + case UniqueX: + s.NX++ + case UniqueY: + s.NY++ + case Modified: + s.NM++ + default: + panic("invalid edit-type") + } + } + return +} + +// Dist is the Levenshtein distance and is guaranteed to be 0 if and only if +// lists X and Y are equal. +func (es EditScript) Dist() int { return len(es) - es.stats().NI } + +// LenX is the length of the X list. +func (es EditScript) LenX() int { return len(es) - es.stats().NY } + +// LenY is the length of the Y list. +func (es EditScript) LenY() int { return len(es) - es.stats().NX } + +// EqualFunc reports whether the symbols at indexes ix and iy are equal. +// When called by Difference, the index is guaranteed to be within nx and ny. +type EqualFunc func(ix int, iy int) Result + +// Result is the result of comparison. +// NumSame is the number of sub-elements that are equal. +// NumDiff is the number of sub-elements that are not equal. +type Result struct{ NumSame, NumDiff int } + +// BoolResult returns a Result that is either Equal or not Equal. +func BoolResult(b bool) Result { + if b { + return Result{NumSame: 1} // Equal, Similar + } else { + return Result{NumDiff: 2} // Not Equal, not Similar + } +} + +// Equal indicates whether the symbols are equal. Two symbols are equal +// if and only if NumDiff == 0. If Equal, then they are also Similar. +func (r Result) Equal() bool { return r.NumDiff == 0 } + +// Similar indicates whether two symbols are similar and may be represented +// by using the Modified type. As a special case, we consider binary comparisons +// (i.e., those that return Result{1, 0} or Result{0, 1}) to be similar. +// +// The exact ratio of NumSame to NumDiff to determine similarity may change. +func (r Result) Similar() bool { + // Use NumSame+1 to offset NumSame so that binary comparisons are similar. + return r.NumSame+1 >= r.NumDiff +} + +var randBool = rand.New(rand.NewSource(time.Now().Unix())).Intn(2) == 0 + +// Difference reports whether two lists of lengths nx and ny are equal +// given the definition of equality provided as f. +// +// This function returns an edit-script, which is a sequence of operations +// needed to convert one list into the other. The following invariants for +// the edit-script are maintained: +// • eq == (es.Dist()==0) +// • nx == es.LenX() +// • ny == es.LenY() +// +// This algorithm is not guaranteed to be an optimal solution (i.e., one that +// produces an edit-script with a minimal Levenshtein distance). This algorithm +// favors performance over optimality. The exact output is not guaranteed to +// be stable and may change over time. +func Difference(nx, ny int, f EqualFunc) (es EditScript) { + // This algorithm is based on traversing what is known as an "edit-graph". + // See Figure 1 from "An O(ND) Difference Algorithm and Its Variations" + // by Eugene W. Myers. Since D can be as large as N itself, this is + // effectively O(N^2). Unlike the algorithm from that paper, we are not + // interested in the optimal path, but at least some "decent" path. + // + // For example, let X and Y be lists of symbols: + // X = [A B C A B B A] + // Y = [C B A B A C] + // + // The edit-graph can be drawn as the following: + // A B C A B B A + // ┌─────────────┐ + // C │_|_|\|_|_|_|_│ 0 + // B │_|\|_|_|\|\|_│ 1 + // A │\|_|_|\|_|_|\│ 2 + // B │_|\|_|_|\|\|_│ 3 + // A │\|_|_|\|_|_|\│ 4 + // C │ | |\| | | | │ 5 + // └─────────────┘ 6 + // 0 1 2 3 4 5 6 7 + // + // List X is written along the horizontal axis, while list Y is written + // along the vertical axis. At any point on this grid, if the symbol in + // list X matches the corresponding symbol in list Y, then a '\' is drawn. + // The goal of any minimal edit-script algorithm is to find a path from the + // top-left corner to the bottom-right corner, while traveling through the + // fewest horizontal or vertical edges. + // A horizontal edge is equivalent to inserting a symbol from list X. + // A vertical edge is equivalent to inserting a symbol from list Y. + // A diagonal edge is equivalent to a matching symbol between both X and Y. + + // Invariants: + // • 0 ≤ fwdPath.X ≤ (fwdFrontier.X, revFrontier.X) ≤ revPath.X ≤ nx + // • 0 ≤ fwdPath.Y ≤ (fwdFrontier.Y, revFrontier.Y) ≤ revPath.Y ≤ ny + // + // In general: + // • fwdFrontier.X < revFrontier.X + // • fwdFrontier.Y < revFrontier.Y + // Unless, it is time for the algorithm to terminate. + fwdPath := path{+1, point{0, 0}, make(EditScript, 0, (nx+ny)/2)} + revPath := path{-1, point{nx, ny}, make(EditScript, 0)} + fwdFrontier := fwdPath.point // Forward search frontier + revFrontier := revPath.point // Reverse search frontier + + // Search budget bounds the cost of searching for better paths. + // The longest sequence of non-matching symbols that can be tolerated is + // approximately the square-root of the search budget. + searchBudget := 4 * (nx + ny) // O(n) + + // Running the tests with the "cmp_debug" build tag prints a visualization + // of the algorithm running in real-time. This is educational for + // understanding how the algorithm works. See debug_enable.go. + f = debug.Begin(nx, ny, f, &fwdPath.es, &revPath.es) + + // The algorithm below is a greedy, meet-in-the-middle algorithm for + // computing sub-optimal edit-scripts between two lists. + // + // The algorithm is approximately as follows: + // • Searching for differences switches back-and-forth between + // a search that starts at the beginning (the top-left corner), and + // a search that starts at the end (the bottom-right corner). The goal of + // the search is connect with the search from the opposite corner. + // • As we search, we build a path in a greedy manner, where the first + // match seen is added to the path (this is sub-optimal, but provides a + // decent result in practice). When matches are found, we try the next pair + // of symbols in the lists and follow all matches as far as possible. + // • When searching for matches, we search along a diagonal going through + // through the "frontier" point. If no matches are found, we advance the + // frontier towards the opposite corner. + // • This algorithm terminates when either the X coordinates or the + // Y coordinates of the forward and reverse frontier points ever intersect. + + // This algorithm is correct even if searching only in the forward direction + // or in the reverse direction. We do both because it is commonly observed + // that two lists commonly differ because elements were added to the front + // or end of the other list. + // + // Non-deterministically start with either the forward or reverse direction + // to introduce some deliberate instability so that we have the flexibility + // to change this algorithm in the future. + if flags.Deterministic || randBool { + goto forwardSearch + } else { + goto reverseSearch + } + +forwardSearch: + { + // Forward search from the beginning. + if fwdFrontier.X >= revFrontier.X || fwdFrontier.Y >= revFrontier.Y || searchBudget == 0 { + goto finishSearch + } + for stop1, stop2, i := false, false, 0; !(stop1 && stop2) && searchBudget > 0; i++ { + // Search in a diagonal pattern for a match. + z := zigzag(i) + p := point{fwdFrontier.X + z, fwdFrontier.Y - z} + switch { + case p.X >= revPath.X || p.Y < fwdPath.Y: + stop1 = true // Hit top-right corner + case p.Y >= revPath.Y || p.X < fwdPath.X: + stop2 = true // Hit bottom-left corner + case f(p.X, p.Y).Equal(): + // Match found, so connect the path to this point. + fwdPath.connect(p, f) + fwdPath.append(Identity) + // Follow sequence of matches as far as possible. + for fwdPath.X < revPath.X && fwdPath.Y < revPath.Y { + if !f(fwdPath.X, fwdPath.Y).Equal() { + break + } + fwdPath.append(Identity) + } + fwdFrontier = fwdPath.point + stop1, stop2 = true, true + default: + searchBudget-- // Match not found + } + debug.Update() + } + // Advance the frontier towards reverse point. + if revPath.X-fwdFrontier.X >= revPath.Y-fwdFrontier.Y { + fwdFrontier.X++ + } else { + fwdFrontier.Y++ + } + goto reverseSearch + } + +reverseSearch: + { + // Reverse search from the end. + if fwdFrontier.X >= revFrontier.X || fwdFrontier.Y >= revFrontier.Y || searchBudget == 0 { + goto finishSearch + } + for stop1, stop2, i := false, false, 0; !(stop1 && stop2) && searchBudget > 0; i++ { + // Search in a diagonal pattern for a match. + z := zigzag(i) + p := point{revFrontier.X - z, revFrontier.Y + z} + switch { + case fwdPath.X >= p.X || revPath.Y < p.Y: + stop1 = true // Hit bottom-left corner + case fwdPath.Y >= p.Y || revPath.X < p.X: + stop2 = true // Hit top-right corner + case f(p.X-1, p.Y-1).Equal(): + // Match found, so connect the path to this point. + revPath.connect(p, f) + revPath.append(Identity) + // Follow sequence of matches as far as possible. + for fwdPath.X < revPath.X && fwdPath.Y < revPath.Y { + if !f(revPath.X-1, revPath.Y-1).Equal() { + break + } + revPath.append(Identity) + } + revFrontier = revPath.point + stop1, stop2 = true, true + default: + searchBudget-- // Match not found + } + debug.Update() + } + // Advance the frontier towards forward point. + if revFrontier.X-fwdPath.X >= revFrontier.Y-fwdPath.Y { + revFrontier.X-- + } else { + revFrontier.Y-- + } + goto forwardSearch + } + +finishSearch: + // Join the forward and reverse paths and then append the reverse path. + fwdPath.connect(revPath.point, f) + for i := len(revPath.es) - 1; i >= 0; i-- { + t := revPath.es[i] + revPath.es = revPath.es[:i] + fwdPath.append(t) + } + debug.Finish() + return fwdPath.es +} + +type path struct { + dir int // +1 if forward, -1 if reverse + point // Leading point of the EditScript path + es EditScript +} + +// connect appends any necessary Identity, Modified, UniqueX, or UniqueY types +// to the edit-script to connect p.point to dst. +func (p *path) connect(dst point, f EqualFunc) { + if p.dir > 0 { + // Connect in forward direction. + for dst.X > p.X && dst.Y > p.Y { + switch r := f(p.X, p.Y); { + case r.Equal(): + p.append(Identity) + case r.Similar(): + p.append(Modified) + case dst.X-p.X >= dst.Y-p.Y: + p.append(UniqueX) + default: + p.append(UniqueY) + } + } + for dst.X > p.X { + p.append(UniqueX) + } + for dst.Y > p.Y { + p.append(UniqueY) + } + } else { + // Connect in reverse direction. + for p.X > dst.X && p.Y > dst.Y { + switch r := f(p.X-1, p.Y-1); { + case r.Equal(): + p.append(Identity) + case r.Similar(): + p.append(Modified) + case p.Y-dst.Y >= p.X-dst.X: + p.append(UniqueY) + default: + p.append(UniqueX) + } + } + for p.X > dst.X { + p.append(UniqueX) + } + for p.Y > dst.Y { + p.append(UniqueY) + } + } +} + +func (p *path) append(t EditType) { + p.es = append(p.es, t) + switch t { + case Identity, Modified: + p.add(p.dir, p.dir) + case UniqueX: + p.add(p.dir, 0) + case UniqueY: + p.add(0, p.dir) + } + debug.Update() +} + +type point struct{ X, Y int } + +func (p *point) add(dx, dy int) { p.X += dx; p.Y += dy } + +// zigzag maps a consecutive sequence of integers to a zig-zag sequence. +// [0 1 2 3 4 5 ...] => [0 -1 +1 -2 +2 ...] +func zigzag(x int) int { + if x&1 != 0 { + x = ^x + } + return x >> 1 +} diff --git a/vendor/github.com/google/go-cmp/cmp/internal/flags/flags.go b/vendor/github.com/google/go-cmp/cmp/internal/flags/flags.go new file mode 100644 index 0000000..d8e459c --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/internal/flags/flags.go @@ -0,0 +1,9 @@ +// Copyright 2019, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package flags + +// Deterministic controls whether the output of Diff should be deterministic. +// This is only used for testing. +var Deterministic bool diff --git a/vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_legacy.go b/vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_legacy.go new file mode 100644 index 0000000..82d1d7f --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_legacy.go @@ -0,0 +1,10 @@ +// Copyright 2019, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !go1.10 + +package flags + +// AtLeastGo110 reports whether the Go toolchain is at least Go 1.10. +const AtLeastGo110 = false diff --git a/vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_recent.go b/vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_recent.go new file mode 100644 index 0000000..8646f05 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_recent.go @@ -0,0 +1,10 @@ +// Copyright 2019, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.10 + +package flags + +// AtLeastGo110 reports whether the Go toolchain is at least Go 1.10. +const AtLeastGo110 = true diff --git a/vendor/github.com/google/go-cmp/cmp/internal/function/func.go b/vendor/github.com/google/go-cmp/cmp/internal/function/func.go new file mode 100644 index 0000000..d127d43 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/internal/function/func.go @@ -0,0 +1,99 @@ +// Copyright 2017, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package function provides functionality for identifying function types. +package function + +import ( + "reflect" + "regexp" + "runtime" + "strings" +) + +type funcType int + +const ( + _ funcType = iota + + tbFunc // func(T) bool + ttbFunc // func(T, T) bool + trbFunc // func(T, R) bool + tibFunc // func(T, I) bool + trFunc // func(T) R + + Equal = ttbFunc // func(T, T) bool + EqualAssignable = tibFunc // func(T, I) bool; encapsulates func(T, T) bool + Transformer = trFunc // func(T) R + ValueFilter = ttbFunc // func(T, T) bool + Less = ttbFunc // func(T, T) bool + ValuePredicate = tbFunc // func(T) bool + KeyValuePredicate = trbFunc // func(T, R) bool +) + +var boolType = reflect.TypeOf(true) + +// IsType reports whether the reflect.Type is of the specified function type. +func IsType(t reflect.Type, ft funcType) bool { + if t == nil || t.Kind() != reflect.Func || t.IsVariadic() { + return false + } + ni, no := t.NumIn(), t.NumOut() + switch ft { + case tbFunc: // func(T) bool + if ni == 1 && no == 1 && t.Out(0) == boolType { + return true + } + case ttbFunc: // func(T, T) bool + if ni == 2 && no == 1 && t.In(0) == t.In(1) && t.Out(0) == boolType { + return true + } + case trbFunc: // func(T, R) bool + if ni == 2 && no == 1 && t.Out(0) == boolType { + return true + } + case tibFunc: // func(T, I) bool + if ni == 2 && no == 1 && t.In(0).AssignableTo(t.In(1)) && t.Out(0) == boolType { + return true + } + case trFunc: // func(T) R + if ni == 1 && no == 1 { + return true + } + } + return false +} + +var lastIdentRx = regexp.MustCompile(`[_\p{L}][_\p{L}\p{N}]*$`) + +// NameOf returns the name of the function value. +func NameOf(v reflect.Value) string { + fnc := runtime.FuncForPC(v.Pointer()) + if fnc == nil { + return "" + } + fullName := fnc.Name() // e.g., "long/path/name/mypkg.(*MyType).(long/path/name/mypkg.myMethod)-fm" + + // Method closures have a "-fm" suffix. + fullName = strings.TrimSuffix(fullName, "-fm") + + var name string + for len(fullName) > 0 { + inParen := strings.HasSuffix(fullName, ")") + fullName = strings.TrimSuffix(fullName, ")") + + s := lastIdentRx.FindString(fullName) + if s == "" { + break + } + name = s + "." + name + fullName = strings.TrimSuffix(fullName, s) + + if i := strings.LastIndexByte(fullName, '('); inParen && i >= 0 { + fullName = fullName[:i] + } + fullName = strings.TrimSuffix(fullName, ".") + } + return strings.TrimSuffix(name, ".") +} diff --git a/vendor/github.com/google/go-cmp/cmp/internal/value/name.go b/vendor/github.com/google/go-cmp/cmp/internal/value/name.go new file mode 100644 index 0000000..b6c12ce --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/internal/value/name.go @@ -0,0 +1,157 @@ +// Copyright 2020, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package value + +import ( + "reflect" + "strconv" +) + +// TypeString is nearly identical to reflect.Type.String, +// but has an additional option to specify that full type names be used. +func TypeString(t reflect.Type, qualified bool) string { + return string(appendTypeName(nil, t, qualified, false)) +} + +func appendTypeName(b []byte, t reflect.Type, qualified, elideFunc bool) []byte { + // BUG: Go reflection provides no way to disambiguate two named types + // of the same name and within the same package, + // but declared within the namespace of different functions. + + // Named type. + if t.Name() != "" { + if qualified && t.PkgPath() != "" { + b = append(b, '"') + b = append(b, t.PkgPath()...) + b = append(b, '"') + b = append(b, '.') + b = append(b, t.Name()...) + } else { + b = append(b, t.String()...) + } + return b + } + + // Unnamed type. + switch k := t.Kind(); k { + case reflect.Bool, reflect.String, reflect.UnsafePointer, + reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr, + reflect.Float32, reflect.Float64, reflect.Complex64, reflect.Complex128: + b = append(b, k.String()...) + case reflect.Chan: + if t.ChanDir() == reflect.RecvDir { + b = append(b, "<-"...) + } + b = append(b, "chan"...) + if t.ChanDir() == reflect.SendDir { + b = append(b, "<-"...) + } + b = append(b, ' ') + b = appendTypeName(b, t.Elem(), qualified, false) + case reflect.Func: + if !elideFunc { + b = append(b, "func"...) + } + b = append(b, '(') + for i := 0; i < t.NumIn(); i++ { + if i > 0 { + b = append(b, ", "...) + } + if i == t.NumIn()-1 && t.IsVariadic() { + b = append(b, "..."...) + b = appendTypeName(b, t.In(i).Elem(), qualified, false) + } else { + b = appendTypeName(b, t.In(i), qualified, false) + } + } + b = append(b, ')') + switch t.NumOut() { + case 0: + // Do nothing + case 1: + b = append(b, ' ') + b = appendTypeName(b, t.Out(0), qualified, false) + default: + b = append(b, " ("...) + for i := 0; i < t.NumOut(); i++ { + if i > 0 { + b = append(b, ", "...) + } + b = appendTypeName(b, t.Out(i), qualified, false) + } + b = append(b, ')') + } + case reflect.Struct: + b = append(b, "struct{ "...) + for i := 0; i < t.NumField(); i++ { + if i > 0 { + b = append(b, "; "...) + } + sf := t.Field(i) + if !sf.Anonymous { + if qualified && sf.PkgPath != "" { + b = append(b, '"') + b = append(b, sf.PkgPath...) + b = append(b, '"') + b = append(b, '.') + } + b = append(b, sf.Name...) + b = append(b, ' ') + } + b = appendTypeName(b, sf.Type, qualified, false) + if sf.Tag != "" { + b = append(b, ' ') + b = strconv.AppendQuote(b, string(sf.Tag)) + } + } + if b[len(b)-1] == ' ' { + b = b[:len(b)-1] + } else { + b = append(b, ' ') + } + b = append(b, '}') + case reflect.Slice, reflect.Array: + b = append(b, '[') + if k == reflect.Array { + b = strconv.AppendUint(b, uint64(t.Len()), 10) + } + b = append(b, ']') + b = appendTypeName(b, t.Elem(), qualified, false) + case reflect.Map: + b = append(b, "map["...) + b = appendTypeName(b, t.Key(), qualified, false) + b = append(b, ']') + b = appendTypeName(b, t.Elem(), qualified, false) + case reflect.Ptr: + b = append(b, '*') + b = appendTypeName(b, t.Elem(), qualified, false) + case reflect.Interface: + b = append(b, "interface{ "...) + for i := 0; i < t.NumMethod(); i++ { + if i > 0 { + b = append(b, "; "...) + } + m := t.Method(i) + if qualified && m.PkgPath != "" { + b = append(b, '"') + b = append(b, m.PkgPath...) + b = append(b, '"') + b = append(b, '.') + } + b = append(b, m.Name...) + b = appendTypeName(b, m.Type, qualified, true) + } + if b[len(b)-1] == ' ' { + b = b[:len(b)-1] + } else { + b = append(b, ' ') + } + b = append(b, '}') + default: + panic("invalid kind: " + k.String()) + } + return b +} diff --git a/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_purego.go b/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_purego.go new file mode 100644 index 0000000..44f4a5a --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_purego.go @@ -0,0 +1,33 @@ +// Copyright 2018, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build purego + +package value + +import "reflect" + +// Pointer is an opaque typed pointer and is guaranteed to be comparable. +type Pointer struct { + p uintptr + t reflect.Type +} + +// PointerOf returns a Pointer from v, which must be a +// reflect.Ptr, reflect.Slice, or reflect.Map. +func PointerOf(v reflect.Value) Pointer { + // NOTE: Storing a pointer as an uintptr is technically incorrect as it + // assumes that the GC implementation does not use a moving collector. + return Pointer{v.Pointer(), v.Type()} +} + +// IsNil reports whether the pointer is nil. +func (p Pointer) IsNil() bool { + return p.p == 0 +} + +// Uintptr returns the pointer as a uintptr. +func (p Pointer) Uintptr() uintptr { + return p.p +} diff --git a/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_unsafe.go b/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_unsafe.go new file mode 100644 index 0000000..a605953 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_unsafe.go @@ -0,0 +1,36 @@ +// Copyright 2018, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !purego + +package value + +import ( + "reflect" + "unsafe" +) + +// Pointer is an opaque typed pointer and is guaranteed to be comparable. +type Pointer struct { + p unsafe.Pointer + t reflect.Type +} + +// PointerOf returns a Pointer from v, which must be a +// reflect.Ptr, reflect.Slice, or reflect.Map. +func PointerOf(v reflect.Value) Pointer { + // The proper representation of a pointer is unsafe.Pointer, + // which is necessary if the GC ever uses a moving collector. + return Pointer{unsafe.Pointer(v.Pointer()), v.Type()} +} + +// IsNil reports whether the pointer is nil. +func (p Pointer) IsNil() bool { + return p.p == nil +} + +// Uintptr returns the pointer as a uintptr. +func (p Pointer) Uintptr() uintptr { + return uintptr(p.p) +} diff --git a/vendor/github.com/google/go-cmp/cmp/internal/value/sort.go b/vendor/github.com/google/go-cmp/cmp/internal/value/sort.go new file mode 100644 index 0000000..98533b0 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/internal/value/sort.go @@ -0,0 +1,106 @@ +// Copyright 2017, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package value + +import ( + "fmt" + "math" + "reflect" + "sort" +) + +// SortKeys sorts a list of map keys, deduplicating keys if necessary. +// The type of each value must be comparable. +func SortKeys(vs []reflect.Value) []reflect.Value { + if len(vs) == 0 { + return vs + } + + // Sort the map keys. + sort.SliceStable(vs, func(i, j int) bool { return isLess(vs[i], vs[j]) }) + + // Deduplicate keys (fails for NaNs). + vs2 := vs[:1] + for _, v := range vs[1:] { + if isLess(vs2[len(vs2)-1], v) { + vs2 = append(vs2, v) + } + } + return vs2 +} + +// isLess is a generic function for sorting arbitrary map keys. +// The inputs must be of the same type and must be comparable. +func isLess(x, y reflect.Value) bool { + switch x.Type().Kind() { + case reflect.Bool: + return !x.Bool() && y.Bool() + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return x.Int() < y.Int() + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return x.Uint() < y.Uint() + case reflect.Float32, reflect.Float64: + // NOTE: This does not sort -0 as less than +0 + // since Go maps treat -0 and +0 as equal keys. + fx, fy := x.Float(), y.Float() + return fx < fy || math.IsNaN(fx) && !math.IsNaN(fy) + case reflect.Complex64, reflect.Complex128: + cx, cy := x.Complex(), y.Complex() + rx, ix, ry, iy := real(cx), imag(cx), real(cy), imag(cy) + if rx == ry || (math.IsNaN(rx) && math.IsNaN(ry)) { + return ix < iy || math.IsNaN(ix) && !math.IsNaN(iy) + } + return rx < ry || math.IsNaN(rx) && !math.IsNaN(ry) + case reflect.Ptr, reflect.UnsafePointer, reflect.Chan: + return x.Pointer() < y.Pointer() + case reflect.String: + return x.String() < y.String() + case reflect.Array: + for i := 0; i < x.Len(); i++ { + if isLess(x.Index(i), y.Index(i)) { + return true + } + if isLess(y.Index(i), x.Index(i)) { + return false + } + } + return false + case reflect.Struct: + for i := 0; i < x.NumField(); i++ { + if isLess(x.Field(i), y.Field(i)) { + return true + } + if isLess(y.Field(i), x.Field(i)) { + return false + } + } + return false + case reflect.Interface: + vx, vy := x.Elem(), y.Elem() + if !vx.IsValid() || !vy.IsValid() { + return !vx.IsValid() && vy.IsValid() + } + tx, ty := vx.Type(), vy.Type() + if tx == ty { + return isLess(x.Elem(), y.Elem()) + } + if tx.Kind() != ty.Kind() { + return vx.Kind() < vy.Kind() + } + if tx.String() != ty.String() { + return tx.String() < ty.String() + } + if tx.PkgPath() != ty.PkgPath() { + return tx.PkgPath() < ty.PkgPath() + } + // This can happen in rare situations, so we fallback to just comparing + // the unique pointer for a reflect.Type. This guarantees deterministic + // ordering within a program, but it is obviously not stable. + return reflect.ValueOf(vx.Type()).Pointer() < reflect.ValueOf(vy.Type()).Pointer() + default: + // Must be Func, Map, or Slice; which are not comparable. + panic(fmt.Sprintf("%T is not comparable", x.Type())) + } +} diff --git a/vendor/github.com/google/go-cmp/cmp/internal/value/zero.go b/vendor/github.com/google/go-cmp/cmp/internal/value/zero.go new file mode 100644 index 0000000..9147a29 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/internal/value/zero.go @@ -0,0 +1,48 @@ +// Copyright 2017, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package value + +import ( + "math" + "reflect" +) + +// IsZero reports whether v is the zero value. +// This does not rely on Interface and so can be used on unexported fields. +func IsZero(v reflect.Value) bool { + switch v.Kind() { + case reflect.Bool: + return v.Bool() == false + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return v.Int() == 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return v.Uint() == 0 + case reflect.Float32, reflect.Float64: + return math.Float64bits(v.Float()) == 0 + case reflect.Complex64, reflect.Complex128: + return math.Float64bits(real(v.Complex())) == 0 && math.Float64bits(imag(v.Complex())) == 0 + case reflect.String: + return v.String() == "" + case reflect.UnsafePointer: + return v.Pointer() == 0 + case reflect.Chan, reflect.Func, reflect.Interface, reflect.Ptr, reflect.Map, reflect.Slice: + return v.IsNil() + case reflect.Array: + for i := 0; i < v.Len(); i++ { + if !IsZero(v.Index(i)) { + return false + } + } + return true + case reflect.Struct: + for i := 0; i < v.NumField(); i++ { + if !IsZero(v.Field(i)) { + return false + } + } + return true + } + return false +} diff --git a/vendor/github.com/google/go-cmp/cmp/options.go b/vendor/github.com/google/go-cmp/cmp/options.go new file mode 100644 index 0000000..e57b9eb --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/options.go @@ -0,0 +1,552 @@ +// Copyright 2017, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmp + +import ( + "fmt" + "reflect" + "regexp" + "strings" + + "github.com/google/go-cmp/cmp/internal/function" +) + +// Option configures for specific behavior of Equal and Diff. In particular, +// the fundamental Option functions (Ignore, Transformer, and Comparer), +// configure how equality is determined. +// +// The fundamental options may be composed with filters (FilterPath and +// FilterValues) to control the scope over which they are applied. +// +// The cmp/cmpopts package provides helper functions for creating options that +// may be used with Equal and Diff. +type Option interface { + // filter applies all filters and returns the option that remains. + // Each option may only read s.curPath and call s.callTTBFunc. + // + // An Options is returned only if multiple comparers or transformers + // can apply simultaneously and will only contain values of those types + // or sub-Options containing values of those types. + filter(s *state, t reflect.Type, vx, vy reflect.Value) applicableOption +} + +// applicableOption represents the following types: +// Fundamental: ignore | validator | *comparer | *transformer +// Grouping: Options +type applicableOption interface { + Option + + // apply executes the option, which may mutate s or panic. + apply(s *state, vx, vy reflect.Value) +} + +// coreOption represents the following types: +// Fundamental: ignore | validator | *comparer | *transformer +// Filters: *pathFilter | *valuesFilter +type coreOption interface { + Option + isCore() +} + +type core struct{} + +func (core) isCore() {} + +// Options is a list of Option values that also satisfies the Option interface. +// Helper comparison packages may return an Options value when packing multiple +// Option values into a single Option. When this package processes an Options, +// it will be implicitly expanded into a flat list. +// +// Applying a filter on an Options is equivalent to applying that same filter +// on all individual options held within. +type Options []Option + +func (opts Options) filter(s *state, t reflect.Type, vx, vy reflect.Value) (out applicableOption) { + for _, opt := range opts { + switch opt := opt.filter(s, t, vx, vy); opt.(type) { + case ignore: + return ignore{} // Only ignore can short-circuit evaluation + case validator: + out = validator{} // Takes precedence over comparer or transformer + case *comparer, *transformer, Options: + switch out.(type) { + case nil: + out = opt + case validator: + // Keep validator + case *comparer, *transformer, Options: + out = Options{out, opt} // Conflicting comparers or transformers + } + } + } + return out +} + +func (opts Options) apply(s *state, _, _ reflect.Value) { + const warning = "ambiguous set of applicable options" + const help = "consider using filters to ensure at most one Comparer or Transformer may apply" + var ss []string + for _, opt := range flattenOptions(nil, opts) { + ss = append(ss, fmt.Sprint(opt)) + } + set := strings.Join(ss, "\n\t") + panic(fmt.Sprintf("%s at %#v:\n\t%s\n%s", warning, s.curPath, set, help)) +} + +func (opts Options) String() string { + var ss []string + for _, opt := range opts { + ss = append(ss, fmt.Sprint(opt)) + } + return fmt.Sprintf("Options{%s}", strings.Join(ss, ", ")) +} + +// FilterPath returns a new Option where opt is only evaluated if filter f +// returns true for the current Path in the value tree. +// +// This filter is called even if a slice element or map entry is missing and +// provides an opportunity to ignore such cases. The filter function must be +// symmetric such that the filter result is identical regardless of whether the +// missing value is from x or y. +// +// The option passed in may be an Ignore, Transformer, Comparer, Options, or +// a previously filtered Option. +func FilterPath(f func(Path) bool, opt Option) Option { + if f == nil { + panic("invalid path filter function") + } + if opt := normalizeOption(opt); opt != nil { + return &pathFilter{fnc: f, opt: opt} + } + return nil +} + +type pathFilter struct { + core + fnc func(Path) bool + opt Option +} + +func (f pathFilter) filter(s *state, t reflect.Type, vx, vy reflect.Value) applicableOption { + if f.fnc(s.curPath) { + return f.opt.filter(s, t, vx, vy) + } + return nil +} + +func (f pathFilter) String() string { + return fmt.Sprintf("FilterPath(%s, %v)", function.NameOf(reflect.ValueOf(f.fnc)), f.opt) +} + +// FilterValues returns a new Option where opt is only evaluated if filter f, +// which is a function of the form "func(T, T) bool", returns true for the +// current pair of values being compared. If either value is invalid or +// the type of the values is not assignable to T, then this filter implicitly +// returns false. +// +// The filter function must be +// symmetric (i.e., agnostic to the order of the inputs) and +// deterministic (i.e., produces the same result when given the same inputs). +// If T is an interface, it is possible that f is called with two values with +// different concrete types that both implement T. +// +// The option passed in may be an Ignore, Transformer, Comparer, Options, or +// a previously filtered Option. +func FilterValues(f interface{}, opt Option) Option { + v := reflect.ValueOf(f) + if !function.IsType(v.Type(), function.ValueFilter) || v.IsNil() { + panic(fmt.Sprintf("invalid values filter function: %T", f)) + } + if opt := normalizeOption(opt); opt != nil { + vf := &valuesFilter{fnc: v, opt: opt} + if ti := v.Type().In(0); ti.Kind() != reflect.Interface || ti.NumMethod() > 0 { + vf.typ = ti + } + return vf + } + return nil +} + +type valuesFilter struct { + core + typ reflect.Type // T + fnc reflect.Value // func(T, T) bool + opt Option +} + +func (f valuesFilter) filter(s *state, t reflect.Type, vx, vy reflect.Value) applicableOption { + if !vx.IsValid() || !vx.CanInterface() || !vy.IsValid() || !vy.CanInterface() { + return nil + } + if (f.typ == nil || t.AssignableTo(f.typ)) && s.callTTBFunc(f.fnc, vx, vy) { + return f.opt.filter(s, t, vx, vy) + } + return nil +} + +func (f valuesFilter) String() string { + return fmt.Sprintf("FilterValues(%s, %v)", function.NameOf(f.fnc), f.opt) +} + +// Ignore is an Option that causes all comparisons to be ignored. +// This value is intended to be combined with FilterPath or FilterValues. +// It is an error to pass an unfiltered Ignore option to Equal. +func Ignore() Option { return ignore{} } + +type ignore struct{ core } + +func (ignore) isFiltered() bool { return false } +func (ignore) filter(_ *state, _ reflect.Type, _, _ reflect.Value) applicableOption { return ignore{} } +func (ignore) apply(s *state, _, _ reflect.Value) { s.report(true, reportByIgnore) } +func (ignore) String() string { return "Ignore()" } + +// validator is a sentinel Option type to indicate that some options could not +// be evaluated due to unexported fields, missing slice elements, or +// missing map entries. Both values are validator only for unexported fields. +type validator struct{ core } + +func (validator) filter(_ *state, _ reflect.Type, vx, vy reflect.Value) applicableOption { + if !vx.IsValid() || !vy.IsValid() { + return validator{} + } + if !vx.CanInterface() || !vy.CanInterface() { + return validator{} + } + return nil +} +func (validator) apply(s *state, vx, vy reflect.Value) { + // Implies missing slice element or map entry. + if !vx.IsValid() || !vy.IsValid() { + s.report(vx.IsValid() == vy.IsValid(), 0) + return + } + + // Unable to Interface implies unexported field without visibility access. + if !vx.CanInterface() || !vy.CanInterface() { + help := "consider using a custom Comparer; if you control the implementation of type, you can also consider using an Exporter, AllowUnexported, or cmpopts.IgnoreUnexported" + var name string + if t := s.curPath.Index(-2).Type(); t.Name() != "" { + // Named type with unexported fields. + name = fmt.Sprintf("%q.%v", t.PkgPath(), t.Name()) // e.g., "path/to/package".MyType + if _, ok := reflect.New(t).Interface().(error); ok { + help = "consider using cmpopts.EquateErrors to compare error values" + } + } else { + // Unnamed type with unexported fields. Derive PkgPath from field. + var pkgPath string + for i := 0; i < t.NumField() && pkgPath == ""; i++ { + pkgPath = t.Field(i).PkgPath + } + name = fmt.Sprintf("%q.(%v)", pkgPath, t.String()) // e.g., "path/to/package".(struct { a int }) + } + panic(fmt.Sprintf("cannot handle unexported field at %#v:\n\t%v\n%s", s.curPath, name, help)) + } + + panic("not reachable") +} + +// identRx represents a valid identifier according to the Go specification. +const identRx = `[_\p{L}][_\p{L}\p{N}]*` + +var identsRx = regexp.MustCompile(`^` + identRx + `(\.` + identRx + `)*$`) + +// Transformer returns an Option that applies a transformation function that +// converts values of a certain type into that of another. +// +// The transformer f must be a function "func(T) R" that converts values of +// type T to those of type R and is implicitly filtered to input values +// assignable to T. The transformer must not mutate T in any way. +// +// To help prevent some cases of infinite recursive cycles applying the +// same transform to the output of itself (e.g., in the case where the +// input and output types are the same), an implicit filter is added such that +// a transformer is applicable only if that exact transformer is not already +// in the tail of the Path since the last non-Transform step. +// For situations where the implicit filter is still insufficient, +// consider using cmpopts.AcyclicTransformer, which adds a filter +// to prevent the transformer from being recursively applied upon itself. +// +// The name is a user provided label that is used as the Transform.Name in the +// transformation PathStep (and eventually shown in the Diff output). +// The name must be a valid identifier or qualified identifier in Go syntax. +// If empty, an arbitrary name is used. +func Transformer(name string, f interface{}) Option { + v := reflect.ValueOf(f) + if !function.IsType(v.Type(), function.Transformer) || v.IsNil() { + panic(fmt.Sprintf("invalid transformer function: %T", f)) + } + if name == "" { + name = function.NameOf(v) + if !identsRx.MatchString(name) { + name = "λ" // Lambda-symbol as placeholder name + } + } else if !identsRx.MatchString(name) { + panic(fmt.Sprintf("invalid name: %q", name)) + } + tr := &transformer{name: name, fnc: reflect.ValueOf(f)} + if ti := v.Type().In(0); ti.Kind() != reflect.Interface || ti.NumMethod() > 0 { + tr.typ = ti + } + return tr +} + +type transformer struct { + core + name string + typ reflect.Type // T + fnc reflect.Value // func(T) R +} + +func (tr *transformer) isFiltered() bool { return tr.typ != nil } + +func (tr *transformer) filter(s *state, t reflect.Type, _, _ reflect.Value) applicableOption { + for i := len(s.curPath) - 1; i >= 0; i-- { + if t, ok := s.curPath[i].(Transform); !ok { + break // Hit most recent non-Transform step + } else if tr == t.trans { + return nil // Cannot directly use same Transform + } + } + if tr.typ == nil || t.AssignableTo(tr.typ) { + return tr + } + return nil +} + +func (tr *transformer) apply(s *state, vx, vy reflect.Value) { + step := Transform{&transform{pathStep{typ: tr.fnc.Type().Out(0)}, tr}} + vvx := s.callTRFunc(tr.fnc, vx, step) + vvy := s.callTRFunc(tr.fnc, vy, step) + step.vx, step.vy = vvx, vvy + s.compareAny(step) +} + +func (tr transformer) String() string { + return fmt.Sprintf("Transformer(%s, %s)", tr.name, function.NameOf(tr.fnc)) +} + +// Comparer returns an Option that determines whether two values are equal +// to each other. +// +// The comparer f must be a function "func(T, T) bool" and is implicitly +// filtered to input values assignable to T. If T is an interface, it is +// possible that f is called with two values of different concrete types that +// both implement T. +// +// The equality function must be: +// • Symmetric: equal(x, y) == equal(y, x) +// • Deterministic: equal(x, y) == equal(x, y) +// • Pure: equal(x, y) does not modify x or y +func Comparer(f interface{}) Option { + v := reflect.ValueOf(f) + if !function.IsType(v.Type(), function.Equal) || v.IsNil() { + panic(fmt.Sprintf("invalid comparer function: %T", f)) + } + cm := &comparer{fnc: v} + if ti := v.Type().In(0); ti.Kind() != reflect.Interface || ti.NumMethod() > 0 { + cm.typ = ti + } + return cm +} + +type comparer struct { + core + typ reflect.Type // T + fnc reflect.Value // func(T, T) bool +} + +func (cm *comparer) isFiltered() bool { return cm.typ != nil } + +func (cm *comparer) filter(_ *state, t reflect.Type, _, _ reflect.Value) applicableOption { + if cm.typ == nil || t.AssignableTo(cm.typ) { + return cm + } + return nil +} + +func (cm *comparer) apply(s *state, vx, vy reflect.Value) { + eq := s.callTTBFunc(cm.fnc, vx, vy) + s.report(eq, reportByFunc) +} + +func (cm comparer) String() string { + return fmt.Sprintf("Comparer(%s)", function.NameOf(cm.fnc)) +} + +// Exporter returns an Option that specifies whether Equal is allowed to +// introspect into the unexported fields of certain struct types. +// +// Users of this option must understand that comparing on unexported fields +// from external packages is not safe since changes in the internal +// implementation of some external package may cause the result of Equal +// to unexpectedly change. However, it may be valid to use this option on types +// defined in an internal package where the semantic meaning of an unexported +// field is in the control of the user. +// +// In many cases, a custom Comparer should be used instead that defines +// equality as a function of the public API of a type rather than the underlying +// unexported implementation. +// +// For example, the reflect.Type documentation defines equality to be determined +// by the == operator on the interface (essentially performing a shallow pointer +// comparison) and most attempts to compare *regexp.Regexp types are interested +// in only checking that the regular expression strings are equal. +// Both of these are accomplished using Comparers: +// +// Comparer(func(x, y reflect.Type) bool { return x == y }) +// Comparer(func(x, y *regexp.Regexp) bool { return x.String() == y.String() }) +// +// In other cases, the cmpopts.IgnoreUnexported option can be used to ignore +// all unexported fields on specified struct types. +func Exporter(f func(reflect.Type) bool) Option { + if !supportExporters { + panic("Exporter is not supported on purego builds") + } + return exporter(f) +} + +type exporter func(reflect.Type) bool + +func (exporter) filter(_ *state, _ reflect.Type, _, _ reflect.Value) applicableOption { + panic("not implemented") +} + +// AllowUnexported returns an Options that allows Equal to forcibly introspect +// unexported fields of the specified struct types. +// +// See Exporter for the proper use of this option. +func AllowUnexported(types ...interface{}) Option { + m := make(map[reflect.Type]bool) + for _, typ := range types { + t := reflect.TypeOf(typ) + if t.Kind() != reflect.Struct { + panic(fmt.Sprintf("invalid struct type: %T", typ)) + } + m[t] = true + } + return exporter(func(t reflect.Type) bool { return m[t] }) +} + +// Result represents the comparison result for a single node and +// is provided by cmp when calling Result (see Reporter). +type Result struct { + _ [0]func() // Make Result incomparable + flags resultFlags +} + +// Equal reports whether the node was determined to be equal or not. +// As a special case, ignored nodes are considered equal. +func (r Result) Equal() bool { + return r.flags&(reportEqual|reportByIgnore) != 0 +} + +// ByIgnore reports whether the node is equal because it was ignored. +// This never reports true if Equal reports false. +func (r Result) ByIgnore() bool { + return r.flags&reportByIgnore != 0 +} + +// ByMethod reports whether the Equal method determined equality. +func (r Result) ByMethod() bool { + return r.flags&reportByMethod != 0 +} + +// ByFunc reports whether a Comparer function determined equality. +func (r Result) ByFunc() bool { + return r.flags&reportByFunc != 0 +} + +// ByCycle reports whether a reference cycle was detected. +func (r Result) ByCycle() bool { + return r.flags&reportByCycle != 0 +} + +type resultFlags uint + +const ( + _ resultFlags = (1 << iota) / 2 + + reportEqual + reportUnequal + reportByIgnore + reportByMethod + reportByFunc + reportByCycle +) + +// Reporter is an Option that can be passed to Equal. When Equal traverses +// the value trees, it calls PushStep as it descends into each node in the +// tree and PopStep as it ascend out of the node. The leaves of the tree are +// either compared (determined to be equal or not equal) or ignored and reported +// as such by calling the Report method. +func Reporter(r interface { + // PushStep is called when a tree-traversal operation is performed. + // The PathStep itself is only valid until the step is popped. + // The PathStep.Values are valid for the duration of the entire traversal + // and must not be mutated. + // + // Equal always calls PushStep at the start to provide an operation-less + // PathStep used to report the root values. + // + // Within a slice, the exact set of inserted, removed, or modified elements + // is unspecified and may change in future implementations. + // The entries of a map are iterated through in an unspecified order. + PushStep(PathStep) + + // Report is called exactly once on leaf nodes to report whether the + // comparison identified the node as equal, unequal, or ignored. + // A leaf node is one that is immediately preceded by and followed by + // a pair of PushStep and PopStep calls. + Report(Result) + + // PopStep ascends back up the value tree. + // There is always a matching pop call for every push call. + PopStep() +}) Option { + return reporter{r} +} + +type reporter struct{ reporterIface } +type reporterIface interface { + PushStep(PathStep) + Report(Result) + PopStep() +} + +func (reporter) filter(_ *state, _ reflect.Type, _, _ reflect.Value) applicableOption { + panic("not implemented") +} + +// normalizeOption normalizes the input options such that all Options groups +// are flattened and groups with a single element are reduced to that element. +// Only coreOptions and Options containing coreOptions are allowed. +func normalizeOption(src Option) Option { + switch opts := flattenOptions(nil, Options{src}); len(opts) { + case 0: + return nil + case 1: + return opts[0] + default: + return opts + } +} + +// flattenOptions copies all options in src to dst as a flat list. +// Only coreOptions and Options containing coreOptions are allowed. +func flattenOptions(dst, src Options) Options { + for _, opt := range src { + switch opt := opt.(type) { + case nil: + continue + case Options: + dst = flattenOptions(dst, opt) + case coreOption: + dst = append(dst, opt) + default: + panic(fmt.Sprintf("invalid option type: %T", opt)) + } + } + return dst +} diff --git a/vendor/github.com/google/go-cmp/cmp/path.go b/vendor/github.com/google/go-cmp/cmp/path.go new file mode 100644 index 0000000..f01eff3 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/path.go @@ -0,0 +1,378 @@ +// Copyright 2017, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmp + +import ( + "fmt" + "reflect" + "strings" + "unicode" + "unicode/utf8" + + "github.com/google/go-cmp/cmp/internal/value" +) + +// Path is a list of PathSteps describing the sequence of operations to get +// from some root type to the current position in the value tree. +// The first Path element is always an operation-less PathStep that exists +// simply to identify the initial type. +// +// When traversing structs with embedded structs, the embedded struct will +// always be accessed as a field before traversing the fields of the +// embedded struct themselves. That is, an exported field from the +// embedded struct will never be accessed directly from the parent struct. +type Path []PathStep + +// PathStep is a union-type for specific operations to traverse +// a value's tree structure. Users of this package never need to implement +// these types as values of this type will be returned by this package. +// +// Implementations of this interface are +// StructField, SliceIndex, MapIndex, Indirect, TypeAssertion, and Transform. +type PathStep interface { + String() string + + // Type is the resulting type after performing the path step. + Type() reflect.Type + + // Values is the resulting values after performing the path step. + // The type of each valid value is guaranteed to be identical to Type. + // + // In some cases, one or both may be invalid or have restrictions: + // • For StructField, both are not interface-able if the current field + // is unexported and the struct type is not explicitly permitted by + // an Exporter to traverse unexported fields. + // • For SliceIndex, one may be invalid if an element is missing from + // either the x or y slice. + // • For MapIndex, one may be invalid if an entry is missing from + // either the x or y map. + // + // The provided values must not be mutated. + Values() (vx, vy reflect.Value) +} + +var ( + _ PathStep = StructField{} + _ PathStep = SliceIndex{} + _ PathStep = MapIndex{} + _ PathStep = Indirect{} + _ PathStep = TypeAssertion{} + _ PathStep = Transform{} +) + +func (pa *Path) push(s PathStep) { + *pa = append(*pa, s) +} + +func (pa *Path) pop() { + *pa = (*pa)[:len(*pa)-1] +} + +// Last returns the last PathStep in the Path. +// If the path is empty, this returns a non-nil PathStep that reports a nil Type. +func (pa Path) Last() PathStep { + return pa.Index(-1) +} + +// Index returns the ith step in the Path and supports negative indexing. +// A negative index starts counting from the tail of the Path such that -1 +// refers to the last step, -2 refers to the second-to-last step, and so on. +// If index is invalid, this returns a non-nil PathStep that reports a nil Type. +func (pa Path) Index(i int) PathStep { + if i < 0 { + i = len(pa) + i + } + if i < 0 || i >= len(pa) { + return pathStep{} + } + return pa[i] +} + +// String returns the simplified path to a node. +// The simplified path only contains struct field accesses. +// +// For example: +// MyMap.MySlices.MyField +func (pa Path) String() string { + var ss []string + for _, s := range pa { + if _, ok := s.(StructField); ok { + ss = append(ss, s.String()) + } + } + return strings.TrimPrefix(strings.Join(ss, ""), ".") +} + +// GoString returns the path to a specific node using Go syntax. +// +// For example: +// (*root.MyMap["key"].(*mypkg.MyStruct).MySlices)[2][3].MyField +func (pa Path) GoString() string { + var ssPre, ssPost []string + var numIndirect int + for i, s := range pa { + var nextStep PathStep + if i+1 < len(pa) { + nextStep = pa[i+1] + } + switch s := s.(type) { + case Indirect: + numIndirect++ + pPre, pPost := "(", ")" + switch nextStep.(type) { + case Indirect: + continue // Next step is indirection, so let them batch up + case StructField: + numIndirect-- // Automatic indirection on struct fields + case nil: + pPre, pPost = "", "" // Last step; no need for parenthesis + } + if numIndirect > 0 { + ssPre = append(ssPre, pPre+strings.Repeat("*", numIndirect)) + ssPost = append(ssPost, pPost) + } + numIndirect = 0 + continue + case Transform: + ssPre = append(ssPre, s.trans.name+"(") + ssPost = append(ssPost, ")") + continue + } + ssPost = append(ssPost, s.String()) + } + for i, j := 0, len(ssPre)-1; i < j; i, j = i+1, j-1 { + ssPre[i], ssPre[j] = ssPre[j], ssPre[i] + } + return strings.Join(ssPre, "") + strings.Join(ssPost, "") +} + +type pathStep struct { + typ reflect.Type + vx, vy reflect.Value +} + +func (ps pathStep) Type() reflect.Type { return ps.typ } +func (ps pathStep) Values() (vx, vy reflect.Value) { return ps.vx, ps.vy } +func (ps pathStep) String() string { + if ps.typ == nil { + return "" + } + s := ps.typ.String() + if s == "" || strings.ContainsAny(s, "{}\n") { + return "root" // Type too simple or complex to print + } + return fmt.Sprintf("{%s}", s) +} + +// StructField represents a struct field access on a field called Name. +type StructField struct{ *structField } +type structField struct { + pathStep + name string + idx int + + // These fields are used for forcibly accessing an unexported field. + // pvx, pvy, and field are only valid if unexported is true. + unexported bool + mayForce bool // Forcibly allow visibility + paddr bool // Was parent addressable? + pvx, pvy reflect.Value // Parent values (always addressible) + field reflect.StructField // Field information +} + +func (sf StructField) Type() reflect.Type { return sf.typ } +func (sf StructField) Values() (vx, vy reflect.Value) { + if !sf.unexported { + return sf.vx, sf.vy // CanInterface reports true + } + + // Forcibly obtain read-write access to an unexported struct field. + if sf.mayForce { + vx = retrieveUnexportedField(sf.pvx, sf.field, sf.paddr) + vy = retrieveUnexportedField(sf.pvy, sf.field, sf.paddr) + return vx, vy // CanInterface reports true + } + return sf.vx, sf.vy // CanInterface reports false +} +func (sf StructField) String() string { return fmt.Sprintf(".%s", sf.name) } + +// Name is the field name. +func (sf StructField) Name() string { return sf.name } + +// Index is the index of the field in the parent struct type. +// See reflect.Type.Field. +func (sf StructField) Index() int { return sf.idx } + +// SliceIndex is an index operation on a slice or array at some index Key. +type SliceIndex struct{ *sliceIndex } +type sliceIndex struct { + pathStep + xkey, ykey int + isSlice bool // False for reflect.Array +} + +func (si SliceIndex) Type() reflect.Type { return si.typ } +func (si SliceIndex) Values() (vx, vy reflect.Value) { return si.vx, si.vy } +func (si SliceIndex) String() string { + switch { + case si.xkey == si.ykey: + return fmt.Sprintf("[%d]", si.xkey) + case si.ykey == -1: + // [5->?] means "I don't know where X[5] went" + return fmt.Sprintf("[%d->?]", si.xkey) + case si.xkey == -1: + // [?->3] means "I don't know where Y[3] came from" + return fmt.Sprintf("[?->%d]", si.ykey) + default: + // [5->3] means "X[5] moved to Y[3]" + return fmt.Sprintf("[%d->%d]", si.xkey, si.ykey) + } +} + +// Key is the index key; it may return -1 if in a split state +func (si SliceIndex) Key() int { + if si.xkey != si.ykey { + return -1 + } + return si.xkey +} + +// SplitKeys are the indexes for indexing into slices in the +// x and y values, respectively. These indexes may differ due to the +// insertion or removal of an element in one of the slices, causing +// all of the indexes to be shifted. If an index is -1, then that +// indicates that the element does not exist in the associated slice. +// +// Key is guaranteed to return -1 if and only if the indexes returned +// by SplitKeys are not the same. SplitKeys will never return -1 for +// both indexes. +func (si SliceIndex) SplitKeys() (ix, iy int) { return si.xkey, si.ykey } + +// MapIndex is an index operation on a map at some index Key. +type MapIndex struct{ *mapIndex } +type mapIndex struct { + pathStep + key reflect.Value +} + +func (mi MapIndex) Type() reflect.Type { return mi.typ } +func (mi MapIndex) Values() (vx, vy reflect.Value) { return mi.vx, mi.vy } +func (mi MapIndex) String() string { return fmt.Sprintf("[%#v]", mi.key) } + +// Key is the value of the map key. +func (mi MapIndex) Key() reflect.Value { return mi.key } + +// Indirect represents pointer indirection on the parent type. +type Indirect struct{ *indirect } +type indirect struct { + pathStep +} + +func (in Indirect) Type() reflect.Type { return in.typ } +func (in Indirect) Values() (vx, vy reflect.Value) { return in.vx, in.vy } +func (in Indirect) String() string { return "*" } + +// TypeAssertion represents a type assertion on an interface. +type TypeAssertion struct{ *typeAssertion } +type typeAssertion struct { + pathStep +} + +func (ta TypeAssertion) Type() reflect.Type { return ta.typ } +func (ta TypeAssertion) Values() (vx, vy reflect.Value) { return ta.vx, ta.vy } +func (ta TypeAssertion) String() string { return fmt.Sprintf(".(%v)", ta.typ) } + +// Transform is a transformation from the parent type to the current type. +type Transform struct{ *transform } +type transform struct { + pathStep + trans *transformer +} + +func (tf Transform) Type() reflect.Type { return tf.typ } +func (tf Transform) Values() (vx, vy reflect.Value) { return tf.vx, tf.vy } +func (tf Transform) String() string { return fmt.Sprintf("%s()", tf.trans.name) } + +// Name is the name of the Transformer. +func (tf Transform) Name() string { return tf.trans.name } + +// Func is the function pointer to the transformer function. +func (tf Transform) Func() reflect.Value { return tf.trans.fnc } + +// Option returns the originally constructed Transformer option. +// The == operator can be used to detect the exact option used. +func (tf Transform) Option() Option { return tf.trans } + +// pointerPath represents a dual-stack of pointers encountered when +// recursively traversing the x and y values. This data structure supports +// detection of cycles and determining whether the cycles are equal. +// In Go, cycles can occur via pointers, slices, and maps. +// +// The pointerPath uses a map to represent a stack; where descension into a +// pointer pushes the address onto the stack, and ascension from a pointer +// pops the address from the stack. Thus, when traversing into a pointer from +// reflect.Ptr, reflect.Slice element, or reflect.Map, we can detect cycles +// by checking whether the pointer has already been visited. The cycle detection +// uses a separate stack for the x and y values. +// +// If a cycle is detected we need to determine whether the two pointers +// should be considered equal. The definition of equality chosen by Equal +// requires two graphs to have the same structure. To determine this, both the +// x and y values must have a cycle where the previous pointers were also +// encountered together as a pair. +// +// Semantically, this is equivalent to augmenting Indirect, SliceIndex, and +// MapIndex with pointer information for the x and y values. +// Suppose px and py are two pointers to compare, we then search the +// Path for whether px was ever encountered in the Path history of x, and +// similarly so with py. If either side has a cycle, the comparison is only +// equal if both px and py have a cycle resulting from the same PathStep. +// +// Using a map as a stack is more performant as we can perform cycle detection +// in O(1) instead of O(N) where N is len(Path). +type pointerPath struct { + // mx is keyed by x pointers, where the value is the associated y pointer. + mx map[value.Pointer]value.Pointer + // my is keyed by y pointers, where the value is the associated x pointer. + my map[value.Pointer]value.Pointer +} + +func (p *pointerPath) Init() { + p.mx = make(map[value.Pointer]value.Pointer) + p.my = make(map[value.Pointer]value.Pointer) +} + +// Push indicates intent to descend into pointers vx and vy where +// visited reports whether either has been seen before. If visited before, +// equal reports whether both pointers were encountered together. +// Pop must be called if and only if the pointers were never visited. +// +// The pointers vx and vy must be a reflect.Ptr, reflect.Slice, or reflect.Map +// and be non-nil. +func (p pointerPath) Push(vx, vy reflect.Value) (equal, visited bool) { + px := value.PointerOf(vx) + py := value.PointerOf(vy) + _, ok1 := p.mx[px] + _, ok2 := p.my[py] + if ok1 || ok2 { + equal = p.mx[px] == py && p.my[py] == px // Pointers paired together + return equal, true + } + p.mx[px] = py + p.my[py] = px + return false, false +} + +// Pop ascends from pointers vx and vy. +func (p pointerPath) Pop(vx, vy reflect.Value) { + delete(p.mx, value.PointerOf(vx)) + delete(p.my, value.PointerOf(vy)) +} + +// isExported reports whether the identifier is exported. +func isExported(id string) bool { + r, _ := utf8.DecodeRuneInString(id) + return unicode.IsUpper(r) +} diff --git a/vendor/github.com/google/go-cmp/cmp/report.go b/vendor/github.com/google/go-cmp/cmp/report.go new file mode 100644 index 0000000..f43cd12 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/report.go @@ -0,0 +1,54 @@ +// Copyright 2017, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmp + +// defaultReporter implements the reporter interface. +// +// As Equal serially calls the PushStep, Report, and PopStep methods, the +// defaultReporter constructs a tree-based representation of the compared value +// and the result of each comparison (see valueNode). +// +// When the String method is called, the FormatDiff method transforms the +// valueNode tree into a textNode tree, which is a tree-based representation +// of the textual output (see textNode). +// +// Lastly, the textNode.String method produces the final report as a string. +type defaultReporter struct { + root *valueNode + curr *valueNode +} + +func (r *defaultReporter) PushStep(ps PathStep) { + r.curr = r.curr.PushStep(ps) + if r.root == nil { + r.root = r.curr + } +} +func (r *defaultReporter) Report(rs Result) { + r.curr.Report(rs) +} +func (r *defaultReporter) PopStep() { + r.curr = r.curr.PopStep() +} + +// String provides a full report of the differences detected as a structured +// literal in pseudo-Go syntax. String may only be called after the entire tree +// has been traversed. +func (r *defaultReporter) String() string { + assert(r.root != nil && r.curr == nil) + if r.root.NumDiff == 0 { + return "" + } + ptrs := new(pointerReferences) + text := formatOptions{}.FormatDiff(r.root, ptrs) + resolveReferences(text) + return text.String() +} + +func assert(ok bool) { + if !ok { + panic("assertion failure") + } +} diff --git a/vendor/github.com/google/go-cmp/cmp/report_compare.go b/vendor/github.com/google/go-cmp/cmp/report_compare.go new file mode 100644 index 0000000..104bb30 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/report_compare.go @@ -0,0 +1,432 @@ +// Copyright 2019, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmp + +import ( + "fmt" + "reflect" + + "github.com/google/go-cmp/cmp/internal/value" +) + +// numContextRecords is the number of surrounding equal records to print. +const numContextRecords = 2 + +type diffMode byte + +const ( + diffUnknown diffMode = 0 + diffIdentical diffMode = ' ' + diffRemoved diffMode = '-' + diffInserted diffMode = '+' +) + +type typeMode int + +const ( + // emitType always prints the type. + emitType typeMode = iota + // elideType never prints the type. + elideType + // autoType prints the type only for composite kinds + // (i.e., structs, slices, arrays, and maps). + autoType +) + +type formatOptions struct { + // DiffMode controls the output mode of FormatDiff. + // + // If diffUnknown, then produce a diff of the x and y values. + // If diffIdentical, then emit values as if they were equal. + // If diffRemoved, then only emit x values (ignoring y values). + // If diffInserted, then only emit y values (ignoring x values). + DiffMode diffMode + + // TypeMode controls whether to print the type for the current node. + // + // As a general rule of thumb, we always print the type of the next node + // after an interface, and always elide the type of the next node after + // a slice or map node. + TypeMode typeMode + + // formatValueOptions are options specific to printing reflect.Values. + formatValueOptions +} + +func (opts formatOptions) WithDiffMode(d diffMode) formatOptions { + opts.DiffMode = d + return opts +} +func (opts formatOptions) WithTypeMode(t typeMode) formatOptions { + opts.TypeMode = t + return opts +} +func (opts formatOptions) WithVerbosity(level int) formatOptions { + opts.VerbosityLevel = level + opts.LimitVerbosity = true + return opts +} +func (opts formatOptions) verbosity() uint { + switch { + case opts.VerbosityLevel < 0: + return 0 + case opts.VerbosityLevel > 16: + return 16 // some reasonable maximum to avoid shift overflow + default: + return uint(opts.VerbosityLevel) + } +} + +const maxVerbosityPreset = 6 + +// verbosityPreset modifies the verbosity settings given an index +// between 0 and maxVerbosityPreset, inclusive. +func verbosityPreset(opts formatOptions, i int) formatOptions { + opts.VerbosityLevel = int(opts.verbosity()) + 2*i + if i > 0 { + opts.AvoidStringer = true + } + if i >= maxVerbosityPreset { + opts.PrintAddresses = true + opts.QualifiedNames = true + } + return opts +} + +// FormatDiff converts a valueNode tree into a textNode tree, where the later +// is a textual representation of the differences detected in the former. +func (opts formatOptions) FormatDiff(v *valueNode, ptrs *pointerReferences) (out textNode) { + if opts.DiffMode == diffIdentical { + opts = opts.WithVerbosity(1) + } else if opts.verbosity() < 3 { + opts = opts.WithVerbosity(3) + } + + // Check whether we have specialized formatting for this node. + // This is not necessary, but helpful for producing more readable outputs. + if opts.CanFormatDiffSlice(v) { + return opts.FormatDiffSlice(v) + } + + var parentKind reflect.Kind + if v.parent != nil && v.parent.TransformerName == "" { + parentKind = v.parent.Type.Kind() + } + + // For leaf nodes, format the value based on the reflect.Values alone. + if v.MaxDepth == 0 { + switch opts.DiffMode { + case diffUnknown, diffIdentical: + // Format Equal. + if v.NumDiff == 0 { + outx := opts.FormatValue(v.ValueX, parentKind, ptrs) + outy := opts.FormatValue(v.ValueY, parentKind, ptrs) + if v.NumIgnored > 0 && v.NumSame == 0 { + return textEllipsis + } else if outx.Len() < outy.Len() { + return outx + } else { + return outy + } + } + + // Format unequal. + assert(opts.DiffMode == diffUnknown) + var list textList + outx := opts.WithTypeMode(elideType).FormatValue(v.ValueX, parentKind, ptrs) + outy := opts.WithTypeMode(elideType).FormatValue(v.ValueY, parentKind, ptrs) + for i := 0; i <= maxVerbosityPreset && outx != nil && outy != nil && outx.Equal(outy); i++ { + opts2 := verbosityPreset(opts, i).WithTypeMode(elideType) + outx = opts2.FormatValue(v.ValueX, parentKind, ptrs) + outy = opts2.FormatValue(v.ValueY, parentKind, ptrs) + } + if outx != nil { + list = append(list, textRecord{Diff: '-', Value: outx}) + } + if outy != nil { + list = append(list, textRecord{Diff: '+', Value: outy}) + } + return opts.WithTypeMode(emitType).FormatType(v.Type, list) + case diffRemoved: + return opts.FormatValue(v.ValueX, parentKind, ptrs) + case diffInserted: + return opts.FormatValue(v.ValueY, parentKind, ptrs) + default: + panic("invalid diff mode") + } + } + + // Register slice element to support cycle detection. + if parentKind == reflect.Slice { + ptrRefs := ptrs.PushPair(v.ValueX, v.ValueY, opts.DiffMode, true) + defer ptrs.Pop() + defer func() { out = wrapTrunkReferences(ptrRefs, out) }() + } + + // Descend into the child value node. + if v.TransformerName != "" { + out := opts.WithTypeMode(emitType).FormatDiff(v.Value, ptrs) + out = &textWrap{Prefix: "Inverse(" + v.TransformerName + ", ", Value: out, Suffix: ")"} + return opts.FormatType(v.Type, out) + } else { + switch k := v.Type.Kind(); k { + case reflect.Struct, reflect.Array, reflect.Slice: + out = opts.formatDiffList(v.Records, k, ptrs) + out = opts.FormatType(v.Type, out) + case reflect.Map: + // Register map to support cycle detection. + ptrRefs := ptrs.PushPair(v.ValueX, v.ValueY, opts.DiffMode, false) + defer ptrs.Pop() + + out = opts.formatDiffList(v.Records, k, ptrs) + out = wrapTrunkReferences(ptrRefs, out) + out = opts.FormatType(v.Type, out) + case reflect.Ptr: + // Register pointer to support cycle detection. + ptrRefs := ptrs.PushPair(v.ValueX, v.ValueY, opts.DiffMode, false) + defer ptrs.Pop() + + out = opts.FormatDiff(v.Value, ptrs) + out = wrapTrunkReferences(ptrRefs, out) + out = &textWrap{Prefix: "&", Value: out} + case reflect.Interface: + out = opts.WithTypeMode(emitType).FormatDiff(v.Value, ptrs) + default: + panic(fmt.Sprintf("%v cannot have children", k)) + } + return out + } +} + +func (opts formatOptions) formatDiffList(recs []reportRecord, k reflect.Kind, ptrs *pointerReferences) textNode { + // Derive record name based on the data structure kind. + var name string + var formatKey func(reflect.Value) string + switch k { + case reflect.Struct: + name = "field" + opts = opts.WithTypeMode(autoType) + formatKey = func(v reflect.Value) string { return v.String() } + case reflect.Slice, reflect.Array: + name = "element" + opts = opts.WithTypeMode(elideType) + formatKey = func(reflect.Value) string { return "" } + case reflect.Map: + name = "entry" + opts = opts.WithTypeMode(elideType) + formatKey = func(v reflect.Value) string { return formatMapKey(v, false, ptrs) } + } + + maxLen := -1 + if opts.LimitVerbosity { + if opts.DiffMode == diffIdentical { + maxLen = ((1 << opts.verbosity()) >> 1) << 2 // 0, 4, 8, 16, 32, etc... + } else { + maxLen = (1 << opts.verbosity()) << 1 // 2, 4, 8, 16, 32, 64, etc... + } + opts.VerbosityLevel-- + } + + // Handle unification. + switch opts.DiffMode { + case diffIdentical, diffRemoved, diffInserted: + var list textList + var deferredEllipsis bool // Add final "..." to indicate records were dropped + for _, r := range recs { + if len(list) == maxLen { + deferredEllipsis = true + break + } + + // Elide struct fields that are zero value. + if k == reflect.Struct { + var isZero bool + switch opts.DiffMode { + case diffIdentical: + isZero = value.IsZero(r.Value.ValueX) || value.IsZero(r.Value.ValueY) + case diffRemoved: + isZero = value.IsZero(r.Value.ValueX) + case diffInserted: + isZero = value.IsZero(r.Value.ValueY) + } + if isZero { + continue + } + } + // Elide ignored nodes. + if r.Value.NumIgnored > 0 && r.Value.NumSame+r.Value.NumDiff == 0 { + deferredEllipsis = !(k == reflect.Slice || k == reflect.Array) + if !deferredEllipsis { + list.AppendEllipsis(diffStats{}) + } + continue + } + if out := opts.FormatDiff(r.Value, ptrs); out != nil { + list = append(list, textRecord{Key: formatKey(r.Key), Value: out}) + } + } + if deferredEllipsis { + list.AppendEllipsis(diffStats{}) + } + return &textWrap{Prefix: "{", Value: list, Suffix: "}"} + case diffUnknown: + default: + panic("invalid diff mode") + } + + // Handle differencing. + var numDiffs int + var list textList + var keys []reflect.Value // invariant: len(list) == len(keys) + groups := coalesceAdjacentRecords(name, recs) + maxGroup := diffStats{Name: name} + for i, ds := range groups { + if maxLen >= 0 && numDiffs >= maxLen { + maxGroup = maxGroup.Append(ds) + continue + } + + // Handle equal records. + if ds.NumDiff() == 0 { + // Compute the number of leading and trailing records to print. + var numLo, numHi int + numEqual := ds.NumIgnored + ds.NumIdentical + for numLo < numContextRecords && numLo+numHi < numEqual && i != 0 { + if r := recs[numLo].Value; r.NumIgnored > 0 && r.NumSame+r.NumDiff == 0 { + break + } + numLo++ + } + for numHi < numContextRecords && numLo+numHi < numEqual && i != len(groups)-1 { + if r := recs[numEqual-numHi-1].Value; r.NumIgnored > 0 && r.NumSame+r.NumDiff == 0 { + break + } + numHi++ + } + if numEqual-(numLo+numHi) == 1 && ds.NumIgnored == 0 { + numHi++ // Avoid pointless coalescing of a single equal record + } + + // Format the equal values. + for _, r := range recs[:numLo] { + out := opts.WithDiffMode(diffIdentical).FormatDiff(r.Value, ptrs) + list = append(list, textRecord{Key: formatKey(r.Key), Value: out}) + keys = append(keys, r.Key) + } + if numEqual > numLo+numHi { + ds.NumIdentical -= numLo + numHi + list.AppendEllipsis(ds) + for len(keys) < len(list) { + keys = append(keys, reflect.Value{}) + } + } + for _, r := range recs[numEqual-numHi : numEqual] { + out := opts.WithDiffMode(diffIdentical).FormatDiff(r.Value, ptrs) + list = append(list, textRecord{Key: formatKey(r.Key), Value: out}) + keys = append(keys, r.Key) + } + recs = recs[numEqual:] + continue + } + + // Handle unequal records. + for _, r := range recs[:ds.NumDiff()] { + switch { + case opts.CanFormatDiffSlice(r.Value): + out := opts.FormatDiffSlice(r.Value) + list = append(list, textRecord{Key: formatKey(r.Key), Value: out}) + keys = append(keys, r.Key) + case r.Value.NumChildren == r.Value.MaxDepth: + outx := opts.WithDiffMode(diffRemoved).FormatDiff(r.Value, ptrs) + outy := opts.WithDiffMode(diffInserted).FormatDiff(r.Value, ptrs) + for i := 0; i <= maxVerbosityPreset && outx != nil && outy != nil && outx.Equal(outy); i++ { + opts2 := verbosityPreset(opts, i) + outx = opts2.WithDiffMode(diffRemoved).FormatDiff(r.Value, ptrs) + outy = opts2.WithDiffMode(diffInserted).FormatDiff(r.Value, ptrs) + } + if outx != nil { + list = append(list, textRecord{Diff: diffRemoved, Key: formatKey(r.Key), Value: outx}) + keys = append(keys, r.Key) + } + if outy != nil { + list = append(list, textRecord{Diff: diffInserted, Key: formatKey(r.Key), Value: outy}) + keys = append(keys, r.Key) + } + default: + out := opts.FormatDiff(r.Value, ptrs) + list = append(list, textRecord{Key: formatKey(r.Key), Value: out}) + keys = append(keys, r.Key) + } + } + recs = recs[ds.NumDiff():] + numDiffs += ds.NumDiff() + } + if maxGroup.IsZero() { + assert(len(recs) == 0) + } else { + list.AppendEllipsis(maxGroup) + for len(keys) < len(list) { + keys = append(keys, reflect.Value{}) + } + } + assert(len(list) == len(keys)) + + // For maps, the default formatting logic uses fmt.Stringer which may + // produce ambiguous output. Avoid calling String to disambiguate. + if k == reflect.Map { + var ambiguous bool + seenKeys := map[string]reflect.Value{} + for i, currKey := range keys { + if currKey.IsValid() { + strKey := list[i].Key + prevKey, seen := seenKeys[strKey] + if seen && prevKey.CanInterface() && currKey.CanInterface() { + ambiguous = prevKey.Interface() != currKey.Interface() + if ambiguous { + break + } + } + seenKeys[strKey] = currKey + } + } + if ambiguous { + for i, k := range keys { + if k.IsValid() { + list[i].Key = formatMapKey(k, true, ptrs) + } + } + } + } + + return &textWrap{Prefix: "{", Value: list, Suffix: "}"} +} + +// coalesceAdjacentRecords coalesces the list of records into groups of +// adjacent equal, or unequal counts. +func coalesceAdjacentRecords(name string, recs []reportRecord) (groups []diffStats) { + var prevCase int // Arbitrary index into which case last occurred + lastStats := func(i int) *diffStats { + if prevCase != i { + groups = append(groups, diffStats{Name: name}) + prevCase = i + } + return &groups[len(groups)-1] + } + for _, r := range recs { + switch rv := r.Value; { + case rv.NumIgnored > 0 && rv.NumSame+rv.NumDiff == 0: + lastStats(1).NumIgnored++ + case rv.NumDiff == 0: + lastStats(1).NumIdentical++ + case rv.NumDiff > 0 && !rv.ValueY.IsValid(): + lastStats(2).NumRemoved++ + case rv.NumDiff > 0 && !rv.ValueX.IsValid(): + lastStats(2).NumInserted++ + default: + lastStats(2).NumModified++ + } + } + return groups +} diff --git a/vendor/github.com/google/go-cmp/cmp/report_references.go b/vendor/github.com/google/go-cmp/cmp/report_references.go new file mode 100644 index 0000000..be31b33 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/report_references.go @@ -0,0 +1,264 @@ +// Copyright 2020, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmp + +import ( + "fmt" + "reflect" + "strings" + + "github.com/google/go-cmp/cmp/internal/flags" + "github.com/google/go-cmp/cmp/internal/value" +) + +const ( + pointerDelimPrefix = "⟪" + pointerDelimSuffix = "⟫" +) + +// formatPointer prints the address of the pointer. +func formatPointer(p value.Pointer, withDelims bool) string { + v := p.Uintptr() + if flags.Deterministic { + v = 0xdeadf00f // Only used for stable testing purposes + } + if withDelims { + return pointerDelimPrefix + formatHex(uint64(v)) + pointerDelimSuffix + } + return formatHex(uint64(v)) +} + +// pointerReferences is a stack of pointers visited so far. +type pointerReferences [][2]value.Pointer + +func (ps *pointerReferences) PushPair(vx, vy reflect.Value, d diffMode, deref bool) (pp [2]value.Pointer) { + if deref && vx.IsValid() { + vx = vx.Addr() + } + if deref && vy.IsValid() { + vy = vy.Addr() + } + switch d { + case diffUnknown, diffIdentical: + pp = [2]value.Pointer{value.PointerOf(vx), value.PointerOf(vy)} + case diffRemoved: + pp = [2]value.Pointer{value.PointerOf(vx), value.Pointer{}} + case diffInserted: + pp = [2]value.Pointer{value.Pointer{}, value.PointerOf(vy)} + } + *ps = append(*ps, pp) + return pp +} + +func (ps *pointerReferences) Push(v reflect.Value) (p value.Pointer, seen bool) { + p = value.PointerOf(v) + for _, pp := range *ps { + if p == pp[0] || p == pp[1] { + return p, true + } + } + *ps = append(*ps, [2]value.Pointer{p, p}) + return p, false +} + +func (ps *pointerReferences) Pop() { + *ps = (*ps)[:len(*ps)-1] +} + +// trunkReferences is metadata for a textNode indicating that the sub-tree +// represents the value for either pointer in a pair of references. +type trunkReferences struct{ pp [2]value.Pointer } + +// trunkReference is metadata for a textNode indicating that the sub-tree +// represents the value for the given pointer reference. +type trunkReference struct{ p value.Pointer } + +// leafReference is metadata for a textNode indicating that the value is +// truncated as it refers to another part of the tree (i.e., a trunk). +type leafReference struct{ p value.Pointer } + +func wrapTrunkReferences(pp [2]value.Pointer, s textNode) textNode { + switch { + case pp[0].IsNil(): + return &textWrap{Value: s, Metadata: trunkReference{pp[1]}} + case pp[1].IsNil(): + return &textWrap{Value: s, Metadata: trunkReference{pp[0]}} + case pp[0] == pp[1]: + return &textWrap{Value: s, Metadata: trunkReference{pp[0]}} + default: + return &textWrap{Value: s, Metadata: trunkReferences{pp}} + } +} +func wrapTrunkReference(p value.Pointer, printAddress bool, s textNode) textNode { + var prefix string + if printAddress { + prefix = formatPointer(p, true) + } + return &textWrap{Prefix: prefix, Value: s, Metadata: trunkReference{p}} +} +func makeLeafReference(p value.Pointer, printAddress bool) textNode { + out := &textWrap{Prefix: "(", Value: textEllipsis, Suffix: ")"} + var prefix string + if printAddress { + prefix = formatPointer(p, true) + } + return &textWrap{Prefix: prefix, Value: out, Metadata: leafReference{p}} +} + +// resolveReferences walks the textNode tree searching for any leaf reference +// metadata and resolves each against the corresponding trunk references. +// Since pointer addresses in memory are not particularly readable to the user, +// it replaces each pointer value with an arbitrary and unique reference ID. +func resolveReferences(s textNode) { + var walkNodes func(textNode, func(textNode)) + walkNodes = func(s textNode, f func(textNode)) { + f(s) + switch s := s.(type) { + case *textWrap: + walkNodes(s.Value, f) + case textList: + for _, r := range s { + walkNodes(r.Value, f) + } + } + } + + // Collect all trunks and leaves with reference metadata. + var trunks, leaves []*textWrap + walkNodes(s, func(s textNode) { + if s, ok := s.(*textWrap); ok { + switch s.Metadata.(type) { + case leafReference: + leaves = append(leaves, s) + case trunkReference, trunkReferences: + trunks = append(trunks, s) + } + } + }) + + // No leaf references to resolve. + if len(leaves) == 0 { + return + } + + // Collect the set of all leaf references to resolve. + leafPtrs := make(map[value.Pointer]bool) + for _, leaf := range leaves { + leafPtrs[leaf.Metadata.(leafReference).p] = true + } + + // Collect the set of trunk pointers that are always paired together. + // This allows us to assign a single ID to both pointers for brevity. + // If a pointer in a pair ever occurs by itself or as a different pair, + // then the pair is broken. + pairedTrunkPtrs := make(map[value.Pointer]value.Pointer) + unpair := func(p value.Pointer) { + if !pairedTrunkPtrs[p].IsNil() { + pairedTrunkPtrs[pairedTrunkPtrs[p]] = value.Pointer{} // invalidate other half + } + pairedTrunkPtrs[p] = value.Pointer{} // invalidate this half + } + for _, trunk := range trunks { + switch p := trunk.Metadata.(type) { + case trunkReference: + unpair(p.p) // standalone pointer cannot be part of a pair + case trunkReferences: + p0, ok0 := pairedTrunkPtrs[p.pp[0]] + p1, ok1 := pairedTrunkPtrs[p.pp[1]] + switch { + case !ok0 && !ok1: + // Register the newly seen pair. + pairedTrunkPtrs[p.pp[0]] = p.pp[1] + pairedTrunkPtrs[p.pp[1]] = p.pp[0] + case ok0 && ok1 && p0 == p.pp[1] && p1 == p.pp[0]: + // Exact pair already seen; do nothing. + default: + // Pair conflicts with some other pair; break all pairs. + unpair(p.pp[0]) + unpair(p.pp[1]) + } + } + } + + // Correlate each pointer referenced by leaves to a unique identifier, + // and print the IDs for each trunk that matches those pointers. + var nextID uint + ptrIDs := make(map[value.Pointer]uint) + newID := func() uint { + id := nextID + nextID++ + return id + } + for _, trunk := range trunks { + switch p := trunk.Metadata.(type) { + case trunkReference: + if print := leafPtrs[p.p]; print { + id, ok := ptrIDs[p.p] + if !ok { + id = newID() + ptrIDs[p.p] = id + } + trunk.Prefix = updateReferencePrefix(trunk.Prefix, formatReference(id)) + } + case trunkReferences: + print0 := leafPtrs[p.pp[0]] + print1 := leafPtrs[p.pp[1]] + if print0 || print1 { + id0, ok0 := ptrIDs[p.pp[0]] + id1, ok1 := ptrIDs[p.pp[1]] + isPair := pairedTrunkPtrs[p.pp[0]] == p.pp[1] && pairedTrunkPtrs[p.pp[1]] == p.pp[0] + if isPair { + var id uint + assert(ok0 == ok1) // must be seen together or not at all + if ok0 { + assert(id0 == id1) // must have the same ID + id = id0 + } else { + id = newID() + ptrIDs[p.pp[0]] = id + ptrIDs[p.pp[1]] = id + } + trunk.Prefix = updateReferencePrefix(trunk.Prefix, formatReference(id)) + } else { + if print0 && !ok0 { + id0 = newID() + ptrIDs[p.pp[0]] = id0 + } + if print1 && !ok1 { + id1 = newID() + ptrIDs[p.pp[1]] = id1 + } + switch { + case print0 && print1: + trunk.Prefix = updateReferencePrefix(trunk.Prefix, formatReference(id0)+","+formatReference(id1)) + case print0: + trunk.Prefix = updateReferencePrefix(trunk.Prefix, formatReference(id0)) + case print1: + trunk.Prefix = updateReferencePrefix(trunk.Prefix, formatReference(id1)) + } + } + } + } + } + + // Update all leaf references with the unique identifier. + for _, leaf := range leaves { + if id, ok := ptrIDs[leaf.Metadata.(leafReference).p]; ok { + leaf.Prefix = updateReferencePrefix(leaf.Prefix, formatReference(id)) + } + } +} + +func formatReference(id uint) string { + return fmt.Sprintf("ref#%d", id) +} + +func updateReferencePrefix(prefix, ref string) string { + if prefix == "" { + return pointerDelimPrefix + ref + pointerDelimSuffix + } + suffix := strings.TrimPrefix(prefix, pointerDelimPrefix) + return pointerDelimPrefix + ref + ": " + suffix +} diff --git a/vendor/github.com/google/go-cmp/cmp/report_reflect.go b/vendor/github.com/google/go-cmp/cmp/report_reflect.go new file mode 100644 index 0000000..33f0357 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/report_reflect.go @@ -0,0 +1,402 @@ +// Copyright 2019, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmp + +import ( + "bytes" + "fmt" + "reflect" + "strconv" + "strings" + "unicode" + "unicode/utf8" + + "github.com/google/go-cmp/cmp/internal/value" +) + +type formatValueOptions struct { + // AvoidStringer controls whether to avoid calling custom stringer + // methods like error.Error or fmt.Stringer.String. + AvoidStringer bool + + // PrintAddresses controls whether to print the address of all pointers, + // slice elements, and maps. + PrintAddresses bool + + // QualifiedNames controls whether FormatType uses the fully qualified name + // (including the full package path as opposed to just the package name). + QualifiedNames bool + + // VerbosityLevel controls the amount of output to produce. + // A higher value produces more output. A value of zero or lower produces + // no output (represented using an ellipsis). + // If LimitVerbosity is false, then the level is treated as infinite. + VerbosityLevel int + + // LimitVerbosity specifies that formatting should respect VerbosityLevel. + LimitVerbosity bool +} + +// FormatType prints the type as if it were wrapping s. +// This may return s as-is depending on the current type and TypeMode mode. +func (opts formatOptions) FormatType(t reflect.Type, s textNode) textNode { + // Check whether to emit the type or not. + switch opts.TypeMode { + case autoType: + switch t.Kind() { + case reflect.Struct, reflect.Slice, reflect.Array, reflect.Map: + if s.Equal(textNil) { + return s + } + default: + return s + } + if opts.DiffMode == diffIdentical { + return s // elide type for identical nodes + } + case elideType: + return s + } + + // Determine the type label, applying special handling for unnamed types. + typeName := value.TypeString(t, opts.QualifiedNames) + if t.Name() == "" { + // According to Go grammar, certain type literals contain symbols that + // do not strongly bind to the next lexicographical token (e.g., *T). + switch t.Kind() { + case reflect.Chan, reflect.Func, reflect.Ptr: + typeName = "(" + typeName + ")" + } + } + return &textWrap{Prefix: typeName, Value: wrapParens(s)} +} + +// wrapParens wraps s with a set of parenthesis, but avoids it if the +// wrapped node itself is already surrounded by a pair of parenthesis or braces. +// It handles unwrapping one level of pointer-reference nodes. +func wrapParens(s textNode) textNode { + var refNode *textWrap + if s2, ok := s.(*textWrap); ok { + // Unwrap a single pointer reference node. + switch s2.Metadata.(type) { + case leafReference, trunkReference, trunkReferences: + refNode = s2 + if s3, ok := refNode.Value.(*textWrap); ok { + s2 = s3 + } + } + + // Already has delimiters that make parenthesis unnecessary. + hasParens := strings.HasPrefix(s2.Prefix, "(") && strings.HasSuffix(s2.Suffix, ")") + hasBraces := strings.HasPrefix(s2.Prefix, "{") && strings.HasSuffix(s2.Suffix, "}") + if hasParens || hasBraces { + return s + } + } + if refNode != nil { + refNode.Value = &textWrap{Prefix: "(", Value: refNode.Value, Suffix: ")"} + return s + } + return &textWrap{Prefix: "(", Value: s, Suffix: ")"} +} + +// FormatValue prints the reflect.Value, taking extra care to avoid descending +// into pointers already in ptrs. As pointers are visited, ptrs is also updated. +func (opts formatOptions) FormatValue(v reflect.Value, parentKind reflect.Kind, ptrs *pointerReferences) (out textNode) { + if !v.IsValid() { + return nil + } + t := v.Type() + + // Check slice element for cycles. + if parentKind == reflect.Slice { + ptrRef, visited := ptrs.Push(v.Addr()) + if visited { + return makeLeafReference(ptrRef, false) + } + defer ptrs.Pop() + defer func() { out = wrapTrunkReference(ptrRef, false, out) }() + } + + // Check whether there is an Error or String method to call. + if !opts.AvoidStringer && v.CanInterface() { + // Avoid calling Error or String methods on nil receivers since many + // implementations crash when doing so. + if (t.Kind() != reflect.Ptr && t.Kind() != reflect.Interface) || !v.IsNil() { + var prefix, strVal string + func() { + // Swallow and ignore any panics from String or Error. + defer func() { recover() }() + switch v := v.Interface().(type) { + case error: + strVal = v.Error() + prefix = "e" + case fmt.Stringer: + strVal = v.String() + prefix = "s" + } + }() + if prefix != "" { + return opts.formatString(prefix, strVal) + } + } + } + + // Check whether to explicitly wrap the result with the type. + var skipType bool + defer func() { + if !skipType { + out = opts.FormatType(t, out) + } + }() + + switch t.Kind() { + case reflect.Bool: + return textLine(fmt.Sprint(v.Bool())) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return textLine(fmt.Sprint(v.Int())) + case reflect.Uint, reflect.Uint16, reflect.Uint32, reflect.Uint64: + return textLine(fmt.Sprint(v.Uint())) + case reflect.Uint8: + if parentKind == reflect.Slice || parentKind == reflect.Array { + return textLine(formatHex(v.Uint())) + } + return textLine(fmt.Sprint(v.Uint())) + case reflect.Uintptr: + return textLine(formatHex(v.Uint())) + case reflect.Float32, reflect.Float64: + return textLine(fmt.Sprint(v.Float())) + case reflect.Complex64, reflect.Complex128: + return textLine(fmt.Sprint(v.Complex())) + case reflect.String: + return opts.formatString("", v.String()) + case reflect.UnsafePointer, reflect.Chan, reflect.Func: + return textLine(formatPointer(value.PointerOf(v), true)) + case reflect.Struct: + var list textList + v := makeAddressable(v) // needed for retrieveUnexportedField + maxLen := v.NumField() + if opts.LimitVerbosity { + maxLen = ((1 << opts.verbosity()) >> 1) << 2 // 0, 4, 8, 16, 32, etc... + opts.VerbosityLevel-- + } + for i := 0; i < v.NumField(); i++ { + vv := v.Field(i) + if value.IsZero(vv) { + continue // Elide fields with zero values + } + if len(list) == maxLen { + list.AppendEllipsis(diffStats{}) + break + } + sf := t.Field(i) + if supportExporters && !isExported(sf.Name) { + vv = retrieveUnexportedField(v, sf, true) + } + s := opts.WithTypeMode(autoType).FormatValue(vv, t.Kind(), ptrs) + list = append(list, textRecord{Key: sf.Name, Value: s}) + } + return &textWrap{Prefix: "{", Value: list, Suffix: "}"} + case reflect.Slice: + if v.IsNil() { + return textNil + } + + // Check whether this is a []byte of text data. + if t.Elem() == reflect.TypeOf(byte(0)) { + b := v.Bytes() + isPrintSpace := func(r rune) bool { return unicode.IsPrint(r) && unicode.IsSpace(r) } + if len(b) > 0 && utf8.Valid(b) && len(bytes.TrimFunc(b, isPrintSpace)) == 0 { + out = opts.formatString("", string(b)) + return opts.WithTypeMode(emitType).FormatType(t, out) + } + } + + fallthrough + case reflect.Array: + maxLen := v.Len() + if opts.LimitVerbosity { + maxLen = ((1 << opts.verbosity()) >> 1) << 2 // 0, 4, 8, 16, 32, etc... + opts.VerbosityLevel-- + } + var list textList + for i := 0; i < v.Len(); i++ { + if len(list) == maxLen { + list.AppendEllipsis(diffStats{}) + break + } + s := opts.WithTypeMode(elideType).FormatValue(v.Index(i), t.Kind(), ptrs) + list = append(list, textRecord{Value: s}) + } + + out = &textWrap{Prefix: "{", Value: list, Suffix: "}"} + if t.Kind() == reflect.Slice && opts.PrintAddresses { + header := fmt.Sprintf("ptr:%v, len:%d, cap:%d", formatPointer(value.PointerOf(v), false), v.Len(), v.Cap()) + out = &textWrap{Prefix: pointerDelimPrefix + header + pointerDelimSuffix, Value: out} + } + return out + case reflect.Map: + if v.IsNil() { + return textNil + } + + // Check pointer for cycles. + ptrRef, visited := ptrs.Push(v) + if visited { + return makeLeafReference(ptrRef, opts.PrintAddresses) + } + defer ptrs.Pop() + + maxLen := v.Len() + if opts.LimitVerbosity { + maxLen = ((1 << opts.verbosity()) >> 1) << 2 // 0, 4, 8, 16, 32, etc... + opts.VerbosityLevel-- + } + var list textList + for _, k := range value.SortKeys(v.MapKeys()) { + if len(list) == maxLen { + list.AppendEllipsis(diffStats{}) + break + } + sk := formatMapKey(k, false, ptrs) + sv := opts.WithTypeMode(elideType).FormatValue(v.MapIndex(k), t.Kind(), ptrs) + list = append(list, textRecord{Key: sk, Value: sv}) + } + + out = &textWrap{Prefix: "{", Value: list, Suffix: "}"} + out = wrapTrunkReference(ptrRef, opts.PrintAddresses, out) + return out + case reflect.Ptr: + if v.IsNil() { + return textNil + } + + // Check pointer for cycles. + ptrRef, visited := ptrs.Push(v) + if visited { + out = makeLeafReference(ptrRef, opts.PrintAddresses) + return &textWrap{Prefix: "&", Value: out} + } + defer ptrs.Pop() + + skipType = true // Let the underlying value print the type instead + out = opts.FormatValue(v.Elem(), t.Kind(), ptrs) + out = wrapTrunkReference(ptrRef, opts.PrintAddresses, out) + out = &textWrap{Prefix: "&", Value: out} + return out + case reflect.Interface: + if v.IsNil() { + return textNil + } + // Interfaces accept different concrete types, + // so configure the underlying value to explicitly print the type. + skipType = true // Print the concrete type instead + return opts.WithTypeMode(emitType).FormatValue(v.Elem(), t.Kind(), ptrs) + default: + panic(fmt.Sprintf("%v kind not handled", v.Kind())) + } +} + +func (opts formatOptions) formatString(prefix, s string) textNode { + maxLen := len(s) + maxLines := strings.Count(s, "\n") + 1 + if opts.LimitVerbosity { + maxLen = (1 << opts.verbosity()) << 5 // 32, 64, 128, 256, etc... + maxLines = (1 << opts.verbosity()) << 2 // 4, 8, 16, 32, 64, etc... + } + + // For multiline strings, use the triple-quote syntax, + // but only use it when printing removed or inserted nodes since + // we only want the extra verbosity for those cases. + lines := strings.Split(strings.TrimSuffix(s, "\n"), "\n") + isTripleQuoted := len(lines) >= 4 && (opts.DiffMode == '-' || opts.DiffMode == '+') + for i := 0; i < len(lines) && isTripleQuoted; i++ { + lines[i] = strings.TrimPrefix(strings.TrimSuffix(lines[i], "\r"), "\r") // trim leading/trailing carriage returns for legacy Windows endline support + isPrintable := func(r rune) bool { + return unicode.IsPrint(r) || r == '\t' // specially treat tab as printable + } + line := lines[i] + isTripleQuoted = !strings.HasPrefix(strings.TrimPrefix(line, prefix), `"""`) && !strings.HasPrefix(line, "...") && strings.TrimFunc(line, isPrintable) == "" && len(line) <= maxLen + } + if isTripleQuoted { + var list textList + list = append(list, textRecord{Diff: opts.DiffMode, Value: textLine(prefix + `"""`), ElideComma: true}) + for i, line := range lines { + if numElided := len(lines) - i; i == maxLines-1 && numElided > 1 { + comment := commentString(fmt.Sprintf("%d elided lines", numElided)) + list = append(list, textRecord{Diff: opts.DiffMode, Value: textEllipsis, ElideComma: true, Comment: comment}) + break + } + list = append(list, textRecord{Diff: opts.DiffMode, Value: textLine(line), ElideComma: true}) + } + list = append(list, textRecord{Diff: opts.DiffMode, Value: textLine(prefix + `"""`), ElideComma: true}) + return &textWrap{Prefix: "(", Value: list, Suffix: ")"} + } + + // Format the string as a single-line quoted string. + if len(s) > maxLen+len(textEllipsis) { + return textLine(prefix + formatString(s[:maxLen]) + string(textEllipsis)) + } + return textLine(prefix + formatString(s)) +} + +// formatMapKey formats v as if it were a map key. +// The result is guaranteed to be a single line. +func formatMapKey(v reflect.Value, disambiguate bool, ptrs *pointerReferences) string { + var opts formatOptions + opts.DiffMode = diffIdentical + opts.TypeMode = elideType + opts.PrintAddresses = disambiguate + opts.AvoidStringer = disambiguate + opts.QualifiedNames = disambiguate + opts.VerbosityLevel = maxVerbosityPreset + opts.LimitVerbosity = true + s := opts.FormatValue(v, reflect.Map, ptrs).String() + return strings.TrimSpace(s) +} + +// formatString prints s as a double-quoted or backtick-quoted string. +func formatString(s string) string { + // Use quoted string if it the same length as a raw string literal. + // Otherwise, attempt to use the raw string form. + qs := strconv.Quote(s) + if len(qs) == 1+len(s)+1 { + return qs + } + + // Disallow newlines to ensure output is a single line. + // Only allow printable runes for readability purposes. + rawInvalid := func(r rune) bool { + return r == '`' || r == '\n' || !(unicode.IsPrint(r) || r == '\t') + } + if utf8.ValidString(s) && strings.IndexFunc(s, rawInvalid) < 0 { + return "`" + s + "`" + } + return qs +} + +// formatHex prints u as a hexadecimal integer in Go notation. +func formatHex(u uint64) string { + var f string + switch { + case u <= 0xff: + f = "0x%02x" + case u <= 0xffff: + f = "0x%04x" + case u <= 0xffffff: + f = "0x%06x" + case u <= 0xffffffff: + f = "0x%08x" + case u <= 0xffffffffff: + f = "0x%010x" + case u <= 0xffffffffffff: + f = "0x%012x" + case u <= 0xffffffffffffff: + f = "0x%014x" + case u <= 0xffffffffffffffff: + f = "0x%016x" + } + return fmt.Sprintf(f, u) +} diff --git a/vendor/github.com/google/go-cmp/cmp/report_slices.go b/vendor/github.com/google/go-cmp/cmp/report_slices.go new file mode 100644 index 0000000..2ad3bc8 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/report_slices.go @@ -0,0 +1,613 @@ +// Copyright 2019, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmp + +import ( + "bytes" + "fmt" + "math" + "reflect" + "strconv" + "strings" + "unicode" + "unicode/utf8" + + "github.com/google/go-cmp/cmp/internal/diff" +) + +// CanFormatDiffSlice reports whether we support custom formatting for nodes +// that are slices of primitive kinds or strings. +func (opts formatOptions) CanFormatDiffSlice(v *valueNode) bool { + switch { + case opts.DiffMode != diffUnknown: + return false // Must be formatting in diff mode + case v.NumDiff == 0: + return false // No differences detected + case !v.ValueX.IsValid() || !v.ValueY.IsValid(): + return false // Both values must be valid + case v.NumIgnored > 0: + return false // Some ignore option was used + case v.NumTransformed > 0: + return false // Some transform option was used + case v.NumCompared > 1: + return false // More than one comparison was used + case v.NumCompared == 1 && v.Type.Name() != "": + // The need for cmp to check applicability of options on every element + // in a slice is a significant performance detriment for large []byte. + // The workaround is to specify Comparer(bytes.Equal), + // which enables cmp to compare []byte more efficiently. + // If they differ, we still want to provide batched diffing. + // The logic disallows named types since they tend to have their own + // String method, with nicer formatting than what this provides. + return false + } + + // Check whether this is an interface with the same concrete types. + t := v.Type + vx, vy := v.ValueX, v.ValueY + if t.Kind() == reflect.Interface && !vx.IsNil() && !vy.IsNil() && vx.Elem().Type() == vy.Elem().Type() { + vx, vy = vx.Elem(), vy.Elem() + t = vx.Type() + } + + // Check whether we provide specialized diffing for this type. + switch t.Kind() { + case reflect.String: + case reflect.Array, reflect.Slice: + // Only slices of primitive types have specialized handling. + switch t.Elem().Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr, + reflect.Bool, reflect.Float32, reflect.Float64, reflect.Complex64, reflect.Complex128: + default: + return false + } + + // Both slice values have to be non-empty. + if t.Kind() == reflect.Slice && (vx.Len() == 0 || vy.Len() == 0) { + return false + } + + // If a sufficient number of elements already differ, + // use specialized formatting even if length requirement is not met. + if v.NumDiff > v.NumSame { + return true + } + default: + return false + } + + // Use specialized string diffing for longer slices or strings. + const minLength = 64 + return vx.Len() >= minLength && vy.Len() >= minLength +} + +// FormatDiffSlice prints a diff for the slices (or strings) represented by v. +// This provides custom-tailored logic to make printing of differences in +// textual strings and slices of primitive kinds more readable. +func (opts formatOptions) FormatDiffSlice(v *valueNode) textNode { + assert(opts.DiffMode == diffUnknown) + t, vx, vy := v.Type, v.ValueX, v.ValueY + if t.Kind() == reflect.Interface { + vx, vy = vx.Elem(), vy.Elem() + t = vx.Type() + opts = opts.WithTypeMode(emitType) + } + + // Auto-detect the type of the data. + var sx, sy string + var ssx, ssy []string + var isString, isMostlyText, isPureLinedText, isBinary bool + switch { + case t.Kind() == reflect.String: + sx, sy = vx.String(), vy.String() + isString = true + case t.Kind() == reflect.Slice && t.Elem() == reflect.TypeOf(byte(0)): + sx, sy = string(vx.Bytes()), string(vy.Bytes()) + isString = true + case t.Kind() == reflect.Array: + // Arrays need to be addressable for slice operations to work. + vx2, vy2 := reflect.New(t).Elem(), reflect.New(t).Elem() + vx2.Set(vx) + vy2.Set(vy) + vx, vy = vx2, vy2 + } + if isString { + var numTotalRunes, numValidRunes, numLines, lastLineIdx, maxLineLen int + for i, r := range sx + sy { + numTotalRunes++ + if (unicode.IsPrint(r) || unicode.IsSpace(r)) && r != utf8.RuneError { + numValidRunes++ + } + if r == '\n' { + if maxLineLen < i-lastLineIdx { + maxLineLen = i - lastLineIdx + } + lastLineIdx = i + 1 + numLines++ + } + } + isPureText := numValidRunes == numTotalRunes + isMostlyText = float64(numValidRunes) > math.Floor(0.90*float64(numTotalRunes)) + isPureLinedText = isPureText && numLines >= 4 && maxLineLen <= 1024 + isBinary = !isMostlyText + + // Avoid diffing by lines if it produces a significantly more complex + // edit script than diffing by bytes. + if isPureLinedText { + ssx = strings.Split(sx, "\n") + ssy = strings.Split(sy, "\n") + esLines := diff.Difference(len(ssx), len(ssy), func(ix, iy int) diff.Result { + return diff.BoolResult(ssx[ix] == ssy[iy]) + }) + esBytes := diff.Difference(len(sx), len(sy), func(ix, iy int) diff.Result { + return diff.BoolResult(sx[ix] == sy[iy]) + }) + efficiencyLines := float64(esLines.Dist()) / float64(len(esLines)) + efficiencyBytes := float64(esBytes.Dist()) / float64(len(esBytes)) + isPureLinedText = efficiencyLines < 4*efficiencyBytes + } + } + + // Format the string into printable records. + var list textList + var delim string + switch { + // If the text appears to be multi-lined text, + // then perform differencing across individual lines. + case isPureLinedText: + list = opts.formatDiffSlice( + reflect.ValueOf(ssx), reflect.ValueOf(ssy), 1, "line", + func(v reflect.Value, d diffMode) textRecord { + s := formatString(v.Index(0).String()) + return textRecord{Diff: d, Value: textLine(s)} + }, + ) + delim = "\n" + + // If possible, use a custom triple-quote (""") syntax for printing + // differences in a string literal. This format is more readable, + // but has edge-cases where differences are visually indistinguishable. + // This format is avoided under the following conditions: + // • A line starts with `"""` + // • A line starts with "..." + // • A line contains non-printable characters + // • Adjacent different lines differ only by whitespace + // + // For example: + // """ + // ... // 3 identical lines + // foo + // bar + // - baz + // + BAZ + // """ + isTripleQuoted := true + prevRemoveLines := map[string]bool{} + prevInsertLines := map[string]bool{} + var list2 textList + list2 = append(list2, textRecord{Value: textLine(`"""`), ElideComma: true}) + for _, r := range list { + if !r.Value.Equal(textEllipsis) { + line, _ := strconv.Unquote(string(r.Value.(textLine))) + line = strings.TrimPrefix(strings.TrimSuffix(line, "\r"), "\r") // trim leading/trailing carriage returns for legacy Windows endline support + normLine := strings.Map(func(r rune) rune { + if unicode.IsSpace(r) { + return -1 // drop whitespace to avoid visually indistinguishable output + } + return r + }, line) + isPrintable := func(r rune) bool { + return unicode.IsPrint(r) || r == '\t' // specially treat tab as printable + } + isTripleQuoted = !strings.HasPrefix(line, `"""`) && !strings.HasPrefix(line, "...") && strings.TrimFunc(line, isPrintable) == "" + switch r.Diff { + case diffRemoved: + isTripleQuoted = isTripleQuoted && !prevInsertLines[normLine] + prevRemoveLines[normLine] = true + case diffInserted: + isTripleQuoted = isTripleQuoted && !prevRemoveLines[normLine] + prevInsertLines[normLine] = true + } + if !isTripleQuoted { + break + } + r.Value = textLine(line) + r.ElideComma = true + } + if !(r.Diff == diffRemoved || r.Diff == diffInserted) { // start a new non-adjacent difference group + prevRemoveLines = map[string]bool{} + prevInsertLines = map[string]bool{} + } + list2 = append(list2, r) + } + if r := list2[len(list2)-1]; r.Diff == diffIdentical && len(r.Value.(textLine)) == 0 { + list2 = list2[:len(list2)-1] // elide single empty line at the end + } + list2 = append(list2, textRecord{Value: textLine(`"""`), ElideComma: true}) + if isTripleQuoted { + var out textNode = &textWrap{Prefix: "(", Value: list2, Suffix: ")"} + switch t.Kind() { + case reflect.String: + if t != reflect.TypeOf(string("")) { + out = opts.FormatType(t, out) + } + case reflect.Slice: + // Always emit type for slices since the triple-quote syntax + // looks like a string (not a slice). + opts = opts.WithTypeMode(emitType) + out = opts.FormatType(t, out) + } + return out + } + + // If the text appears to be single-lined text, + // then perform differencing in approximately fixed-sized chunks. + // The output is printed as quoted strings. + case isMostlyText: + list = opts.formatDiffSlice( + reflect.ValueOf(sx), reflect.ValueOf(sy), 64, "byte", + func(v reflect.Value, d diffMode) textRecord { + s := formatString(v.String()) + return textRecord{Diff: d, Value: textLine(s)} + }, + ) + + // If the text appears to be binary data, + // then perform differencing in approximately fixed-sized chunks. + // The output is inspired by hexdump. + case isBinary: + list = opts.formatDiffSlice( + reflect.ValueOf(sx), reflect.ValueOf(sy), 16, "byte", + func(v reflect.Value, d diffMode) textRecord { + var ss []string + for i := 0; i < v.Len(); i++ { + ss = append(ss, formatHex(v.Index(i).Uint())) + } + s := strings.Join(ss, ", ") + comment := commentString(fmt.Sprintf("%c|%v|", d, formatASCII(v.String()))) + return textRecord{Diff: d, Value: textLine(s), Comment: comment} + }, + ) + + // For all other slices of primitive types, + // then perform differencing in approximately fixed-sized chunks. + // The size of each chunk depends on the width of the element kind. + default: + var chunkSize int + if t.Elem().Kind() == reflect.Bool { + chunkSize = 16 + } else { + switch t.Elem().Bits() { + case 8: + chunkSize = 16 + case 16: + chunkSize = 12 + case 32: + chunkSize = 8 + default: + chunkSize = 8 + } + } + list = opts.formatDiffSlice( + vx, vy, chunkSize, t.Elem().Kind().String(), + func(v reflect.Value, d diffMode) textRecord { + var ss []string + for i := 0; i < v.Len(); i++ { + switch t.Elem().Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + ss = append(ss, fmt.Sprint(v.Index(i).Int())) + case reflect.Uint, reflect.Uint16, reflect.Uint32, reflect.Uint64: + ss = append(ss, fmt.Sprint(v.Index(i).Uint())) + case reflect.Uint8, reflect.Uintptr: + ss = append(ss, formatHex(v.Index(i).Uint())) + case reflect.Bool, reflect.Float32, reflect.Float64, reflect.Complex64, reflect.Complex128: + ss = append(ss, fmt.Sprint(v.Index(i).Interface())) + } + } + s := strings.Join(ss, ", ") + return textRecord{Diff: d, Value: textLine(s)} + }, + ) + } + + // Wrap the output with appropriate type information. + var out textNode = &textWrap{Prefix: "{", Value: list, Suffix: "}"} + if !isMostlyText { + // The "{...}" byte-sequence literal is not valid Go syntax for strings. + // Emit the type for extra clarity (e.g. "string{...}"). + if t.Kind() == reflect.String { + opts = opts.WithTypeMode(emitType) + } + return opts.FormatType(t, out) + } + switch t.Kind() { + case reflect.String: + out = &textWrap{Prefix: "strings.Join(", Value: out, Suffix: fmt.Sprintf(", %q)", delim)} + if t != reflect.TypeOf(string("")) { + out = opts.FormatType(t, out) + } + case reflect.Slice: + out = &textWrap{Prefix: "bytes.Join(", Value: out, Suffix: fmt.Sprintf(", %q)", delim)} + if t != reflect.TypeOf([]byte(nil)) { + out = opts.FormatType(t, out) + } + } + return out +} + +// formatASCII formats s as an ASCII string. +// This is useful for printing binary strings in a semi-legible way. +func formatASCII(s string) string { + b := bytes.Repeat([]byte{'.'}, len(s)) + for i := 0; i < len(s); i++ { + if ' ' <= s[i] && s[i] <= '~' { + b[i] = s[i] + } + } + return string(b) +} + +func (opts formatOptions) formatDiffSlice( + vx, vy reflect.Value, chunkSize int, name string, + makeRec func(reflect.Value, diffMode) textRecord, +) (list textList) { + eq := func(ix, iy int) bool { + return vx.Index(ix).Interface() == vy.Index(iy).Interface() + } + es := diff.Difference(vx.Len(), vy.Len(), func(ix, iy int) diff.Result { + return diff.BoolResult(eq(ix, iy)) + }) + + appendChunks := func(v reflect.Value, d diffMode) int { + n0 := v.Len() + for v.Len() > 0 { + n := chunkSize + if n > v.Len() { + n = v.Len() + } + list = append(list, makeRec(v.Slice(0, n), d)) + v = v.Slice(n, v.Len()) + } + return n0 - v.Len() + } + + var numDiffs int + maxLen := -1 + if opts.LimitVerbosity { + maxLen = (1 << opts.verbosity()) << 2 // 4, 8, 16, 32, 64, etc... + opts.VerbosityLevel-- + } + + groups := coalesceAdjacentEdits(name, es) + groups = coalesceInterveningIdentical(groups, chunkSize/4) + groups = cleanupSurroundingIdentical(groups, eq) + maxGroup := diffStats{Name: name} + for i, ds := range groups { + if maxLen >= 0 && numDiffs >= maxLen { + maxGroup = maxGroup.Append(ds) + continue + } + + // Print equal. + if ds.NumDiff() == 0 { + // Compute the number of leading and trailing equal bytes to print. + var numLo, numHi int + numEqual := ds.NumIgnored + ds.NumIdentical + for numLo < chunkSize*numContextRecords && numLo+numHi < numEqual && i != 0 { + numLo++ + } + for numHi < chunkSize*numContextRecords && numLo+numHi < numEqual && i != len(groups)-1 { + numHi++ + } + if numEqual-(numLo+numHi) <= chunkSize && ds.NumIgnored == 0 { + numHi = numEqual - numLo // Avoid pointless coalescing of single equal row + } + + // Print the equal bytes. + appendChunks(vx.Slice(0, numLo), diffIdentical) + if numEqual > numLo+numHi { + ds.NumIdentical -= numLo + numHi + list.AppendEllipsis(ds) + } + appendChunks(vx.Slice(numEqual-numHi, numEqual), diffIdentical) + vx = vx.Slice(numEqual, vx.Len()) + vy = vy.Slice(numEqual, vy.Len()) + continue + } + + // Print unequal. + len0 := len(list) + nx := appendChunks(vx.Slice(0, ds.NumIdentical+ds.NumRemoved+ds.NumModified), diffRemoved) + vx = vx.Slice(nx, vx.Len()) + ny := appendChunks(vy.Slice(0, ds.NumIdentical+ds.NumInserted+ds.NumModified), diffInserted) + vy = vy.Slice(ny, vy.Len()) + numDiffs += len(list) - len0 + } + if maxGroup.IsZero() { + assert(vx.Len() == 0 && vy.Len() == 0) + } else { + list.AppendEllipsis(maxGroup) + } + return list +} + +// coalesceAdjacentEdits coalesces the list of edits into groups of adjacent +// equal or unequal counts. +// +// Example: +// +// Input: "..XXY...Y" +// Output: [ +// {NumIdentical: 2}, +// {NumRemoved: 2, NumInserted 1}, +// {NumIdentical: 3}, +// {NumInserted: 1}, +// ] +// +func coalesceAdjacentEdits(name string, es diff.EditScript) (groups []diffStats) { + var prevMode byte + lastStats := func(mode byte) *diffStats { + if prevMode != mode { + groups = append(groups, diffStats{Name: name}) + prevMode = mode + } + return &groups[len(groups)-1] + } + for _, e := range es { + switch e { + case diff.Identity: + lastStats('=').NumIdentical++ + case diff.UniqueX: + lastStats('!').NumRemoved++ + case diff.UniqueY: + lastStats('!').NumInserted++ + case diff.Modified: + lastStats('!').NumModified++ + } + } + return groups +} + +// coalesceInterveningIdentical coalesces sufficiently short (<= windowSize) +// equal groups into adjacent unequal groups that currently result in a +// dual inserted/removed printout. This acts as a high-pass filter to smooth +// out high-frequency changes within the windowSize. +// +// Example: +// +// WindowSize: 16, +// Input: [ +// {NumIdentical: 61}, // group 0 +// {NumRemoved: 3, NumInserted: 1}, // group 1 +// {NumIdentical: 6}, // ├── coalesce +// {NumInserted: 2}, // ├── coalesce +// {NumIdentical: 1}, // ├── coalesce +// {NumRemoved: 9}, // └── coalesce +// {NumIdentical: 64}, // group 2 +// {NumRemoved: 3, NumInserted: 1}, // group 3 +// {NumIdentical: 6}, // ├── coalesce +// {NumInserted: 2}, // ├── coalesce +// {NumIdentical: 1}, // ├── coalesce +// {NumRemoved: 7}, // ├── coalesce +// {NumIdentical: 1}, // ├── coalesce +// {NumRemoved: 2}, // └── coalesce +// {NumIdentical: 63}, // group 4 +// ] +// Output: [ +// {NumIdentical: 61}, +// {NumIdentical: 7, NumRemoved: 12, NumInserted: 3}, +// {NumIdentical: 64}, +// {NumIdentical: 8, NumRemoved: 12, NumInserted: 3}, +// {NumIdentical: 63}, +// ] +// +func coalesceInterveningIdentical(groups []diffStats, windowSize int) []diffStats { + groups, groupsOrig := groups[:0], groups + for i, ds := range groupsOrig { + if len(groups) >= 2 && ds.NumDiff() > 0 { + prev := &groups[len(groups)-2] // Unequal group + curr := &groups[len(groups)-1] // Equal group + next := &groupsOrig[i] // Unequal group + hadX, hadY := prev.NumRemoved > 0, prev.NumInserted > 0 + hasX, hasY := next.NumRemoved > 0, next.NumInserted > 0 + if ((hadX || hasX) && (hadY || hasY)) && curr.NumIdentical <= windowSize { + *prev = prev.Append(*curr).Append(*next) + groups = groups[:len(groups)-1] // Truncate off equal group + continue + } + } + groups = append(groups, ds) + } + return groups +} + +// cleanupSurroundingIdentical scans through all unequal groups, and +// moves any leading sequence of equal elements to the preceding equal group and +// moves and trailing sequence of equal elements to the succeeding equal group. +// +// This is necessary since coalesceInterveningIdentical may coalesce edit groups +// together such that leading/trailing spans of equal elements becomes possible. +// Note that this can occur even with an optimal diffing algorithm. +// +// Example: +// +// Input: [ +// {NumIdentical: 61}, +// {NumIdentical: 1 , NumRemoved: 11, NumInserted: 2}, // assume 3 leading identical elements +// {NumIdentical: 67}, +// {NumIdentical: 7, NumRemoved: 12, NumInserted: 3}, // assume 10 trailing identical elements +// {NumIdentical: 54}, +// ] +// Output: [ +// {NumIdentical: 64}, // incremented by 3 +// {NumRemoved: 9}, +// {NumIdentical: 67}, +// {NumRemoved: 9}, +// {NumIdentical: 64}, // incremented by 10 +// ] +// +func cleanupSurroundingIdentical(groups []diffStats, eq func(i, j int) bool) []diffStats { + var ix, iy int // indexes into sequence x and y + for i, ds := range groups { + // Handle equal group. + if ds.NumDiff() == 0 { + ix += ds.NumIdentical + iy += ds.NumIdentical + continue + } + + // Handle unequal group. + nx := ds.NumIdentical + ds.NumRemoved + ds.NumModified + ny := ds.NumIdentical + ds.NumInserted + ds.NumModified + var numLeadingIdentical, numTrailingIdentical int + for i := 0; i < nx && i < ny && eq(ix+i, iy+i); i++ { + numLeadingIdentical++ + } + for i := 0; i < nx && i < ny && eq(ix+nx-1-i, iy+ny-1-i); i++ { + numTrailingIdentical++ + } + if numIdentical := numLeadingIdentical + numTrailingIdentical; numIdentical > 0 { + if numLeadingIdentical > 0 { + // Remove leading identical span from this group and + // insert it into the preceding group. + if i-1 >= 0 { + groups[i-1].NumIdentical += numLeadingIdentical + } else { + // No preceding group exists, so prepend a new group, + // but do so after we finish iterating over all groups. + defer func() { + groups = append([]diffStats{{Name: groups[0].Name, NumIdentical: numLeadingIdentical}}, groups...) + }() + } + // Increment indexes since the preceding group would have handled this. + ix += numLeadingIdentical + iy += numLeadingIdentical + } + if numTrailingIdentical > 0 { + // Remove trailing identical span from this group and + // insert it into the succeeding group. + if i+1 < len(groups) { + groups[i+1].NumIdentical += numTrailingIdentical + } else { + // No succeeding group exists, so append a new group, + // but do so after we finish iterating over all groups. + defer func() { + groups = append(groups, diffStats{Name: groups[len(groups)-1].Name, NumIdentical: numTrailingIdentical}) + }() + } + // Do not increment indexes since the succeeding group will handle this. + } + + // Update this group since some identical elements were removed. + nx -= numIdentical + ny -= numIdentical + groups[i] = diffStats{Name: ds.Name, NumRemoved: nx, NumInserted: ny} + } + ix += nx + iy += ny + } + return groups +} diff --git a/vendor/github.com/google/go-cmp/cmp/report_text.go b/vendor/github.com/google/go-cmp/cmp/report_text.go new file mode 100644 index 0000000..0fd46d7 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/report_text.go @@ -0,0 +1,431 @@ +// Copyright 2019, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmp + +import ( + "bytes" + "fmt" + "math/rand" + "strings" + "time" + "unicode/utf8" + + "github.com/google/go-cmp/cmp/internal/flags" +) + +var randBool = rand.New(rand.NewSource(time.Now().Unix())).Intn(2) == 0 + +const maxColumnLength = 80 + +type indentMode int + +func (n indentMode) appendIndent(b []byte, d diffMode) []byte { + // The output of Diff is documented as being unstable to provide future + // flexibility in changing the output for more humanly readable reports. + // This logic intentionally introduces instability to the exact output + // so that users can detect accidental reliance on stability early on, + // rather than much later when an actual change to the format occurs. + if flags.Deterministic || randBool { + // Use regular spaces (U+0020). + switch d { + case diffUnknown, diffIdentical: + b = append(b, " "...) + case diffRemoved: + b = append(b, "- "...) + case diffInserted: + b = append(b, "+ "...) + } + } else { + // Use non-breaking spaces (U+00a0). + switch d { + case diffUnknown, diffIdentical: + b = append(b, "  "...) + case diffRemoved: + b = append(b, "- "...) + case diffInserted: + b = append(b, "+ "...) + } + } + return repeatCount(n).appendChar(b, '\t') +} + +type repeatCount int + +func (n repeatCount) appendChar(b []byte, c byte) []byte { + for ; n > 0; n-- { + b = append(b, c) + } + return b +} + +// textNode is a simplified tree-based representation of structured text. +// Possible node types are textWrap, textList, or textLine. +type textNode interface { + // Len reports the length in bytes of a single-line version of the tree. + // Nested textRecord.Diff and textRecord.Comment fields are ignored. + Len() int + // Equal reports whether the two trees are structurally identical. + // Nested textRecord.Diff and textRecord.Comment fields are compared. + Equal(textNode) bool + // String returns the string representation of the text tree. + // It is not guaranteed that len(x.String()) == x.Len(), + // nor that x.String() == y.String() implies that x.Equal(y). + String() string + + // formatCompactTo formats the contents of the tree as a single-line string + // to the provided buffer. Any nested textRecord.Diff and textRecord.Comment + // fields are ignored. + // + // However, not all nodes in the tree should be collapsed as a single-line. + // If a node can be collapsed as a single-line, it is replaced by a textLine + // node. Since the top-level node cannot replace itself, this also returns + // the current node itself. + // + // This does not mutate the receiver. + formatCompactTo([]byte, diffMode) ([]byte, textNode) + // formatExpandedTo formats the contents of the tree as a multi-line string + // to the provided buffer. In order for column alignment to operate well, + // formatCompactTo must be called before calling formatExpandedTo. + formatExpandedTo([]byte, diffMode, indentMode) []byte +} + +// textWrap is a wrapper that concatenates a prefix and/or a suffix +// to the underlying node. +type textWrap struct { + Prefix string // e.g., "bytes.Buffer{" + Value textNode // textWrap | textList | textLine + Suffix string // e.g., "}" + Metadata interface{} // arbitrary metadata; has no effect on formatting +} + +func (s *textWrap) Len() int { + return len(s.Prefix) + s.Value.Len() + len(s.Suffix) +} +func (s1 *textWrap) Equal(s2 textNode) bool { + if s2, ok := s2.(*textWrap); ok { + return s1.Prefix == s2.Prefix && s1.Value.Equal(s2.Value) && s1.Suffix == s2.Suffix + } + return false +} +func (s *textWrap) String() string { + var d diffMode + var n indentMode + _, s2 := s.formatCompactTo(nil, d) + b := n.appendIndent(nil, d) // Leading indent + b = s2.formatExpandedTo(b, d, n) // Main body + b = append(b, '\n') // Trailing newline + return string(b) +} +func (s *textWrap) formatCompactTo(b []byte, d diffMode) ([]byte, textNode) { + n0 := len(b) // Original buffer length + b = append(b, s.Prefix...) + b, s.Value = s.Value.formatCompactTo(b, d) + b = append(b, s.Suffix...) + if _, ok := s.Value.(textLine); ok { + return b, textLine(b[n0:]) + } + return b, s +} +func (s *textWrap) formatExpandedTo(b []byte, d diffMode, n indentMode) []byte { + b = append(b, s.Prefix...) + b = s.Value.formatExpandedTo(b, d, n) + b = append(b, s.Suffix...) + return b +} + +// textList is a comma-separated list of textWrap or textLine nodes. +// The list may be formatted as multi-lines or single-line at the discretion +// of the textList.formatCompactTo method. +type textList []textRecord +type textRecord struct { + Diff diffMode // e.g., 0 or '-' or '+' + Key string // e.g., "MyField" + Value textNode // textWrap | textLine + ElideComma bool // avoid trailing comma + Comment fmt.Stringer // e.g., "6 identical fields" +} + +// AppendEllipsis appends a new ellipsis node to the list if none already +// exists at the end. If cs is non-zero it coalesces the statistics with the +// previous diffStats. +func (s *textList) AppendEllipsis(ds diffStats) { + hasStats := !ds.IsZero() + if len(*s) == 0 || !(*s)[len(*s)-1].Value.Equal(textEllipsis) { + if hasStats { + *s = append(*s, textRecord{Value: textEllipsis, ElideComma: true, Comment: ds}) + } else { + *s = append(*s, textRecord{Value: textEllipsis, ElideComma: true}) + } + return + } + if hasStats { + (*s)[len(*s)-1].Comment = (*s)[len(*s)-1].Comment.(diffStats).Append(ds) + } +} + +func (s textList) Len() (n int) { + for i, r := range s { + n += len(r.Key) + if r.Key != "" { + n += len(": ") + } + n += r.Value.Len() + if i < len(s)-1 { + n += len(", ") + } + } + return n +} + +func (s1 textList) Equal(s2 textNode) bool { + if s2, ok := s2.(textList); ok { + if len(s1) != len(s2) { + return false + } + for i := range s1 { + r1, r2 := s1[i], s2[i] + if !(r1.Diff == r2.Diff && r1.Key == r2.Key && r1.Value.Equal(r2.Value) && r1.Comment == r2.Comment) { + return false + } + } + return true + } + return false +} + +func (s textList) String() string { + return (&textWrap{Prefix: "{", Value: s, Suffix: "}"}).String() +} + +func (s textList) formatCompactTo(b []byte, d diffMode) ([]byte, textNode) { + s = append(textList(nil), s...) // Avoid mutating original + + // Determine whether we can collapse this list as a single line. + n0 := len(b) // Original buffer length + var multiLine bool + for i, r := range s { + if r.Diff == diffInserted || r.Diff == diffRemoved { + multiLine = true + } + b = append(b, r.Key...) + if r.Key != "" { + b = append(b, ": "...) + } + b, s[i].Value = r.Value.formatCompactTo(b, d|r.Diff) + if _, ok := s[i].Value.(textLine); !ok { + multiLine = true + } + if r.Comment != nil { + multiLine = true + } + if i < len(s)-1 { + b = append(b, ", "...) + } + } + // Force multi-lined output when printing a removed/inserted node that + // is sufficiently long. + if (d == diffInserted || d == diffRemoved) && len(b[n0:]) > maxColumnLength { + multiLine = true + } + if !multiLine { + return b, textLine(b[n0:]) + } + return b, s +} + +func (s textList) formatExpandedTo(b []byte, d diffMode, n indentMode) []byte { + alignKeyLens := s.alignLens( + func(r textRecord) bool { + _, isLine := r.Value.(textLine) + return r.Key == "" || !isLine + }, + func(r textRecord) int { return utf8.RuneCountInString(r.Key) }, + ) + alignValueLens := s.alignLens( + func(r textRecord) bool { + _, isLine := r.Value.(textLine) + return !isLine || r.Value.Equal(textEllipsis) || r.Comment == nil + }, + func(r textRecord) int { return utf8.RuneCount(r.Value.(textLine)) }, + ) + + // Format lists of simple lists in a batched form. + // If the list is sequence of only textLine values, + // then batch multiple values on a single line. + var isSimple bool + for _, r := range s { + _, isLine := r.Value.(textLine) + isSimple = r.Diff == 0 && r.Key == "" && isLine && r.Comment == nil + if !isSimple { + break + } + } + if isSimple { + n++ + var batch []byte + emitBatch := func() { + if len(batch) > 0 { + b = n.appendIndent(append(b, '\n'), d) + b = append(b, bytes.TrimRight(batch, " ")...) + batch = batch[:0] + } + } + for _, r := range s { + line := r.Value.(textLine) + if len(batch)+len(line)+len(", ") > maxColumnLength { + emitBatch() + } + batch = append(batch, line...) + batch = append(batch, ", "...) + } + emitBatch() + n-- + return n.appendIndent(append(b, '\n'), d) + } + + // Format the list as a multi-lined output. + n++ + for i, r := range s { + b = n.appendIndent(append(b, '\n'), d|r.Diff) + if r.Key != "" { + b = append(b, r.Key+": "...) + } + b = alignKeyLens[i].appendChar(b, ' ') + + b = r.Value.formatExpandedTo(b, d|r.Diff, n) + if !r.ElideComma { + b = append(b, ',') + } + b = alignValueLens[i].appendChar(b, ' ') + + if r.Comment != nil { + b = append(b, " // "+r.Comment.String()...) + } + } + n-- + + return n.appendIndent(append(b, '\n'), d) +} + +func (s textList) alignLens( + skipFunc func(textRecord) bool, + lenFunc func(textRecord) int, +) []repeatCount { + var startIdx, endIdx, maxLen int + lens := make([]repeatCount, len(s)) + for i, r := range s { + if skipFunc(r) { + for j := startIdx; j < endIdx && j < len(s); j++ { + lens[j] = repeatCount(maxLen - lenFunc(s[j])) + } + startIdx, endIdx, maxLen = i+1, i+1, 0 + } else { + if maxLen < lenFunc(r) { + maxLen = lenFunc(r) + } + endIdx = i + 1 + } + } + for j := startIdx; j < endIdx && j < len(s); j++ { + lens[j] = repeatCount(maxLen - lenFunc(s[j])) + } + return lens +} + +// textLine is a single-line segment of text and is always a leaf node +// in the textNode tree. +type textLine []byte + +var ( + textNil = textLine("nil") + textEllipsis = textLine("...") +) + +func (s textLine) Len() int { + return len(s) +} +func (s1 textLine) Equal(s2 textNode) bool { + if s2, ok := s2.(textLine); ok { + return bytes.Equal([]byte(s1), []byte(s2)) + } + return false +} +func (s textLine) String() string { + return string(s) +} +func (s textLine) formatCompactTo(b []byte, d diffMode) ([]byte, textNode) { + return append(b, s...), s +} +func (s textLine) formatExpandedTo(b []byte, _ diffMode, _ indentMode) []byte { + return append(b, s...) +} + +type diffStats struct { + Name string + NumIgnored int + NumIdentical int + NumRemoved int + NumInserted int + NumModified int +} + +func (s diffStats) IsZero() bool { + s.Name = "" + return s == diffStats{} +} + +func (s diffStats) NumDiff() int { + return s.NumRemoved + s.NumInserted + s.NumModified +} + +func (s diffStats) Append(ds diffStats) diffStats { + assert(s.Name == ds.Name) + s.NumIgnored += ds.NumIgnored + s.NumIdentical += ds.NumIdentical + s.NumRemoved += ds.NumRemoved + s.NumInserted += ds.NumInserted + s.NumModified += ds.NumModified + return s +} + +// String prints a humanly-readable summary of coalesced records. +// +// Example: +// diffStats{Name: "Field", NumIgnored: 5}.String() => "5 ignored fields" +func (s diffStats) String() string { + var ss []string + var sum int + labels := [...]string{"ignored", "identical", "removed", "inserted", "modified"} + counts := [...]int{s.NumIgnored, s.NumIdentical, s.NumRemoved, s.NumInserted, s.NumModified} + for i, n := range counts { + if n > 0 { + ss = append(ss, fmt.Sprintf("%d %v", n, labels[i])) + } + sum += n + } + + // Pluralize the name (adjusting for some obscure English grammar rules). + name := s.Name + if sum > 1 { + name += "s" + if strings.HasSuffix(name, "ys") { + name = name[:len(name)-2] + "ies" // e.g., "entrys" => "entries" + } + } + + // Format the list according to English grammar (with Oxford comma). + switch n := len(ss); n { + case 0: + return "" + case 1, 2: + return strings.Join(ss, " and ") + " " + name + default: + return strings.Join(ss[:n-1], ", ") + ", and " + ss[n-1] + " " + name + } +} + +type commentString string + +func (s commentString) String() string { return string(s) } diff --git a/vendor/github.com/google/go-cmp/cmp/report_value.go b/vendor/github.com/google/go-cmp/cmp/report_value.go new file mode 100644 index 0000000..668d470 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/report_value.go @@ -0,0 +1,121 @@ +// Copyright 2019, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmp + +import "reflect" + +// valueNode represents a single node within a report, which is a +// structured representation of the value tree, containing information +// regarding which nodes are equal or not. +type valueNode struct { + parent *valueNode + + Type reflect.Type + ValueX reflect.Value + ValueY reflect.Value + + // NumSame is the number of leaf nodes that are equal. + // All descendants are equal only if NumDiff is 0. + NumSame int + // NumDiff is the number of leaf nodes that are not equal. + NumDiff int + // NumIgnored is the number of leaf nodes that are ignored. + NumIgnored int + // NumCompared is the number of leaf nodes that were compared + // using an Equal method or Comparer function. + NumCompared int + // NumTransformed is the number of non-leaf nodes that were transformed. + NumTransformed int + // NumChildren is the number of transitive descendants of this node. + // This counts from zero; thus, leaf nodes have no descendants. + NumChildren int + // MaxDepth is the maximum depth of the tree. This counts from zero; + // thus, leaf nodes have a depth of zero. + MaxDepth int + + // Records is a list of struct fields, slice elements, or map entries. + Records []reportRecord // If populated, implies Value is not populated + + // Value is the result of a transformation, pointer indirect, of + // type assertion. + Value *valueNode // If populated, implies Records is not populated + + // TransformerName is the name of the transformer. + TransformerName string // If non-empty, implies Value is populated +} +type reportRecord struct { + Key reflect.Value // Invalid for slice element + Value *valueNode +} + +func (parent *valueNode) PushStep(ps PathStep) (child *valueNode) { + vx, vy := ps.Values() + child = &valueNode{parent: parent, Type: ps.Type(), ValueX: vx, ValueY: vy} + switch s := ps.(type) { + case StructField: + assert(parent.Value == nil) + parent.Records = append(parent.Records, reportRecord{Key: reflect.ValueOf(s.Name()), Value: child}) + case SliceIndex: + assert(parent.Value == nil) + parent.Records = append(parent.Records, reportRecord{Value: child}) + case MapIndex: + assert(parent.Value == nil) + parent.Records = append(parent.Records, reportRecord{Key: s.Key(), Value: child}) + case Indirect: + assert(parent.Value == nil && parent.Records == nil) + parent.Value = child + case TypeAssertion: + assert(parent.Value == nil && parent.Records == nil) + parent.Value = child + case Transform: + assert(parent.Value == nil && parent.Records == nil) + parent.Value = child + parent.TransformerName = s.Name() + parent.NumTransformed++ + default: + assert(parent == nil) // Must be the root step + } + return child +} + +func (r *valueNode) Report(rs Result) { + assert(r.MaxDepth == 0) // May only be called on leaf nodes + + if rs.ByIgnore() { + r.NumIgnored++ + } else { + if rs.Equal() { + r.NumSame++ + } else { + r.NumDiff++ + } + } + assert(r.NumSame+r.NumDiff+r.NumIgnored == 1) + + if rs.ByMethod() { + r.NumCompared++ + } + if rs.ByFunc() { + r.NumCompared++ + } + assert(r.NumCompared <= 1) +} + +func (child *valueNode) PopStep() (parent *valueNode) { + if child.parent == nil { + return nil + } + parent = child.parent + parent.NumSame += child.NumSame + parent.NumDiff += child.NumDiff + parent.NumIgnored += child.NumIgnored + parent.NumCompared += child.NumCompared + parent.NumTransformed += child.NumTransformed + parent.NumChildren += child.NumChildren + 1 + if parent.MaxDepth < child.MaxDepth+1 { + parent.MaxDepth = child.MaxDepth + 1 + } + return parent +} diff --git a/vendor/github.com/google/go-jsonnet/.goreleaser.yml b/vendor/github.com/google/go-jsonnet/.goreleaser.yml index 38211e2..596edb7 100644 --- a/vendor/github.com/google/go-jsonnet/.goreleaser.yml +++ b/vendor/github.com/google/go-jsonnet/.goreleaser.yml @@ -51,3 +51,37 @@ archives: amd64: x86_64 checksum: name_template: 'checksums.txt' + +nfpms: + - id: jsonnet + package_name: jsonnet-go + builds: + - jsonnet + description: A data templating language for app and tool developers + homepage: https://github.com/google/go-jsonnet + license: Apache 2.0 + formats: + - deb + bindir: /usr/bin + maintainer: David Cunningham + file_name_template: "jsonnet-go_{{ .Version }}_{{ .Os }}_{{ .Arch }}" + overrides: + deb: + conflicts: + # See: https://packages.ubuntu.com/jsonnet + - jsonnet + - id: jsonnetfmt + package_name: jsonnetfmt-go + builds: + - jsonnetfmt + homepage: https://github.com/google/go-jsonnet + license: Apache 2.0 + formats: + - deb + bindir: /usr/bin + file_name_template: "jsonnetfmt-go_{{ .Version }}_{{ .Os }}_{{ .Arch }}" + overrides: + deb: + conflicts: + # See: https://packages.ubuntu.com/jsonnet + - jsonnetfmt diff --git a/vendor/github.com/google/go-jsonnet/.travis.yml b/vendor/github.com/google/go-jsonnet/.travis.yml index 8ce0148..0f4516d 100644 --- a/vendor/github.com/google/go-jsonnet/.travis.yml +++ b/vendor/github.com/google/go-jsonnet/.travis.yml @@ -6,14 +6,15 @@ matrix: - go: 1.11.x - go: 1.12.x - go: 1.13.x + - go: 1.x arch: amd64 - name: "arch: arm64" - go: 1.13.x + go: 1.x arch: arm64 env: - PYTHON_COMMAND=python3 - name: "arch: i686" - go: 1.13.x + go: 1.x arch: amd64 env: - PYTHON_COMMAND=python3 @@ -21,11 +22,10 @@ matrix: - CGO_ENABLED=1 - SKIP_PYTHON_BINDINGS_TESTS=1 - name: "arch: ppc64le" - go: 1.13.x + go: 1.x arch: ppc64le env: - PYTHON_COMMAND=python3 - - go: tip - name: "Bazel Check" go: 1.x script: ./travisBazel.sh diff --git a/vendor/github.com/google/go-jsonnet/BUILD.bazel b/vendor/github.com/google/go-jsonnet/BUILD.bazel index a2e5f54..1410047 100644 --- a/vendor/github.com/google/go-jsonnet/BUILD.bazel +++ b/vendor/github.com/google/go-jsonnet/BUILD.bazel @@ -19,6 +19,7 @@ go_library( "interpreter.go", "runtime_error.go", "thunks.go", + "util.go", "value.go", "vm.go", ], diff --git a/vendor/github.com/google/go-jsonnet/README.md b/vendor/github.com/google/go-jsonnet/README.md index af152d6..8a5124c 100644 --- a/vendor/github.com/google/go-jsonnet/README.md +++ b/vendor/github.com/google/go-jsonnet/README.md @@ -76,7 +76,7 @@ go get golang.org/x/tools/cmd/benchcmp 1. Make sure you build a jsonnet binary _prior_ to making changes. ```bash -go build ./cmd/jsonnet -o jsonnet-old +go build -o jsonnet-old ./cmd/jsonnet ``` 2. Make changes (iterate as needed), and rebuild new binary @@ -125,6 +125,17 @@ export PATH=$PATH:$GOPATH/bin # If you haven't already go generate ``` +## Update cpp-jsonnet sub-repo + +This repo depends on [the original Jsonnet repo](https://github.com/google/jsonnet). Shared parts include the standard library, headers files for C API and some tests. + +You can update the submodule and regenerate dependent files with one command: +``` +./update_cpp_jsonnet.sh +``` + +Note: It needs to be run from repo root. + ## Updating and modifying the standard library Standard library source code is kept in `cpp-jsonnet` submodule, because it is shared with [Jsonnet C++ @@ -133,11 +144,11 @@ implementation](https://github.com/google/jsonnet). For performance reasons we perform preprocessing on the standard library, so for the changes to be visible, regeneration is necessary: ```bash -git submodule init -git submodule update go run cmd/dumpstdlibast/dumpstdlibast.go cpp-jsonnet/stdlib/std.jsonnet > astgen/stdast.go ``` +**The + The above command creates the _astgen/stdast.go_ file which puts the desugared standard library into the right data structures, which lets us avoid the parsing overhead during execution. Note that this step is not necessary to perform manually when building with Bazel; the Bazel target regenerates the _astgen/stdast.go_ (writing it into Bazel's build sandbox directory tree) file when necessary. ## Keeping the Bazel files up to date diff --git a/vendor/github.com/google/go-jsonnet/astgen/BUILD.bazel b/vendor/github.com/google/go-jsonnet/astgen/BUILD.bazel index 0bec2f1..2d0a4b5 100644 --- a/vendor/github.com/google/go-jsonnet/astgen/BUILD.bazel +++ b/vendor/github.com/google/go-jsonnet/astgen/BUILD.bazel @@ -2,7 +2,7 @@ load("@io_bazel_rules_go//go:def.bzl", "go_library") genrule( name = "dumpstdlibast", - srcs = ["//cpp-jsonnet/stdlib"], + srcs = ["@cpp_jsonnet//stdlib"], outs = ["stdast.go"], cmd = "./$(location //cmd/dumpstdlibast) \"$<\" > \"$@\"", tools = ["//cmd/dumpstdlibast"], diff --git a/vendor/github.com/google/go-jsonnet/astgen/stdast.go b/vendor/github.com/google/go-jsonnet/astgen/stdast.go index f10fdd0..f538fc7 100644 --- a/vendor/github.com/google/go-jsonnet/astgen/stdast.go +++ b/vendor/github.com/google/go-jsonnet/astgen/stdast.go @@ -1993,322 +1993,390 @@ var p14098Var = "function " var p14098 = &p14098Var var p14108Var = "thunk from >" var p14108 = &p14108Var -var p14119Var = "function " -var p14119 = &p14119Var -var p14123Var = "thunk from >" -var p14123 = &p14123Var -var p14132Var = "thunk from from >>" +var p14132Var = "thunk from >" var p14132 = &p14132Var -var p14139Var = "thunk from >" +var p14139Var = "function " var p14139 = &p14139Var -var p14148Var = "thunk from from >>" +var p14148Var = "thunk from >" var p14148 = &p14148Var -var p14164Var = "thunk from >" -var p14164 = &p14164Var -var p14183Var = "thunk from >" -var p14183 = &p14183Var -var p14191Var = "thunk from >" -var p14191 = &p14191Var -var p14200Var = "thunk from from >>" -var p14200 = &p14200Var -var p14216Var = "thunk from >" -var p14216 = &p14216Var -var p14227Var = "thunk from >>" -var p14227 = &p14227Var -var p14237Var = "thunk from >" -var p14237 = &p14237Var -var p14242Var = "function " -var p14242 = &p14242Var -var p14279Var = "thunk from >" -var p14279 = &p14279Var -var p14295Var = "thunk from >" -var p14295 = &p14295Var -var p14312Var = "thunk from >" -var p14312 = &p14312Var -var p14320Var = "thunk from >" -var p14320 = &p14320Var -var p14329Var = "thunk from from >>" -var p14329 = &p14329Var -var p14336Var = "thunk from >" -var p14336 = &p14336Var -var p14345Var = "thunk from from >>" -var p14345 = &p14345Var -var p14363Var = "thunk from >" -var p14363 = &p14363Var -var p14373Var = "thunk from >" -var p14373 = &p14373Var -var p14378Var = "function " -var p14378 = &p14378Var -var p14395Var = "thunk from >" -var p14395 = &p14395Var -var p14425Var = "thunk from >" -var p14425 = &p14425Var -var p14441Var = "thunk from >" -var p14441 = &p14441Var -var p14457Var = "thunk from >" -var p14457 = &p14457Var -var p14467Var = "function " -var p14467 = &p14467Var -var p14471Var = "thunk from >" -var p14471 = &p14471Var -var p14480Var = "thunk from from >>" -var p14480 = &p14480Var -var p14493Var = "thunk from >" -var p14493 = &p14493Var -var p14505Var = "thunk from >>" -var p14505 = &p14505Var -var p14516Var = "thunk from >>>" -var p14516 = &p14516Var -var p14523Var = "function " -var p14523 = &p14523Var -var p14533Var = "thunk from >>" -var p14533 = &p14533Var +var p14169Var = "thunk from >" +var p14169 = &p14169Var +var p14176Var = "function " +var p14176 = &p14176Var +var p14185Var = "thunk from >" +var p14185 = &p14185Var +var p14193Var = "function " +var p14193 = &p14193Var +var p14197Var = "thunk from >" +var p14197 = &p14197Var +var p14206Var = "thunk from from >>" +var p14206 = &p14206Var +var p14213Var = "thunk from >" +var p14213 = &p14213Var +var p14222Var = "thunk from from >>" +var p14222 = &p14222Var +var p14238Var = "thunk from >" +var p14238 = &p14238Var +var p14257Var = "thunk from >" +var p14257 = &p14257Var +var p14265Var = "thunk from >" +var p14265 = &p14265Var +var p14274Var = "thunk from from >>" +var p14274 = &p14274Var +var p14290Var = "thunk from >" +var p14290 = &p14290Var +var p14301Var = "thunk from >>" +var p14301 = &p14301Var +var p14311Var = "thunk from >" +var p14311 = &p14311Var +var p14316Var = "function " +var p14316 = &p14316Var +var p14353Var = "thunk from >" +var p14353 = &p14353Var +var p14369Var = "thunk from >" +var p14369 = &p14369Var +var p14386Var = "thunk from >" +var p14386 = &p14386Var +var p14394Var = "thunk from >" +var p14394 = &p14394Var +var p14403Var = "thunk from from >>" +var p14403 = &p14403Var +var p14410Var = "thunk from >" +var p14410 = &p14410Var +var p14419Var = "thunk from from >>" +var p14419 = &p14419Var +var p14437Var = "thunk from >" +var p14437 = &p14437Var +var p14447Var = "thunk from >" +var p14447 = &p14447Var +var p14452Var = "function " +var p14452 = &p14452Var +var p14469Var = "thunk from >" +var p14469 = &p14469Var +var p14499Var = "thunk from >" +var p14499 = &p14499Var +var p14515Var = "thunk from >" +var p14515 = &p14515Var +var p14531Var = "thunk from >" +var p14531 = &p14531Var var p14541Var = "function " var p14541 = &p14541Var -var p14545Var = "thunk from >" +var p14545Var = "thunk from >" var p14545 = &p14545Var -var p14550Var = "function " -var p14550 = &p14550Var -var p14571Var = "thunk from >" -var p14571 = &p14571Var -var p14585Var = "thunk from >" -var p14585 = &p14585Var -var p14600Var = "thunk from >" -var p14600 = &p14600Var -var p14614Var = "thunk from >" -var p14614 = &p14614Var -var p14632Var = "thunk from >" -var p14632 = &p14632Var -var p14653Var = "thunk from >" -var p14653 = &p14653Var -var p14662Var = "thunk from >>" -var p14662 = &p14662Var -var p14668Var = "thunk from >" -var p14668 = &p14668Var -var p14677Var = "thunk from >>" -var p14677 = &p14677Var -var p14694Var = "thunk from >" -var p14694 = &p14694Var -var p14723Var = "thunk from >" -var p14723 = &p14723Var -var p14732Var = "thunk from >>" -var p14732 = &p14732Var -var p14747Var = "object " -var p14747 = &p14747Var -var p14756Var = "thunk from >" -var p14756 = &p14756Var -var p14772Var = "thunk from >" -var p14772 = &p14772Var -var p14783Var = "function " -var p14783 = &p14783Var -var p14796Var = "thunk from >" -var p14796 = &p14796Var -var p14813Var = "thunk from >" -var p14813 = &p14813Var -var p14829Var = "thunk from >" -var p14829 = &p14829Var +var p14554Var = "thunk from from >>" +var p14554 = &p14554Var +var p14567Var = "thunk from >" +var p14567 = &p14567Var +var p14579Var = "thunk from >>" +var p14579 = &p14579Var +var p14590Var = "thunk from >>>" +var p14590 = &p14590Var +var p14597Var = "function " +var p14597 = &p14597Var +var p14607Var = "thunk from >>" +var p14607 = &p14607Var +var p14615Var = "function " +var p14615 = &p14615Var +var p14619Var = "thunk from >" +var p14619 = &p14619Var +var p14624Var = "function " +var p14624 = &p14624Var +var p14645Var = "thunk from >" +var p14645 = &p14645Var +var p14659Var = "thunk from >" +var p14659 = &p14659Var +var p14674Var = "thunk from >" +var p14674 = &p14674Var +var p14688Var = "thunk from >" +var p14688 = &p14688Var +var p14706Var = "thunk from >" +var p14706 = &p14706Var +var p14727Var = "thunk from >" +var p14727 = &p14727Var +var p14736Var = "thunk from >>" +var p14736 = &p14736Var +var p14742Var = "thunk from >" +var p14742 = &p14742Var +var p14751Var = "thunk from >>" +var p14751 = &p14751Var +var p14768Var = "thunk from >" +var p14768 = &p14768Var +var p14797Var = "thunk from >" +var p14797 = &p14797Var +var p14806Var = "thunk from >>" +var p14806 = &p14806Var +var p14821Var = "object " +var p14821 = &p14821Var +var p14830Var = "thunk from >" +var p14830 = &p14830Var var p14846Var = "thunk from >" var p14846 = &p14846Var -var p14854Var = "thunk from >" -var p14854 = &p14854Var -var p14863Var = "thunk from from >>" -var p14863 = &p14863Var -var p14870Var = "thunk from >" +var p14857Var = "function " +var p14857 = &p14857Var +var p14870Var = "thunk from >" var p14870 = &p14870Var -var p14879Var = "thunk from from >>" -var p14879 = &p14879Var -var p14917Var = "thunk from >" -var p14917 = &p14917Var -var p14921Var = "function " -var p14921 = &p14921Var -var p14953Var = "thunk from >>" +var p14887Var = "thunk from >" +var p14887 = &p14887Var +var p14903Var = "thunk from >" +var p14903 = &p14903Var +var p14920Var = "thunk from >" +var p14920 = &p14920Var +var p14928Var = "thunk from >" +var p14928 = &p14928Var +var p14937Var = "thunk from from >>" +var p14937 = &p14937Var +var p14944Var = "thunk from >" +var p14944 = &p14944Var +var p14953Var = "thunk from from >>" var p14953 = &p14953Var -var p14966Var = "function " -var p14966 = &p14966Var -var p14979Var = "thunk from >" -var p14979 = &p14979Var -var p14996Var = "thunk from >" -var p14996 = &p14996Var -var p15009Var = "thunk from >" -var p15009 = &p15009Var -var p15013Var = "function " -var p15013 = &p15013Var -var p15032Var = "thunk from >>" -var p15032 = &p15032Var -var p15044Var = "thunk from >>>" -var p15044 = &p15044Var -var p15049Var = "object " -var p15049 = &p15049Var -var p15051Var = "object " -var p15051 = &p15051Var -var p15054Var = "function " -var p15054 = &p15054Var -var p15057Var = "object " -var p15057 = &p15057Var -var p15063Var = "function " -var p15063 = &p15063Var -var p15066Var = "function " -var p15066 = &p15066Var -var p15069Var = "function " -var p15069 = &p15069Var -var p15072Var = "function " -var p15072 = &p15072Var -var p15074Var = "function " -var p15074 = &p15074Var -var p15077Var = "function " -var p15077 = &p15077Var -var p15080Var = "function " -var p15080 = &p15080Var -var p15084Var = "function " -var p15084 = &p15084Var -var p15088Var = "thunk from >" -var p15088 = &p15088Var -var p15091Var = "thunk from >" -var p15091 = &p15091Var -var p15098Var = "thunk from from >>" -var p15098 = &p15098Var -var p15101Var = "function " -var p15101 = &p15101Var -var p15105Var = "thunk from >" -var p15105 = &p15105Var -var p15108Var = "thunk from >" -var p15108 = &p15108Var -var p15115Var = "thunk from from >>" -var p15115 = &p15115Var -var p15118Var = "function " +var p14991Var = "thunk from >" +var p14991 = &p14991Var +var p14995Var = "function " +var p14995 = &p14995Var +var p15027Var = "thunk from >>" +var p15027 = &p15027Var +var p15040Var = "function " +var p15040 = &p15040Var +var p15053Var = "thunk from >" +var p15053 = &p15053Var +var p15070Var = "thunk from >" +var p15070 = &p15070Var +var p15083Var = "thunk from >" +var p15083 = &p15083Var +var p15087Var = "function " +var p15087 = &p15087Var +var p15106Var = "thunk from >>" +var p15106 = &p15106Var +var p15118Var = "thunk from >>>" var p15118 = &p15118Var -var p15122Var = "thunk from >" -var p15122 = &p15122Var -var p15125Var = "thunk from >" -var p15125 = &p15125Var -var p15132Var = "thunk from from >>" -var p15132 = &p15132Var -var p15135Var = "function " -var p15135 = &p15135Var -var p15139Var = "thunk from >" -var p15139 = &p15139Var -var p15144Var = "function " -var p15144 = &p15144Var -var p15148Var = "thunk from >" -var p15148 = &p15148Var -var p15151Var = "thunk from >" -var p15151 = &p15151Var -var p15158Var = "thunk from from >>" -var p15158 = &p15158Var -var p15162Var = "function " -var p15162 = &p15162Var -var p15166Var = "thunk from >" -var p15166 = &p15166Var -var p15169Var = "thunk from >" -var p15169 = &p15169Var -var p15172Var = "thunk from >" -var p15172 = &p15172Var -var p15175Var = "thunk from >" -var p15175 = &p15175Var -var p15178Var = "thunk from >" -var p15178 = &p15178Var -var p15181Var = "thunk from >" -var p15181 = &p15181Var -var p15185Var = "thunk from >" -var p15185 = &p15185Var -var p15188Var = "thunk from >" -var p15188 = &p15188Var -var p15191Var = "thunk from >" -var p15191 = &p15191Var -var p15194Var = "thunk from >" -var p15194 = &p15194Var -var p15197Var = "thunk from >" -var p15197 = &p15197Var -var p15200Var = "thunk from >" -var p15200 = &p15200Var -var p15203Var = "thunk from >" -var p15203 = &p15203Var -var p15206Var = "thunk from >" -var p15206 = &p15206Var -var p15209Var = "thunk from >" -var p15209 = &p15209Var -var p15213Var = "thunk from >" -var p15213 = &p15213Var -var p15216Var = "thunk from >" -var p15216 = &p15216Var -var p15219Var = "thunk from >" -var p15219 = &p15219Var -var p15222Var = "thunk from >" -var p15222 = &p15222Var -var p15226Var = "thunk from >" -var p15226 = &p15226Var -var p15229Var = "thunk from >" -var p15229 = &p15229Var -var p15234Var = "function " -var p15234 = &p15234Var -var p15237Var = "function " -var p15237 = &p15237Var -var p15240Var = "function " -var p15240 = &p15240Var -var p15243Var = "function " -var p15243 = &p15243Var -var p15245Var = "function " -var p15245 = &p15245Var -var p15248Var = "function " -var p15248 = &p15248Var -var p15251Var = "function " -var p15251 = &p15251Var -var p15254Var = "function " -var p15254 = &p15254Var -var p15257Var = "function " -var p15257 = &p15257Var -var p15261Var = "function " -var p15261 = &p15261Var -var p15264Var = "function " -var p15264 = &p15264Var -var p15267Var = "function " -var p15267 = &p15267Var -var p15280Var = "function " -var p15280 = &p15280Var -var p15282Var = "function " -var p15282 = &p15282Var -var p15286Var = "thunk from >" -var p15286 = &p15286Var -var p15289Var = "thunk from >" -var p15289 = &p15289Var -var p15292Var = "function " -var p15292 = &p15292Var -var p15295Var = "function " -var p15295 = &p15295Var -var p15303Var = "thunk from >" +var p15127Var = "function " +var p15127 = &p15127Var +var p15131Var = "thunk from >" +var p15131 = &p15131Var +var p15140Var = "thunk from from >>" +var p15140 = &p15140Var +var p15143Var = "thunk from >" +var p15143 = &p15143Var +var p15152Var = "thunk from from >>" +var p15152 = &p15152Var +var p15196Var = "thunk from >" +var p15196 = &p15196Var +var p15262Var = "function " +var p15262 = &p15262Var +var p15266Var = "thunk from >" +var p15266 = &p15266Var +var p15275Var = "thunk from from >>" +var p15275 = &p15275Var +var p15278Var = "thunk from >" +var p15278 = &p15278Var +var p15287Var = "thunk from from >>" +var p15287 = &p15287Var +var p15294Var = "thunk from >" +var p15294 = &p15294Var +var p15303Var = "thunk from from >>" var p15303 = &p15303Var -var p15306Var = "thunk from >" -var p15306 = &p15306Var -var p15309Var = "thunk from >" -var p15309 = &p15309Var -var p15316Var = "thunk from >>" -var p15316 = &p15316Var -var p15319Var = "thunk from >" -var p15319 = &p15319Var -var p15331Var = "function " -var p15331 = &p15331Var -var p15333Var = "function " -var p15333 = &p15333Var -var p15336Var = "object " -var p15336 = &p15336Var -var p15360Var = "object " -var p15360 = &p15360Var -var p15364Var = "object " -var p15364 = &p15364Var -var p15367Var = "object " -var p15367 = &p15367Var -var p15370Var = "object " +var p15312Var = "thunk from >" +var p15312 = &p15312Var +var p15317Var = "function " +var p15317 = &p15317Var +var p15330Var = "thunk from >" +var p15330 = &p15330Var +var p15339Var = "thunk from from >>" +var p15339 = &p15339Var +var p15370Var = "thunk from >" var p15370 = &p15370Var -var p15373Var = "object " -var p15373 = &p15373Var -var p15376Var = "object " -var p15376 = &p15376Var -var p15379Var = "object " -var p15379 = &p15379Var -var p15386Var = "thunk from >" +var p15386Var = "thunk from >" var p15386 = &p15386Var -var p15388Var = "thunk from >" -var p15388 = &p15388Var +var p15397Var = "thunk from >" +var p15397 = &p15397Var +var p15403Var = "function " +var p15403 = &p15403Var +var p15414Var = "thunk from >" +var p15414 = &p15414Var +var p15425Var = "function " +var p15425 = &p15425Var +var p15436Var = "thunk from >" +var p15436 = &p15436Var +var p15446Var = "function " +var p15446 = &p15446Var +var p15457Var = "thunk from >" +var p15457 = &p15457Var +var p15467Var = "function " +var p15467 = &p15467Var +var p15478Var = "thunk from >" +var p15478 = &p15478Var +var p15485Var = "object " +var p15485 = &p15485Var +var p15487Var = "object " +var p15487 = &p15487Var +var p15490Var = "function " +var p15490 = &p15490Var +var p15493Var = "object " +var p15493 = &p15493Var +var p15499Var = "function " +var p15499 = &p15499Var +var p15502Var = "function " +var p15502 = &p15502Var +var p15505Var = "function " +var p15505 = &p15505Var +var p15508Var = "function " +var p15508 = &p15508Var +var p15510Var = "function " +var p15510 = &p15510Var +var p15513Var = "function " +var p15513 = &p15513Var +var p15516Var = "function " +var p15516 = &p15516Var +var p15520Var = "function " +var p15520 = &p15520Var +var p15524Var = "thunk from >" +var p15524 = &p15524Var +var p15527Var = "thunk from >" +var p15527 = &p15527Var +var p15534Var = "thunk from from >>" +var p15534 = &p15534Var +var p15537Var = "function " +var p15537 = &p15537Var +var p15541Var = "thunk from >" +var p15541 = &p15541Var +var p15544Var = "thunk from >" +var p15544 = &p15544Var +var p15551Var = "thunk from from >>" +var p15551 = &p15551Var +var p15554Var = "function " +var p15554 = &p15554Var +var p15558Var = "thunk from >" +var p15558 = &p15558Var +var p15561Var = "thunk from >" +var p15561 = &p15561Var +var p15568Var = "thunk from from >>" +var p15568 = &p15568Var +var p15571Var = "function " +var p15571 = &p15571Var +var p15575Var = "thunk from >" +var p15575 = &p15575Var +var p15580Var = "function " +var p15580 = &p15580Var +var p15584Var = "thunk from >" +var p15584 = &p15584Var +var p15587Var = "thunk from >" +var p15587 = &p15587Var +var p15594Var = "thunk from from >>" +var p15594 = &p15594Var +var p15598Var = "function " +var p15598 = &p15598Var +var p15602Var = "thunk from >" +var p15602 = &p15602Var +var p15605Var = "thunk from >" +var p15605 = &p15605Var +var p15608Var = "thunk from >" +var p15608 = &p15608Var +var p15611Var = "thunk from >" +var p15611 = &p15611Var +var p15614Var = "thunk from >" +var p15614 = &p15614Var +var p15617Var = "thunk from >" +var p15617 = &p15617Var +var p15621Var = "thunk from >" +var p15621 = &p15621Var +var p15624Var = "thunk from >" +var p15624 = &p15624Var +var p15627Var = "thunk from >" +var p15627 = &p15627Var +var p15630Var = "thunk from >" +var p15630 = &p15630Var +var p15633Var = "thunk from >" +var p15633 = &p15633Var +var p15636Var = "thunk from >" +var p15636 = &p15636Var +var p15639Var = "thunk from >" +var p15639 = &p15639Var +var p15642Var = "thunk from >" +var p15642 = &p15642Var +var p15645Var = "thunk from >" +var p15645 = &p15645Var +var p15649Var = "thunk from >" +var p15649 = &p15649Var +var p15652Var = "thunk from >" +var p15652 = &p15652Var +var p15655Var = "thunk from >" +var p15655 = &p15655Var +var p15658Var = "thunk from >" +var p15658 = &p15658Var +var p15662Var = "thunk from >" +var p15662 = &p15662Var +var p15665Var = "thunk from >" +var p15665 = &p15665Var +var p15670Var = "function " +var p15670 = &p15670Var +var p15673Var = "function " +var p15673 = &p15673Var +var p15676Var = "function " +var p15676 = &p15676Var +var p15679Var = "function " +var p15679 = &p15679Var +var p15681Var = "function " +var p15681 = &p15681Var +var p15684Var = "function " +var p15684 = &p15684Var +var p15687Var = "function " +var p15687 = &p15687Var +var p15690Var = "function " +var p15690 = &p15690Var +var p15693Var = "function " +var p15693 = &p15693Var +var p15697Var = "function " +var p15697 = &p15697Var +var p15700Var = "function " +var p15700 = &p15700Var +var p15703Var = "function " +var p15703 = &p15703Var +var p15716Var = "function " +var p15716 = &p15716Var +var p15718Var = "function " +var p15718 = &p15718Var +var p15722Var = "thunk from >" +var p15722 = &p15722Var +var p15725Var = "thunk from >" +var p15725 = &p15725Var +var p15728Var = "function " +var p15728 = &p15728Var +var p15731Var = "function " +var p15731 = &p15731Var +var p15739Var = "thunk from >" +var p15739 = &p15739Var +var p15742Var = "thunk from >" +var p15742 = &p15742Var +var p15745Var = "thunk from >" +var p15745 = &p15745Var +var p15752Var = "thunk from >>" +var p15752 = &p15752Var +var p15755Var = "thunk from >" +var p15755 = &p15755Var +var p15767Var = "function " +var p15767 = &p15767Var +var p15769Var = "function " +var p15769 = &p15769Var +var p15772Var = "object " +var p15772 = &p15772Var +var p15796Var = "object " +var p15796 = &p15796Var +var p15800Var = "object " +var p15800 = &p15800Var +var p15803Var = "object " +var p15803 = &p15803Var +var p15806Var = "object " +var p15806 = &p15806Var +var p15809Var = "object " +var p15809 = &p15809Var +var p15812Var = "object " +var p15812 = &p15812Var +var p15815Var = "object " +var p15815 = &p15815Var +var p15822Var = "thunk from >" +var p15822 = &p15822Var +var p15824Var = "thunk from >" +var p15824 = &p15824Var var p1 = &ast.Source{ Lines: []string{ "/*\n", @@ -2352,7 +2420,7 @@ var p1 = &ast.Source{ " assert std.isString(str) : 'substr first parameter should be a string, got ' + std.type(str);\n", " assert std.isNumber(from) : 'substr second parameter should be a string, got ' + std.type(from);\n", " assert std.isNumber(len) : 'substr third parameter should be a string, got ' + std.type(len);\n", - " assert len >=0 : 'substr third parameter should be greater than zero, got ' + len;\n", + " assert len >= 0 : 'substr third parameter should be greater than zero, got ' + len;\n", " std.join('', std.makeArray(std.max(0, std.min(len, std.length(str) - from)), function(i) str[i + from])),\n", "\n", " startsWith(a, b)::\n", @@ -2425,13 +2493,13 @@ var p1 = &ast.Source{ " split(str, c)::\n", " assert std.isString(str) : 'std.split first parameter should be a string, got ' + std.type(str);\n", " assert std.isString(c) : 'std.split second parameter should be a string, got ' + std.type(c);\n", - " assert std.length(c) == 1 : 'std.split second parameter should have length 1, got ' + std.length(c);\n", + " assert std.length(c) == 1 : 'std.split second parameter should have length 1, got ' + std.length(c);\n", " std.splitLimit(str, c, -1),\n", "\n", " splitLimit(str, c, maxsplits)::\n", " assert std.isString(str) : 'std.splitLimit first parameter should be a string, got ' + std.type(str);\n", " assert std.isString(c) : 'std.splitLimit second parameter should be a string, got ' + std.type(c);\n", - " assert std.length(c) == 1 : 'std.splitLimit second parameter should have length 1, got ' + std.length(c);\n", + " assert std.length(c) == 1 : 'std.splitLimit second parameter should have length 1, got ' + std.length(c);\n", " assert std.isNumber(maxsplits) : 'std.splitLimit third parameter should be a number, got ' + std.type(maxsplits);\n", " local aux(str, delim, i, arr, v) =\n", " local c = str[i];\n", @@ -2496,9 +2564,9 @@ var p1 = &ast.Source{ "\n", " repeat(what, count)::\n", " local joiner =\n", - " if std.isString(what) then \"\"\n", + " if std.isString(what) then ''\n", " else if std.isArray(what) then []\n", - " else error \"std.repeat first argument must be an array or a string\";\n", + " else error 'std.repeat first argument must be an array or a string';\n", " std.join(joiner, std.makeArray(count, function(i) what)),\n", "\n", " slice(indexable, index, end, step)::\n", @@ -2539,7 +2607,7 @@ var p1 = &ast.Source{ " std.count(arr, x) > 0\n", " else if std.isString(arr) then\n", " std.length(std.findSubstr(x, arr)) > 0\n", - " else error \"std.member first argument must be an array or a string\",\n", + " else error 'std.member first argument must be an array or a string',\n", "\n", " count(arr, x):: std.length(std.filter(function(v) v == x, arr)),\n", "\n", @@ -3152,7 +3220,7 @@ var p1 = &ast.Source{ " if a < b then a else b,\n", "\n", " clamp(x, minVal, maxVal)::\n", - " if x < minVal then minVal\n", + " if x < minVal then minVal\n", " else if x > maxVal then maxVal\n", " else x,\n", "\n", @@ -3614,6 +3682,12 @@ var p1 = &ast.Source{ " objectHasAll(o, f)::\n", " std.objectHasEx(o, f, true),\n", "\n", + " objectValues(o)::\n", + " [o[k] for k in std.objectFields(o)],\n", + "\n", + " objectValuesAll(o)::\n", + " [o[k] for k in std.objectFieldsAll(o)],\n", + "\n", " equals(a, b)::\n", " local ta = std.type(a);\n", " local tb = std.type(b);\n", @@ -3692,6 +3766,40 @@ var p1 = &ast.Source{ " error 'find second parameter should be an array, got ' + std.type(arr)\n", " else\n", " std.filter(function(i) arr[i] == value, std.range(0, std.length(arr) - 1)),\n", + "\n", + " // Three way comparison.\n", + " // TODO(sbarzowski): consider exposing and documenting it properly\n", + " __compare(v1, v2)::\n", + " local t1 = std.type(v1), t2 = std.type(v2);\n", + " if t1 != t2 then\n", + " error \"Comparison requires matching types. Got \" + t1 + \" and \" + t2\n", + " else if t1 == \"array\" then\n", + " std.__compare_array(v1, v2)\n", + " else if t1 == \"function\" || t1 == \"object\" || t1 == \"bool\" then\n", + " error \"Values of type \" + t1 + \" are not comparable.\"\n", + " else if v1 < v2 then -1\n", + " else if v1 > v2 then 1\n", + " else 0,\n", + "\n", + " __compare_array(arr1, arr2)::\n", + " local len1 = std.length(arr1), len2 = std.length(arr2);\n", + " local minLen = std.min(len1, len2);\n", + " local aux(i) =\n", + " if i < minLen then\n", + " local cmpRes = std.__compare(arr1[i], arr2[i]);\n", + " if cmpRes != 0 then\n", + " cmpRes\n", + " else\n", + " aux(i + 1) tailstrict\n", + " else\n", + " std.__compare(len1, len2);\n", + " aux(0),\n", + "\n", + " __array_less(arr1, arr2):: std.__compare_array(arr1, arr2) == -1,\n", + " __array_greater(arr1, arr2):: std.__compare_array(arr1, arr2) == 1,\n", + " __array_less_or_equal(arr1, arr2):: std.__compare_array(arr1, arr2) <= 0,\n", + " __array_greater_or_equal(arr1, arr2):: std.__compare_array(arr1, arr2) >= 0,\n", + "\n", "}\n", "\n", }, @@ -3709,7 +3817,7 @@ var _StdAst = &ast.DesugaredObject{ Column: int(1), }, End: ast.Location{ - Line: int(1382), + Line: int(1422), Column: int(2), }, File: p1, @@ -6272,7 +6380,7 @@ var _StdAst = &ast.DesugaredObject{ }, End: ast.Location{ Line: int(42), - Column: int(19), + Column: int(20), }, File: p1, }, @@ -6312,11 +6420,11 @@ var _StdAst = &ast.DesugaredObject{ FileName: "", Begin: ast.Location{ Line: int(42), - Column: int(18), + Column: int(19), }, End: ast.Location{ Line: int(42), - Column: int(19), + Column: int(20), }, File: p1, }, @@ -7177,14 +7285,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(42), + Column: int(5), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(43), + Column: int(109), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -7198,11 +7306,11 @@ var _StdAst = &ast.DesugaredObject{ FileName: "", Begin: ast.Location{ Line: int(42), - Column: int(22), + Column: int(23), }, End: ast.Location{ Line: int(42), - Column: int(86), + Column: int(87), }, File: p1, }, @@ -7218,11 +7326,11 @@ var _StdAst = &ast.DesugaredObject{ FileName: "", Begin: ast.Location{ Line: int(42), - Column: int(22), + Column: int(23), }, End: ast.Location{ Line: int(42), - Column: int(80), + Column: int(81), }, File: p1, }, @@ -7243,11 +7351,11 @@ var _StdAst = &ast.DesugaredObject{ FileName: "", Begin: ast.Location{ Line: int(42), - Column: int(83), + Column: int(84), }, End: ast.Location{ Line: int(42), - Column: int(86), + Column: int(87), }, File: p1, }, @@ -7268,14 +7376,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(41), + Column: int(5), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(43), + Column: int(109), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -7465,14 +7573,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(40), + Column: int(5), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(43), + Column: int(109), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -7662,14 +7770,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(39), + Column: int(5), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(43), + Column: int(109), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -10299,14 +10407,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(59), + Column: int(23), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(59), + Column: int(30), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -11312,14 +11420,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(66), + Column: int(23), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(66), + Column: int(36), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -13194,14 +13302,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(98), + Column: int(18), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(98), + Column: int(25), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -13540,14 +13648,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(96), + Column: int(5), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(100), + Column: int(25), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -13561,14 +13669,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(96), + Column: int(48), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(96), + Column: int(78), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -13744,14 +13852,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(95), + Column: int(5), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(100), + Column: int(25), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -14477,14 +14585,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(104), + Column: int(5), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(105), + Column: int(22), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -14521,14 +14629,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(103), + Column: int(5), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(105), + Column: int(22), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -15254,14 +15362,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(109), + Column: int(5), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(110), + Column: int(23), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -15298,14 +15406,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(108), + Column: int(5), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(110), + Column: int(23), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -16291,14 +16399,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(115), + Column: int(5), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(116), + Column: int(31), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -16313,11 +16421,11 @@ var _StdAst = &ast.DesugaredObject{ FileName: "", Begin: ast.Location{ Line: int(115), - Column: int(34), + Column: int(33), }, End: ast.Location{ Line: int(115), - Column: int(105), + Column: int(104), }, File: p1, }, @@ -16334,11 +16442,11 @@ var _StdAst = &ast.DesugaredObject{ FileName: "", Begin: ast.Location{ Line: int(115), - Column: int(34), + Column: int(33), }, End: ast.Location{ Line: int(115), - Column: int(89), + Column: int(88), }, File: p1, }, @@ -16359,11 +16467,11 @@ var _StdAst = &ast.DesugaredObject{ FileName: "", Begin: ast.Location{ Line: int(115), - Column: int(92), + Column: int(91), }, End: ast.Location{ Line: int(115), - Column: int(105), + Column: int(104), }, File: p1, }, @@ -16380,11 +16488,11 @@ var _StdAst = &ast.DesugaredObject{ FileName: "", Begin: ast.Location{ Line: int(115), - Column: int(92), + Column: int(91), }, End: ast.Location{ Line: int(115), - Column: int(102), + Column: int(101), }, File: p1, }, @@ -16400,11 +16508,11 @@ var _StdAst = &ast.DesugaredObject{ FileName: "", Begin: ast.Location{ Line: int(115), - Column: int(92), + Column: int(91), }, End: ast.Location{ Line: int(115), - Column: int(95), + Column: int(94), }, File: p1, }, @@ -16453,11 +16561,11 @@ var _StdAst = &ast.DesugaredObject{ FileName: "", Begin: ast.Location{ Line: int(115), - Column: int(103), + Column: int(102), }, End: ast.Location{ Line: int(115), - Column: int(104), + Column: int(103), }, File: p1, }, @@ -16488,14 +16596,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(114), + Column: int(5), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(116), + Column: int(31), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -16685,14 +16793,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(113), + Column: int(5), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(116), + Column: int(31), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -19496,14 +19604,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(122), + Column: int(5), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(132), + Column: int(27), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -19693,14 +19801,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(121), + Column: int(5), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(132), + Column: int(27), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -19715,11 +19823,11 @@ var _StdAst = &ast.DesugaredObject{ FileName: "", Begin: ast.Location{ Line: int(121), - Column: int(34), + Column: int(33), }, End: ast.Location{ Line: int(121), - Column: int(110), + Column: int(109), }, File: p1, }, @@ -19736,11 +19844,11 @@ var _StdAst = &ast.DesugaredObject{ FileName: "", Begin: ast.Location{ Line: int(121), - Column: int(34), + Column: int(33), }, End: ast.Location{ Line: int(121), - Column: int(94), + Column: int(93), }, File: p1, }, @@ -19761,11 +19869,11 @@ var _StdAst = &ast.DesugaredObject{ FileName: "", Begin: ast.Location{ Line: int(121), - Column: int(97), + Column: int(96), }, End: ast.Location{ Line: int(121), - Column: int(110), + Column: int(109), }, File: p1, }, @@ -19782,11 +19890,11 @@ var _StdAst = &ast.DesugaredObject{ FileName: "", Begin: ast.Location{ Line: int(121), - Column: int(97), + Column: int(96), }, End: ast.Location{ Line: int(121), - Column: int(107), + Column: int(106), }, File: p1, }, @@ -19802,11 +19910,11 @@ var _StdAst = &ast.DesugaredObject{ FileName: "", Begin: ast.Location{ Line: int(121), - Column: int(97), + Column: int(96), }, End: ast.Location{ Line: int(121), - Column: int(100), + Column: int(99), }, File: p1, }, @@ -19855,11 +19963,11 @@ var _StdAst = &ast.DesugaredObject{ FileName: "", Begin: ast.Location{ Line: int(121), - Column: int(108), + Column: int(107), }, End: ast.Location{ Line: int(121), - Column: int(109), + Column: int(108), }, File: p1, }, @@ -19890,14 +19998,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(120), + Column: int(5), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(132), + Column: int(27), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -20087,14 +20195,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(119), + Column: int(5), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(132), + Column: int(27), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -21426,14 +21534,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(145), + Column: int(25), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(145), + Column: int(44), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -22016,14 +22124,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(151), + Column: int(15), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(151), + Column: int(42), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -22744,14 +22852,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(154), + Column: int(51), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(154), + Column: int(78), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -23739,14 +23847,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(138), + Column: int(5), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(163), + Column: int(30), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -23783,14 +23891,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(137), + Column: int(5), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(163), + Column: int(30), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -23827,14 +23935,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(136), + Column: int(5), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(163), + Column: int(30), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -23871,14 +23979,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(135), + Column: int(5), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(163), + Column: int(30), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -31328,14 +31436,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(210), + Column: int(5), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(222), + Column: int(66), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -31349,14 +31457,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(210), + Column: int(64), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(210), + Column: int(139), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -31608,14 +31716,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(209), + Column: int(5), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(222), + Column: int(66), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -31629,14 +31737,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(209), + Column: int(24), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(209), + Column: int(71), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -31784,14 +31892,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(208), + Column: int(5), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(222), + Column: int(66), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -31805,14 +31913,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(208), + Column: int(68), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(208), + Column: int(176), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -38780,14 +38888,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(263), + Column: int(7), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(263), + Column: int(62), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -38875,14 +38983,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(263), + Column: int(7), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(263), + Column: int(62), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -42710,14 +42818,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(281), + Column: int(15), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(281), + Column: int(87), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -46024,14 +46132,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(302), + Column: int(20), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(302), + Column: int(50), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -46399,14 +46507,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(304), + Column: int(13), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(304), + Column: int(63), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -48693,14 +48801,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(314), + Column: int(7), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(328), + Column: int(26), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -51390,14 +51498,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(332), + Column: int(9), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(345), + Column: int(25), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -56839,14 +56947,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(353), + Column: int(11), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(376), + Column: int(27), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -57886,14 +57994,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(380), + Column: int(7), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(385), + Column: int(26), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -58760,14 +58868,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(389), + Column: int(7), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(394), + Column: int(10), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -64151,14 +64259,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(397), + Column: int(7), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(426), + Column: int(51), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -66540,14 +66648,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(431), + Column: int(7), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(448), + Column: int(8), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -70939,14 +71047,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(509), + Column: int(44), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(509), + Column: int(53), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -73849,14 +73957,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(526), + Column: int(45), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(526), + Column: int(51), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -113207,7 +113315,7 @@ var _StdAst = &ast.DesugaredObject{ }, End: ast.Location{ Line: int(842), - Column: int(19), + Column: int(18), }, File: p1, }, @@ -113248,11 +113356,11 @@ var _StdAst = &ast.DesugaredObject{ FileName: "", Begin: ast.Location{ Line: int(842), - Column: int(13), + Column: int(12), }, End: ast.Location{ Line: int(842), - Column: int(19), + Column: int(18), }, File: p1, }, @@ -113272,11 +113380,11 @@ var _StdAst = &ast.DesugaredObject{ FileName: "", Begin: ast.Location{ Line: int(842), - Column: int(25), + Column: int(24), }, End: ast.Location{ Line: int(842), - Column: int(31), + Column: int(30), }, File: p1, }, @@ -114107,14 +114215,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(851), + Column: int(20), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(859), + Column: int(8), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -114545,14 +114653,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(854), + Column: int(11), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(854), + Column: int(64), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -114709,14 +114817,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(854), + Column: int(12), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(854), + Column: int(34), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -114997,14 +115105,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(856), + Column: int(12), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(856), + Column: int(44), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -115529,14 +115637,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(861), + Column: int(42), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(861), + Column: int(58), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -116175,14 +116283,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(863), + Column: int(26), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(866), + Column: int(6), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -118826,14 +118934,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(889), + Column: int(11), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(889), + Column: int(27), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -119076,14 +119184,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(892), + Column: int(5), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(892), + Column: int(70), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -119325,14 +119433,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(892), + Column: int(27), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(892), + Column: int(69), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -120466,14 +120574,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(904), + Column: int(5), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(904), + Column: int(70), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -120715,14 +120823,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(904), + Column: int(27), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(904), + Column: int(69), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -124653,14 +124761,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(936), + Column: int(34), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(939), + Column: int(35), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -126003,14 +126111,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(945), + Column: int(34), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(949), + Column: int(35), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -129498,14 +129606,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(970), + Column: int(51), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(970), + Column: int(81), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -132458,14 +132566,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1001), + Column: int(25), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1005), + Column: int(12), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -132604,14 +132712,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1001), + Column: int(25), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1005), + Column: int(12), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -135673,14 +135781,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1029), + Column: int(25), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1033), + Column: int(12), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -135818,14 +135926,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1029), + Column: int(25), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1033), + Column: int(12), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -138072,14 +138180,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1042), + Column: int(20), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1042), + Column: int(83), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -138815,14 +138923,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1048), + Column: int(22), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1051), + Column: int(8), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -138978,14 +139086,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1049), + Column: int(9), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1049), + Column: int(73), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -139611,14 +139719,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1052), + Column: int(7), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1052), + Column: int(40), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -140086,14 +140194,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1054), + Column: int(7), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1054), + Column: int(70), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -140356,14 +140464,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1054), + Column: int(32), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1054), + Column: int(68), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -140867,14 +140975,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1056), + Column: int(7), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1056), + Column: int(41), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -142169,14 +142277,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1069), + Column: int(18), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1069), + Column: int(96), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -142332,14 +142440,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1069), + Column: int(19), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1069), + Column: int(63), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -143376,14 +143484,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1074), + Column: int(13), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1074), + Column: int(75), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -144711,14 +144819,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1083), + Column: int(46), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1083), + Column: int(51), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -144905,14 +145013,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1083), + Column: int(57), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1083), + Column: int(62), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -145278,14 +145386,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1085), + Column: int(26), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1085), + Column: int(87), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -145441,14 +145549,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1085), + Column: int(27), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1085), + Column: int(53), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -146099,14 +146207,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1086), + Column: int(51), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1086), + Column: int(77), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -152722,14 +152830,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1141), + Column: int(8), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1141), + Column: int(27), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -153027,14 +153135,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1142), + Column: int(13), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1142), + Column: int(53), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -160627,14 +160735,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1187), + Column: int(20), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1187), + Column: int(25), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -160983,14 +161091,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1189), + Column: int(20), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1189), + Column: int(25), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -163053,14 +163161,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1202), + Column: int(20), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1202), + Column: int(29), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -163268,14 +163376,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1202), + Column: int(39), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1202), + Column: int(48), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -166607,14 +166715,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1226), + Column: int(15), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1226), + Column: int(20), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -167067,14 +167175,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1228), + Column: int(15), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1228), + Column: int(20), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -172423,14 +172531,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1258), + Column: int(15), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1258), + Column: int(20), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -175237,14 +175345,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1276), + Column: int(27), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1276), + Column: int(83), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -176047,14 +176155,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1279), + Column: int(7), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1288), + Column: int(8), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -176144,14 +176252,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1279), + Column: int(7), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1288), + Column: int(8), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -178647,7 +178755,7 @@ var _StdAst = &ast.DesugaredObject{ Ctx: nil, FreeVars: nil, }, - Value: "equals", + Value: "objectValues", Kind: ast.LiteralStringKind(1), BlockIndent: "", BlockTermIndent: "", @@ -178676,26 +178784,7 @@ var _StdAst = &ast.DesugaredObject{ Parameters: []ast.Parameter{ ast.Parameter{ NameFodder: ast.Fodder{}, - Name: "a", - EqFodder: nil, - DefaultArg: nil, - CommaFodder: ast.Fodder{}, - LocRange: ast.LocationRange{ - FileName: "", - Begin: ast.Location{ - Line: int(1304), - Column: int(10), - }, - End: ast.Location{ - Line: int(1304), - Column: int(11), - }, - File: p1, - }, - }, - ast.Parameter{ - NameFodder: ast.Fodder{}, - Name: "b", + Name: "o", EqFodder: nil, DefaultArg: nil, CommaFodder: nil, @@ -178703,11 +178792,11 @@ var _StdAst = &ast.DesugaredObject{ FileName: "", Begin: ast.Location{ Line: int(1304), - Column: int(13), + Column: int(16), }, End: ast.Location{ Line: int(1304), - Column: int(14), + Column: int(17), }, File: p1, }, @@ -178715,7 +178804,7 @@ var _StdAst = &ast.DesugaredObject{ }, TrailingComma: false, ParenRightFodder: ast.Fodder{}, - Body: &ast.Local{ + Body: &ast.Apply{ NodeBase: ast.NodeBase{ LocRange: ast.LocationRange{ FileName: "", @@ -178724,96 +178813,136 @@ var _StdAst = &ast.DesugaredObject{ Column: int(5), }, End: ast.Location{ - Line: int(1338), - Column: int(34), + Line: int(1305), + Column: int(40), }, File: p1, }, - Fodder: ast.Fodder{ - ast.FodderElement{ - Kind: ast.FodderKind(0), - Blanks: int(0), - Indent: int(4), - Comment: []string{}, - }, - }, - Ctx: p14119, + Fodder: nil, + Ctx: nil, FreeVars: ast.Identifiers{ - "a", - "b", + "o", "std", }, }, - Binds: ast.LocalBinds{ - ast.LocalBind{ - VarFodder: ast.Fodder{}, - Variable: "ta", - EqFodder: ast.Fodder{}, - Body: &ast.Apply{ - NodeBase: ast.NodeBase{ - LocRange: ast.LocationRange{ - FileName: "", - Begin: ast.Location{ - Line: int(1305), - Column: int(16), - }, - End: ast.Location{ - Line: int(1305), - Column: int(27), - }, - File: p1, + Target: &ast.Index{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Target: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), }, - Fodder: ast.Fodder{}, - Ctx: p14123, - FreeVars: ast.Identifiers{ - "a", - "std", + End: ast.Location{ + Line: int(0), + Column: int(0), }, + File: nil, }, - Target: &ast.Index{ + Fodder: nil, + Ctx: nil, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Id: "std", + }, + LeftBracketFodder: nil, + Index: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: nil, + }, + Value: "flatMap", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + RightBracketFodder: nil, + Id: nil, + }, + FodderLeft: nil, + Arguments: ast.Arguments{ + Positional: []ast.CommaSeparatedExpr{ + ast.CommaSeparatedExpr{ + Expr: &ast.Function{ NodeBase: ast.NodeBase{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1305), - Column: int(16), + Line: int(0), + Column: int(0), }, End: ast.Location{ - Line: int(1305), - Column: int(24), + Line: int(0), + Column: int(0), }, - File: p1, + File: nil, }, - Fodder: ast.Fodder{}, - Ctx: p14123, + Fodder: nil, + Ctx: nil, FreeVars: ast.Identifiers{ - "std", + "o", }, }, - Target: &ast.Var{ - NodeBase: ast.NodeBase{ + ParenLeftFodder: nil, + Parameters: []ast.Parameter{ + ast.Parameter{ + NameFodder: nil, + Name: "k", + EqFodder: nil, + DefaultArg: nil, + CommaFodder: nil, LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1305), - Column: int(16), + Line: int(0), + Column: int(0), }, End: ast.Location{ - Line: int(1305), - Column: int(19), + Line: int(0), + Column: int(0), }, - File: p1, - }, - Fodder: ast.Fodder{}, - Ctx: nil, - FreeVars: ast.Identifiers{ - "std", + File: nil, }, }, - Id: "std", }, - LeftBracketFodder: ast.Fodder{}, - Index: &ast.LiteralString{ + TrailingComma: false, + ParenRightFodder: nil, + Body: &ast.Array{ NodeBase: ast.NodeBase{ LocRange: ast.LocationRange{ FileName: "", @@ -178829,121 +178958,110 @@ var _StdAst = &ast.DesugaredObject{ }, Fodder: nil, Ctx: nil, - FreeVars: nil, + FreeVars: ast.Identifiers{ + "k", + "o", + }, }, - Value: "type", - Kind: ast.LiteralStringKind(1), - BlockIndent: "", - BlockTermIndent: "", - }, - RightBracketFodder: ast.Fodder{}, - Id: nil, - }, - FodderLeft: ast.Fodder{}, - Arguments: ast.Arguments{ - Positional: []ast.CommaSeparatedExpr{ - ast.CommaSeparatedExpr{ - Expr: &ast.Var{ - NodeBase: ast.NodeBase{ - LocRange: ast.LocationRange{ - FileName: "", - Begin: ast.Location{ - Line: int(1305), - Column: int(25), + Elements: []ast.CommaSeparatedExpr{ + ast.CommaSeparatedExpr{ + Expr: &ast.Index{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1305), + Column: int(6), + }, + End: ast.Location{ + Line: int(1305), + Column: int(10), + }, + File: p1, }, - End: ast.Location{ - Line: int(1305), - Column: int(26), + Fodder: ast.Fodder{}, + Ctx: p14132, + FreeVars: ast.Identifiers{ + "k", + "o", }, - File: p1, }, - Fodder: ast.Fodder{}, - Ctx: p14132, - FreeVars: ast.Identifiers{ - "a", + Target: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1305), + Column: int(6), + }, + End: ast.Location{ + Line: int(1305), + Column: int(7), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p14132, + FreeVars: ast.Identifiers{ + "o", + }, + }, + Id: "o", }, + LeftBracketFodder: ast.Fodder{}, + Index: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1305), + Column: int(8), + }, + End: ast.Location{ + Line: int(1305), + Column: int(9), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p14132, + FreeVars: ast.Identifiers{ + "k", + }, + }, + Id: "k", + }, + RightBracketFodder: ast.Fodder{}, + Id: nil, }, - Id: "a", + CommaFodder: nil, }, - CommaFodder: nil, }, + TrailingComma: false, + CloseFodder: nil, }, - Named: nil, }, - TrailingComma: false, - TailStrict: false, - FodderRight: ast.Fodder{}, - TailStrictFodder: nil, - }, - Fun: nil, - CloseFodder: ast.Fodder{}, - LocRange: ast.LocationRange{ - FileName: "", - Begin: ast.Location{ - Line: int(1305), - Column: int(11), - }, - End: ast.Location{ - Line: int(1305), - Column: int(27), - }, - File: p1, - }, - }, - }, - Body: &ast.Local{ - NodeBase: ast.NodeBase{ - LocRange: ast.LocationRange{ - FileName: "", - Begin: ast.Location{ - Line: int(1306), - Column: int(5), - }, - End: ast.Location{ - Line: int(1338), - Column: int(34), - }, - File: p1, - }, - Fodder: ast.Fodder{ - ast.FodderElement{ - Kind: ast.FodderKind(0), - Blanks: int(0), - Indent: int(4), - Comment: []string{}, - }, - }, - Ctx: p14119, - FreeVars: ast.Identifiers{ - "a", - "b", - "std", - "ta", + CommaFodder: nil, }, - }, - Binds: ast.LocalBinds{ - ast.LocalBind{ - VarFodder: ast.Fodder{}, - Variable: "tb", - EqFodder: ast.Fodder{}, - Body: &ast.Apply{ + ast.CommaSeparatedExpr{ + Expr: &ast.Apply{ NodeBase: ast.NodeBase{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1306), - Column: int(16), + Line: int(1305), + Column: int(20), }, End: ast.Location{ - Line: int(1306), - Column: int(27), + Line: int(1305), + Column: int(39), }, File: p1, }, Fodder: ast.Fodder{}, Ctx: p14139, FreeVars: ast.Identifiers{ - "b", + "o", "std", }, }, @@ -178952,12 +179070,12 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1306), - Column: int(16), + Line: int(1305), + Column: int(20), }, End: ast.Location{ - Line: int(1306), - Column: int(24), + Line: int(1305), + Column: int(36), }, File: p1, }, @@ -178972,12 +179090,922 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1306), - Column: int(16), + Line: int(1305), + Column: int(20), }, End: ast.Location{ - Line: int(1306), - Column: int(19), + Line: int(1305), + Column: int(23), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: nil, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Id: "std", + }, + LeftBracketFodder: ast.Fodder{}, + Index: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: nil, + }, + Value: "objectFields", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + RightBracketFodder: ast.Fodder{}, + Id: nil, + }, + FodderLeft: ast.Fodder{}, + Arguments: ast.Arguments{ + Positional: []ast.CommaSeparatedExpr{ + ast.CommaSeparatedExpr{ + Expr: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1305), + Column: int(37), + }, + End: ast.Location{ + Line: int(1305), + Column: int(38), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p14148, + FreeVars: ast.Identifiers{ + "o", + }, + }, + Id: "o", + }, + CommaFodder: nil, + }, + }, + Named: nil, + }, + TrailingComma: false, + TailStrict: false, + FodderRight: ast.Fodder{}, + TailStrictFodder: nil, + }, + CommaFodder: nil, + }, + }, + Named: nil, + }, + TrailingComma: false, + TailStrict: false, + FodderRight: nil, + TailStrictFodder: nil, + }, + }, + PlusSuper: false, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1304), + Column: int(3), + }, + End: ast.Location{ + Line: int(1305), + Column: int(40), + }, + File: p1, + }, + }, + ast.DesugaredObjectField{ + Hide: ast.ObjectFieldHide(0), + Name: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: nil, + }, + Value: "objectValuesAll", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + Body: &ast.Function{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: p11, + FreeVars: ast.Identifiers{ + "std", + }, + }, + ParenLeftFodder: ast.Fodder{}, + Parameters: []ast.Parameter{ + ast.Parameter{ + NameFodder: ast.Fodder{}, + Name: "o", + EqFodder: nil, + DefaultArg: nil, + CommaFodder: nil, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1307), + Column: int(19), + }, + End: ast.Location{ + Line: int(1307), + Column: int(20), + }, + File: p1, + }, + }, + }, + TrailingComma: false, + ParenRightFodder: ast.Fodder{}, + Body: &ast.Apply{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1308), + Column: int(5), + }, + End: ast.Location{ + Line: int(1308), + Column: int(43), + }, + File: p1, + }, + Fodder: nil, + Ctx: nil, + FreeVars: ast.Identifiers{ + "o", + "std", + }, + }, + Target: &ast.Index{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Target: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Id: "std", + }, + LeftBracketFodder: nil, + Index: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: nil, + }, + Value: "flatMap", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + RightBracketFodder: nil, + Id: nil, + }, + FodderLeft: nil, + Arguments: ast.Arguments{ + Positional: []ast.CommaSeparatedExpr{ + ast.CommaSeparatedExpr{ + Expr: &ast.Function{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: ast.Identifiers{ + "o", + }, + }, + ParenLeftFodder: nil, + Parameters: []ast.Parameter{ + ast.Parameter{ + NameFodder: nil, + Name: "k", + EqFodder: nil, + DefaultArg: nil, + CommaFodder: nil, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + }, + }, + TrailingComma: false, + ParenRightFodder: nil, + Body: &ast.Array{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: ast.Identifiers{ + "k", + "o", + }, + }, + Elements: []ast.CommaSeparatedExpr{ + ast.CommaSeparatedExpr{ + Expr: &ast.Index{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1308), + Column: int(6), + }, + End: ast.Location{ + Line: int(1308), + Column: int(10), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p14169, + FreeVars: ast.Identifiers{ + "k", + "o", + }, + }, + Target: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1308), + Column: int(6), + }, + End: ast.Location{ + Line: int(1308), + Column: int(7), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p14169, + FreeVars: ast.Identifiers{ + "o", + }, + }, + Id: "o", + }, + LeftBracketFodder: ast.Fodder{}, + Index: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1308), + Column: int(8), + }, + End: ast.Location{ + Line: int(1308), + Column: int(9), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p14169, + FreeVars: ast.Identifiers{ + "k", + }, + }, + Id: "k", + }, + RightBracketFodder: ast.Fodder{}, + Id: nil, + }, + CommaFodder: nil, + }, + }, + TrailingComma: false, + CloseFodder: nil, + }, + }, + CommaFodder: nil, + }, + ast.CommaSeparatedExpr{ + Expr: &ast.Apply{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1308), + Column: int(20), + }, + End: ast.Location{ + Line: int(1308), + Column: int(42), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p14176, + FreeVars: ast.Identifiers{ + "o", + "std", + }, + }, + Target: &ast.Index{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1308), + Column: int(20), + }, + End: ast.Location{ + Line: int(1308), + Column: int(39), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p14176, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Target: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1308), + Column: int(20), + }, + End: ast.Location{ + Line: int(1308), + Column: int(23), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: nil, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Id: "std", + }, + LeftBracketFodder: ast.Fodder{}, + Index: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: nil, + }, + Value: "objectFieldsAll", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + RightBracketFodder: ast.Fodder{}, + Id: nil, + }, + FodderLeft: ast.Fodder{}, + Arguments: ast.Arguments{ + Positional: []ast.CommaSeparatedExpr{ + ast.CommaSeparatedExpr{ + Expr: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1308), + Column: int(40), + }, + End: ast.Location{ + Line: int(1308), + Column: int(41), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p14185, + FreeVars: ast.Identifiers{ + "o", + }, + }, + Id: "o", + }, + CommaFodder: nil, + }, + }, + Named: nil, + }, + TrailingComma: false, + TailStrict: false, + FodderRight: ast.Fodder{}, + TailStrictFodder: nil, + }, + CommaFodder: nil, + }, + }, + Named: nil, + }, + TrailingComma: false, + TailStrict: false, + FodderRight: nil, + TailStrictFodder: nil, + }, + }, + PlusSuper: false, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1307), + Column: int(3), + }, + End: ast.Location{ + Line: int(1308), + Column: int(43), + }, + File: p1, + }, + }, + ast.DesugaredObjectField{ + Hide: ast.ObjectFieldHide(0), + Name: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: nil, + }, + Value: "equals", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + Body: &ast.Function{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: p11, + FreeVars: ast.Identifiers{ + "std", + }, + }, + ParenLeftFodder: ast.Fodder{}, + Parameters: []ast.Parameter{ + ast.Parameter{ + NameFodder: ast.Fodder{}, + Name: "a", + EqFodder: nil, + DefaultArg: nil, + CommaFodder: ast.Fodder{}, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1310), + Column: int(10), + }, + End: ast.Location{ + Line: int(1310), + Column: int(11), + }, + File: p1, + }, + }, + ast.Parameter{ + NameFodder: ast.Fodder{}, + Name: "b", + EqFodder: nil, + DefaultArg: nil, + CommaFodder: nil, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1310), + Column: int(13), + }, + End: ast.Location{ + Line: int(1310), + Column: int(14), + }, + File: p1, + }, + }, + }, + TrailingComma: false, + ParenRightFodder: ast.Fodder{}, + Body: &ast.Local{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1311), + Column: int(5), + }, + End: ast.Location{ + Line: int(1344), + Column: int(34), + }, + File: p1, + }, + Fodder: ast.Fodder{ + ast.FodderElement{ + Kind: ast.FodderKind(0), + Blanks: int(0), + Indent: int(4), + Comment: []string{}, + }, + }, + Ctx: p14193, + FreeVars: ast.Identifiers{ + "a", + "b", + "std", + }, + }, + Binds: ast.LocalBinds{ + ast.LocalBind{ + VarFodder: ast.Fodder{}, + Variable: "ta", + EqFodder: ast.Fodder{}, + Body: &ast.Apply{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1311), + Column: int(16), + }, + End: ast.Location{ + Line: int(1311), + Column: int(27), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p14197, + FreeVars: ast.Identifiers{ + "a", + "std", + }, + }, + Target: &ast.Index{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1311), + Column: int(16), + }, + End: ast.Location{ + Line: int(1311), + Column: int(24), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p14197, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Target: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1311), + Column: int(16), + }, + End: ast.Location{ + Line: int(1311), + Column: int(19), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: nil, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Id: "std", + }, + LeftBracketFodder: ast.Fodder{}, + Index: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: nil, + }, + Value: "type", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + RightBracketFodder: ast.Fodder{}, + Id: nil, + }, + FodderLeft: ast.Fodder{}, + Arguments: ast.Arguments{ + Positional: []ast.CommaSeparatedExpr{ + ast.CommaSeparatedExpr{ + Expr: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1311), + Column: int(25), + }, + End: ast.Location{ + Line: int(1311), + Column: int(26), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p14206, + FreeVars: ast.Identifiers{ + "a", + }, + }, + Id: "a", + }, + CommaFodder: nil, + }, + }, + Named: nil, + }, + TrailingComma: false, + TailStrict: false, + FodderRight: ast.Fodder{}, + TailStrictFodder: nil, + }, + Fun: nil, + CloseFodder: ast.Fodder{}, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1311), + Column: int(11), + }, + End: ast.Location{ + Line: int(1311), + Column: int(27), + }, + File: p1, + }, + }, + }, + Body: &ast.Local{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1312), + Column: int(5), + }, + End: ast.Location{ + Line: int(1344), + Column: int(34), + }, + File: p1, + }, + Fodder: ast.Fodder{ + ast.FodderElement{ + Kind: ast.FodderKind(0), + Blanks: int(0), + Indent: int(4), + Comment: []string{}, + }, + }, + Ctx: p14193, + FreeVars: ast.Identifiers{ + "a", + "b", + "std", + "ta", + }, + }, + Binds: ast.LocalBinds{ + ast.LocalBind{ + VarFodder: ast.Fodder{}, + Variable: "tb", + EqFodder: ast.Fodder{}, + Body: &ast.Apply{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1312), + Column: int(16), + }, + End: ast.Location{ + Line: int(1312), + Column: int(27), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p14213, + FreeVars: ast.Identifiers{ + "b", + "std", + }, + }, + Target: &ast.Index{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1312), + Column: int(16), + }, + End: ast.Location{ + Line: int(1312), + Column: int(24), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p14213, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Target: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1312), + Column: int(16), + }, + End: ast.Location{ + Line: int(1312), + Column: int(19), }, File: p1, }, @@ -179025,17 +180053,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1306), + Line: int(1312), Column: int(25), }, End: ast.Location{ - Line: int(1306), + Line: int(1312), Column: int(26), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14148, + Ctx: p14222, FreeVars: ast.Identifiers{ "b", }, @@ -179057,11 +180085,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1306), + Line: int(1312), Column: int(11), }, End: ast.Location{ - Line: int(1306), + Line: int(1312), Column: int(27), }, File: p1, @@ -179073,11 +180101,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1307), + Line: int(1313), Column: int(5), }, End: ast.Location{ - Line: int(1338), + Line: int(1344), Column: int(34), }, File: p1, @@ -179090,7 +180118,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "a", "b", @@ -179104,17 +180132,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1307), + Line: int(1313), Column: int(8), }, End: ast.Location{ - Line: int(1307), + Line: int(1313), Column: int(36), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "std", "ta", @@ -179127,17 +180155,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1307), + Line: int(1313), Column: int(9), }, End: ast.Location{ - Line: int(1307), + Line: int(1313), Column: int(36), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "std", "ta", @@ -179149,17 +180177,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1307), + Line: int(1313), Column: int(9), }, End: ast.Location{ - Line: int(1307), + Line: int(1313), Column: int(28), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "std", }, @@ -179169,11 +180197,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1307), + Line: int(1313), Column: int(9), }, End: ast.Location{ - Line: int(1307), + Line: int(1313), Column: int(12), }, File: p1, @@ -179222,17 +180250,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1307), + Line: int(1313), Column: int(29), }, End: ast.Location{ - Line: int(1307), + Line: int(1313), Column: int(31), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14164, + Ctx: p14238, FreeVars: ast.Identifiers{ "ta", }, @@ -179247,17 +180275,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1307), + Line: int(1313), Column: int(33), }, End: ast.Location{ - Line: int(1307), + Line: int(1313), Column: int(35), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14164, + Ctx: p14238, FreeVars: ast.Identifiers{ "tb", }, @@ -179281,11 +180309,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1308), + Line: int(1314), Column: int(7), }, End: ast.Location{ - Line: int(1308), + Line: int(1314), Column: int(12), }, File: p1, @@ -179298,7 +180326,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14119, + Ctx: p14193, FreeVars: nil, }, Value: false, @@ -179316,11 +180344,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1310), + Line: int(1316), Column: int(7), }, End: ast.Location{ - Line: int(1338), + Line: int(1344), Column: int(34), }, File: p1, @@ -179333,7 +180361,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "a", "b", @@ -179346,17 +180374,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1310), + Line: int(1316), Column: int(10), }, End: ast.Location{ - Line: int(1310), + Line: int(1316), Column: int(42), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "std", "ta", @@ -179367,17 +180395,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1310), + Line: int(1316), Column: int(10), }, End: ast.Location{ - Line: int(1310), + Line: int(1316), Column: int(29), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "std", }, @@ -179387,11 +180415,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1310), + Line: int(1316), Column: int(10), }, End: ast.Location{ - Line: int(1310), + Line: int(1316), Column: int(13), }, File: p1, @@ -179440,17 +180468,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1310), + Line: int(1316), Column: int(30), }, End: ast.Location{ - Line: int(1310), + Line: int(1316), Column: int(32), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14183, + Ctx: p14257, FreeVars: ast.Identifiers{ "ta", }, @@ -179465,17 +180493,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1310), + Line: int(1316), Column: int(34), }, End: ast.Location{ - Line: int(1310), + Line: int(1316), Column: int(41), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14183, + Ctx: p14257, FreeVars: nil, }, Value: "array", @@ -179499,11 +180527,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1311), + Line: int(1317), Column: int(9), }, End: ast.Location{ - Line: int(1322), + Line: int(1328), Column: int(23), }, File: p1, @@ -179516,7 +180544,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "a", "b", @@ -179533,17 +180561,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1311), + Line: int(1317), Column: int(20), }, End: ast.Location{ - Line: int(1311), + Line: int(1317), Column: int(33), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14191, + Ctx: p14265, FreeVars: ast.Identifiers{ "a", "std", @@ -179554,17 +180582,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1311), + Line: int(1317), Column: int(20), }, End: ast.Location{ - Line: int(1311), + Line: int(1317), Column: int(30), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14191, + Ctx: p14265, FreeVars: ast.Identifiers{ "std", }, @@ -179574,11 +180602,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1311), + Line: int(1317), Column: int(20), }, End: ast.Location{ - Line: int(1311), + Line: int(1317), Column: int(23), }, File: p1, @@ -179627,17 +180655,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1311), + Line: int(1317), Column: int(31), }, End: ast.Location{ - Line: int(1311), + Line: int(1317), Column: int(32), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14200, + Ctx: p14274, FreeVars: ast.Identifiers{ "a", }, @@ -179659,11 +180687,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1311), + Line: int(1317), Column: int(15), }, End: ast.Location{ - Line: int(1311), + Line: int(1317), Column: int(33), }, File: p1, @@ -179675,11 +180703,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1312), + Line: int(1318), Column: int(9), }, End: ast.Location{ - Line: int(1322), + Line: int(1328), Column: int(23), }, File: p1, @@ -179692,7 +180720,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "a", "b", @@ -179705,17 +180733,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1312), + Line: int(1318), Column: int(12), }, End: ast.Location{ - Line: int(1312), + Line: int(1318), Column: int(51), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "b", "la", @@ -179728,17 +180756,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1312), + Line: int(1318), Column: int(13), }, End: ast.Location{ - Line: int(1312), + Line: int(1318), Column: int(51), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "b", "la", @@ -179750,17 +180778,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1312), + Line: int(1318), Column: int(13), }, End: ast.Location{ - Line: int(1312), + Line: int(1318), Column: int(32), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "std", }, @@ -179770,11 +180798,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1312), + Line: int(1318), Column: int(13), }, End: ast.Location{ - Line: int(1312), + Line: int(1318), Column: int(16), }, File: p1, @@ -179823,17 +180851,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1312), + Line: int(1318), Column: int(33), }, End: ast.Location{ - Line: int(1312), + Line: int(1318), Column: int(35), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14216, + Ctx: p14290, FreeVars: ast.Identifiers{ "la", }, @@ -179848,17 +180876,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1312), + Line: int(1318), Column: int(37), }, End: ast.Location{ - Line: int(1312), + Line: int(1318), Column: int(50), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14216, + Ctx: p14290, FreeVars: ast.Identifiers{ "b", "std", @@ -179869,17 +180897,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1312), + Line: int(1318), Column: int(37), }, End: ast.Location{ - Line: int(1312), + Line: int(1318), Column: int(47), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14216, + Ctx: p14290, FreeVars: ast.Identifiers{ "std", }, @@ -179889,11 +180917,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1312), + Line: int(1318), Column: int(37), }, End: ast.Location{ - Line: int(1312), + Line: int(1318), Column: int(40), }, File: p1, @@ -179942,17 +180970,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1312), + Line: int(1318), Column: int(48), }, End: ast.Location{ - Line: int(1312), + Line: int(1318), Column: int(49), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14227, + Ctx: p14301, FreeVars: ast.Identifiers{ "b", }, @@ -179986,11 +181014,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1313), + Line: int(1319), Column: int(11), }, End: ast.Location{ - Line: int(1313), + Line: int(1319), Column: int(16), }, File: p1, @@ -180003,7 +181031,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14119, + Ctx: p14193, FreeVars: nil, }, Value: false, @@ -180021,11 +181049,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1315), + Line: int(1321), Column: int(11), }, End: ast.Location{ - Line: int(1322), + Line: int(1328), Column: int(23), }, File: p1, @@ -180038,7 +181066,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "a", "b", @@ -180055,17 +181083,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1315), + Line: int(1321), Column: int(17), }, End: ast.Location{ - Line: int(1321), + Line: int(1327), Column: int(31), }, File: p1, }, Fodder: nil, - Ctx: p14237, + Ctx: p14311, FreeVars: ast.Identifiers{ "aux", "la", @@ -180082,11 +181110,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1315), + Line: int(1321), Column: int(21), }, End: ast.Location{ - Line: int(1315), + Line: int(1321), Column: int(22), }, File: p1, @@ -180101,11 +181129,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1315), + Line: int(1321), Column: int(24), }, End: ast.Location{ - Line: int(1315), + Line: int(1321), Column: int(25), }, File: p1, @@ -180120,11 +181148,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1315), + Line: int(1321), Column: int(27), }, End: ast.Location{ - Line: int(1315), + Line: int(1321), Column: int(28), }, File: p1, @@ -180138,11 +181166,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1316), + Line: int(1322), Column: int(13), }, End: ast.Location{ - Line: int(1321), + Line: int(1327), Column: int(31), }, File: p1, @@ -180155,7 +181183,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14242, + Ctx: p14316, FreeVars: ast.Identifiers{ "a", "aux", @@ -180169,17 +181197,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1316), + Line: int(1322), Column: int(16), }, End: ast.Location{ - Line: int(1316), + Line: int(1322), Column: int(23), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14242, + Ctx: p14316, FreeVars: ast.Identifiers{ "i", "la", @@ -180190,17 +181218,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1316), + Line: int(1322), Column: int(16), }, End: ast.Location{ - Line: int(1316), + Line: int(1322), Column: int(17), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14242, + Ctx: p14316, FreeVars: ast.Identifiers{ "i", }, @@ -180214,17 +181242,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1316), + Line: int(1322), Column: int(21), }, End: ast.Location{ - Line: int(1316), + Line: int(1322), Column: int(23), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14242, + Ctx: p14316, FreeVars: ast.Identifiers{ "la", }, @@ -180238,11 +181266,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1317), + Line: int(1323), Column: int(15), }, End: ast.Location{ - Line: int(1317), + Line: int(1323), Column: int(19), }, File: p1, @@ -180255,7 +181283,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14242, + Ctx: p14316, FreeVars: nil, }, Value: true, @@ -180273,17 +181301,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1318), + Line: int(1324), Column: int(18), }, End: ast.Location{ - Line: int(1321), + Line: int(1327), Column: int(31), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14242, + Ctx: p14316, FreeVars: ast.Identifiers{ "a", "aux", @@ -180296,17 +181324,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1318), + Line: int(1324), Column: int(21), }, End: ast.Location{ - Line: int(1318), + Line: int(1324), Column: int(33), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14242, + Ctx: p14316, FreeVars: ast.Identifiers{ "a", "b", @@ -180318,17 +181346,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1318), + Line: int(1324), Column: int(21), }, End: ast.Location{ - Line: int(1318), + Line: int(1324), Column: int(25), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14242, + Ctx: p14316, FreeVars: ast.Identifiers{ "a", "i", @@ -180339,17 +181367,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1318), + Line: int(1324), Column: int(21), }, End: ast.Location{ - Line: int(1318), + Line: int(1324), Column: int(22), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14242, + Ctx: p14316, FreeVars: ast.Identifiers{ "a", }, @@ -180362,17 +181390,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1318), + Line: int(1324), Column: int(23), }, End: ast.Location{ - Line: int(1318), + Line: int(1324), Column: int(24), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14242, + Ctx: p14316, FreeVars: ast.Identifiers{ "i", }, @@ -180389,17 +181417,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1318), + Line: int(1324), Column: int(29), }, End: ast.Location{ - Line: int(1318), + Line: int(1324), Column: int(33), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14242, + Ctx: p14316, FreeVars: ast.Identifiers{ "b", "i", @@ -180410,17 +181438,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1318), + Line: int(1324), Column: int(29), }, End: ast.Location{ - Line: int(1318), + Line: int(1324), Column: int(30), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14242, + Ctx: p14316, FreeVars: ast.Identifiers{ "b", }, @@ -180433,17 +181461,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1318), + Line: int(1324), Column: int(31), }, End: ast.Location{ - Line: int(1318), + Line: int(1324), Column: int(32), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14242, + Ctx: p14316, FreeVars: ast.Identifiers{ "i", }, @@ -180460,11 +181488,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1319), + Line: int(1325), Column: int(15), }, End: ast.Location{ - Line: int(1319), + Line: int(1325), Column: int(20), }, File: p1, @@ -180477,7 +181505,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14242, + Ctx: p14316, FreeVars: nil, }, Value: false, @@ -180495,17 +181523,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1321), + Line: int(1327), Column: int(15), }, End: ast.Location{ - Line: int(1321), + Line: int(1327), Column: int(31), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14242, + Ctx: p14316, FreeVars: ast.Identifiers{ "a", "aux", @@ -180518,11 +181546,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1321), + Line: int(1327), Column: int(15), }, End: ast.Location{ - Line: int(1321), + Line: int(1327), Column: int(18), }, File: p1, @@ -180535,7 +181563,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14242, + Ctx: p14316, FreeVars: ast.Identifiers{ "aux", }, @@ -180551,17 +181579,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1321), + Line: int(1327), Column: int(19), }, End: ast.Location{ - Line: int(1321), + Line: int(1327), Column: int(20), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14279, + Ctx: p14353, FreeVars: ast.Identifiers{ "a", }, @@ -180576,17 +181604,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1321), + Line: int(1327), Column: int(22), }, End: ast.Location{ - Line: int(1321), + Line: int(1327), Column: int(23), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14279, + Ctx: p14353, FreeVars: ast.Identifiers{ "b", }, @@ -180601,17 +181629,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1321), + Line: int(1327), Column: int(25), }, End: ast.Location{ - Line: int(1321), + Line: int(1327), Column: int(30), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14279, + Ctx: p14353, FreeVars: ast.Identifiers{ "i", }, @@ -180621,17 +181649,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1321), + Line: int(1327), Column: int(25), }, End: ast.Location{ - Line: int(1321), + Line: int(1327), Column: int(26), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14279, + Ctx: p14353, FreeVars: ast.Identifiers{ "i", }, @@ -180645,17 +181673,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1321), + Line: int(1327), Column: int(29), }, End: ast.Location{ - Line: int(1321), + Line: int(1327), Column: int(30), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14279, + Ctx: p14353, FreeVars: nil, }, OriginalString: "1", @@ -180695,17 +181723,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1322), + Line: int(1328), Column: int(11), }, End: ast.Location{ - Line: int(1322), + Line: int(1328), Column: int(23), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "a", "aux", @@ -180717,11 +181745,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1322), + Line: int(1328), Column: int(11), }, End: ast.Location{ - Line: int(1322), + Line: int(1328), Column: int(14), }, File: p1, @@ -180734,7 +181762,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "aux", }, @@ -180750,17 +181778,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1322), + Line: int(1328), Column: int(15), }, End: ast.Location{ - Line: int(1322), + Line: int(1328), Column: int(16), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14295, + Ctx: p14369, FreeVars: ast.Identifiers{ "a", }, @@ -180775,17 +181803,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1322), + Line: int(1328), Column: int(18), }, End: ast.Location{ - Line: int(1322), + Line: int(1328), Column: int(19), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14295, + Ctx: p14369, FreeVars: ast.Identifiers{ "b", }, @@ -180800,17 +181828,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1322), + Line: int(1328), Column: int(21), }, End: ast.Location{ - Line: int(1322), + Line: int(1328), Column: int(22), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14295, + Ctx: p14369, FreeVars: nil, }, OriginalString: "0", @@ -180841,17 +181869,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1323), + Line: int(1329), Column: int(12), }, End: ast.Location{ - Line: int(1338), + Line: int(1344), Column: int(34), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "a", "b", @@ -180864,17 +181892,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1323), + Line: int(1329), Column: int(15), }, End: ast.Location{ - Line: int(1323), + Line: int(1329), Column: int(48), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "std", "ta", @@ -180885,17 +181913,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1323), + Line: int(1329), Column: int(15), }, End: ast.Location{ - Line: int(1323), + Line: int(1329), Column: int(34), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "std", }, @@ -180905,11 +181933,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1323), + Line: int(1329), Column: int(15), }, End: ast.Location{ - Line: int(1323), + Line: int(1329), Column: int(18), }, File: p1, @@ -180958,17 +181986,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1323), + Line: int(1329), Column: int(35), }, End: ast.Location{ - Line: int(1323), + Line: int(1329), Column: int(37), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14312, + Ctx: p14386, FreeVars: ast.Identifiers{ "ta", }, @@ -180983,17 +182011,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1323), + Line: int(1329), Column: int(39), }, End: ast.Location{ - Line: int(1323), + Line: int(1329), Column: int(47), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14312, + Ctx: p14386, FreeVars: nil, }, Value: "object", @@ -181017,11 +182045,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1324), + Line: int(1330), Column: int(9), }, End: ast.Location{ - Line: int(1336), + Line: int(1342), Column: int(23), }, File: p1, @@ -181034,7 +182062,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "a", "b", @@ -181051,17 +182079,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1324), + Line: int(1330), Column: int(24), }, End: ast.Location{ - Line: int(1324), + Line: int(1330), Column: int(43), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14320, + Ctx: p14394, FreeVars: ast.Identifiers{ "a", "std", @@ -181072,17 +182100,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1324), + Line: int(1330), Column: int(24), }, End: ast.Location{ - Line: int(1324), + Line: int(1330), Column: int(40), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14320, + Ctx: p14394, FreeVars: ast.Identifiers{ "std", }, @@ -181092,11 +182120,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1324), + Line: int(1330), Column: int(24), }, End: ast.Location{ - Line: int(1324), + Line: int(1330), Column: int(27), }, File: p1, @@ -181145,17 +182173,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1324), + Line: int(1330), Column: int(41), }, End: ast.Location{ - Line: int(1324), + Line: int(1330), Column: int(42), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14329, + Ctx: p14403, FreeVars: ast.Identifiers{ "a", }, @@ -181177,11 +182205,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1324), + Line: int(1330), Column: int(15), }, End: ast.Location{ - Line: int(1324), + Line: int(1330), Column: int(43), }, File: p1, @@ -181193,11 +182221,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1325), + Line: int(1331), Column: int(9), }, End: ast.Location{ - Line: int(1336), + Line: int(1342), Column: int(23), }, File: p1, @@ -181210,7 +182238,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "a", "b", @@ -181228,17 +182256,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1325), + Line: int(1331), Column: int(25), }, End: ast.Location{ - Line: int(1325), + Line: int(1331), Column: int(43), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14336, + Ctx: p14410, FreeVars: ast.Identifiers{ "fields", "std", @@ -181249,17 +182277,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1325), + Line: int(1331), Column: int(25), }, End: ast.Location{ - Line: int(1325), + Line: int(1331), Column: int(35), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14336, + Ctx: p14410, FreeVars: ast.Identifiers{ "std", }, @@ -181269,11 +182297,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1325), + Line: int(1331), Column: int(25), }, End: ast.Location{ - Line: int(1325), + Line: int(1331), Column: int(28), }, File: p1, @@ -181322,17 +182350,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1325), + Line: int(1331), Column: int(36), }, End: ast.Location{ - Line: int(1325), + Line: int(1331), Column: int(42), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14345, + Ctx: p14419, FreeVars: ast.Identifiers{ "fields", }, @@ -181354,11 +182382,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1325), + Line: int(1331), Column: int(15), }, End: ast.Location{ - Line: int(1325), + Line: int(1331), Column: int(43), }, File: p1, @@ -181370,11 +182398,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1326), + Line: int(1332), Column: int(9), }, End: ast.Location{ - Line: int(1336), + Line: int(1342), Column: int(23), }, File: p1, @@ -181387,7 +182415,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "a", "b", @@ -181401,17 +182429,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1326), + Line: int(1332), Column: int(12), }, End: ast.Location{ - Line: int(1326), + Line: int(1332), Column: int(41), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "b", "fields", @@ -181423,17 +182451,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1326), + Line: int(1332), Column: int(12), }, End: ast.Location{ - Line: int(1326), + Line: int(1332), Column: int(18), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "fields", }, @@ -181447,17 +182475,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1326), + Line: int(1332), Column: int(22), }, End: ast.Location{ - Line: int(1326), + Line: int(1332), Column: int(41), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "b", "std", @@ -181468,17 +182496,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1326), + Line: int(1332), Column: int(22), }, End: ast.Location{ - Line: int(1326), + Line: int(1332), Column: int(38), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "std", }, @@ -181488,11 +182516,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1326), + Line: int(1332), Column: int(22), }, End: ast.Location{ - Line: int(1326), + Line: int(1332), Column: int(25), }, File: p1, @@ -181541,17 +182569,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1326), + Line: int(1332), Column: int(39), }, End: ast.Location{ - Line: int(1326), + Line: int(1332), Column: int(40), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14363, + Ctx: p14437, FreeVars: ast.Identifiers{ "b", }, @@ -181575,11 +182603,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1327), + Line: int(1333), Column: int(11), }, End: ast.Location{ - Line: int(1327), + Line: int(1333), Column: int(16), }, File: p1, @@ -181592,7 +182620,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14119, + Ctx: p14193, FreeVars: nil, }, Value: false, @@ -181610,11 +182638,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1329), + Line: int(1335), Column: int(11), }, End: ast.Location{ - Line: int(1336), + Line: int(1342), Column: int(23), }, File: p1, @@ -181627,7 +182655,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "a", "b", @@ -181645,17 +182673,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1329), + Line: int(1335), Column: int(17), }, End: ast.Location{ - Line: int(1335), + Line: int(1341), Column: int(31), }, File: p1, }, Fodder: nil, - Ctx: p14373, + Ctx: p14447, FreeVars: ast.Identifiers{ "aux", "fields", @@ -181673,11 +182701,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1329), + Line: int(1335), Column: int(21), }, End: ast.Location{ - Line: int(1329), + Line: int(1335), Column: int(22), }, File: p1, @@ -181692,11 +182720,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1329), + Line: int(1335), Column: int(24), }, End: ast.Location{ - Line: int(1329), + Line: int(1335), Column: int(25), }, File: p1, @@ -181711,11 +182739,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1329), + Line: int(1335), Column: int(27), }, End: ast.Location{ - Line: int(1329), + Line: int(1335), Column: int(28), }, File: p1, @@ -181729,11 +182757,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1330), + Line: int(1336), Column: int(13), }, End: ast.Location{ - Line: int(1335), + Line: int(1341), Column: int(31), }, File: p1, @@ -181746,7 +182774,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14378, + Ctx: p14452, FreeVars: ast.Identifiers{ "a", "aux", @@ -181761,17 +182789,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1330), + Line: int(1336), Column: int(16), }, End: ast.Location{ - Line: int(1330), + Line: int(1336), Column: int(28), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14378, + Ctx: p14452, FreeVars: ast.Identifiers{ "i", "lfields", @@ -181782,17 +182810,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1330), + Line: int(1336), Column: int(16), }, End: ast.Location{ - Line: int(1330), + Line: int(1336), Column: int(17), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14378, + Ctx: p14452, FreeVars: ast.Identifiers{ "i", }, @@ -181806,17 +182834,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1330), + Line: int(1336), Column: int(21), }, End: ast.Location{ - Line: int(1330), + Line: int(1336), Column: int(28), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14378, + Ctx: p14452, FreeVars: ast.Identifiers{ "lfields", }, @@ -181830,11 +182858,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1331), + Line: int(1337), Column: int(15), }, End: ast.Location{ - Line: int(1331), + Line: int(1337), Column: int(19), }, File: p1, @@ -181847,7 +182875,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14378, + Ctx: p14452, FreeVars: nil, }, Value: true, @@ -181865,17 +182893,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1332), + Line: int(1338), Column: int(18), }, End: ast.Location{ - Line: int(1335), + Line: int(1341), Column: int(31), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14378, + Ctx: p14452, FreeVars: ast.Identifiers{ "a", "aux", @@ -181889,17 +182917,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1332), + Line: int(1338), Column: int(21), }, End: ast.Location{ - Line: int(1332), + Line: int(1338), Column: int(54), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14378, + Ctx: p14452, FreeVars: ast.Identifiers{ "a", "b", @@ -181917,17 +182945,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1332), + Line: int(1338), Column: int(31), }, End: ast.Location{ - Line: int(1332), + Line: int(1338), Column: int(40), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14395, + Ctx: p14469, FreeVars: ast.Identifiers{ "fields", "i", @@ -181938,17 +182966,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1332), + Line: int(1338), Column: int(31), }, End: ast.Location{ - Line: int(1332), + Line: int(1338), Column: int(37), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14395, + Ctx: p14469, FreeVars: ast.Identifiers{ "fields", }, @@ -181961,17 +182989,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1332), + Line: int(1338), Column: int(38), }, End: ast.Location{ - Line: int(1332), + Line: int(1338), Column: int(39), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14395, + Ctx: p14469, FreeVars: ast.Identifiers{ "i", }, @@ -181986,11 +183014,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1332), + Line: int(1338), Column: int(27), }, End: ast.Location{ - Line: int(1332), + Line: int(1338), Column: int(40), }, File: p1, @@ -182002,17 +183030,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1332), + Line: int(1338), Column: int(42), }, End: ast.Location{ - Line: int(1332), + Line: int(1338), Column: int(54), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14378, + Ctx: p14452, FreeVars: ast.Identifiers{ "a", "b", @@ -182024,17 +183052,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1332), + Line: int(1338), Column: int(42), }, End: ast.Location{ - Line: int(1332), + Line: int(1338), Column: int(46), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14378, + Ctx: p14452, FreeVars: ast.Identifiers{ "a", "f", @@ -182045,17 +183073,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1332), + Line: int(1338), Column: int(42), }, End: ast.Location{ - Line: int(1332), + Line: int(1338), Column: int(43), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14378, + Ctx: p14452, FreeVars: ast.Identifiers{ "a", }, @@ -182068,17 +183096,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1332), + Line: int(1338), Column: int(44), }, End: ast.Location{ - Line: int(1332), + Line: int(1338), Column: int(45), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14378, + Ctx: p14452, FreeVars: ast.Identifiers{ "f", }, @@ -182095,17 +183123,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1332), + Line: int(1338), Column: int(50), }, End: ast.Location{ - Line: int(1332), + Line: int(1338), Column: int(54), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14378, + Ctx: p14452, FreeVars: ast.Identifiers{ "b", "f", @@ -182116,17 +183144,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1332), + Line: int(1338), Column: int(50), }, End: ast.Location{ - Line: int(1332), + Line: int(1338), Column: int(51), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14378, + Ctx: p14452, FreeVars: ast.Identifiers{ "b", }, @@ -182139,17 +183167,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1332), + Line: int(1338), Column: int(52), }, End: ast.Location{ - Line: int(1332), + Line: int(1338), Column: int(53), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14378, + Ctx: p14452, FreeVars: ast.Identifiers{ "f", }, @@ -182167,11 +183195,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1333), + Line: int(1339), Column: int(15), }, End: ast.Location{ - Line: int(1333), + Line: int(1339), Column: int(20), }, File: p1, @@ -182184,7 +183212,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14378, + Ctx: p14452, FreeVars: nil, }, Value: false, @@ -182202,17 +183230,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1335), + Line: int(1341), Column: int(15), }, End: ast.Location{ - Line: int(1335), + Line: int(1341), Column: int(31), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14378, + Ctx: p14452, FreeVars: ast.Identifiers{ "a", "aux", @@ -182225,11 +183253,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1335), + Line: int(1341), Column: int(15), }, End: ast.Location{ - Line: int(1335), + Line: int(1341), Column: int(18), }, File: p1, @@ -182242,7 +183270,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14378, + Ctx: p14452, FreeVars: ast.Identifiers{ "aux", }, @@ -182258,17 +183286,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1335), + Line: int(1341), Column: int(19), }, End: ast.Location{ - Line: int(1335), + Line: int(1341), Column: int(20), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14425, + Ctx: p14499, FreeVars: ast.Identifiers{ "a", }, @@ -182283,17 +183311,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1335), + Line: int(1341), Column: int(22), }, End: ast.Location{ - Line: int(1335), + Line: int(1341), Column: int(23), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14425, + Ctx: p14499, FreeVars: ast.Identifiers{ "b", }, @@ -182308,17 +183336,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1335), + Line: int(1341), Column: int(25), }, End: ast.Location{ - Line: int(1335), + Line: int(1341), Column: int(30), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14425, + Ctx: p14499, FreeVars: ast.Identifiers{ "i", }, @@ -182328,17 +183356,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1335), + Line: int(1341), Column: int(25), }, End: ast.Location{ - Line: int(1335), + Line: int(1341), Column: int(26), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14425, + Ctx: p14499, FreeVars: ast.Identifiers{ "i", }, @@ -182352,17 +183380,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1335), + Line: int(1341), Column: int(29), }, End: ast.Location{ - Line: int(1335), + Line: int(1341), Column: int(30), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14425, + Ctx: p14499, FreeVars: nil, }, OriginalString: "1", @@ -182402,17 +183430,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1336), + Line: int(1342), Column: int(11), }, End: ast.Location{ - Line: int(1336), + Line: int(1342), Column: int(23), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "a", "aux", @@ -182424,11 +183452,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1336), + Line: int(1342), Column: int(11), }, End: ast.Location{ - Line: int(1336), + Line: int(1342), Column: int(14), }, File: p1, @@ -182441,7 +183469,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "aux", }, @@ -182457,17 +183485,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1336), + Line: int(1342), Column: int(15), }, End: ast.Location{ - Line: int(1336), + Line: int(1342), Column: int(16), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14441, + Ctx: p14515, FreeVars: ast.Identifiers{ "a", }, @@ -182482,17 +183510,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1336), + Line: int(1342), Column: int(18), }, End: ast.Location{ - Line: int(1336), + Line: int(1342), Column: int(19), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14441, + Ctx: p14515, FreeVars: ast.Identifiers{ "b", }, @@ -182507,17 +183535,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1336), + Line: int(1342), Column: int(21), }, End: ast.Location{ - Line: int(1336), + Line: int(1342), Column: int(22), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14441, + Ctx: p14515, FreeVars: nil, }, OriginalString: "0", @@ -182549,17 +183577,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1338), + Line: int(1344), Column: int(9), }, End: ast.Location{ - Line: int(1338), + Line: int(1344), Column: int(34), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "a", "b", @@ -182571,17 +183599,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1338), + Line: int(1344), Column: int(9), }, End: ast.Location{ - Line: int(1338), + Line: int(1344), Column: int(28), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14119, + Ctx: p14193, FreeVars: ast.Identifiers{ "std", }, @@ -182591,11 +183619,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1338), + Line: int(1344), Column: int(9), }, End: ast.Location{ - Line: int(1338), + Line: int(1344), Column: int(12), }, File: p1, @@ -182651,17 +183679,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1338), + Line: int(1344), Column: int(29), }, End: ast.Location{ - Line: int(1338), + Line: int(1344), Column: int(30), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14457, + Ctx: p14531, FreeVars: ast.Identifiers{ "a", }, @@ -182676,17 +183704,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1338), + Line: int(1344), Column: int(32), }, End: ast.Location{ - Line: int(1338), + Line: int(1344), Column: int(33), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14457, + Ctx: p14531, FreeVars: ast.Identifiers{ "b", }, @@ -182713,11 +183741,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1304), + Line: int(1310), Column: int(3), }, End: ast.Location{ - Line: int(1338), + Line: int(1344), Column: int(34), }, File: p1, @@ -182779,11 +183807,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1341), + Line: int(1347), Column: int(15), }, End: ast.Location{ - Line: int(1341), + Line: int(1347), Column: int(16), }, File: p1, @@ -182798,11 +183826,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1341), + Line: int(1347), Column: int(18), }, End: ast.Location{ - Line: int(1341), + Line: int(1347), Column: int(19), }, File: p1, @@ -182816,11 +183844,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1342), + Line: int(1348), Column: int(5), }, End: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(80), }, File: p1, @@ -182833,7 +183861,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14467, + Ctx: p14541, FreeVars: ast.Identifiers{ "f", "r", @@ -182850,17 +183878,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1342), + Line: int(1348), Column: int(17), }, End: ast.Location{ - Line: int(1342), + Line: int(1348), Column: int(34), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14471, + Ctx: p14545, FreeVars: ast.Identifiers{ "f", "std", @@ -182871,17 +183899,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1342), + Line: int(1348), Column: int(17), }, End: ast.Location{ - Line: int(1342), + Line: int(1348), Column: int(26), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14471, + Ctx: p14545, FreeVars: ast.Identifiers{ "std", }, @@ -182891,11 +183919,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1342), + Line: int(1348), Column: int(17), }, End: ast.Location{ - Line: int(1342), + Line: int(1348), Column: int(20), }, File: p1, @@ -182944,17 +183972,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1342), + Line: int(1348), Column: int(27), }, End: ast.Location{ - Line: int(1342), + Line: int(1348), Column: int(28), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14480, + Ctx: p14554, FreeVars: ast.Identifiers{ "f", }, @@ -182969,17 +183997,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1342), + Line: int(1348), Column: int(30), }, End: ast.Location{ - Line: int(1342), + Line: int(1348), Column: int(33), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14480, + Ctx: p14554, FreeVars: nil, }, Value: "/", @@ -183002,11 +184030,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1342), + Line: int(1348), Column: int(11), }, End: ast.Location{ - Line: int(1342), + Line: int(1348), Column: int(34), }, File: p1, @@ -183018,17 +184046,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(5), }, End: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(80), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14467, + Ctx: p14541, FreeVars: ast.Identifiers{ "arr", "r", @@ -183040,17 +184068,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(5), }, End: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(13), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14467, + Ctx: p14541, FreeVars: ast.Identifiers{ "std", }, @@ -183060,11 +184088,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(5), }, End: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(8), }, File: p1, @@ -183120,17 +184148,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(14), }, End: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(17), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14493, + Ctx: p14567, FreeVars: nil, }, Value: "/", @@ -183146,17 +184174,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(19), }, End: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(79), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14493, + Ctx: p14567, FreeVars: ast.Identifiers{ "arr", "r", @@ -183168,17 +184196,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(19), }, End: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(73), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14493, + Ctx: p14567, FreeVars: ast.Identifiers{ "arr", "std", @@ -183189,17 +184217,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(19), }, End: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(32), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14493, + Ctx: p14567, FreeVars: ast.Identifiers{ "std", }, @@ -183209,11 +184237,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(19), }, End: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(22), }, File: p1, @@ -183262,17 +184290,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(33), }, End: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(52), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14505, + Ctx: p14579, FreeVars: ast.Identifiers{ "arr", "std", @@ -183283,17 +184311,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(33), }, End: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(48), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14505, + Ctx: p14579, FreeVars: ast.Identifiers{ "arr", "std", @@ -183304,17 +184332,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(33), }, End: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(43), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14505, + Ctx: p14579, FreeVars: ast.Identifiers{ "std", }, @@ -183324,11 +184352,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(33), }, End: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(36), }, File: p1, @@ -183377,17 +184405,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(44), }, End: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(47), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14516, + Ctx: p14590, FreeVars: ast.Identifiers{ "arr", }, @@ -183411,17 +184439,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(51), }, End: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(52), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14505, + Ctx: p14579, FreeVars: nil, }, OriginalString: "1", @@ -183435,17 +184463,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(54), }, End: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(72), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14505, + Ctx: p14579, FreeVars: ast.Identifiers{ "arr", }, @@ -183461,11 +184489,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(63), }, End: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(64), }, File: p1, @@ -183479,17 +184507,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(66), }, End: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(72), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14523, + Ctx: p14597, FreeVars: ast.Identifiers{ "arr", "i", @@ -183500,17 +184528,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(66), }, End: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(69), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14523, + Ctx: p14597, FreeVars: ast.Identifiers{ "arr", }, @@ -183523,17 +184551,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(70), }, End: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(71), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14523, + Ctx: p14597, FreeVars: ast.Identifiers{ "i", }, @@ -183561,17 +184589,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(76), }, End: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(79), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14493, + Ctx: p14567, FreeVars: ast.Identifiers{ "r", }, @@ -183583,17 +184611,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(77), }, End: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(78), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14533, + Ctx: p14607, FreeVars: ast.Identifiers{ "r", }, @@ -183623,11 +184651,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1341), + Line: int(1347), Column: int(3), }, End: ast.Location{ - Line: int(1343), + Line: int(1349), Column: int(80), }, File: p1, @@ -183690,11 +184718,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1345), + Line: int(1351), Column: int(9), }, End: ast.Location{ - Line: int(1345), + Line: int(1351), Column: int(10), }, File: p1, @@ -183708,11 +184736,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1346), + Line: int(1352), Column: int(5), }, End: ast.Location{ - Line: int(1362), + Line: int(1368), Column: int(8), }, File: p1, @@ -183725,7 +184753,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14541, + Ctx: p14615, FreeVars: ast.Identifiers{ "$", "a", @@ -183742,17 +184770,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1346), + Line: int(1352), Column: int(11), }, End: ast.Location{ - Line: int(1354), + Line: int(1360), Column: int(13), }, File: p1, }, Fodder: nil, - Ctx: p14545, + Ctx: p14619, FreeVars: ast.Identifiers{ "std", }, @@ -183768,11 +184796,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1346), + Line: int(1352), Column: int(21), }, End: ast.Location{ - Line: int(1346), + Line: int(1352), Column: int(22), }, File: p1, @@ -183786,11 +184814,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1347), + Line: int(1353), Column: int(7), }, End: ast.Location{ - Line: int(1354), + Line: int(1360), Column: int(13), }, File: p1, @@ -183803,7 +184831,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14550, + Ctx: p14624, FreeVars: ast.Identifiers{ "b", "std", @@ -183814,17 +184842,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1347), + Line: int(1353), Column: int(10), }, End: ast.Location{ - Line: int(1347), + Line: int(1353), Column: int(19), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14550, + Ctx: p14624, FreeVars: ast.Identifiers{ "b", }, @@ -183834,17 +184862,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1347), + Line: int(1353), Column: int(10), }, End: ast.Location{ - Line: int(1347), + Line: int(1353), Column: int(11), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14550, + Ctx: p14624, FreeVars: ast.Identifiers{ "b", }, @@ -183858,17 +184886,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1347), + Line: int(1353), Column: int(15), }, End: ast.Location{ - Line: int(1347), + Line: int(1353), Column: int(19), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14550, + Ctx: p14624, FreeVars: nil, }, }, @@ -183879,11 +184907,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1348), + Line: int(1354), Column: int(9), }, End: ast.Location{ - Line: int(1348), + Line: int(1354), Column: int(14), }, File: p1, @@ -183896,7 +184924,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14550, + Ctx: p14624, FreeVars: nil, }, Value: false, @@ -183914,17 +184942,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1349), + Line: int(1355), Column: int(12), }, End: ast.Location{ - Line: int(1354), + Line: int(1360), Column: int(13), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14550, + Ctx: p14624, FreeVars: ast.Identifiers{ "b", "std", @@ -183935,17 +184963,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1349), + Line: int(1355), Column: int(15), }, End: ast.Location{ - Line: int(1349), + Line: int(1355), Column: int(29), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14550, + Ctx: p14624, FreeVars: ast.Identifiers{ "b", "std", @@ -183956,17 +184984,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1349), + Line: int(1355), Column: int(15), }, End: ast.Location{ - Line: int(1349), + Line: int(1355), Column: int(26), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14550, + Ctx: p14624, FreeVars: ast.Identifiers{ "std", }, @@ -183976,11 +185004,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1349), + Line: int(1355), Column: int(15), }, End: ast.Location{ - Line: int(1349), + Line: int(1355), Column: int(18), }, File: p1, @@ -184029,17 +185057,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1349), + Line: int(1355), Column: int(27), }, End: ast.Location{ - Line: int(1349), + Line: int(1355), Column: int(28), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14571, + Ctx: p14645, FreeVars: ast.Identifiers{ "b", }, @@ -184062,17 +185090,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1350), + Line: int(1356), Column: int(9), }, End: ast.Location{ - Line: int(1350), + Line: int(1356), Column: int(26), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14550, + Ctx: p14624, FreeVars: ast.Identifiers{ "b", "std", @@ -184083,17 +185111,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1350), + Line: int(1356), Column: int(9), }, End: ast.Location{ - Line: int(1350), + Line: int(1356), Column: int(22), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14550, + Ctx: p14624, FreeVars: ast.Identifiers{ "b", "std", @@ -184104,17 +185132,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1350), + Line: int(1356), Column: int(9), }, End: ast.Location{ - Line: int(1350), + Line: int(1356), Column: int(19), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14550, + Ctx: p14624, FreeVars: ast.Identifiers{ "std", }, @@ -184124,11 +185152,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1350), + Line: int(1356), Column: int(9), }, End: ast.Location{ - Line: int(1350), + Line: int(1356), Column: int(12), }, File: p1, @@ -184184,17 +185212,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1350), + Line: int(1356), Column: int(20), }, End: ast.Location{ - Line: int(1350), + Line: int(1356), Column: int(21), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14585, + Ctx: p14659, FreeVars: ast.Identifiers{ "b", }, @@ -184218,17 +185246,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1350), + Line: int(1356), Column: int(25), }, End: ast.Location{ - Line: int(1350), + Line: int(1356), Column: int(26), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14550, + Ctx: p14624, FreeVars: nil, }, OriginalString: "0", @@ -184247,17 +185275,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1351), + Line: int(1357), Column: int(12), }, End: ast.Location{ - Line: int(1354), + Line: int(1360), Column: int(13), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14550, + Ctx: p14624, FreeVars: ast.Identifiers{ "b", "std", @@ -184268,17 +185296,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1351), + Line: int(1357), Column: int(15), }, End: ast.Location{ - Line: int(1351), + Line: int(1357), Column: int(30), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14550, + Ctx: p14624, FreeVars: ast.Identifiers{ "b", "std", @@ -184289,17 +185317,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1351), + Line: int(1357), Column: int(15), }, End: ast.Location{ - Line: int(1351), + Line: int(1357), Column: int(27), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14550, + Ctx: p14624, FreeVars: ast.Identifiers{ "std", }, @@ -184309,11 +185337,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1351), + Line: int(1357), Column: int(15), }, End: ast.Location{ - Line: int(1351), + Line: int(1357), Column: int(18), }, File: p1, @@ -184362,17 +185390,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1351), + Line: int(1357), Column: int(28), }, End: ast.Location{ - Line: int(1351), + Line: int(1357), Column: int(29), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14600, + Ctx: p14674, FreeVars: ast.Identifiers{ "b", }, @@ -184395,17 +185423,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1352), + Line: int(1358), Column: int(9), }, End: ast.Location{ - Line: int(1352), + Line: int(1358), Column: int(26), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14550, + Ctx: p14624, FreeVars: ast.Identifiers{ "b", "std", @@ -184416,17 +185444,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1352), + Line: int(1358), Column: int(9), }, End: ast.Location{ - Line: int(1352), + Line: int(1358), Column: int(22), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14550, + Ctx: p14624, FreeVars: ast.Identifiers{ "b", "std", @@ -184437,17 +185465,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1352), + Line: int(1358), Column: int(9), }, End: ast.Location{ - Line: int(1352), + Line: int(1358), Column: int(19), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14550, + Ctx: p14624, FreeVars: ast.Identifiers{ "std", }, @@ -184457,11 +185485,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1352), + Line: int(1358), Column: int(9), }, End: ast.Location{ - Line: int(1352), + Line: int(1358), Column: int(12), }, File: p1, @@ -184517,17 +185545,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1352), + Line: int(1358), Column: int(20), }, End: ast.Location{ - Line: int(1352), + Line: int(1358), Column: int(21), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14614, + Ctx: p14688, FreeVars: ast.Identifiers{ "b", }, @@ -184551,17 +185579,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1352), + Line: int(1358), Column: int(25), }, End: ast.Location{ - Line: int(1352), + Line: int(1358), Column: int(26), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14550, + Ctx: p14624, FreeVars: nil, }, OriginalString: "0", @@ -184580,11 +185608,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1354), + Line: int(1360), Column: int(9), }, End: ast.Location{ - Line: int(1354), + Line: int(1360), Column: int(13), }, File: p1, @@ -184597,7 +185625,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14550, + Ctx: p14624, FreeVars: nil, }, Value: true, @@ -184627,11 +185655,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1355), + Line: int(1361), Column: int(5), }, End: ast.Location{ - Line: int(1362), + Line: int(1368), Column: int(8), }, File: p1, @@ -184644,7 +185672,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14541, + Ctx: p14615, FreeVars: ast.Identifiers{ "$", "a", @@ -184657,17 +185685,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1355), + Line: int(1361), Column: int(8), }, End: ast.Location{ - Line: int(1355), + Line: int(1361), Column: int(22), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14541, + Ctx: p14615, FreeVars: ast.Identifiers{ "a", "std", @@ -184678,17 +185706,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1355), + Line: int(1361), Column: int(8), }, End: ast.Location{ - Line: int(1355), + Line: int(1361), Column: int(19), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14541, + Ctx: p14615, FreeVars: ast.Identifiers{ "std", }, @@ -184698,11 +185726,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1355), + Line: int(1361), Column: int(8), }, End: ast.Location{ - Line: int(1355), + Line: int(1361), Column: int(11), }, File: p1, @@ -184751,17 +185779,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1355), + Line: int(1361), Column: int(20), }, End: ast.Location{ - Line: int(1355), + Line: int(1361), Column: int(21), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14632, + Ctx: p14706, FreeVars: ast.Identifiers{ "a", }, @@ -184784,14 +185812,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1362), + Column: int(7), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1362), + Column: int(57), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -184949,17 +185977,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1356), + Line: int(1362), Column: int(35), }, End: ast.Location{ - Line: int(1356), + Line: int(1362), Column: int(56), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14541, + Ctx: p14615, FreeVars: ast.Identifiers{ "$", "isContent", @@ -184971,17 +185999,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1356), + Line: int(1362), Column: int(35), }, End: ast.Location{ - Line: int(1356), + Line: int(1362), Column: int(44), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14541, + Ctx: p14615, FreeVars: ast.Identifiers{ "isContent", }, @@ -184997,17 +186025,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1356), + Line: int(1362), Column: int(45), }, End: ast.Location{ - Line: int(1356), + Line: int(1362), Column: int(55), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14653, + Ctx: p14727, FreeVars: ast.Identifiers{ "$", "x", @@ -185018,17 +186046,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1356), + Line: int(1362), Column: int(45), }, End: ast.Location{ - Line: int(1356), + Line: int(1362), Column: int(52), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14653, + Ctx: p14727, FreeVars: ast.Identifiers{ "$", }, @@ -185038,11 +186066,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1356), + Line: int(1362), Column: int(45), }, End: ast.Location{ - Line: int(1356), + Line: int(1362), Column: int(46), }, File: p1, @@ -185091,17 +186119,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1356), + Line: int(1362), Column: int(53), }, End: ast.Location{ - Line: int(1356), + Line: int(1362), Column: int(54), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14662, + Ctx: p14736, FreeVars: ast.Identifiers{ "x", }, @@ -185157,17 +186185,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1356), + Line: int(1362), Column: int(8), }, End: ast.Location{ - Line: int(1356), + Line: int(1362), Column: int(20), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14668, + Ctx: p14742, FreeVars: ast.Identifiers{ "std", "x", @@ -185178,17 +186206,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1356), + Line: int(1362), Column: int(8), }, End: ast.Location{ - Line: int(1356), + Line: int(1362), Column: int(17), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14668, + Ctx: p14742, FreeVars: ast.Identifiers{ "std", }, @@ -185198,11 +186226,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1356), + Line: int(1362), Column: int(8), }, End: ast.Location{ - Line: int(1356), + Line: int(1362), Column: int(11), }, File: p1, @@ -185251,17 +186279,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1356), + Line: int(1362), Column: int(18), }, End: ast.Location{ - Line: int(1356), + Line: int(1362), Column: int(19), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14677, + Ctx: p14751, FreeVars: ast.Identifiers{ "x", }, @@ -185317,17 +186345,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1356), + Line: int(1362), Column: int(30), }, End: ast.Location{ - Line: int(1356), + Line: int(1362), Column: int(31), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14541, + Ctx: p14615, FreeVars: ast.Identifiers{ "a", }, @@ -185357,17 +186385,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1357), + Line: int(1363), Column: int(10), }, End: ast.Location{ - Line: int(1362), + Line: int(1368), Column: int(8), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14541, + Ctx: p14615, FreeVars: ast.Identifiers{ "$", "a", @@ -185380,17 +186408,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1357), + Line: int(1363), Column: int(13), }, End: ast.Location{ - Line: int(1357), + Line: int(1363), Column: int(28), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14541, + Ctx: p14615, FreeVars: ast.Identifiers{ "a", "std", @@ -185401,17 +186429,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1357), + Line: int(1363), Column: int(13), }, End: ast.Location{ - Line: int(1357), + Line: int(1363), Column: int(25), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14541, + Ctx: p14615, FreeVars: ast.Identifiers{ "std", }, @@ -185421,11 +186449,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1357), + Line: int(1363), Column: int(13), }, End: ast.Location{ - Line: int(1357), + Line: int(1363), Column: int(16), }, File: p1, @@ -185474,17 +186502,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1357), + Line: int(1363), Column: int(26), }, End: ast.Location{ - Line: int(1357), + Line: int(1363), Column: int(27), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14694, + Ctx: p14768, FreeVars: ast.Identifiers{ "a", }, @@ -185507,14 +186535,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1363), + Column: int(34), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1367), + Column: int(6), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -185603,14 +186631,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1363), + Column: int(34), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1367), + Column: int(6), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -185770,17 +186798,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1360), + Line: int(1366), Column: int(10), }, End: ast.Location{ - Line: int(1360), + Line: int(1366), Column: int(36), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14541, + Ctx: p14615, FreeVars: ast.Identifiers{ "a", "isContent", @@ -185793,17 +186821,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1360), + Line: int(1366), Column: int(10), }, End: ast.Location{ - Line: int(1360), + Line: int(1366), Column: int(19), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14541, + Ctx: p14615, FreeVars: ast.Identifiers{ "isContent", }, @@ -185819,17 +186847,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1360), + Line: int(1366), Column: int(20), }, End: ast.Location{ - Line: int(1360), + Line: int(1366), Column: int(35), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14723, + Ctx: p14797, FreeVars: ast.Identifiers{ "a", "std", @@ -185841,17 +186869,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1360), + Line: int(1366), Column: int(20), }, End: ast.Location{ - Line: int(1360), + Line: int(1366), Column: int(29), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14723, + Ctx: p14797, FreeVars: ast.Identifiers{ "std", }, @@ -185861,11 +186889,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1360), + Line: int(1366), Column: int(20), }, End: ast.Location{ - Line: int(1360), + Line: int(1366), Column: int(23), }, File: p1, @@ -185914,17 +186942,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1360), + Line: int(1366), Column: int(30), }, End: ast.Location{ - Line: int(1360), + Line: int(1366), Column: int(34), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14732, + Ctx: p14806, FreeVars: ast.Identifiers{ "a", "x", @@ -185935,17 +186963,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1360), + Line: int(1366), Column: int(30), }, End: ast.Location{ - Line: int(1360), + Line: int(1366), Column: int(31), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14732, + Ctx: p14806, FreeVars: ast.Identifiers{ "a", }, @@ -185958,17 +186986,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1360), + Line: int(1366), Column: int(32), }, End: ast.Location{ - Line: int(1360), + Line: int(1366), Column: int(33), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14732, + Ctx: p14806, FreeVars: ast.Identifiers{ "x", }, @@ -186028,17 +187056,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1357), + Line: int(1363), Column: int(34), }, End: ast.Location{ - Line: int(1361), + Line: int(1367), Column: int(6), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14541, + Ctx: p14615, FreeVars: ast.Identifiers{ "$", "a", @@ -186054,17 +187082,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1358), + Line: int(1364), Column: int(8), }, End: ast.Location{ - Line: int(1358), + Line: int(1364), Column: int(9), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14541, + Ctx: p14615, FreeVars: ast.Identifiers{ "x", }, @@ -186076,17 +187104,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1358), + Line: int(1364), Column: int(12), }, End: ast.Location{ - Line: int(1358), + Line: int(1364), Column: int(25), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14747, + Ctx: p14821, FreeVars: ast.Identifiers{ "$", "a", @@ -186098,17 +187126,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1358), + Line: int(1364), Column: int(12), }, End: ast.Location{ - Line: int(1358), + Line: int(1364), Column: int(19), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14747, + Ctx: p14821, FreeVars: ast.Identifiers{ "$", }, @@ -186118,11 +187146,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1358), + Line: int(1364), Column: int(12), }, End: ast.Location{ - Line: int(1358), + Line: int(1364), Column: int(13), }, File: p1, @@ -186171,17 +187199,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1358), + Line: int(1364), Column: int(20), }, End: ast.Location{ - Line: int(1358), + Line: int(1364), Column: int(24), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14756, + Ctx: p14830, FreeVars: ast.Identifiers{ "a", "x", @@ -186192,17 +187220,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1358), + Line: int(1364), Column: int(20), }, End: ast.Location{ - Line: int(1358), + Line: int(1364), Column: int(21), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14756, + Ctx: p14830, FreeVars: ast.Identifiers{ "a", }, @@ -186215,17 +187243,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1358), + Line: int(1364), Column: int(22), }, End: ast.Location{ - Line: int(1358), + Line: int(1364), Column: int(23), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14756, + Ctx: p14830, FreeVars: ast.Identifiers{ "x", }, @@ -186249,11 +187277,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1358), + Line: int(1364), Column: int(7), }, End: ast.Location{ - Line: int(1358), + Line: int(1364), Column: int(25), }, File: p1, @@ -186301,17 +187329,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1359), + Line: int(1365), Column: int(16), }, End: ast.Location{ - Line: int(1359), + Line: int(1365), Column: int(35), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14541, + Ctx: p14615, FreeVars: ast.Identifiers{ "a", "std", @@ -186322,17 +187350,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1359), + Line: int(1365), Column: int(16), }, End: ast.Location{ - Line: int(1359), + Line: int(1365), Column: int(32), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14541, + Ctx: p14615, FreeVars: ast.Identifiers{ "std", }, @@ -186342,11 +187370,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1359), + Line: int(1365), Column: int(16), }, End: ast.Location{ - Line: int(1359), + Line: int(1365), Column: int(19), }, File: p1, @@ -186395,17 +187423,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1359), + Line: int(1365), Column: int(33), }, End: ast.Location{ - Line: int(1359), + Line: int(1365), Column: int(34), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14772, + Ctx: p14846, FreeVars: ast.Identifiers{ "a", }, @@ -186448,11 +187476,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1362), + Line: int(1368), Column: int(7), }, End: ast.Location{ - Line: int(1362), + Line: int(1368), Column: int(8), }, File: p1, @@ -186465,7 +187493,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14541, + Ctx: p14615, FreeVars: ast.Identifiers{ "a", }, @@ -186480,11 +187508,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1345), + Line: int(1351), Column: int(3), }, End: ast.Location{ - Line: int(1362), + Line: int(1368), Column: int(8), }, File: p1, @@ -186546,11 +187574,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1364), + Line: int(1370), Column: int(14), }, End: ast.Location{ - Line: int(1364), + Line: int(1370), Column: int(17), }, File: p1, @@ -186565,11 +187593,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1364), + Line: int(1370), Column: int(19), }, End: ast.Location{ - Line: int(1364), + Line: int(1370), Column: int(22), }, File: p1, @@ -186583,11 +187611,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1365), + Line: int(1371), Column: int(5), }, End: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(91), }, File: p1, @@ -186600,7 +187628,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "pat", "std", @@ -186612,17 +187640,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1365), + Line: int(1371), Column: int(8), }, End: ast.Location{ - Line: int(1365), + Line: int(1371), Column: int(26), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "pat", "std", @@ -186634,17 +187662,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1365), + Line: int(1371), Column: int(9), }, End: ast.Location{ - Line: int(1365), + Line: int(1371), Column: int(26), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "pat", "std", @@ -186655,17 +187683,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1365), + Line: int(1371), Column: int(9), }, End: ast.Location{ - Line: int(1365), + Line: int(1371), Column: int(21), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "std", }, @@ -186675,11 +187703,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1365), + Line: int(1371), Column: int(9), }, End: ast.Location{ - Line: int(1365), + Line: int(1371), Column: int(12), }, File: p1, @@ -186728,17 +187756,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1365), + Line: int(1371), Column: int(22), }, End: ast.Location{ - Line: int(1365), + Line: int(1371), Column: int(25), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14796, + Ctx: p14870, FreeVars: ast.Identifiers{ "pat", }, @@ -186762,11 +187790,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1366), + Line: int(1372), Column: int(7), }, End: ast.Location{ - Line: int(1366), + Line: int(1372), Column: int(82), }, File: p1, @@ -186779,7 +187807,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "pat", "std", @@ -186790,17 +187818,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1366), + Line: int(1372), Column: int(13), }, End: ast.Location{ - Line: int(1366), + Line: int(1372), Column: int(82), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "pat", "std", @@ -186811,17 +187839,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1366), + Line: int(1372), Column: int(13), }, End: ast.Location{ - Line: int(1366), + Line: int(1372), Column: int(66), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: nil, }, Value: "findSubstr first parameter should be a string, got ", @@ -186836,17 +187864,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1366), + Line: int(1372), Column: int(69), }, End: ast.Location{ - Line: int(1366), + Line: int(1372), Column: int(82), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "pat", "std", @@ -186857,17 +187885,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1366), + Line: int(1372), Column: int(69), }, End: ast.Location{ - Line: int(1366), + Line: int(1372), Column: int(77), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "std", }, @@ -186877,11 +187905,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1366), + Line: int(1372), Column: int(69), }, End: ast.Location{ - Line: int(1366), + Line: int(1372), Column: int(72), }, File: p1, @@ -186930,17 +187958,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1366), + Line: int(1372), Column: int(78), }, End: ast.Location{ - Line: int(1366), + Line: int(1372), Column: int(81), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14813, + Ctx: p14887, FreeVars: ast.Identifiers{ "pat", }, @@ -186972,17 +188000,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1367), + Line: int(1373), Column: int(10), }, End: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(91), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "pat", "std", @@ -186994,17 +188022,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1367), + Line: int(1373), Column: int(13), }, End: ast.Location{ - Line: int(1367), + Line: int(1373), Column: int(31), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "std", "str", @@ -187016,17 +188044,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1367), + Line: int(1373), Column: int(14), }, End: ast.Location{ - Line: int(1367), + Line: int(1373), Column: int(31), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "std", "str", @@ -187037,17 +188065,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1367), + Line: int(1373), Column: int(14), }, End: ast.Location{ - Line: int(1367), + Line: int(1373), Column: int(26), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "std", }, @@ -187057,11 +188085,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1367), + Line: int(1373), Column: int(14), }, End: ast.Location{ - Line: int(1367), + Line: int(1373), Column: int(17), }, File: p1, @@ -187110,17 +188138,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1367), + Line: int(1373), Column: int(27), }, End: ast.Location{ - Line: int(1367), + Line: int(1373), Column: int(30), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14829, + Ctx: p14903, FreeVars: ast.Identifiers{ "str", }, @@ -187144,11 +188172,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1368), + Line: int(1374), Column: int(7), }, End: ast.Location{ - Line: int(1368), + Line: int(1374), Column: int(83), }, File: p1, @@ -187161,7 +188189,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "std", "str", @@ -187172,17 +188200,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1368), + Line: int(1374), Column: int(13), }, End: ast.Location{ - Line: int(1368), + Line: int(1374), Column: int(83), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "std", "str", @@ -187193,17 +188221,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1368), + Line: int(1374), Column: int(13), }, End: ast.Location{ - Line: int(1368), + Line: int(1374), Column: int(67), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: nil, }, Value: "findSubstr second parameter should be a string, got ", @@ -187218,17 +188246,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1368), + Line: int(1374), Column: int(70), }, End: ast.Location{ - Line: int(1368), + Line: int(1374), Column: int(83), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "std", "str", @@ -187239,17 +188267,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1368), + Line: int(1374), Column: int(70), }, End: ast.Location{ - Line: int(1368), + Line: int(1374), Column: int(78), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "std", }, @@ -187259,11 +188287,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1368), + Line: int(1374), Column: int(70), }, End: ast.Location{ - Line: int(1368), + Line: int(1374), Column: int(73), }, File: p1, @@ -187312,17 +188340,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1368), + Line: int(1374), Column: int(79), }, End: ast.Location{ - Line: int(1368), + Line: int(1374), Column: int(82), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14846, + Ctx: p14920, FreeVars: ast.Identifiers{ "str", }, @@ -187354,11 +188382,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1370), + Line: int(1376), Column: int(7), }, End: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(91), }, File: p1, @@ -187371,7 +188399,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "pat", "std", @@ -187388,17 +188416,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1370), + Line: int(1376), Column: int(23), }, End: ast.Location{ - Line: int(1370), + Line: int(1376), Column: int(38), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14854, + Ctx: p14928, FreeVars: ast.Identifiers{ "pat", "std", @@ -187409,17 +188437,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1370), + Line: int(1376), Column: int(23), }, End: ast.Location{ - Line: int(1370), + Line: int(1376), Column: int(33), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14854, + Ctx: p14928, FreeVars: ast.Identifiers{ "std", }, @@ -187429,11 +188457,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1370), + Line: int(1376), Column: int(23), }, End: ast.Location{ - Line: int(1370), + Line: int(1376), Column: int(26), }, File: p1, @@ -187482,17 +188510,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1370), + Line: int(1376), Column: int(34), }, End: ast.Location{ - Line: int(1370), + Line: int(1376), Column: int(37), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14863, + Ctx: p14937, FreeVars: ast.Identifiers{ "pat", }, @@ -187514,11 +188542,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1370), + Line: int(1376), Column: int(13), }, End: ast.Location{ - Line: int(1370), + Line: int(1376), Column: int(38), }, File: p1, @@ -187530,11 +188558,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1371), + Line: int(1377), Column: int(7), }, End: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(91), }, File: p1, @@ -187547,7 +188575,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "pat", "pat_len", @@ -187565,17 +188593,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1371), + Line: int(1377), Column: int(23), }, End: ast.Location{ - Line: int(1371), + Line: int(1377), Column: int(38), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14870, + Ctx: p14944, FreeVars: ast.Identifiers{ "std", "str", @@ -187586,17 +188614,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1371), + Line: int(1377), Column: int(23), }, End: ast.Location{ - Line: int(1371), + Line: int(1377), Column: int(33), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14870, + Ctx: p14944, FreeVars: ast.Identifiers{ "std", }, @@ -187606,11 +188634,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1371), + Line: int(1377), Column: int(23), }, End: ast.Location{ - Line: int(1371), + Line: int(1377), Column: int(26), }, File: p1, @@ -187659,17 +188687,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1371), + Line: int(1377), Column: int(34), }, End: ast.Location{ - Line: int(1371), + Line: int(1377), Column: int(37), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14879, + Ctx: p14953, FreeVars: ast.Identifiers{ "str", }, @@ -187691,11 +188719,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1371), + Line: int(1377), Column: int(13), }, End: ast.Location{ - Line: int(1371), + Line: int(1377), Column: int(38), }, File: p1, @@ -187707,11 +188735,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1372), + Line: int(1378), Column: int(7), }, End: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(91), }, File: p1, @@ -187724,7 +188752,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "pat", "pat_len", @@ -187738,17 +188766,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1372), + Line: int(1378), Column: int(10), }, End: ast.Location{ - Line: int(1372), + Line: int(1378), Column: int(59), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "pat_len", "str_len", @@ -187759,17 +188787,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1372), + Line: int(1378), Column: int(10), }, End: ast.Location{ - Line: int(1372), + Line: int(1378), Column: int(38), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "pat_len", "str_len", @@ -187780,17 +188808,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1372), + Line: int(1378), Column: int(10), }, End: ast.Location{ - Line: int(1372), + Line: int(1378), Column: int(22), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "pat_len", }, @@ -187800,17 +188828,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1372), + Line: int(1378), Column: int(10), }, End: ast.Location{ - Line: int(1372), + Line: int(1378), Column: int(17), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "pat_len", }, @@ -187824,17 +188852,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1372), + Line: int(1378), Column: int(21), }, End: ast.Location{ - Line: int(1372), + Line: int(1378), Column: int(22), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: nil, }, OriginalString: "0", @@ -187847,17 +188875,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1372), + Line: int(1378), Column: int(26), }, End: ast.Location{ - Line: int(1372), + Line: int(1378), Column: int(38), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "str_len", }, @@ -187867,17 +188895,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1372), + Line: int(1378), Column: int(26), }, End: ast.Location{ - Line: int(1372), + Line: int(1378), Column: int(33), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "str_len", }, @@ -187891,17 +188919,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1372), + Line: int(1378), Column: int(37), }, End: ast.Location{ - Line: int(1372), + Line: int(1378), Column: int(38), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: nil, }, OriginalString: "0", @@ -187915,17 +188943,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1372), + Line: int(1378), Column: int(42), }, End: ast.Location{ - Line: int(1372), + Line: int(1378), Column: int(59), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "pat_len", "str_len", @@ -187936,17 +188964,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1372), + Line: int(1378), Column: int(42), }, End: ast.Location{ - Line: int(1372), + Line: int(1378), Column: int(49), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "pat_len", }, @@ -187960,17 +188988,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1372), + Line: int(1378), Column: int(52), }, End: ast.Location{ - Line: int(1372), + Line: int(1378), Column: int(59), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "str_len", }, @@ -187985,11 +189013,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1373), + Line: int(1379), Column: int(9), }, End: ast.Location{ - Line: int(1373), + Line: int(1379), Column: int(11), }, File: p1, @@ -188002,7 +189030,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14783, + Ctx: p14857, FreeVars: nil, }, Elements: nil, @@ -188022,17 +189050,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(9), }, End: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(91), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "pat", "pat_len", @@ -188046,17 +189074,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(9), }, End: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(19), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14783, + Ctx: p14857, FreeVars: ast.Identifiers{ "std", }, @@ -188066,11 +189094,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(9), }, End: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(12), }, File: p1, @@ -188126,17 +189154,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(20), }, End: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(57), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14917, + Ctx: p14991, FreeVars: ast.Identifiers{ "pat", "pat_len", @@ -188155,11 +189183,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(29), }, End: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(30), }, File: p1, @@ -188173,17 +189201,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(32), }, End: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(57), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14921, + Ctx: p14995, FreeVars: ast.Identifiers{ "i", "pat", @@ -188197,14 +189225,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1381), + Column: int(32), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1381), + Column: int(50), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -188293,17 +189321,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(32), }, End: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(35), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14921, + Ctx: p14995, FreeVars: ast.Identifiers{ "str", }, @@ -188318,17 +189346,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(36), }, End: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(37), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14921, + Ctx: p14995, FreeVars: ast.Identifiers{ "i", }, @@ -188343,17 +189371,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(38), }, End: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(49), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14921, + Ctx: p14995, FreeVars: ast.Identifiers{ "i", "pat_len", @@ -188364,17 +189392,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(38), }, End: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(39), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14921, + Ctx: p14995, FreeVars: ast.Identifiers{ "i", }, @@ -188388,17 +189416,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(42), }, End: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(49), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14921, + Ctx: p14995, FreeVars: ast.Identifiers{ "pat_len", }, @@ -188445,17 +189473,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(54), }, End: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(57), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14921, + Ctx: p14995, FreeVars: ast.Identifiers{ "pat", }, @@ -188472,17 +189500,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(59), }, End: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(90), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14917, + Ctx: p14991, FreeVars: ast.Identifiers{ "pat_len", "std", @@ -188494,17 +189522,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(59), }, End: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(68), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14917, + Ctx: p14991, FreeVars: ast.Identifiers{ "std", }, @@ -188514,11 +189542,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(59), }, End: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(62), }, File: p1, @@ -188567,17 +189595,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(69), }, End: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(70), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14953, + Ctx: p15027, FreeVars: nil, }, OriginalString: "0", @@ -188590,17 +189618,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(72), }, End: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(89), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14953, + Ctx: p15027, FreeVars: ast.Identifiers{ "pat_len", "str_len", @@ -188611,17 +189639,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(72), }, End: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(79), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14953, + Ctx: p15027, FreeVars: ast.Identifiers{ "str_len", }, @@ -188635,17 +189663,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(82), }, End: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(89), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14953, + Ctx: p15027, FreeVars: ast.Identifiers{ "pat_len", }, @@ -188683,11 +189711,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1364), + Line: int(1370), Column: int(3), }, End: ast.Location{ - Line: int(1375), + Line: int(1381), Column: int(91), }, File: p1, @@ -188749,11 +189777,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1377), + Line: int(1383), Column: int(8), }, End: ast.Location{ - Line: int(1377), + Line: int(1383), Column: int(13), }, File: p1, @@ -188768,11 +189796,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1377), + Line: int(1383), Column: int(15), }, End: ast.Location{ - Line: int(1377), + Line: int(1383), Column: int(18), }, File: p1, @@ -188786,11 +189814,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1378), + Line: int(1384), Column: int(5), }, End: ast.Location{ - Line: int(1381), + Line: int(1387), Column: int(81), }, File: p1, @@ -188803,7 +189831,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14966, + Ctx: p15040, FreeVars: ast.Identifiers{ "arr", "std", @@ -188815,17 +189843,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1378), + Line: int(1384), Column: int(8), }, End: ast.Location{ - Line: int(1378), + Line: int(1384), Column: int(25), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14966, + Ctx: p15040, FreeVars: ast.Identifiers{ "arr", "std", @@ -188837,17 +189865,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1378), + Line: int(1384), Column: int(9), }, End: ast.Location{ - Line: int(1378), + Line: int(1384), Column: int(25), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14966, + Ctx: p15040, FreeVars: ast.Identifiers{ "arr", "std", @@ -188858,17 +189886,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1378), + Line: int(1384), Column: int(9), }, End: ast.Location{ - Line: int(1378), + Line: int(1384), Column: int(20), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14966, + Ctx: p15040, FreeVars: ast.Identifiers{ "std", }, @@ -188878,11 +189906,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1378), + Line: int(1384), Column: int(9), }, End: ast.Location{ - Line: int(1378), + Line: int(1384), Column: int(12), }, File: p1, @@ -188931,17 +189959,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1378), + Line: int(1384), Column: int(21), }, End: ast.Location{ - Line: int(1378), + Line: int(1384), Column: int(24), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14979, + Ctx: p15053, FreeVars: ast.Identifiers{ "arr", }, @@ -188965,11 +189993,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1379), + Line: int(1385), Column: int(7), }, End: ast.Location{ - Line: int(1379), + Line: int(1385), Column: int(77), }, File: p1, @@ -188982,7 +190010,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p14966, + Ctx: p15040, FreeVars: ast.Identifiers{ "arr", "std", @@ -188993,17 +190021,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1379), + Line: int(1385), Column: int(13), }, End: ast.Location{ - Line: int(1379), + Line: int(1385), Column: int(77), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14966, + Ctx: p15040, FreeVars: ast.Identifiers{ "arr", "std", @@ -189014,17 +190042,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1379), + Line: int(1385), Column: int(13), }, End: ast.Location{ - Line: int(1379), + Line: int(1385), Column: int(61), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14966, + Ctx: p15040, FreeVars: nil, }, Value: "find second parameter should be an array, got ", @@ -189039,17 +190067,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1379), + Line: int(1385), Column: int(64), }, End: ast.Location{ - Line: int(1379), + Line: int(1385), Column: int(77), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14966, + Ctx: p15040, FreeVars: ast.Identifiers{ "arr", "std", @@ -189060,17 +190088,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1379), + Line: int(1385), Column: int(64), }, End: ast.Location{ - Line: int(1379), + Line: int(1385), Column: int(72), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14966, + Ctx: p15040, FreeVars: ast.Identifiers{ "std", }, @@ -189080,11 +190108,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1379), + Line: int(1385), Column: int(64), }, End: ast.Location{ - Line: int(1379), + Line: int(1385), Column: int(67), }, File: p1, @@ -189133,17 +190161,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1379), + Line: int(1385), Column: int(73), }, End: ast.Location{ - Line: int(1379), + Line: int(1385), Column: int(76), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14996, + Ctx: p15070, FreeVars: ast.Identifiers{ "arr", }, @@ -189175,17 +190203,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1381), + Line: int(1387), Column: int(7), }, End: ast.Location{ - Line: int(1381), + Line: int(1387), Column: int(81), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14966, + Ctx: p15040, FreeVars: ast.Identifiers{ "arr", "std", @@ -189197,17 +190225,17 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1381), + Line: int(1387), Column: int(7), }, End: ast.Location{ - Line: int(1381), + Line: int(1387), Column: int(17), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p14966, + Ctx: p15040, FreeVars: ast.Identifiers{ "std", }, @@ -189217,11 +190245,11 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1381), + Line: int(1387), Column: int(7), }, End: ast.Location{ - Line: int(1381), + Line: int(1387), Column: int(10), }, File: p1, @@ -189260,7 +190288,4956 @@ var _StdAst = &ast.DesugaredObject{ Ctx: nil, FreeVars: nil, }, - Value: "filter", + Value: "filter", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + RightBracketFodder: ast.Fodder{}, + Id: nil, + }, + FodderLeft: ast.Fodder{}, + Arguments: ast.Arguments{ + Positional: []ast.CommaSeparatedExpr{ + ast.CommaSeparatedExpr{ + Expr: &ast.Function{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1387), + Column: int(18), + }, + End: ast.Location{ + Line: int(1387), + Column: int(45), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15083, + FreeVars: ast.Identifiers{ + "arr", + "value", + }, + }, + ParenLeftFodder: ast.Fodder{}, + Parameters: []ast.Parameter{ + ast.Parameter{ + NameFodder: ast.Fodder{}, + Name: "i", + EqFodder: nil, + DefaultArg: nil, + CommaFodder: nil, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1387), + Column: int(27), + }, + End: ast.Location{ + Line: int(1387), + Column: int(28), + }, + File: p1, + }, + }, + }, + TrailingComma: false, + ParenRightFodder: ast.Fodder{}, + Body: &ast.Binary{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1387), + Column: int(30), + }, + End: ast.Location{ + Line: int(1387), + Column: int(45), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15087, + FreeVars: ast.Identifiers{ + "arr", + "i", + "value", + }, + }, + Left: &ast.Index{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1387), + Column: int(30), + }, + End: ast.Location{ + Line: int(1387), + Column: int(36), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15087, + FreeVars: ast.Identifiers{ + "arr", + "i", + }, + }, + Target: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1387), + Column: int(30), + }, + End: ast.Location{ + Line: int(1387), + Column: int(33), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15087, + FreeVars: ast.Identifiers{ + "arr", + }, + }, + Id: "arr", + }, + LeftBracketFodder: ast.Fodder{}, + Index: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1387), + Column: int(34), + }, + End: ast.Location{ + Line: int(1387), + Column: int(35), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15087, + FreeVars: ast.Identifiers{ + "i", + }, + }, + Id: "i", + }, + RightBracketFodder: ast.Fodder{}, + Id: nil, + }, + OpFodder: ast.Fodder{}, + Op: ast.BinaryOp(12), + Right: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1387), + Column: int(40), + }, + End: ast.Location{ + Line: int(1387), + Column: int(45), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15087, + FreeVars: ast.Identifiers{ + "value", + }, + }, + Id: "value", + }, + }, + }, + CommaFodder: ast.Fodder{}, + }, + ast.CommaSeparatedExpr{ + Expr: &ast.Apply{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1387), + Column: int(47), + }, + End: ast.Location{ + Line: int(1387), + Column: int(80), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15083, + FreeVars: ast.Identifiers{ + "arr", + "std", + }, + }, + Target: &ast.Index{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1387), + Column: int(47), + }, + End: ast.Location{ + Line: int(1387), + Column: int(56), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15083, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Target: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1387), + Column: int(47), + }, + End: ast.Location{ + Line: int(1387), + Column: int(50), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: nil, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Id: "std", + }, + LeftBracketFodder: ast.Fodder{}, + Index: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: nil, + }, + Value: "range", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + RightBracketFodder: ast.Fodder{}, + Id: nil, + }, + FodderLeft: ast.Fodder{}, + Arguments: ast.Arguments{ + Positional: []ast.CommaSeparatedExpr{ + ast.CommaSeparatedExpr{ + Expr: &ast.LiteralNumber{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1387), + Column: int(57), + }, + End: ast.Location{ + Line: int(1387), + Column: int(58), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15106, + FreeVars: nil, + }, + OriginalString: "0", + }, + CommaFodder: ast.Fodder{}, + }, + ast.CommaSeparatedExpr{ + Expr: &ast.Binary{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1387), + Column: int(60), + }, + End: ast.Location{ + Line: int(1387), + Column: int(79), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15106, + FreeVars: ast.Identifiers{ + "arr", + "std", + }, + }, + Left: &ast.Apply{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1387), + Column: int(60), + }, + End: ast.Location{ + Line: int(1387), + Column: int(75), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15106, + FreeVars: ast.Identifiers{ + "arr", + "std", + }, + }, + Target: &ast.Index{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1387), + Column: int(60), + }, + End: ast.Location{ + Line: int(1387), + Column: int(70), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15106, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Target: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1387), + Column: int(60), + }, + End: ast.Location{ + Line: int(1387), + Column: int(63), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: nil, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Id: "std", + }, + LeftBracketFodder: ast.Fodder{}, + Index: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: nil, + }, + Value: "length", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + RightBracketFodder: ast.Fodder{}, + Id: nil, + }, + FodderLeft: ast.Fodder{}, + Arguments: ast.Arguments{ + Positional: []ast.CommaSeparatedExpr{ + ast.CommaSeparatedExpr{ + Expr: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1387), + Column: int(71), + }, + End: ast.Location{ + Line: int(1387), + Column: int(74), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15118, + FreeVars: ast.Identifiers{ + "arr", + }, + }, + Id: "arr", + }, + CommaFodder: nil, + }, + }, + Named: nil, + }, + TrailingComma: false, + TailStrict: false, + FodderRight: ast.Fodder{}, + TailStrictFodder: nil, + }, + OpFodder: ast.Fodder{}, + Op: ast.BinaryOp(4), + Right: &ast.LiteralNumber{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1387), + Column: int(78), + }, + End: ast.Location{ + Line: int(1387), + Column: int(79), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15106, + FreeVars: nil, + }, + OriginalString: "1", + }, + }, + CommaFodder: nil, + }, + }, + Named: nil, + }, + TrailingComma: false, + TailStrict: false, + FodderRight: ast.Fodder{}, + TailStrictFodder: nil, + }, + CommaFodder: nil, + }, + }, + Named: nil, + }, + TrailingComma: false, + TailStrict: false, + FodderRight: ast.Fodder{}, + TailStrictFodder: nil, + }, + }, + }, + PlusSuper: false, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1383), + Column: int(3), + }, + End: ast.Location{ + Line: int(1387), + Column: int(81), + }, + File: p1, + }, + }, + ast.DesugaredObjectField{ + Hide: ast.ObjectFieldHide(0), + Name: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: nil, + }, + Value: "__compare", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + Body: &ast.Function{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: p11, + FreeVars: ast.Identifiers{ + "std", + }, + }, + ParenLeftFodder: ast.Fodder{}, + Parameters: []ast.Parameter{ + ast.Parameter{ + NameFodder: ast.Fodder{}, + Name: "v1", + EqFodder: nil, + DefaultArg: nil, + CommaFodder: ast.Fodder{}, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1391), + Column: int(13), + }, + End: ast.Location{ + Line: int(1391), + Column: int(15), + }, + File: p1, + }, + }, + ast.Parameter{ + NameFodder: ast.Fodder{}, + Name: "v2", + EqFodder: nil, + DefaultArg: nil, + CommaFodder: nil, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1391), + Column: int(17), + }, + End: ast.Location{ + Line: int(1391), + Column: int(19), + }, + File: p1, + }, + }, + }, + TrailingComma: false, + ParenRightFodder: ast.Fodder{}, + Body: &ast.Local{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1392), + Column: int(7), + }, + End: ast.Location{ + Line: int(1401), + Column: int(13), + }, + File: p1, + }, + Fodder: ast.Fodder{ + ast.FodderElement{ + Kind: ast.FodderKind(0), + Blanks: int(0), + Indent: int(6), + Comment: []string{}, + }, + }, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "std", + "v1", + "v2", + }, + }, + Binds: ast.LocalBinds{ + ast.LocalBind{ + VarFodder: ast.Fodder{}, + Variable: "t1", + EqFodder: ast.Fodder{}, + Body: &ast.Apply{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1392), + Column: int(18), + }, + End: ast.Location{ + Line: int(1392), + Column: int(30), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15131, + FreeVars: ast.Identifiers{ + "std", + "v1", + }, + }, + Target: &ast.Index{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1392), + Column: int(18), + }, + End: ast.Location{ + Line: int(1392), + Column: int(26), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15131, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Target: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1392), + Column: int(18), + }, + End: ast.Location{ + Line: int(1392), + Column: int(21), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: nil, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Id: "std", + }, + LeftBracketFodder: ast.Fodder{}, + Index: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: nil, + }, + Value: "type", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + RightBracketFodder: ast.Fodder{}, + Id: nil, + }, + FodderLeft: ast.Fodder{}, + Arguments: ast.Arguments{ + Positional: []ast.CommaSeparatedExpr{ + ast.CommaSeparatedExpr{ + Expr: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1392), + Column: int(27), + }, + End: ast.Location{ + Line: int(1392), + Column: int(29), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15140, + FreeVars: ast.Identifiers{ + "v1", + }, + }, + Id: "v1", + }, + CommaFodder: nil, + }, + }, + Named: nil, + }, + TrailingComma: false, + TailStrict: false, + FodderRight: ast.Fodder{}, + TailStrictFodder: nil, + }, + Fun: nil, + CloseFodder: ast.Fodder{}, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1392), + Column: int(13), + }, + End: ast.Location{ + Line: int(1392), + Column: int(30), + }, + File: p1, + }, + }, + ast.LocalBind{ + VarFodder: ast.Fodder{}, + Variable: "t2", + EqFodder: ast.Fodder{}, + Body: &ast.Apply{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1392), + Column: int(37), + }, + End: ast.Location{ + Line: int(1392), + Column: int(49), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15143, + FreeVars: ast.Identifiers{ + "std", + "v2", + }, + }, + Target: &ast.Index{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1392), + Column: int(37), + }, + End: ast.Location{ + Line: int(1392), + Column: int(45), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15143, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Target: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1392), + Column: int(37), + }, + End: ast.Location{ + Line: int(1392), + Column: int(40), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: nil, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Id: "std", + }, + LeftBracketFodder: ast.Fodder{}, + Index: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: nil, + }, + Value: "type", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + RightBracketFodder: ast.Fodder{}, + Id: nil, + }, + FodderLeft: ast.Fodder{}, + Arguments: ast.Arguments{ + Positional: []ast.CommaSeparatedExpr{ + ast.CommaSeparatedExpr{ + Expr: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1392), + Column: int(46), + }, + End: ast.Location{ + Line: int(1392), + Column: int(48), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15152, + FreeVars: ast.Identifiers{ + "v2", + }, + }, + Id: "v2", + }, + CommaFodder: nil, + }, + }, + Named: nil, + }, + TrailingComma: false, + TailStrict: false, + FodderRight: ast.Fodder{}, + TailStrictFodder: nil, + }, + Fun: nil, + CloseFodder: ast.Fodder{}, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1392), + Column: int(32), + }, + End: ast.Location{ + Line: int(1392), + Column: int(49), + }, + File: p1, + }, + }, + }, + Body: &ast.Conditional{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1393), + Column: int(7), + }, + End: ast.Location{ + Line: int(1401), + Column: int(13), + }, + File: p1, + }, + Fodder: ast.Fodder{ + ast.FodderElement{ + Kind: ast.FodderKind(0), + Blanks: int(0), + Indent: int(6), + Comment: []string{}, + }, + }, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "std", + "t1", + "t2", + "v1", + "v2", + }, + }, + Cond: &ast.Binary{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1393), + Column: int(10), + }, + End: ast.Location{ + Line: int(1393), + Column: int(18), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "t1", + "t2", + }, + }, + Left: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1393), + Column: int(10), + }, + End: ast.Location{ + Line: int(1393), + Column: int(12), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "t1", + }, + }, + Id: "t1", + }, + OpFodder: ast.Fodder{}, + Op: ast.BinaryOp(13), + Right: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1393), + Column: int(16), + }, + End: ast.Location{ + Line: int(1393), + Column: int(18), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "t2", + }, + }, + Id: "t2", + }, + }, + ThenFodder: ast.Fodder{}, + BranchTrue: &ast.Error{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1394), + Column: int(9), + }, + End: ast.Location{ + Line: int(1394), + Column: int(77), + }, + File: p1, + }, + Fodder: ast.Fodder{ + ast.FodderElement{ + Kind: ast.FodderKind(0), + Blanks: int(0), + Indent: int(8), + Comment: []string{}, + }, + }, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "t1", + "t2", + }, + }, + Expr: &ast.Binary{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1394), + Column: int(15), + }, + End: ast.Location{ + Line: int(1394), + Column: int(77), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "t1", + "t2", + }, + }, + Left: &ast.Binary{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1394), + Column: int(15), + }, + End: ast.Location{ + Line: int(1394), + Column: int(72), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "t1", + }, + }, + Left: &ast.Binary{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1394), + Column: int(15), + }, + End: ast.Location{ + Line: int(1394), + Column: int(62), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "t1", + }, + }, + Left: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1394), + Column: int(15), + }, + End: ast.Location{ + Line: int(1394), + Column: int(57), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: nil, + }, + Value: "Comparison requires matching types. Got ", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + OpFodder: ast.Fodder{}, + Op: ast.BinaryOp(3), + Right: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1394), + Column: int(60), + }, + End: ast.Location{ + Line: int(1394), + Column: int(62), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "t1", + }, + }, + Id: "t1", + }, + }, + OpFodder: ast.Fodder{}, + Op: ast.BinaryOp(3), + Right: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1394), + Column: int(65), + }, + End: ast.Location{ + Line: int(1394), + Column: int(72), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: nil, + }, + Value: " and ", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + }, + OpFodder: ast.Fodder{}, + Op: ast.BinaryOp(3), + Right: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1394), + Column: int(75), + }, + End: ast.Location{ + Line: int(1394), + Column: int(77), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "t2", + }, + }, + Id: "t2", + }, + }, + }, + ElseFodder: ast.Fodder{ + ast.FodderElement{ + Kind: ast.FodderKind(0), + Blanks: int(0), + Indent: int(6), + Comment: []string{}, + }, + }, + BranchFalse: &ast.Conditional{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1395), + Column: int(12), + }, + End: ast.Location{ + Line: int(1401), + Column: int(13), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "std", + "t1", + "v1", + "v2", + }, + }, + Cond: &ast.Binary{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1395), + Column: int(15), + }, + End: ast.Location{ + Line: int(1395), + Column: int(28), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "t1", + }, + }, + Left: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1395), + Column: int(15), + }, + End: ast.Location{ + Line: int(1395), + Column: int(17), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "t1", + }, + }, + Id: "t1", + }, + OpFodder: ast.Fodder{}, + Op: ast.BinaryOp(12), + Right: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1395), + Column: int(21), + }, + End: ast.Location{ + Line: int(1395), + Column: int(28), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: nil, + }, + Value: "array", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + }, + ThenFodder: ast.Fodder{}, + BranchTrue: &ast.Apply{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1396), + Column: int(9), + }, + End: ast.Location{ + Line: int(1396), + Column: int(36), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "std", + "v1", + "v2", + }, + }, + Target: &ast.Index{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1396), + Column: int(9), + }, + End: ast.Location{ + Line: int(1396), + Column: int(28), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Target: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1396), + Column: int(9), + }, + End: ast.Location{ + Line: int(1396), + Column: int(12), + }, + File: p1, + }, + Fodder: ast.Fodder{ + ast.FodderElement{ + Kind: ast.FodderKind(0), + Blanks: int(0), + Indent: int(8), + Comment: []string{}, + }, + }, + Ctx: nil, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Id: "std", + }, + LeftBracketFodder: ast.Fodder{}, + Index: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: nil, + }, + Value: "__compare_array", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + RightBracketFodder: ast.Fodder{}, + Id: nil, + }, + FodderLeft: ast.Fodder{}, + Arguments: ast.Arguments{ + Positional: []ast.CommaSeparatedExpr{ + ast.CommaSeparatedExpr{ + Expr: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1396), + Column: int(29), + }, + End: ast.Location{ + Line: int(1396), + Column: int(31), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15196, + FreeVars: ast.Identifiers{ + "v1", + }, + }, + Id: "v1", + }, + CommaFodder: ast.Fodder{}, + }, + ast.CommaSeparatedExpr{ + Expr: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1396), + Column: int(33), + }, + End: ast.Location{ + Line: int(1396), + Column: int(35), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15196, + FreeVars: ast.Identifiers{ + "v2", + }, + }, + Id: "v2", + }, + CommaFodder: nil, + }, + }, + Named: nil, + }, + TrailingComma: false, + TailStrict: false, + FodderRight: ast.Fodder{}, + TailStrictFodder: nil, + }, + ElseFodder: ast.Fodder{ + ast.FodderElement{ + Kind: ast.FodderKind(0), + Blanks: int(0), + Indent: int(6), + Comment: []string{}, + }, + }, + BranchFalse: &ast.Conditional{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1397), + Column: int(12), + }, + End: ast.Location{ + Line: int(1401), + Column: int(13), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "t1", + "v1", + "v2", + }, + }, + Cond: &ast.Binary{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1397), + Column: int(15), + }, + End: ast.Location{ + Line: int(1397), + Column: int(65), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "t1", + }, + }, + Left: &ast.Binary{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1397), + Column: int(15), + }, + End: ast.Location{ + Line: int(1397), + Column: int(49), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "t1", + }, + }, + Left: &ast.Binary{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1397), + Column: int(15), + }, + End: ast.Location{ + Line: int(1397), + Column: int(31), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "t1", + }, + }, + Left: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1397), + Column: int(15), + }, + End: ast.Location{ + Line: int(1397), + Column: int(17), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "t1", + }, + }, + Id: "t1", + }, + OpFodder: ast.Fodder{}, + Op: ast.BinaryOp(12), + Right: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1397), + Column: int(21), + }, + End: ast.Location{ + Line: int(1397), + Column: int(31), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: nil, + }, + Value: "function", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + }, + OpFodder: ast.Fodder{}, + Op: ast.BinaryOp(18), + Right: &ast.Binary{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1397), + Column: int(35), + }, + End: ast.Location{ + Line: int(1397), + Column: int(49), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "t1", + }, + }, + Left: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1397), + Column: int(35), + }, + End: ast.Location{ + Line: int(1397), + Column: int(37), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "t1", + }, + }, + Id: "t1", + }, + OpFodder: ast.Fodder{}, + Op: ast.BinaryOp(12), + Right: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1397), + Column: int(41), + }, + End: ast.Location{ + Line: int(1397), + Column: int(49), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: nil, + }, + Value: "object", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + }, + }, + OpFodder: ast.Fodder{}, + Op: ast.BinaryOp(18), + Right: &ast.Binary{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1397), + Column: int(53), + }, + End: ast.Location{ + Line: int(1397), + Column: int(65), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "t1", + }, + }, + Left: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1397), + Column: int(53), + }, + End: ast.Location{ + Line: int(1397), + Column: int(55), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "t1", + }, + }, + Id: "t1", + }, + OpFodder: ast.Fodder{}, + Op: ast.BinaryOp(12), + Right: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1397), + Column: int(59), + }, + End: ast.Location{ + Line: int(1397), + Column: int(65), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: nil, + }, + Value: "bool", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + }, + }, + ThenFodder: ast.Fodder{}, + BranchTrue: &ast.Error{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1398), + Column: int(9), + }, + End: ast.Location{ + Line: int(1398), + Column: int(62), + }, + File: p1, + }, + Fodder: ast.Fodder{ + ast.FodderElement{ + Kind: ast.FodderKind(0), + Blanks: int(0), + Indent: int(8), + Comment: []string{}, + }, + }, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "t1", + }, + }, + Expr: &ast.Binary{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1398), + Column: int(15), + }, + End: ast.Location{ + Line: int(1398), + Column: int(62), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "t1", + }, + }, + Left: &ast.Binary{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1398), + Column: int(15), + }, + End: ast.Location{ + Line: int(1398), + Column: int(37), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "t1", + }, + }, + Left: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1398), + Column: int(15), + }, + End: ast.Location{ + Line: int(1398), + Column: int(32), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: nil, + }, + Value: "Values of type ", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + OpFodder: ast.Fodder{}, + Op: ast.BinaryOp(3), + Right: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1398), + Column: int(35), + }, + End: ast.Location{ + Line: int(1398), + Column: int(37), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "t1", + }, + }, + Id: "t1", + }, + }, + OpFodder: ast.Fodder{}, + Op: ast.BinaryOp(3), + Right: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1398), + Column: int(40), + }, + End: ast.Location{ + Line: int(1398), + Column: int(62), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: nil, + }, + Value: " are not comparable.", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + }, + }, + ElseFodder: ast.Fodder{ + ast.FodderElement{ + Kind: ast.FodderKind(0), + Blanks: int(0), + Indent: int(6), + Comment: []string{}, + }, + }, + BranchFalse: &ast.Conditional{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1399), + Column: int(12), + }, + End: ast.Location{ + Line: int(1401), + Column: int(13), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "v1", + "v2", + }, + }, + Cond: &ast.Binary{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1399), + Column: int(15), + }, + End: ast.Location{ + Line: int(1399), + Column: int(22), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "v1", + "v2", + }, + }, + Left: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1399), + Column: int(15), + }, + End: ast.Location{ + Line: int(1399), + Column: int(17), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "v1", + }, + }, + Id: "v1", + }, + OpFodder: ast.Fodder{}, + Op: ast.BinaryOp(9), + Right: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1399), + Column: int(20), + }, + End: ast.Location{ + Line: int(1399), + Column: int(22), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "v2", + }, + }, + Id: "v2", + }, + }, + ThenFodder: ast.Fodder{}, + BranchTrue: &ast.Unary{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1399), + Column: int(28), + }, + End: ast.Location{ + Line: int(1399), + Column: int(30), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: nil, + }, + Op: ast.UnaryOp(3), + Expr: &ast.LiteralNumber{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1399), + Column: int(29), + }, + End: ast.Location{ + Line: int(1399), + Column: int(30), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: nil, + }, + OriginalString: "1", + }, + }, + ElseFodder: ast.Fodder{ + ast.FodderElement{ + Kind: ast.FodderKind(0), + Blanks: int(0), + Indent: int(6), + Comment: []string{}, + }, + }, + BranchFalse: &ast.Conditional{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1400), + Column: int(12), + }, + End: ast.Location{ + Line: int(1401), + Column: int(13), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "v1", + "v2", + }, + }, + Cond: &ast.Binary{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1400), + Column: int(15), + }, + End: ast.Location{ + Line: int(1400), + Column: int(22), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "v1", + "v2", + }, + }, + Left: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1400), + Column: int(15), + }, + End: ast.Location{ + Line: int(1400), + Column: int(17), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "v1", + }, + }, + Id: "v1", + }, + OpFodder: ast.Fodder{}, + Op: ast.BinaryOp(7), + Right: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1400), + Column: int(20), + }, + End: ast.Location{ + Line: int(1400), + Column: int(22), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: ast.Identifiers{ + "v2", + }, + }, + Id: "v2", + }, + }, + ThenFodder: ast.Fodder{}, + BranchTrue: &ast.LiteralNumber{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1400), + Column: int(28), + }, + End: ast.Location{ + Line: int(1400), + Column: int(29), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: nil, + }, + OriginalString: "1", + }, + ElseFodder: ast.Fodder{ + ast.FodderElement{ + Kind: ast.FodderKind(0), + Blanks: int(0), + Indent: int(6), + Comment: []string{}, + }, + }, + BranchFalse: &ast.LiteralNumber{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1401), + Column: int(12), + }, + End: ast.Location{ + Line: int(1401), + Column: int(13), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15127, + FreeVars: nil, + }, + OriginalString: "0", + }, + }, + }, + }, + }, + }, + }, + }, + PlusSuper: false, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1391), + Column: int(3), + }, + End: ast.Location{ + Line: int(1401), + Column: int(13), + }, + File: p1, + }, + }, + ast.DesugaredObjectField{ + Hide: ast.ObjectFieldHide(0), + Name: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: nil, + }, + Value: "__compare_array", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + Body: &ast.Function{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: p11, + FreeVars: ast.Identifiers{ + "std", + }, + }, + ParenLeftFodder: ast.Fodder{}, + Parameters: []ast.Parameter{ + ast.Parameter{ + NameFodder: ast.Fodder{}, + Name: "arr1", + EqFodder: nil, + DefaultArg: nil, + CommaFodder: ast.Fodder{}, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1403), + Column: int(19), + }, + End: ast.Location{ + Line: int(1403), + Column: int(23), + }, + File: p1, + }, + }, + ast.Parameter{ + NameFodder: ast.Fodder{}, + Name: "arr2", + EqFodder: nil, + DefaultArg: nil, + CommaFodder: nil, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1403), + Column: int(25), + }, + End: ast.Location{ + Line: int(1403), + Column: int(29), + }, + File: p1, + }, + }, + }, + TrailingComma: false, + ParenRightFodder: ast.Fodder{}, + Body: &ast.Local{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1404), + Column: int(5), + }, + End: ast.Location{ + Line: int(1415), + Column: int(11), + }, + File: p1, + }, + Fodder: ast.Fodder{ + ast.FodderElement{ + Kind: ast.FodderKind(0), + Blanks: int(0), + Indent: int(4), + Comment: []string{}, + }, + }, + Ctx: p15262, + FreeVars: ast.Identifiers{ + "arr1", + "arr2", + "std", + }, + }, + Binds: ast.LocalBinds{ + ast.LocalBind{ + VarFodder: ast.Fodder{}, + Variable: "len1", + EqFodder: ast.Fodder{}, + Body: &ast.Apply{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1404), + Column: int(18), + }, + End: ast.Location{ + Line: int(1404), + Column: int(34), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15266, + FreeVars: ast.Identifiers{ + "arr1", + "std", + }, + }, + Target: &ast.Index{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1404), + Column: int(18), + }, + End: ast.Location{ + Line: int(1404), + Column: int(28), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15266, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Target: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1404), + Column: int(18), + }, + End: ast.Location{ + Line: int(1404), + Column: int(21), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: nil, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Id: "std", + }, + LeftBracketFodder: ast.Fodder{}, + Index: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: nil, + }, + Value: "length", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + RightBracketFodder: ast.Fodder{}, + Id: nil, + }, + FodderLeft: ast.Fodder{}, + Arguments: ast.Arguments{ + Positional: []ast.CommaSeparatedExpr{ + ast.CommaSeparatedExpr{ + Expr: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1404), + Column: int(29), + }, + End: ast.Location{ + Line: int(1404), + Column: int(33), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15275, + FreeVars: ast.Identifiers{ + "arr1", + }, + }, + Id: "arr1", + }, + CommaFodder: nil, + }, + }, + Named: nil, + }, + TrailingComma: false, + TailStrict: false, + FodderRight: ast.Fodder{}, + TailStrictFodder: nil, + }, + Fun: nil, + CloseFodder: ast.Fodder{}, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1404), + Column: int(11), + }, + End: ast.Location{ + Line: int(1404), + Column: int(34), + }, + File: p1, + }, + }, + ast.LocalBind{ + VarFodder: ast.Fodder{}, + Variable: "len2", + EqFodder: ast.Fodder{}, + Body: &ast.Apply{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1404), + Column: int(43), + }, + End: ast.Location{ + Line: int(1404), + Column: int(59), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15278, + FreeVars: ast.Identifiers{ + "arr2", + "std", + }, + }, + Target: &ast.Index{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1404), + Column: int(43), + }, + End: ast.Location{ + Line: int(1404), + Column: int(53), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15278, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Target: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1404), + Column: int(43), + }, + End: ast.Location{ + Line: int(1404), + Column: int(46), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: nil, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Id: "std", + }, + LeftBracketFodder: ast.Fodder{}, + Index: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: nil, + }, + Value: "length", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + RightBracketFodder: ast.Fodder{}, + Id: nil, + }, + FodderLeft: ast.Fodder{}, + Arguments: ast.Arguments{ + Positional: []ast.CommaSeparatedExpr{ + ast.CommaSeparatedExpr{ + Expr: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1404), + Column: int(54), + }, + End: ast.Location{ + Line: int(1404), + Column: int(58), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15287, + FreeVars: ast.Identifiers{ + "arr2", + }, + }, + Id: "arr2", + }, + CommaFodder: nil, + }, + }, + Named: nil, + }, + TrailingComma: false, + TailStrict: false, + FodderRight: ast.Fodder{}, + TailStrictFodder: nil, + }, + Fun: nil, + CloseFodder: ast.Fodder{}, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1404), + Column: int(36), + }, + End: ast.Location{ + Line: int(1404), + Column: int(59), + }, + File: p1, + }, + }, + }, + Body: &ast.Local{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1405), + Column: int(5), + }, + End: ast.Location{ + Line: int(1415), + Column: int(11), + }, + File: p1, + }, + Fodder: ast.Fodder{ + ast.FodderElement{ + Kind: ast.FodderKind(0), + Blanks: int(0), + Indent: int(4), + Comment: []string{}, + }, + }, + Ctx: p15262, + FreeVars: ast.Identifiers{ + "arr1", + "arr2", + "len1", + "len2", + "std", + }, + }, + Binds: ast.LocalBinds{ + ast.LocalBind{ + VarFodder: ast.Fodder{}, + Variable: "minLen", + EqFodder: ast.Fodder{}, + Body: &ast.Apply{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1405), + Column: int(20), + }, + End: ast.Location{ + Line: int(1405), + Column: int(39), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15294, + FreeVars: ast.Identifiers{ + "len1", + "len2", + "std", + }, + }, + Target: &ast.Index{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1405), + Column: int(20), + }, + End: ast.Location{ + Line: int(1405), + Column: int(27), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15294, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Target: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1405), + Column: int(20), + }, + End: ast.Location{ + Line: int(1405), + Column: int(23), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: nil, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Id: "std", + }, + LeftBracketFodder: ast.Fodder{}, + Index: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: nil, + }, + Value: "min", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + RightBracketFodder: ast.Fodder{}, + Id: nil, + }, + FodderLeft: ast.Fodder{}, + Arguments: ast.Arguments{ + Positional: []ast.CommaSeparatedExpr{ + ast.CommaSeparatedExpr{ + Expr: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1405), + Column: int(28), + }, + End: ast.Location{ + Line: int(1405), + Column: int(32), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15303, + FreeVars: ast.Identifiers{ + "len1", + }, + }, + Id: "len1", + }, + CommaFodder: ast.Fodder{}, + }, + ast.CommaSeparatedExpr{ + Expr: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1405), + Column: int(34), + }, + End: ast.Location{ + Line: int(1405), + Column: int(38), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15303, + FreeVars: ast.Identifiers{ + "len2", + }, + }, + Id: "len2", + }, + CommaFodder: nil, + }, + }, + Named: nil, + }, + TrailingComma: false, + TailStrict: false, + FodderRight: ast.Fodder{}, + TailStrictFodder: nil, + }, + Fun: nil, + CloseFodder: ast.Fodder{}, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1405), + Column: int(11), + }, + End: ast.Location{ + Line: int(1405), + Column: int(39), + }, + File: p1, + }, + }, + }, + Body: &ast.Local{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1406), + Column: int(5), + }, + End: ast.Location{ + Line: int(1415), + Column: int(11), + }, + File: p1, + }, + Fodder: ast.Fodder{ + ast.FodderElement{ + Kind: ast.FodderKind(0), + Blanks: int(0), + Indent: int(4), + Comment: []string{}, + }, + }, + Ctx: p15262, + FreeVars: ast.Identifiers{ + "arr1", + "arr2", + "len1", + "len2", + "minLen", + "std", + }, + }, + Binds: ast.LocalBinds{ + ast.LocalBind{ + VarFodder: nil, + Variable: "aux", + EqFodder: nil, + Body: &ast.Function{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1406), + Column: int(11), + }, + End: ast.Location{ + Line: int(1414), + Column: int(34), + }, + File: p1, + }, + Fodder: nil, + Ctx: p15312, + FreeVars: ast.Identifiers{ + "arr1", + "arr2", + "aux", + "len1", + "len2", + "minLen", + "std", + }, + }, + ParenLeftFodder: ast.Fodder{}, + Parameters: []ast.Parameter{ + ast.Parameter{ + NameFodder: ast.Fodder{}, + Name: "i", + EqFodder: nil, + DefaultArg: nil, + CommaFodder: nil, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1406), + Column: int(15), + }, + End: ast.Location{ + Line: int(1406), + Column: int(16), + }, + File: p1, + }, + }, + }, + TrailingComma: false, + ParenRightFodder: ast.Fodder{}, + Body: &ast.Conditional{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1407), + Column: int(7), + }, + End: ast.Location{ + Line: int(1414), + Column: int(34), + }, + File: p1, + }, + Fodder: ast.Fodder{ + ast.FodderElement{ + Kind: ast.FodderKind(0), + Blanks: int(0), + Indent: int(6), + Comment: []string{}, + }, + }, + Ctx: p15317, + FreeVars: ast.Identifiers{ + "arr1", + "arr2", + "aux", + "i", + "len1", + "len2", + "minLen", + "std", + }, + }, + Cond: &ast.Binary{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1407), + Column: int(10), + }, + End: ast.Location{ + Line: int(1407), + Column: int(20), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15317, + FreeVars: ast.Identifiers{ + "i", + "minLen", + }, + }, + Left: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1407), + Column: int(10), + }, + End: ast.Location{ + Line: int(1407), + Column: int(11), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15317, + FreeVars: ast.Identifiers{ + "i", + }, + }, + Id: "i", + }, + OpFodder: ast.Fodder{}, + Op: ast.BinaryOp(9), + Right: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1407), + Column: int(14), + }, + End: ast.Location{ + Line: int(1407), + Column: int(20), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15317, + FreeVars: ast.Identifiers{ + "minLen", + }, + }, + Id: "minLen", + }, + }, + ThenFodder: ast.Fodder{}, + BranchTrue: &ast.Local{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1408), + Column: int(9), + }, + End: ast.Location{ + Line: int(1412), + Column: int(21), + }, + File: p1, + }, + Fodder: ast.Fodder{ + ast.FodderElement{ + Kind: ast.FodderKind(0), + Blanks: int(0), + Indent: int(8), + Comment: []string{}, + }, + }, + Ctx: p15317, + FreeVars: ast.Identifiers{ + "arr1", + "arr2", + "aux", + "i", + "std", + }, + }, + Binds: ast.LocalBinds{ + ast.LocalBind{ + VarFodder: ast.Fodder{}, + Variable: "cmpRes", + EqFodder: ast.Fodder{}, + Body: &ast.Apply{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1408), + Column: int(24), + }, + End: ast.Location{ + Line: int(1408), + Column: int(55), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15330, + FreeVars: ast.Identifiers{ + "arr1", + "arr2", + "i", + "std", + }, + }, + Target: &ast.Index{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1408), + Column: int(24), + }, + End: ast.Location{ + Line: int(1408), + Column: int(37), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15330, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Target: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1408), + Column: int(24), + }, + End: ast.Location{ + Line: int(1408), + Column: int(27), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: nil, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Id: "std", + }, + LeftBracketFodder: ast.Fodder{}, + Index: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: nil, + }, + Value: "__compare", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + RightBracketFodder: ast.Fodder{}, + Id: nil, + }, + FodderLeft: ast.Fodder{}, + Arguments: ast.Arguments{ + Positional: []ast.CommaSeparatedExpr{ + ast.CommaSeparatedExpr{ + Expr: &ast.Index{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1408), + Column: int(38), + }, + End: ast.Location{ + Line: int(1408), + Column: int(45), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15339, + FreeVars: ast.Identifiers{ + "arr1", + "i", + }, + }, + Target: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1408), + Column: int(38), + }, + End: ast.Location{ + Line: int(1408), + Column: int(42), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15339, + FreeVars: ast.Identifiers{ + "arr1", + }, + }, + Id: "arr1", + }, + LeftBracketFodder: ast.Fodder{}, + Index: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1408), + Column: int(43), + }, + End: ast.Location{ + Line: int(1408), + Column: int(44), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15339, + FreeVars: ast.Identifiers{ + "i", + }, + }, + Id: "i", + }, + RightBracketFodder: ast.Fodder{}, + Id: nil, + }, + CommaFodder: ast.Fodder{}, + }, + ast.CommaSeparatedExpr{ + Expr: &ast.Index{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1408), + Column: int(47), + }, + End: ast.Location{ + Line: int(1408), + Column: int(54), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15339, + FreeVars: ast.Identifiers{ + "arr2", + "i", + }, + }, + Target: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1408), + Column: int(47), + }, + End: ast.Location{ + Line: int(1408), + Column: int(51), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15339, + FreeVars: ast.Identifiers{ + "arr2", + }, + }, + Id: "arr2", + }, + LeftBracketFodder: ast.Fodder{}, + Index: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1408), + Column: int(52), + }, + End: ast.Location{ + Line: int(1408), + Column: int(53), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15339, + FreeVars: ast.Identifiers{ + "i", + }, + }, + Id: "i", + }, + RightBracketFodder: ast.Fodder{}, + Id: nil, + }, + CommaFodder: nil, + }, + }, + Named: nil, + }, + TrailingComma: false, + TailStrict: false, + FodderRight: ast.Fodder{}, + TailStrictFodder: nil, + }, + Fun: nil, + CloseFodder: ast.Fodder{}, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1408), + Column: int(15), + }, + End: ast.Location{ + Line: int(1408), + Column: int(55), + }, + File: p1, + }, + }, + }, + Body: &ast.Conditional{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1409), + Column: int(9), + }, + End: ast.Location{ + Line: int(1412), + Column: int(21), + }, + File: p1, + }, + Fodder: ast.Fodder{ + ast.FodderElement{ + Kind: ast.FodderKind(0), + Blanks: int(0), + Indent: int(8), + Comment: []string{}, + }, + }, + Ctx: p15317, + FreeVars: ast.Identifiers{ + "aux", + "cmpRes", + "i", + }, + }, + Cond: &ast.Binary{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1409), + Column: int(12), + }, + End: ast.Location{ + Line: int(1409), + Column: int(23), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15317, + FreeVars: ast.Identifiers{ + "cmpRes", + }, + }, + Left: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1409), + Column: int(12), + }, + End: ast.Location{ + Line: int(1409), + Column: int(18), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15317, + FreeVars: ast.Identifiers{ + "cmpRes", + }, + }, + Id: "cmpRes", + }, + OpFodder: ast.Fodder{}, + Op: ast.BinaryOp(13), + Right: &ast.LiteralNumber{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1409), + Column: int(22), + }, + End: ast.Location{ + Line: int(1409), + Column: int(23), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15317, + FreeVars: nil, + }, + OriginalString: "0", + }, + }, + ThenFodder: ast.Fodder{}, + BranchTrue: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1410), + Column: int(11), + }, + End: ast.Location{ + Line: int(1410), + Column: int(17), + }, + File: p1, + }, + Fodder: ast.Fodder{ + ast.FodderElement{ + Kind: ast.FodderKind(0), + Blanks: int(0), + Indent: int(10), + Comment: []string{}, + }, + }, + Ctx: p15317, + FreeVars: ast.Identifiers{ + "cmpRes", + }, + }, + Id: "cmpRes", + }, + ElseFodder: ast.Fodder{ + ast.FodderElement{ + Kind: ast.FodderKind(0), + Blanks: int(0), + Indent: int(8), + Comment: []string{}, + }, + }, + BranchFalse: &ast.Apply{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1412), + Column: int(11), + }, + End: ast.Location{ + Line: int(1412), + Column: int(21), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15317, + FreeVars: ast.Identifiers{ + "aux", + "i", + }, + }, + Target: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1412), + Column: int(11), + }, + End: ast.Location{ + Line: int(1412), + Column: int(14), + }, + File: p1, + }, + Fodder: ast.Fodder{ + ast.FodderElement{ + Kind: ast.FodderKind(0), + Blanks: int(0), + Indent: int(10), + Comment: []string{}, + }, + }, + Ctx: p15317, + FreeVars: ast.Identifiers{ + "aux", + }, + }, + Id: "aux", + }, + FodderLeft: ast.Fodder{}, + Arguments: ast.Arguments{ + Positional: []ast.CommaSeparatedExpr{ + ast.CommaSeparatedExpr{ + Expr: &ast.Binary{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1412), + Column: int(15), + }, + End: ast.Location{ + Line: int(1412), + Column: int(20), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15370, + FreeVars: ast.Identifiers{ + "i", + }, + }, + Left: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1412), + Column: int(15), + }, + End: ast.Location{ + Line: int(1412), + Column: int(16), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15370, + FreeVars: ast.Identifiers{ + "i", + }, + }, + Id: "i", + }, + OpFodder: ast.Fodder{}, + Op: ast.BinaryOp(3), + Right: &ast.LiteralNumber{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1412), + Column: int(19), + }, + End: ast.Location{ + Line: int(1412), + Column: int(20), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15370, + FreeVars: nil, + }, + OriginalString: "1", + }, + }, + CommaFodder: nil, + }, + }, + Named: nil, + }, + TrailingComma: false, + TailStrict: true, + FodderRight: ast.Fodder{}, + TailStrictFodder: ast.Fodder{}, + }, + }, + }, + ElseFodder: ast.Fodder{ + ast.FodderElement{ + Kind: ast.FodderKind(0), + Blanks: int(0), + Indent: int(6), + Comment: []string{}, + }, + }, + BranchFalse: &ast.Apply{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1414), + Column: int(9), + }, + End: ast.Location{ + Line: int(1414), + Column: int(34), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15317, + FreeVars: ast.Identifiers{ + "len1", + "len2", + "std", + }, + }, + Target: &ast.Index{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1414), + Column: int(9), + }, + End: ast.Location{ + Line: int(1414), + Column: int(22), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15317, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Target: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1414), + Column: int(9), + }, + End: ast.Location{ + Line: int(1414), + Column: int(12), + }, + File: p1, + }, + Fodder: ast.Fodder{ + ast.FodderElement{ + Kind: ast.FodderKind(0), + Blanks: int(0), + Indent: int(8), + Comment: []string{}, + }, + }, + Ctx: nil, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Id: "std", + }, + LeftBracketFodder: ast.Fodder{}, + Index: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: nil, + }, + Value: "__compare", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + RightBracketFodder: ast.Fodder{}, + Id: nil, + }, + FodderLeft: ast.Fodder{}, + Arguments: ast.Arguments{ + Positional: []ast.CommaSeparatedExpr{ + ast.CommaSeparatedExpr{ + Expr: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1414), + Column: int(23), + }, + End: ast.Location{ + Line: int(1414), + Column: int(27), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15386, + FreeVars: ast.Identifiers{ + "len1", + }, + }, + Id: "len1", + }, + CommaFodder: ast.Fodder{}, + }, + ast.CommaSeparatedExpr{ + Expr: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1414), + Column: int(29), + }, + End: ast.Location{ + Line: int(1414), + Column: int(33), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15386, + FreeVars: ast.Identifiers{ + "len2", + }, + }, + Id: "len2", + }, + CommaFodder: nil, + }, + }, + Named: nil, + }, + TrailingComma: false, + TailStrict: false, + FodderRight: ast.Fodder{}, + TailStrictFodder: nil, + }, + }, + }, + Fun: nil, + CloseFodder: nil, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + }, + }, + Body: &ast.Apply{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1415), + Column: int(5), + }, + End: ast.Location{ + Line: int(1415), + Column: int(11), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15262, + FreeVars: ast.Identifiers{ + "aux", + }, + }, + Target: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1415), + Column: int(5), + }, + End: ast.Location{ + Line: int(1415), + Column: int(8), + }, + File: p1, + }, + Fodder: ast.Fodder{ + ast.FodderElement{ + Kind: ast.FodderKind(0), + Blanks: int(0), + Indent: int(4), + Comment: []string{}, + }, + }, + Ctx: p15262, + FreeVars: ast.Identifiers{ + "aux", + }, + }, + Id: "aux", + }, + FodderLeft: ast.Fodder{}, + Arguments: ast.Arguments{ + Positional: []ast.CommaSeparatedExpr{ + ast.CommaSeparatedExpr{ + Expr: &ast.LiteralNumber{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1415), + Column: int(9), + }, + End: ast.Location{ + Line: int(1415), + Column: int(10), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15397, + FreeVars: nil, + }, + OriginalString: "0", + }, + CommaFodder: nil, + }, + }, + Named: nil, + }, + TrailingComma: false, + TailStrict: false, + FodderRight: ast.Fodder{}, + TailStrictFodder: nil, + }, + }, + }, + }, + }, + PlusSuper: false, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1403), + Column: int(3), + }, + End: ast.Location{ + Line: int(1415), + Column: int(11), + }, + File: p1, + }, + }, + ast.DesugaredObjectField{ + Hide: ast.ObjectFieldHide(0), + Name: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: nil, + }, + Value: "__array_less", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + Body: &ast.Function{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: p11, + FreeVars: ast.Identifiers{ + "std", + }, + }, + ParenLeftFodder: ast.Fodder{}, + Parameters: []ast.Parameter{ + ast.Parameter{ + NameFodder: ast.Fodder{}, + Name: "arr1", + EqFodder: nil, + DefaultArg: nil, + CommaFodder: ast.Fodder{}, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1417), + Column: int(16), + }, + End: ast.Location{ + Line: int(1417), + Column: int(20), + }, + File: p1, + }, + }, + ast.Parameter{ + NameFodder: ast.Fodder{}, + Name: "arr2", + EqFodder: nil, + DefaultArg: nil, + CommaFodder: nil, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1417), + Column: int(22), + }, + End: ast.Location{ + Line: int(1417), + Column: int(26), + }, + File: p1, + }, + }, + }, + TrailingComma: false, + ParenRightFodder: ast.Fodder{}, + Body: &ast.Binary{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1417), + Column: int(30), + }, + End: ast.Location{ + Line: int(1417), + Column: int(67), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15403, + FreeVars: ast.Identifiers{ + "arr1", + "arr2", + "std", + }, + }, + Left: &ast.Apply{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1417), + Column: int(30), + }, + End: ast.Location{ + Line: int(1417), + Column: int(61), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15403, + FreeVars: ast.Identifiers{ + "arr1", + "arr2", + "std", + }, + }, + Target: &ast.Index{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1417), + Column: int(30), + }, + End: ast.Location{ + Line: int(1417), + Column: int(49), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15403, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Target: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1417), + Column: int(30), + }, + End: ast.Location{ + Line: int(1417), + Column: int(33), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: nil, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Id: "std", + }, + LeftBracketFodder: ast.Fodder{}, + Index: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: nil, + }, + Value: "__compare_array", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + RightBracketFodder: ast.Fodder{}, + Id: nil, + }, + FodderLeft: ast.Fodder{}, + Arguments: ast.Arguments{ + Positional: []ast.CommaSeparatedExpr{ + ast.CommaSeparatedExpr{ + Expr: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1417), + Column: int(50), + }, + End: ast.Location{ + Line: int(1417), + Column: int(54), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15414, + FreeVars: ast.Identifiers{ + "arr1", + }, + }, + Id: "arr1", + }, + CommaFodder: ast.Fodder{}, + }, + ast.CommaSeparatedExpr{ + Expr: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1417), + Column: int(56), + }, + End: ast.Location{ + Line: int(1417), + Column: int(60), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15414, + FreeVars: ast.Identifiers{ + "arr2", + }, + }, + Id: "arr2", + }, + CommaFodder: nil, + }, + }, + Named: nil, + }, + TrailingComma: false, + TailStrict: false, + FodderRight: ast.Fodder{}, + TailStrictFodder: nil, + }, + OpFodder: ast.Fodder{}, + Op: ast.BinaryOp(12), + Right: &ast.Unary{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1417), + Column: int(65), + }, + End: ast.Location{ + Line: int(1417), + Column: int(67), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15403, + FreeVars: nil, + }, + Op: ast.UnaryOp(3), + Expr: &ast.LiteralNumber{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1417), + Column: int(66), + }, + End: ast.Location{ + Line: int(1417), + Column: int(67), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15403, + FreeVars: nil, + }, + OriginalString: "1", + }, + }, + }, + }, + PlusSuper: false, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1417), + Column: int(3), + }, + End: ast.Location{ + Line: int(1417), + Column: int(67), + }, + File: p1, + }, + }, + ast.DesugaredObjectField{ + Hide: ast.ObjectFieldHide(0), + Name: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: nil, + }, + Value: "__array_greater", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + Body: &ast.Function{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: p11, + FreeVars: ast.Identifiers{ + "std", + }, + }, + ParenLeftFodder: ast.Fodder{}, + Parameters: []ast.Parameter{ + ast.Parameter{ + NameFodder: ast.Fodder{}, + Name: "arr1", + EqFodder: nil, + DefaultArg: nil, + CommaFodder: ast.Fodder{}, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1418), + Column: int(19), + }, + End: ast.Location{ + Line: int(1418), + Column: int(23), + }, + File: p1, + }, + }, + ast.Parameter{ + NameFodder: ast.Fodder{}, + Name: "arr2", + EqFodder: nil, + DefaultArg: nil, + CommaFodder: nil, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1418), + Column: int(25), + }, + End: ast.Location{ + Line: int(1418), + Column: int(29), + }, + File: p1, + }, + }, + }, + TrailingComma: false, + ParenRightFodder: ast.Fodder{}, + Body: &ast.Binary{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1418), + Column: int(33), + }, + End: ast.Location{ + Line: int(1418), + Column: int(69), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15425, + FreeVars: ast.Identifiers{ + "arr1", + "arr2", + "std", + }, + }, + Left: &ast.Apply{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1418), + Column: int(33), + }, + End: ast.Location{ + Line: int(1418), + Column: int(64), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15425, + FreeVars: ast.Identifiers{ + "arr1", + "arr2", + "std", + }, + }, + Target: &ast.Index{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1418), + Column: int(33), + }, + End: ast.Location{ + Line: int(1418), + Column: int(52), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15425, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Target: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1418), + Column: int(33), + }, + End: ast.Location{ + Line: int(1418), + Column: int(36), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: nil, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Id: "std", + }, + LeftBracketFodder: ast.Fodder{}, + Index: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: nil, + }, + Value: "__compare_array", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + RightBracketFodder: ast.Fodder{}, + Id: nil, + }, + FodderLeft: ast.Fodder{}, + Arguments: ast.Arguments{ + Positional: []ast.CommaSeparatedExpr{ + ast.CommaSeparatedExpr{ + Expr: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1418), + Column: int(53), + }, + End: ast.Location{ + Line: int(1418), + Column: int(57), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15436, + FreeVars: ast.Identifiers{ + "arr1", + }, + }, + Id: "arr1", + }, + CommaFodder: ast.Fodder{}, + }, + ast.CommaSeparatedExpr{ + Expr: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1418), + Column: int(59), + }, + End: ast.Location{ + Line: int(1418), + Column: int(63), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15436, + FreeVars: ast.Identifiers{ + "arr2", + }, + }, + Id: "arr2", + }, + CommaFodder: nil, + }, + }, + Named: nil, + }, + TrailingComma: false, + TailStrict: false, + FodderRight: ast.Fodder{}, + TailStrictFodder: nil, + }, + OpFodder: ast.Fodder{}, + Op: ast.BinaryOp(12), + Right: &ast.LiteralNumber{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1418), + Column: int(68), + }, + End: ast.Location{ + Line: int(1418), + Column: int(69), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15425, + FreeVars: nil, + }, + OriginalString: "1", + }, + }, + }, + PlusSuper: false, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1418), + Column: int(3), + }, + End: ast.Location{ + Line: int(1418), + Column: int(69), + }, + File: p1, + }, + }, + ast.DesugaredObjectField{ + Hide: ast.ObjectFieldHide(0), + Name: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: nil, + }, + Value: "__array_less_or_equal", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + Body: &ast.Function{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: p11, + FreeVars: ast.Identifiers{ + "std", + }, + }, + ParenLeftFodder: ast.Fodder{}, + Parameters: []ast.Parameter{ + ast.Parameter{ + NameFodder: ast.Fodder{}, + Name: "arr1", + EqFodder: nil, + DefaultArg: nil, + CommaFodder: ast.Fodder{}, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1419), + Column: int(25), + }, + End: ast.Location{ + Line: int(1419), + Column: int(29), + }, + File: p1, + }, + }, + ast.Parameter{ + NameFodder: ast.Fodder{}, + Name: "arr2", + EqFodder: nil, + DefaultArg: nil, + CommaFodder: nil, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1419), + Column: int(31), + }, + End: ast.Location{ + Line: int(1419), + Column: int(35), + }, + File: p1, + }, + }, + }, + TrailingComma: false, + ParenRightFodder: ast.Fodder{}, + Body: &ast.Binary{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1419), + Column: int(39), + }, + End: ast.Location{ + Line: int(1419), + Column: int(75), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15446, + FreeVars: ast.Identifiers{ + "arr1", + "arr2", + "std", + }, + }, + Left: &ast.Apply{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1419), + Column: int(39), + }, + End: ast.Location{ + Line: int(1419), + Column: int(70), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15446, + FreeVars: ast.Identifiers{ + "arr1", + "arr2", + "std", + }, + }, + Target: &ast.Index{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1419), + Column: int(39), + }, + End: ast.Location{ + Line: int(1419), + Column: int(58), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15446, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Target: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1419), + Column: int(39), + }, + End: ast.Location{ + Line: int(1419), + Column: int(42), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: nil, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Id: "std", + }, + LeftBracketFodder: ast.Fodder{}, + Index: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: nil, + }, + Value: "__compare_array", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + RightBracketFodder: ast.Fodder{}, + Id: nil, + }, + FodderLeft: ast.Fodder{}, + Arguments: ast.Arguments{ + Positional: []ast.CommaSeparatedExpr{ + ast.CommaSeparatedExpr{ + Expr: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1419), + Column: int(59), + }, + End: ast.Location{ + Line: int(1419), + Column: int(63), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15457, + FreeVars: ast.Identifiers{ + "arr1", + }, + }, + Id: "arr1", + }, + CommaFodder: ast.Fodder{}, + }, + ast.CommaSeparatedExpr{ + Expr: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1419), + Column: int(65), + }, + End: ast.Location{ + Line: int(1419), + Column: int(69), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15457, + FreeVars: ast.Identifiers{ + "arr2", + }, + }, + Id: "arr2", + }, + CommaFodder: nil, + }, + }, + Named: nil, + }, + TrailingComma: false, + TailStrict: false, + FodderRight: ast.Fodder{}, + TailStrictFodder: nil, + }, + OpFodder: ast.Fodder{}, + Op: ast.BinaryOp(10), + Right: &ast.LiteralNumber{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1419), + Column: int(74), + }, + End: ast.Location{ + Line: int(1419), + Column: int(75), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15446, + FreeVars: nil, + }, + OriginalString: "0", + }, + }, + }, + PlusSuper: false, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1419), + Column: int(3), + }, + End: ast.Location{ + Line: int(1419), + Column: int(75), + }, + File: p1, + }, + }, + ast.DesugaredObjectField{ + Hide: ast.ObjectFieldHide(0), + Name: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: nil, + }, + Value: "__array_greater_or_equal", + Kind: ast.LiteralStringKind(1), + BlockIndent: "", + BlockTermIndent: "", + }, + Body: &ast.Function{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: p11, + FreeVars: ast.Identifiers{ + "std", + }, + }, + ParenLeftFodder: ast.Fodder{}, + Parameters: []ast.Parameter{ + ast.Parameter{ + NameFodder: ast.Fodder{}, + Name: "arr1", + EqFodder: nil, + DefaultArg: nil, + CommaFodder: ast.Fodder{}, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1420), + Column: int(28), + }, + End: ast.Location{ + Line: int(1420), + Column: int(32), + }, + File: p1, + }, + }, + ast.Parameter{ + NameFodder: ast.Fodder{}, + Name: "arr2", + EqFodder: nil, + DefaultArg: nil, + CommaFodder: nil, + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1420), + Column: int(34), + }, + End: ast.Location{ + Line: int(1420), + Column: int(38), + }, + File: p1, + }, + }, + }, + TrailingComma: false, + ParenRightFodder: ast.Fodder{}, + Body: &ast.Binary{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1420), + Column: int(42), + }, + End: ast.Location{ + Line: int(1420), + Column: int(78), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15467, + FreeVars: ast.Identifiers{ + "arr1", + "arr2", + "std", + }, + }, + Left: &ast.Apply{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1420), + Column: int(42), + }, + End: ast.Location{ + Line: int(1420), + Column: int(73), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15467, + FreeVars: ast.Identifiers{ + "arr1", + "arr2", + "std", + }, + }, + Target: &ast.Index{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1420), + Column: int(42), + }, + End: ast.Location{ + Line: int(1420), + Column: int(61), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15467, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Target: &ast.Var{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1420), + Column: int(42), + }, + End: ast.Location{ + Line: int(1420), + Column: int(45), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: nil, + FreeVars: ast.Identifiers{ + "std", + }, + }, + Id: "std", + }, + LeftBracketFodder: ast.Fodder{}, + Index: &ast.LiteralString{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(0), + Column: int(0), + }, + End: ast.Location{ + Line: int(0), + Column: int(0), + }, + File: nil, + }, + Fodder: nil, + Ctx: nil, + FreeVars: nil, + }, + Value: "__compare_array", Kind: ast.LiteralStringKind(1), BlockIndent: "", BlockTermIndent: "", @@ -189272,467 +195249,52 @@ var _StdAst = &ast.DesugaredObject{ Arguments: ast.Arguments{ Positional: []ast.CommaSeparatedExpr{ ast.CommaSeparatedExpr{ - Expr: &ast.Function{ + Expr: &ast.Var{ NodeBase: ast.NodeBase{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1381), - Column: int(18), + Line: int(1420), + Column: int(62), }, End: ast.Location{ - Line: int(1381), - Column: int(45), + Line: int(1420), + Column: int(66), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15009, + Ctx: p15478, FreeVars: ast.Identifiers{ - "arr", - "value", - }, - }, - ParenLeftFodder: ast.Fodder{}, - Parameters: []ast.Parameter{ - ast.Parameter{ - NameFodder: ast.Fodder{}, - Name: "i", - EqFodder: nil, - DefaultArg: nil, - CommaFodder: nil, - LocRange: ast.LocationRange{ - FileName: "", - Begin: ast.Location{ - Line: int(1381), - Column: int(27), - }, - End: ast.Location{ - Line: int(1381), - Column: int(28), - }, - File: p1, - }, - }, - }, - TrailingComma: false, - ParenRightFodder: ast.Fodder{}, - Body: &ast.Binary{ - NodeBase: ast.NodeBase{ - LocRange: ast.LocationRange{ - FileName: "", - Begin: ast.Location{ - Line: int(1381), - Column: int(30), - }, - End: ast.Location{ - Line: int(1381), - Column: int(45), - }, - File: p1, - }, - Fodder: ast.Fodder{}, - Ctx: p15013, - FreeVars: ast.Identifiers{ - "arr", - "i", - "value", - }, - }, - Left: &ast.Index{ - NodeBase: ast.NodeBase{ - LocRange: ast.LocationRange{ - FileName: "", - Begin: ast.Location{ - Line: int(1381), - Column: int(30), - }, - End: ast.Location{ - Line: int(1381), - Column: int(36), - }, - File: p1, - }, - Fodder: ast.Fodder{}, - Ctx: p15013, - FreeVars: ast.Identifiers{ - "arr", - "i", - }, - }, - Target: &ast.Var{ - NodeBase: ast.NodeBase{ - LocRange: ast.LocationRange{ - FileName: "", - Begin: ast.Location{ - Line: int(1381), - Column: int(30), - }, - End: ast.Location{ - Line: int(1381), - Column: int(33), - }, - File: p1, - }, - Fodder: ast.Fodder{}, - Ctx: p15013, - FreeVars: ast.Identifiers{ - "arr", - }, - }, - Id: "arr", - }, - LeftBracketFodder: ast.Fodder{}, - Index: &ast.Var{ - NodeBase: ast.NodeBase{ - LocRange: ast.LocationRange{ - FileName: "", - Begin: ast.Location{ - Line: int(1381), - Column: int(34), - }, - End: ast.Location{ - Line: int(1381), - Column: int(35), - }, - File: p1, - }, - Fodder: ast.Fodder{}, - Ctx: p15013, - FreeVars: ast.Identifiers{ - "i", - }, - }, - Id: "i", - }, - RightBracketFodder: ast.Fodder{}, - Id: nil, - }, - OpFodder: ast.Fodder{}, - Op: ast.BinaryOp(12), - Right: &ast.Var{ - NodeBase: ast.NodeBase{ - LocRange: ast.LocationRange{ - FileName: "", - Begin: ast.Location{ - Line: int(1381), - Column: int(40), - }, - End: ast.Location{ - Line: int(1381), - Column: int(45), - }, - File: p1, - }, - Fodder: ast.Fodder{}, - Ctx: p15013, - FreeVars: ast.Identifiers{ - "value", - }, - }, - Id: "value", + "arr1", }, }, + Id: "arr1", }, CommaFodder: ast.Fodder{}, }, ast.CommaSeparatedExpr{ - Expr: &ast.Apply{ + Expr: &ast.Var{ NodeBase: ast.NodeBase{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1381), - Column: int(47), + Line: int(1420), + Column: int(68), }, End: ast.Location{ - Line: int(1381), - Column: int(80), + Line: int(1420), + Column: int(72), }, File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15009, + Ctx: p15478, FreeVars: ast.Identifiers{ - "arr", - "std", + "arr2", }, }, - Target: &ast.Index{ - NodeBase: ast.NodeBase{ - LocRange: ast.LocationRange{ - FileName: "", - Begin: ast.Location{ - Line: int(1381), - Column: int(47), - }, - End: ast.Location{ - Line: int(1381), - Column: int(56), - }, - File: p1, - }, - Fodder: ast.Fodder{}, - Ctx: p15009, - FreeVars: ast.Identifiers{ - "std", - }, - }, - Target: &ast.Var{ - NodeBase: ast.NodeBase{ - LocRange: ast.LocationRange{ - FileName: "", - Begin: ast.Location{ - Line: int(1381), - Column: int(47), - }, - End: ast.Location{ - Line: int(1381), - Column: int(50), - }, - File: p1, - }, - Fodder: ast.Fodder{}, - Ctx: nil, - FreeVars: ast.Identifiers{ - "std", - }, - }, - Id: "std", - }, - LeftBracketFodder: ast.Fodder{}, - Index: &ast.LiteralString{ - NodeBase: ast.NodeBase{ - LocRange: ast.LocationRange{ - FileName: "", - Begin: ast.Location{ - Line: int(0), - Column: int(0), - }, - End: ast.Location{ - Line: int(0), - Column: int(0), - }, - File: nil, - }, - Fodder: nil, - Ctx: nil, - FreeVars: nil, - }, - Value: "range", - Kind: ast.LiteralStringKind(1), - BlockIndent: "", - BlockTermIndent: "", - }, - RightBracketFodder: ast.Fodder{}, - Id: nil, - }, - FodderLeft: ast.Fodder{}, - Arguments: ast.Arguments{ - Positional: []ast.CommaSeparatedExpr{ - ast.CommaSeparatedExpr{ - Expr: &ast.LiteralNumber{ - NodeBase: ast.NodeBase{ - LocRange: ast.LocationRange{ - FileName: "", - Begin: ast.Location{ - Line: int(1381), - Column: int(57), - }, - End: ast.Location{ - Line: int(1381), - Column: int(58), - }, - File: p1, - }, - Fodder: ast.Fodder{}, - Ctx: p15032, - FreeVars: nil, - }, - OriginalString: "0", - }, - CommaFodder: ast.Fodder{}, - }, - ast.CommaSeparatedExpr{ - Expr: &ast.Binary{ - NodeBase: ast.NodeBase{ - LocRange: ast.LocationRange{ - FileName: "", - Begin: ast.Location{ - Line: int(1381), - Column: int(60), - }, - End: ast.Location{ - Line: int(1381), - Column: int(79), - }, - File: p1, - }, - Fodder: ast.Fodder{}, - Ctx: p15032, - FreeVars: ast.Identifiers{ - "arr", - "std", - }, - }, - Left: &ast.Apply{ - NodeBase: ast.NodeBase{ - LocRange: ast.LocationRange{ - FileName: "", - Begin: ast.Location{ - Line: int(1381), - Column: int(60), - }, - End: ast.Location{ - Line: int(1381), - Column: int(75), - }, - File: p1, - }, - Fodder: ast.Fodder{}, - Ctx: p15032, - FreeVars: ast.Identifiers{ - "arr", - "std", - }, - }, - Target: &ast.Index{ - NodeBase: ast.NodeBase{ - LocRange: ast.LocationRange{ - FileName: "", - Begin: ast.Location{ - Line: int(1381), - Column: int(60), - }, - End: ast.Location{ - Line: int(1381), - Column: int(70), - }, - File: p1, - }, - Fodder: ast.Fodder{}, - Ctx: p15032, - FreeVars: ast.Identifiers{ - "std", - }, - }, - Target: &ast.Var{ - NodeBase: ast.NodeBase{ - LocRange: ast.LocationRange{ - FileName: "", - Begin: ast.Location{ - Line: int(1381), - Column: int(60), - }, - End: ast.Location{ - Line: int(1381), - Column: int(63), - }, - File: p1, - }, - Fodder: ast.Fodder{}, - Ctx: nil, - FreeVars: ast.Identifiers{ - "std", - }, - }, - Id: "std", - }, - LeftBracketFodder: ast.Fodder{}, - Index: &ast.LiteralString{ - NodeBase: ast.NodeBase{ - LocRange: ast.LocationRange{ - FileName: "", - Begin: ast.Location{ - Line: int(0), - Column: int(0), - }, - End: ast.Location{ - Line: int(0), - Column: int(0), - }, - File: nil, - }, - Fodder: nil, - Ctx: nil, - FreeVars: nil, - }, - Value: "length", - Kind: ast.LiteralStringKind(1), - BlockIndent: "", - BlockTermIndent: "", - }, - RightBracketFodder: ast.Fodder{}, - Id: nil, - }, - FodderLeft: ast.Fodder{}, - Arguments: ast.Arguments{ - Positional: []ast.CommaSeparatedExpr{ - ast.CommaSeparatedExpr{ - Expr: &ast.Var{ - NodeBase: ast.NodeBase{ - LocRange: ast.LocationRange{ - FileName: "", - Begin: ast.Location{ - Line: int(1381), - Column: int(71), - }, - End: ast.Location{ - Line: int(1381), - Column: int(74), - }, - File: p1, - }, - Fodder: ast.Fodder{}, - Ctx: p15044, - FreeVars: ast.Identifiers{ - "arr", - }, - }, - Id: "arr", - }, - CommaFodder: nil, - }, - }, - Named: nil, - }, - TrailingComma: false, - TailStrict: false, - FodderRight: ast.Fodder{}, - TailStrictFodder: nil, - }, - OpFodder: ast.Fodder{}, - Op: ast.BinaryOp(4), - Right: &ast.LiteralNumber{ - NodeBase: ast.NodeBase{ - LocRange: ast.LocationRange{ - FileName: "", - Begin: ast.Location{ - Line: int(1381), - Column: int(78), - }, - End: ast.Location{ - Line: int(1381), - Column: int(79), - }, - File: p1, - }, - Fodder: ast.Fodder{}, - Ctx: p15032, - FreeVars: nil, - }, - OriginalString: "1", - }, - }, - CommaFodder: nil, - }, - }, - Named: nil, - }, - TrailingComma: false, - TailStrict: false, - FodderRight: ast.Fodder{}, - TailStrictFodder: nil, + Id: "arr2", }, CommaFodder: nil, }, @@ -189744,18 +195306,40 @@ var _StdAst = &ast.DesugaredObject{ FodderRight: ast.Fodder{}, TailStrictFodder: nil, }, + OpFodder: ast.Fodder{}, + Op: ast.BinaryOp(8), + Right: &ast.LiteralNumber{ + NodeBase: ast.NodeBase{ + LocRange: ast.LocationRange{ + FileName: "", + Begin: ast.Location{ + Line: int(1420), + Column: int(77), + }, + End: ast.Location{ + Line: int(1420), + Column: int(78), + }, + File: p1, + }, + Fodder: ast.Fodder{}, + Ctx: p15467, + FreeVars: nil, + }, + OriginalString: "0", + }, }, }, PlusSuper: false, LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(1377), + Line: int(1420), Column: int(3), }, End: ast.Location{ - Line: int(1381), - Column: int(81), + Line: int(1420), + Column: int(78), }, File: p1, }, @@ -189781,7 +195365,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15049, + Ctx: p15485, FreeVars: nil, }, }, @@ -189819,7 +195403,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15051, + Ctx: p15487, FreeVars: nil, }, ParenLeftFodder: ast.Fodder{}, @@ -189861,7 +195445,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15054, + Ctx: p15490, FreeVars: ast.Identifiers{ "x", }, @@ -189903,7 +195487,7 @@ var _StdAst = &ast.DesugaredObject{ File: nil, }, Fodder: nil, - Ctx: p15057, + Ctx: p15493, FreeVars: ast.Identifiers{ "std", }, @@ -189988,7 +195572,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15063, + Ctx: p15499, FreeVars: ast.Identifiers{ "base", }, @@ -190008,7 +195592,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15066, + Ctx: p15502, FreeVars: ast.Identifiers{ "base", }, @@ -190028,7 +195612,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15069, + Ctx: p15505, FreeVars: ast.Identifiers{ "base", }, @@ -190052,7 +195636,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15072, + Ctx: p15508, FreeVars: nil, }, OriginalString: "0", @@ -190075,7 +195659,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15074, + Ctx: p15510, FreeVars: ast.Identifiers{ "base", }, @@ -190095,7 +195679,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15077, + Ctx: p15513, FreeVars: ast.Identifiers{ "base", }, @@ -190119,7 +195703,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15080, + Ctx: p15516, FreeVars: nil, }, OriginalString: "16", @@ -190157,7 +195741,7 @@ var _StdAst = &ast.DesugaredObject{ }, }, }, - Ctx: p15084, + Ctx: p15520, FreeVars: ast.Identifiers{ "base", "std", @@ -190184,7 +195768,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15088, + Ctx: p15524, FreeVars: ast.Identifiers{ "std", }, @@ -190204,7 +195788,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15091, + Ctx: p15527, FreeVars: ast.Identifiers{ "std", }, @@ -190277,7 +195861,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15098, + Ctx: p15534, FreeVars: nil, }, Value: "0", @@ -190333,7 +195917,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p15101, + Ctx: p15537, FreeVars: ast.Identifiers{ "base", "std", @@ -190361,7 +195945,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15105, + Ctx: p15541, FreeVars: ast.Identifiers{ "std", }, @@ -190381,7 +195965,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15108, + Ctx: p15544, FreeVars: ast.Identifiers{ "std", }, @@ -190454,7 +196038,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15115, + Ctx: p15551, FreeVars: nil, }, Value: "A", @@ -190510,7 +196094,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p15118, + Ctx: p15554, FreeVars: ast.Identifiers{ "base", "std", @@ -190539,7 +196123,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15122, + Ctx: p15558, FreeVars: ast.Identifiers{ "std", }, @@ -190559,7 +196143,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15125, + Ctx: p15561, FreeVars: ast.Identifiers{ "std", }, @@ -190632,7 +196216,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15132, + Ctx: p15568, FreeVars: nil, }, Value: "a", @@ -190688,7 +196272,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p15135, + Ctx: p15571, FreeVars: ast.Identifiers{ "base", "lower_a_code", @@ -190718,7 +196302,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: nil, - Ctx: p15139, + Ctx: p15575, FreeVars: ast.Identifiers{ "base", "lower_a_code", @@ -190793,7 +196377,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p15144, + Ctx: p15580, FreeVars: ast.Identifiers{ "aggregate", "base", @@ -190825,7 +196409,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15148, + Ctx: p15584, FreeVars: ast.Identifiers{ "char", "std", @@ -190846,7 +196430,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15151, + Ctx: p15587, FreeVars: ast.Identifiers{ "std", }, @@ -190919,7 +196503,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15158, + Ctx: p15594, FreeVars: ast.Identifiers{ "char", }, @@ -190974,7 +196558,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p15162, + Ctx: p15598, FreeVars: ast.Identifiers{ "aggregate", "base", @@ -191006,7 +196590,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15166, + Ctx: p15602, FreeVars: ast.Identifiers{ "code", "lower_a_code", @@ -191029,7 +196613,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15169, + Ctx: p15605, FreeVars: ast.Identifiers{ "code", "lower_a_code", @@ -191050,7 +196634,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15172, + Ctx: p15608, FreeVars: ast.Identifiers{ "code", }, @@ -191074,7 +196658,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15175, + Ctx: p15611, FreeVars: ast.Identifiers{ "lower_a_code", }, @@ -191098,7 +196682,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15178, + Ctx: p15614, FreeVars: ast.Identifiers{ "code", "lower_a_code", @@ -191119,7 +196703,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15181, + Ctx: p15617, FreeVars: ast.Identifiers{ "code", "lower_a_code", @@ -191147,7 +196731,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p15185, + Ctx: p15621, FreeVars: ast.Identifiers{ "code", }, @@ -191171,7 +196755,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15188, + Ctx: p15624, FreeVars: ast.Identifiers{ "lower_a_code", }, @@ -191196,7 +196780,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15191, + Ctx: p15627, FreeVars: nil, }, OriginalString: "10", @@ -191225,7 +196809,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15194, + Ctx: p15630, FreeVars: ast.Identifiers{ "code", "upper_a_code", @@ -191247,7 +196831,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15197, + Ctx: p15633, FreeVars: ast.Identifiers{ "code", "upper_a_code", @@ -191268,7 +196852,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15200, + Ctx: p15636, FreeVars: ast.Identifiers{ "code", }, @@ -191292,7 +196876,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15203, + Ctx: p15639, FreeVars: ast.Identifiers{ "upper_a_code", }, @@ -191316,7 +196900,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15206, + Ctx: p15642, FreeVars: ast.Identifiers{ "code", "upper_a_code", @@ -191337,7 +196921,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15209, + Ctx: p15645, FreeVars: ast.Identifiers{ "code", "upper_a_code", @@ -191365,7 +196949,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p15213, + Ctx: p15649, FreeVars: ast.Identifiers{ "code", }, @@ -191389,7 +196973,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15216, + Ctx: p15652, FreeVars: ast.Identifiers{ "upper_a_code", }, @@ -191414,7 +196998,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15219, + Ctx: p15655, FreeVars: nil, }, OriginalString: "10", @@ -191443,7 +197027,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15222, + Ctx: p15658, FreeVars: ast.Identifiers{ "code", "zero_code", @@ -191471,7 +197055,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p15226, + Ctx: p15662, FreeVars: ast.Identifiers{ "code", }, @@ -191495,7 +197079,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15229, + Ctx: p15665, FreeVars: ast.Identifiers{ "zero_code", }, @@ -191560,7 +197144,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15234, + Ctx: p15670, FreeVars: ast.Identifiers{ "base", "digit", @@ -191581,7 +197165,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15237, + Ctx: p15673, FreeVars: ast.Identifiers{ "digit", }, @@ -191601,7 +197185,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15240, + Ctx: p15676, FreeVars: ast.Identifiers{ "digit", }, @@ -191625,7 +197209,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15243, + Ctx: p15679, FreeVars: nil, }, OriginalString: "0", @@ -191648,7 +197232,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15245, + Ctx: p15681, FreeVars: ast.Identifiers{ "base", "digit", @@ -191669,7 +197253,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15248, + Ctx: p15684, FreeVars: ast.Identifiers{ "digit", }, @@ -191693,7 +197277,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15251, + Ctx: p15687, FreeVars: ast.Identifiers{ "base", }, @@ -191718,7 +197302,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15254, + Ctx: p15690, FreeVars: ast.Identifiers{ "aggregate", "base", @@ -191740,7 +197324,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15257, + Ctx: p15693, FreeVars: ast.Identifiers{ "aggregate", "base", @@ -191768,7 +197352,7 @@ var _StdAst = &ast.DesugaredObject{ Comment: []string{}, }, }, - Ctx: p15261, + Ctx: p15697, FreeVars: ast.Identifiers{ "base", }, @@ -191792,7 +197376,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15264, + Ctx: p15700, FreeVars: ast.Identifiers{ "aggregate", }, @@ -191817,7 +197401,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15267, + Ctx: p15703, FreeVars: ast.Identifiers{ "digit", }, @@ -191831,14 +197415,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(90), + Column: int(7), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(91), + Column: int(31), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -191853,14 +197437,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(90), + Column: int(43), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(90), + Column: int(86), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -191958,7 +197542,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15280, + Ctx: p15716, FreeVars: nil, }, Value: "%s is not a base %d integer", @@ -191984,7 +197568,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15282, + Ctx: p15718, FreeVars: ast.Identifiers{ "base", "str", @@ -192007,7 +197591,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15286, + Ctx: p15722, FreeVars: ast.Identifiers{ "str", }, @@ -192032,7 +197616,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15289, + Ctx: p15725, FreeVars: ast.Identifiers{ "base", }, @@ -192091,7 +197675,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15292, + Ctx: p15728, FreeVars: ast.Identifiers{ "addDigit", "std", @@ -192113,7 +197697,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15295, + Ctx: p15731, FreeVars: ast.Identifiers{ "std", }, @@ -192193,7 +197777,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15303, + Ctx: p15739, FreeVars: ast.Identifiers{ "addDigit", }, @@ -192218,7 +197802,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15306, + Ctx: p15742, FreeVars: ast.Identifiers{ "std", "str", @@ -192239,7 +197823,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15309, + Ctx: p15745, FreeVars: ast.Identifiers{ "std", }, @@ -192312,7 +197896,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15316, + Ctx: p15752, FreeVars: ast.Identifiers{ "str", }, @@ -192347,7 +197931,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15319, + Ctx: p15755, FreeVars: nil, }, OriginalString: "0", @@ -192372,14 +197956,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(77), + Column: int(5), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(92), + Column: int(49), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -192393,14 +197977,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(77), + Column: int(37), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(77), + Column: int(69), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -192497,7 +198081,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15331, + Ctx: p15767, FreeVars: nil, }, Value: "integer base %d invalid", @@ -192523,7 +198107,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15333, + Ctx: p15769, FreeVars: ast.Identifiers{ "base", }, @@ -192577,7 +198161,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15336, + Ctx: p15772, FreeVars: nil, }, Value: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/", @@ -192609,14 +198193,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1091), + Column: int(22), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1091), + Column: int(72), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -192703,14 +198287,14 @@ var _StdAst = &ast.DesugaredObject{ LocRange: ast.LocationRange{ FileName: "", Begin: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1091), + Column: int(22), }, End: ast.Location{ - Line: int(0), - Column: int(0), + Line: int(1091), + Column: int(72), }, - File: nil, + File: p1, }, Fodder: nil, Ctx: nil, @@ -192874,7 +198458,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15360, + Ctx: p15796, FreeVars: ast.Identifiers{ "base64_table", "i", @@ -192899,7 +198483,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15364, + Ctx: p15800, FreeVars: ast.Identifiers{ "base64_table", "i", @@ -192920,7 +198504,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15367, + Ctx: p15803, FreeVars: ast.Identifiers{ "base64_table", }, @@ -192943,7 +198527,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15370, + Ctx: p15806, FreeVars: ast.Identifiers{ "i", }, @@ -192968,7 +198552,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15373, + Ctx: p15809, FreeVars: ast.Identifiers{ "i", }, @@ -193017,7 +198601,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15376, + Ctx: p15812, FreeVars: ast.Identifiers{ "std", }, @@ -193037,7 +198621,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15379, + Ctx: p15815, FreeVars: ast.Identifiers{ "std", }, @@ -193110,7 +198694,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15386, + Ctx: p15822, FreeVars: nil, }, OriginalString: "0", @@ -193133,7 +198717,7 @@ var _StdAst = &ast.DesugaredObject{ File: p1, }, Fodder: ast.Fodder{}, - Ctx: p15388, + Ctx: p15824, FreeVars: nil, }, OriginalString: "63", diff --git a/vendor/github.com/google/go-jsonnet/builtins.go b/vendor/github.com/google/go-jsonnet/builtins.go index 0dba2dc..f4bb5eb 100644 --- a/vendor/github.com/google/go-jsonnet/builtins.go +++ b/vendor/github.com/google/go-jsonnet/builtins.go @@ -33,11 +33,11 @@ import ( "github.com/google/go-jsonnet/ast" ) -func builtinPlus(i *interpreter, trace traceElement, x, y value) (value, error) { +func builtinPlus(i *interpreter, x, y value) (value, error) { // TODO(sbarzowski) perhaps a more elegant way to dispatch switch right := y.(type) { case valueString: - left, err := builtinToString(i, trace, x) + left, err := builtinToString(i, x) if err != nil { return nil, err } @@ -46,13 +46,13 @@ func builtinPlus(i *interpreter, trace traceElement, x, y value) (value, error) } switch left := x.(type) { case *valueNumber: - right, err := i.getNumber(y, trace) + right, err := i.getNumber(y) if err != nil { return nil, err } - return makeDoubleCheck(i, trace, left.value+right.value) + return makeDoubleCheck(i, left.value+right.value) case valueString: - right, err := builtinToString(i, trace, y) + right, err := builtinToString(i, y) if err != nil { return nil, err } @@ -62,119 +62,153 @@ func builtinPlus(i *interpreter, trace traceElement, x, y value) (value, error) case *valueObject: return makeValueExtendedObject(left, right), nil default: - return nil, i.typeErrorSpecific(y, &valueObject{}, trace) + return nil, i.typeErrorSpecific(y, &valueObject{}) } case *valueArray: - right, err := i.getArray(y, trace) + right, err := i.getArray(y) if err != nil { return nil, err } return concatArrays(left, right), nil default: - return nil, i.typeErrorGeneral(x, trace) + return nil, i.typeErrorGeneral(x) } } -func builtinMinus(i *interpreter, trace traceElement, xv, yv value) (value, error) { - x, err := i.getNumber(xv, trace) +func builtinMinus(i *interpreter, xv, yv value) (value, error) { + x, err := i.getNumber(xv) if err != nil { return nil, err } - y, err := i.getNumber(yv, trace) + y, err := i.getNumber(yv) if err != nil { return nil, err } - return makeDoubleCheck(i, trace, x.value-y.value) + return makeDoubleCheck(i, x.value-y.value) } -func builtinMult(i *interpreter, trace traceElement, xv, yv value) (value, error) { - x, err := i.getNumber(xv, trace) +func builtinMult(i *interpreter, xv, yv value) (value, error) { + x, err := i.getNumber(xv) if err != nil { return nil, err } - y, err := i.getNumber(yv, trace) + y, err := i.getNumber(yv) if err != nil { return nil, err } - return makeDoubleCheck(i, trace, x.value*y.value) + return makeDoubleCheck(i, x.value*y.value) } -func builtinDiv(i *interpreter, trace traceElement, xv, yv value) (value, error) { - x, err := i.getNumber(xv, trace) +func builtinDiv(i *interpreter, xv, yv value) (value, error) { + x, err := i.getNumber(xv) if err != nil { return nil, err } - y, err := i.getNumber(yv, trace) + y, err := i.getNumber(yv) if err != nil { return nil, err } if y.value == 0 { - return nil, i.Error("Division by zero.", trace) + return nil, i.Error("Division by zero.") } - return makeDoubleCheck(i, trace, x.value/y.value) + return makeDoubleCheck(i, x.value/y.value) } -func builtinModulo(i *interpreter, trace traceElement, xv, yv value) (value, error) { - x, err := i.getNumber(xv, trace) +func builtinModulo(i *interpreter, xv, yv value) (value, error) { + x, err := i.getNumber(xv) if err != nil { return nil, err } - y, err := i.getNumber(yv, trace) + y, err := i.getNumber(yv) if err != nil { return nil, err } if y.value == 0 { - return nil, i.Error("Division by zero.", trace) + return nil, i.Error("Division by zero.") } - return makeDoubleCheck(i, trace, math.Mod(x.value, y.value)) + return makeDoubleCheck(i, math.Mod(x.value, y.value)) } -func valueLess(i *interpreter, trace traceElement, x, yv value) (bool, error) { +func valueCmp(i *interpreter, x, y value) (int, error) { switch left := x.(type) { case *valueNumber: - right, err := i.getNumber(yv, trace) + right, err := i.getNumber(y) if err != nil { - return false, err + return 0, err } - return left.value < right.value, nil + return float64Cmp(left.value, right.value), nil case valueString: - right, err := i.getString(yv, trace) + right, err := i.getString(y) if err != nil { - return false, err + return 0, err + } + return stringCmp(left, right), nil + case *valueArray: + right, err := i.getArray(y) + if err != nil { + return 0, err } - return stringLessThan(left, right), nil + return arrayCmp(i, left, right) default: - return false, i.typeErrorGeneral(x, trace) + return 0, i.typeErrorGeneral(x) } } -func builtinLess(i *interpreter, trace traceElement, x, yv value) (value, error) { - b, err := valueLess(i, trace, x, yv) - return makeValueBoolean(b), err +func arrayCmp(i *interpreter, x, y *valueArray) (int, error) { + for index := 0; index < minInt(x.length(), y.length()); index++ { + left, err := x.index(i, index) + if err != nil { + return 0, err + } + right, err := y.index(i, index) + if err != nil { + return 0, err + } + cmp, err := valueCmp(i, left, right) + if err != nil { + return 0, err + } + if cmp != 0 { + return cmp, nil + } + } + return intCmp(x.length(), y.length()), nil +} + +func builtinLess(i *interpreter, x, y value) (value, error) { + r, err := valueCmp(i, x, y) + if err != nil { + return nil, err + } + return makeValueBoolean(r == -1), nil } -func builtinGreater(i *interpreter, trace traceElement, x, y value) (value, error) { - return builtinLess(i, trace, y, x) +func builtinGreater(i *interpreter, x, y value) (value, error) { + r, err := valueCmp(i, x, y) + if err != nil { + return nil, err + } + return makeValueBoolean(r == 1), nil } -func builtinGreaterEq(i *interpreter, trace traceElement, x, y value) (value, error) { - res, err := builtinLess(i, trace, x, y) +func builtinGreaterEq(i *interpreter, x, y value) (value, error) { + r, err := valueCmp(i, x, y) if err != nil { return nil, err } - return res.(*valueBoolean).not(), nil + return makeValueBoolean(r >= 0), nil } -func builtinLessEq(i *interpreter, trace traceElement, x, y value) (value, error) { - res, err := builtinGreater(i, trace, x, y) +func builtinLessEq(i *interpreter, x, y value) (value, error) { + r, err := valueCmp(i, x, y) if err != nil { return nil, err } - return res.(*valueBoolean).not(), nil + return makeValueBoolean(r <= 0), nil } -func builtinLength(i *interpreter, trace traceElement, x value) (value, error) { +func builtinLength(i *interpreter, x value) (value, error) { var num int switch x := x.(type) { case *valueObject: @@ -190,29 +224,30 @@ func builtinLength(i *interpreter, trace traceElement, x value) (value, error) { } } default: - return nil, i.typeErrorGeneral(x, trace) + return nil, i.typeErrorGeneral(x) } return makeValueNumber(float64(num)), nil } -func builtinToString(i *interpreter, trace traceElement, x value) (value, error) { +func builtinToString(i *interpreter, x value) (value, error) { switch x := x.(type) { case valueString: return x, nil } var buf bytes.Buffer - err := i.manifestAndSerializeJSON(&buf, trace, x, false, "") + err := i.manifestAndSerializeJSON(&buf, x, false, "") if err != nil { return nil, err } return makeValueString(buf.String()), nil } -func builtinTrace(i *interpreter, trace traceElement, x value, y value) (value, error) { - xStr, err := i.getString(x, trace) +func builtinTrace(i *interpreter, x value, y value) (value, error) { + xStr, err := i.getString(x) if err != nil { return nil, err } + trace := i.stack.currentTrace filename := trace.loc.FileName line := trace.loc.Begin.Line fmt.Fprintf( @@ -230,12 +265,12 @@ type astMakeArrayElement struct { index int } -func builtinMakeArray(i *interpreter, trace traceElement, szv, funcv value) (value, error) { - sz, err := i.getInt(szv, trace) +func builtinMakeArray(i *interpreter, szv, funcv value) (value, error) { + sz, err := i.getInt(szv) if err != nil { return nil, err } - fun, err := i.getFunction(funcv, trace) + fun, err := i.getFunction(funcv) if err != nil { return nil, err } @@ -254,8 +289,8 @@ func builtinMakeArray(i *interpreter, trace traceElement, szv, funcv value) (val return makeValueArray(elems), nil } -func builtinFlatMap(i *interpreter, trace traceElement, funcv, arrv value) (value, error) { - fun, err := i.getFunction(funcv, trace) +func builtinFlatMap(i *interpreter, funcv, arrv value) (value, error) { + fun, err := i.getFunction(funcv) if err != nil { return nil, err } @@ -267,11 +302,11 @@ func builtinFlatMap(i *interpreter, trace traceElement, funcv, arrv value) (valu // TODO(sbarzowski) verify that it actually helps elems := make([]*cachedThunk, 0, num) for counter := 0; counter < num; counter++ { - returnedValue, err := fun.call(i, trace, args(arrv.elements[counter])) + returnedValue, err := fun.call(i, args(arrv.elements[counter])) if err != nil { return nil, err } - returned, err := i.getArray(returnedValue, trace) + returned, err := i.getArray(returnedValue) if err != nil { return nil, err } @@ -281,11 +316,11 @@ func builtinFlatMap(i *interpreter, trace traceElement, funcv, arrv value) (valu case valueString: var str strings.Builder for _, elem := range arrv.getRunes() { - returnedValue, err := fun.call(i, trace, args(readyThunk(makeValueString(string(elem))))) + returnedValue, err := fun.call(i, args(readyThunk(makeValueString(string(elem))))) if err != nil { return nil, err } - returned, err := i.getString(returnedValue, trace) + returned, err := i.getString(returnedValue) if err != nil { return nil, err } @@ -293,15 +328,15 @@ func builtinFlatMap(i *interpreter, trace traceElement, funcv, arrv value) (valu } return makeValueString(str.String()), nil default: - return nil, i.Error("std.flatMap second param must be array / string, got "+arrv.getType().name, trace) + return nil, i.Error("std.flatMap second param must be array / string, got " + arrv.getType().name) } } -func joinArrays(i *interpreter, trace traceElement, sep *valueArray, arr *valueArray) (value, error) { +func joinArrays(i *interpreter, sep *valueArray, arr *valueArray) (value, error) { result := make([]*cachedThunk, 0, arr.length()) first := true for _, elem := range arr.elements { - elemValue, err := i.evaluatePV(elem, trace) + elemValue, err := i.evaluatePV(elem) if err != nil { return nil, err } @@ -314,7 +349,7 @@ func joinArrays(i *interpreter, trace traceElement, sep *valueArray, arr *valueA } result = append(result, v.elements...) default: - return nil, i.typeErrorSpecific(elemValue, &valueArray{}, trace) + return nil, i.typeErrorSpecific(elemValue, &valueArray{}) } first = false @@ -322,11 +357,11 @@ func joinArrays(i *interpreter, trace traceElement, sep *valueArray, arr *valueA return makeValueArray(result), nil } -func joinStrings(i *interpreter, trace traceElement, sep valueString, arr *valueArray) (value, error) { +func joinStrings(i *interpreter, sep valueString, arr *valueArray) (value, error) { result := make([]rune, 0, arr.length()) first := true for _, elem := range arr.elements { - elemValue, err := i.evaluatePV(elem, trace) + elemValue, err := i.evaluatePV(elem) if err != nil { return nil, err } @@ -339,30 +374,30 @@ func joinStrings(i *interpreter, trace traceElement, sep valueString, arr *value } result = append(result, v.getRunes()...) default: - return nil, i.typeErrorSpecific(elemValue, emptyString(), trace) + return nil, i.typeErrorSpecific(elemValue, emptyString()) } first = false } return makeStringFromRunes(result), nil } -func builtinJoin(i *interpreter, trace traceElement, sep, arrv value) (value, error) { - arr, err := i.getArray(arrv, trace) +func builtinJoin(i *interpreter, sep, arrv value) (value, error) { + arr, err := i.getArray(arrv) if err != nil { return nil, err } switch sep := sep.(type) { case valueString: - return joinStrings(i, trace, sep, arr) + return joinStrings(i, sep, arr) case *valueArray: - return joinArrays(i, trace, sep, arr) + return joinArrays(i, sep, arr) default: - return nil, i.Error("join first parameter should be string or array, got "+sep.getType().name, trace) + return nil, i.Error("join first parameter should be string or array, got " + sep.getType().name) } } -func builtinReverse(i *interpreter, trace traceElement, arrv value) (value, error) { - arr, err := i.getArray(arrv, trace) +func builtinReverse(i *interpreter, arrv value) (value, error) { + arr, err := i.getArray(arrv) if err != nil { return nil, err } @@ -378,12 +413,12 @@ func builtinReverse(i *interpreter, trace traceElement, arrv value) (value, erro return makeValueArray(reversedArray), nil } -func builtinFilter(i *interpreter, trace traceElement, funcv, arrv value) (value, error) { - arr, err := i.getArray(arrv, trace) +func builtinFilter(i *interpreter, funcv, arrv value) (value, error) { + arr, err := i.getArray(arrv) if err != nil { return nil, err } - fun, err := i.getFunction(funcv, trace) + fun, err := i.getFunction(funcv) if err != nil { return nil, err } @@ -393,11 +428,11 @@ func builtinFilter(i *interpreter, trace traceElement, funcv, arrv value) (value // TODO(sbarzowski) verify that it actually helps elems := make([]*cachedThunk, 0, num) for counter := 0; counter < num; counter++ { - includedValue, err := fun.call(i, trace, args(arr.elements[counter])) + includedValue, err := fun.call(i, args(arr.elements[counter])) if err != nil { return nil, err } - included, err := i.getBoolean(includedValue, trace) + included, err := i.getBoolean(includedValue) if err != nil { return nil, err } @@ -410,7 +445,6 @@ func builtinFilter(i *interpreter, trace traceElement, funcv, arrv value) (value type sortData struct { i *interpreter - trace traceElement thunks []*cachedThunk keys []value err error @@ -421,12 +455,12 @@ func (d *sortData) Len() int { } func (d *sortData) Less(i, j int) bool { - b, err := valueLess(d.i, d.trace, d.keys[i], d.keys[j]) + r, err := valueCmp(d.i, d.keys[i], d.keys[j]) if err != nil { d.err = err panic("Error while comparing elements") } - return b + return r == -1 } func (d *sortData) Swap(i, j int) { @@ -446,26 +480,26 @@ func (d *sortData) Sort() (err error) { return } -func builtinSort(i *interpreter, trace traceElement, arguments []value) (value, error) { +func builtinSort(i *interpreter, arguments []value) (value, error) { arrv := arguments[0] keyFv := arguments[1] - arr, err := i.getArray(arrv, trace) + arr, err := i.getArray(arrv) if err != nil { return nil, err } - keyF, err := i.getFunction(keyFv, trace) + keyF, err := i.getFunction(keyFv) if err != nil { return nil, err } num := arr.length() - data := sortData{i: i, trace: trace, thunks: make([]*cachedThunk, num), keys: make([]value, num)} + data := sortData{i: i, thunks: make([]*cachedThunk, num), keys: make([]value, num)} for counter := 0; counter < num; counter++ { var err error data.thunks[counter] = arr.elements[counter] - data.keys[counter], err = keyF.call(i, trace, args(arr.elements[counter])) + data.keys[counter], err = keyF.call(i, args(arr.elements[counter])) if err != nil { return nil, err } @@ -479,12 +513,12 @@ func builtinSort(i *interpreter, trace traceElement, arguments []value) (value, return makeValueArray(data.thunks), nil } -func builtinRange(i *interpreter, trace traceElement, fromv, tov value) (value, error) { - from, err := i.getInt(fromv, trace) +func builtinRange(i *interpreter, fromv, tov value) (value, error) { + from, err := i.getInt(fromv) if err != nil { return nil, err } - to, err := i.getInt(tov, trace) + to, err := i.getInt(tov) if err != nil { return nil, err } @@ -495,16 +529,16 @@ func builtinRange(i *interpreter, trace traceElement, fromv, tov value) (value, return makeValueArray(elems), nil } -func builtinNegation(i *interpreter, trace traceElement, x value) (value, error) { - b, err := i.getBoolean(x, trace) +func builtinNegation(i *interpreter, x value) (value, error) { + b, err := i.getBoolean(x) if err != nil { return nil, err } return makeValueBoolean(!b.value), nil } -func builtinBitNeg(i *interpreter, trace traceElement, x value) (value, error) { - n, err := i.getNumber(x, trace) +func builtinBitNeg(i *interpreter, x value) (value, error) { + n, err := i.getNumber(x) if err != nil { return nil, err } @@ -512,12 +546,12 @@ func builtinBitNeg(i *interpreter, trace traceElement, x value) (value, error) { return int64ToValue(^intValue), nil } -func builtinIdentity(i *interpreter, trace traceElement, x value) (value, error) { +func builtinIdentity(i *interpreter, x value) (value, error) { return x, nil } -func builtinUnaryPlus(i *interpreter, trace traceElement, x value) (value, error) { - n, err := i.getNumber(x, trace) +func builtinUnaryPlus(i *interpreter, x value) (value, error) { + n, err := i.getNumber(x) if err != nil { return nil, err } @@ -525,8 +559,8 @@ func builtinUnaryPlus(i *interpreter, trace traceElement, x value) (value, error return makeValueNumber(n.value), nil } -func builtinUnaryMinus(i *interpreter, trace traceElement, x value) (value, error) { - n, err := i.getNumber(x, trace) +func builtinUnaryMinus(i *interpreter, x value) (value, error) { + n, err := i.getNumber(x) if err != nil { return nil, err } @@ -535,25 +569,25 @@ func builtinUnaryMinus(i *interpreter, trace traceElement, x value) (value, erro // TODO(sbarzowski) since we have a builtin implementation of equals it's no longer really // needed and we should deprecate it eventually -func primitiveEquals(i *interpreter, trace traceElement, x, y value) (value, error) { +func primitiveEquals(i *interpreter, x, y value) (value, error) { if x.getType() != y.getType() { return makeValueBoolean(false), nil } switch left := x.(type) { case *valueBoolean: - right, err := i.getBoolean(y, trace) + right, err := i.getBoolean(y) if err != nil { return nil, err } return makeValueBoolean(left.value == right.value), nil case *valueNumber: - right, err := i.getNumber(y, trace) + right, err := i.getNumber(y) if err != nil { return nil, err } return makeValueBoolean(left.value == right.value), nil case valueString: - right, err := i.getString(y, trace) + right, err := i.getString(y) if err != nil { return nil, err } @@ -561,34 +595,33 @@ func primitiveEquals(i *interpreter, trace traceElement, x, y value) (value, err case *valueNull: return makeValueBoolean(true), nil case *valueFunction: - return nil, i.Error("Cannot test equality of functions", trace) + return nil, i.Error("Cannot test equality of functions") default: return nil, i.Error( - "primitiveEquals operates on primitive types, got "+x.getType().name, - trace, + "primitiveEquals operates on primitive types, got " + x.getType().name, ) } } -func rawEquals(i *interpreter, trace traceElement, x, y value) (bool, error) { +func rawEquals(i *interpreter, x, y value) (bool, error) { if x.getType() != y.getType() { return false, nil } switch left := x.(type) { case *valueBoolean: - right, err := i.getBoolean(y, trace) + right, err := i.getBoolean(y) if err != nil { return false, err } return left.value == right.value, nil case *valueNumber: - right, err := i.getNumber(y, trace) + right, err := i.getNumber(y) if err != nil { return false, err } return left.value == right.value, nil case valueString: - right, err := i.getString(y, trace) + right, err := i.getString(y) if err != nil { return false, err } @@ -596,7 +629,7 @@ func rawEquals(i *interpreter, trace traceElement, x, y value) (bool, error) { case *valueNull: return true, nil case *valueArray: - right, err := i.getArray(y, trace) + right, err := i.getArray(y) if err != nil { return false, err } @@ -604,15 +637,15 @@ func rawEquals(i *interpreter, trace traceElement, x, y value) (bool, error) { return false, nil } for j := range left.elements { - leftElem, err := i.evaluatePV(left.elements[j], trace) + leftElem, err := i.evaluatePV(left.elements[j]) if err != nil { return false, err } - rightElem, err := i.evaluatePV(right.elements[j], trace) + rightElem, err := i.evaluatePV(right.elements[j]) if err != nil { return false, err } - eq, err := rawEquals(i, trace, leftElem, rightElem) + eq, err := rawEquals(i, leftElem, rightElem) if err != nil { return false, err } @@ -622,7 +655,7 @@ func rawEquals(i *interpreter, trace traceElement, x, y value) (bool, error) { } return true, nil case *valueObject: - right, err := i.getObject(y, trace) + right, err := i.getObject(y) if err != nil { return false, err } @@ -640,15 +673,15 @@ func rawEquals(i *interpreter, trace traceElement, x, y value) (bool, error) { } for j := range leftFields { fieldName := leftFields[j] - leftField, err := left.index(i, trace, fieldName) + leftField, err := left.index(i, fieldName) if err != nil { return false, err } - rightField, err := right.index(i, trace, fieldName) + rightField, err := right.index(i, fieldName) if err != nil { return false, err } - eq, err := rawEquals(i, trace, leftField, rightField) + eq, err := rawEquals(i, leftField, rightField) if err != nil { return false, err } @@ -658,33 +691,33 @@ func rawEquals(i *interpreter, trace traceElement, x, y value) (bool, error) { } return true, nil case *valueFunction: - return false, i.Error("Cannot test equality of functions", trace) + return false, i.Error("Cannot test equality of functions") } panic(fmt.Sprintf("Unhandled case in equals %#+v %#+v", x, y)) } -func builtinEquals(i *interpreter, trace traceElement, x, y value) (value, error) { - eq, err := rawEquals(i, trace, x, y) +func builtinEquals(i *interpreter, x, y value) (value, error) { + eq, err := rawEquals(i, x, y) if err != nil { return nil, err } return makeValueBoolean(eq), nil } -func builtinNotEquals(i *interpreter, trace traceElement, x, y value) (value, error) { - eq, err := rawEquals(i, trace, x, y) +func builtinNotEquals(i *interpreter, x, y value) (value, error) { + eq, err := rawEquals(i, x, y) if err != nil { return nil, err } return makeValueBoolean(!eq), nil } -func builtinType(i *interpreter, trace traceElement, x value) (value, error) { +func builtinType(i *interpreter, x value) (value, error) { return makeValueString(x.getType().name), nil } -func builtinMd5(i *interpreter, trace traceElement, x value) (value, error) { - str, err := i.getString(x, trace) +func builtinMd5(i *interpreter, x value) (value, error) { + str, err := i.getString(x) if err != nil { return nil, err } @@ -692,7 +725,7 @@ func builtinMd5(i *interpreter, trace traceElement, x value) (value, error) { return makeValueString(hex.EncodeToString(hash[:])), nil } -func builtinBase64(i *interpreter, trace traceElement, input value) (value, error) { +func builtinBase64(i *interpreter, input value) (value, error) { var byteArr []byte var sanityCheck = func(v int) (string, bool) { @@ -706,7 +739,7 @@ func builtinBase64(i *interpreter, trace traceElement, input value) (value, erro switch input.(type) { case valueString: - vStr, err := i.getString(input, trace) + vStr, err := i.getString(input) if err != nil { return nil, err } @@ -716,47 +749,47 @@ func builtinBase64(i *interpreter, trace traceElement, input value) (value, erro n := int(r) msg, ok := sanityCheck(n) if !ok { - return nil, makeRuntimeError(msg, i.getCurrentStackTrace(trace)) + return nil, makeRuntimeError(msg, i.getCurrentStackTrace()) } } byteArr = []byte(str) case *valueArray: - vArr, err := i.getArray(input, trace) + vArr, err := i.getArray(input) if err != nil { return nil, err } for _, cThunk := range vArr.elements { - cTv, err := cThunk.getValue(i, trace) + cTv, err := cThunk.getValue(i) if err != nil { return nil, err } - vInt, err := i.getInt(cTv, trace) + vInt, err := i.getInt(cTv) if err != nil { msg := fmt.Sprintf("base64 encountered a non-integer value in the array, got %s", cTv.getType().name) - return nil, makeRuntimeError(msg, i.getCurrentStackTrace(trace)) + return nil, makeRuntimeError(msg, i.getCurrentStackTrace()) } msg, ok := sanityCheck(vInt) if !ok { - return nil, makeRuntimeError(msg, i.getCurrentStackTrace(trace)) + return nil, makeRuntimeError(msg, i.getCurrentStackTrace()) } byteArr = append(byteArr, byte(vInt)) } default: msg := fmt.Sprintf("base64 can only base64 encode strings / arrays of single bytes, got %s", input.getType().name) - return nil, makeRuntimeError(msg, i.getCurrentStackTrace(trace)) + return nil, makeRuntimeError(msg, i.getCurrentStackTrace()) } sEnc := base64.StdEncoding.EncodeToString(byteArr) return makeValueString(sEnc), nil } -func builtinEncodeUTF8(i *interpreter, trace traceElement, x value) (value, error) { - str, err := i.getString(x, trace) +func builtinEncodeUTF8(i *interpreter, x value) (value, error) { + str, err := i.getString(x) if err != nil { return nil, err } @@ -768,19 +801,19 @@ func builtinEncodeUTF8(i *interpreter, trace traceElement, x value) (value, erro return makeValueArray(elems), nil } -func builtinDecodeUTF8(i *interpreter, trace traceElement, x value) (value, error) { - arr, err := i.getArray(x, trace) +func builtinDecodeUTF8(i *interpreter, x value) (value, error) { + arr, err := i.getArray(x) if err != nil { return nil, err } bs := make([]byte, len(arr.elements)) // it will be longer if characters fall outside of ASCII for pos := range arr.elements { - v, err := i.evaluateInt(arr.elements[pos], trace) + v, err := i.evaluateInt(arr.elements[pos]) if err != nil { return nil, err } if v < 0 || v > 255 { - return nil, i.Error(fmt.Sprintf("Bytes must be integers in range [0, 255], got %d", v), trace) + return nil, i.Error(fmt.Sprintf("Bytes must be integers in range [0, 255], got %d", v)) } bs[pos] = byte(v) } @@ -791,47 +824,47 @@ func builtinDecodeUTF8(i *interpreter, trace traceElement, x value) (value, erro // https://en.wikipedia.org/wiki/Unicode#Architecture_and_terminology const codepointMax = 0x10FFFF -func builtinChar(i *interpreter, trace traceElement, x value) (value, error) { - n, err := i.getNumber(x, trace) +func builtinChar(i *interpreter, x value) (value, error) { + n, err := i.getNumber(x) if err != nil { return nil, err } if n.value > codepointMax { - return nil, i.Error(fmt.Sprintf("Invalid unicode codepoint, got %v", n.value), trace) + return nil, i.Error(fmt.Sprintf("Invalid unicode codepoint, got %v", n.value)) } else if n.value < 0 { - return nil, i.Error(fmt.Sprintf("Codepoints must be >= 0, got %v", n.value), trace) + return nil, i.Error(fmt.Sprintf("Codepoints must be >= 0, got %v", n.value)) } return makeValueString(string(rune(n.value))), nil } -func builtinCodepoint(i *interpreter, trace traceElement, x value) (value, error) { - str, err := i.getString(x, trace) +func builtinCodepoint(i *interpreter, x value) (value, error) { + str, err := i.getString(x) if err != nil { return nil, err } if str.length() != 1 { - return nil, i.Error(fmt.Sprintf("codepoint takes a string of length 1, got length %v", str.length()), trace) + return nil, i.Error(fmt.Sprintf("codepoint takes a string of length 1, got length %v", str.length())) } return makeValueNumber(float64(str.getRunes()[0])), nil } -func makeDoubleCheck(i *interpreter, trace traceElement, x float64) (value, error) { +func makeDoubleCheck(i *interpreter, x float64) (value, error) { if math.IsNaN(x) { - return nil, i.Error("Not a number", trace) + return nil, i.Error("Not a number") } if math.IsInf(x, 0) { - return nil, i.Error("Overflow", trace) + return nil, i.Error("Overflow") } return makeValueNumber(x), nil } -func liftNumeric(f func(float64) float64) func(*interpreter, traceElement, value) (value, error) { - return func(i *interpreter, trace traceElement, x value) (value, error) { - n, err := i.getNumber(x, trace) +func liftNumeric(f func(float64) float64) func(*interpreter, value) (value, error) { + return func(i *interpreter, x value) (value, error) { + n, err := i.getNumber(x) if err != nil { return nil, err } - return makeDoubleCheck(i, trace, f(n.value)) + return makeDoubleCheck(i, f(n.value)) } } @@ -861,28 +894,28 @@ var builtinExponent = liftNumeric(func(f float64) float64 { return float64(exponent) }) -func liftBitwise(f func(int64, int64) int64, positiveRightArg bool) func(*interpreter, traceElement, value, value) (value, error) { - return func(i *interpreter, trace traceElement, xv, yv value) (value, error) { - x, err := i.getNumber(xv, trace) +func liftBitwise(f func(int64, int64) int64, positiveRightArg bool) func(*interpreter, value, value) (value, error) { + return func(i *interpreter, xv, yv value) (value, error) { + x, err := i.getNumber(xv) if err != nil { return nil, err } - y, err := i.getNumber(yv, trace) + y, err := i.getNumber(yv) if err != nil { return nil, err } if x.value < math.MinInt64 || x.value > math.MaxInt64 { msg := fmt.Sprintf("Bitwise operator argument %v outside of range [%v, %v]", x.value, int64(math.MinInt64), int64(math.MaxInt64)) - return nil, makeRuntimeError(msg, i.getCurrentStackTrace(trace)) + return nil, makeRuntimeError(msg, i.getCurrentStackTrace()) } if y.value < math.MinInt64 || y.value > math.MaxInt64 { msg := fmt.Sprintf("Bitwise operator argument %v outside of range [%v, %v]", y.value, int64(math.MinInt64), int64(math.MaxInt64)) - return nil, makeRuntimeError(msg, i.getCurrentStackTrace(trace)) + return nil, makeRuntimeError(msg, i.getCurrentStackTrace()) } if positiveRightArg && y.value < 0 { - return nil, makeRuntimeError("Shift by negative exponent.", i.getCurrentStackTrace(trace)) + return nil, makeRuntimeError("Shift by negative exponent.", i.getCurrentStackTrace()) } - return makeDoubleCheck(i, trace, float64(f(int64(x.value), int64(y.value)))) + return makeDoubleCheck(i, float64(f(int64(x.value), int64(y.value)))) } } @@ -892,12 +925,12 @@ var builtinBitwiseAnd = liftBitwise(func(x, y int64) int64 { return x & y }, fal var builtinBitwiseOr = liftBitwise(func(x, y int64) int64 { return x | y }, false) var builtinBitwiseXor = liftBitwise(func(x, y int64) int64 { return x ^ y }, false) -func builtinObjectFieldsEx(i *interpreter, trace traceElement, objv, includeHiddenV value) (value, error) { - obj, err := i.getObject(objv, trace) +func builtinObjectFieldsEx(i *interpreter, objv, includeHiddenV value) (value, error) { + obj, err := i.getObject(objv) if err != nil { return nil, err } - includeHidden, err := i.getBoolean(includeHiddenV, trace) + includeHidden, err := i.getBoolean(includeHiddenV) if err != nil { return nil, err } @@ -910,16 +943,16 @@ func builtinObjectFieldsEx(i *interpreter, trace traceElement, objv, includeHidd return makeValueArray(elems), nil } -func builtinObjectHasEx(i *interpreter, trace traceElement, objv value, fnamev value, includeHiddenV value) (value, error) { - obj, err := i.getObject(objv, trace) +func builtinObjectHasEx(i *interpreter, objv value, fnamev value, includeHiddenV value) (value, error) { + obj, err := i.getObject(objv) if err != nil { return nil, err } - fname, err := i.getString(fnamev, trace) + fname, err := i.getString(fnamev) if err != nil { return nil, err } - includeHidden, err := i.getBoolean(includeHiddenV, trace) + includeHidden, err := i.getBoolean(includeHiddenV) if err != nil { return nil, err } @@ -928,52 +961,52 @@ func builtinObjectHasEx(i *interpreter, trace traceElement, objv value, fnamev v return makeValueBoolean(hasField), nil } -func builtinPow(i *interpreter, trace traceElement, basev value, expv value) (value, error) { - base, err := i.getNumber(basev, trace) +func builtinPow(i *interpreter, basev value, expv value) (value, error) { + base, err := i.getNumber(basev) if err != nil { return nil, err } - exp, err := i.getNumber(expv, trace) + exp, err := i.getNumber(expv) if err != nil { return nil, err } - return makeDoubleCheck(i, trace, math.Pow(base.value, exp.value)) + return makeDoubleCheck(i, math.Pow(base.value, exp.value)) } -func builtinSubstr(i *interpreter, trace traceElement, inputStr, inputFrom, inputLen value) (value, error) { - strV, err := i.getString(inputStr, trace) +func builtinSubstr(i *interpreter, inputStr, inputFrom, inputLen value) (value, error) { + strV, err := i.getString(inputStr) if err != nil { msg := fmt.Sprintf("substr first parameter should be a string, got %s", inputStr.getType().name) - return nil, makeRuntimeError(msg, i.getCurrentStackTrace(trace)) + return nil, makeRuntimeError(msg, i.getCurrentStackTrace()) } - fromV, err := i.getNumber(inputFrom, trace) + fromV, err := i.getNumber(inputFrom) if err != nil { msg := fmt.Sprintf("substr second parameter should be a number, got %s", inputFrom.getType().name) - return nil, makeRuntimeError(msg, i.getCurrentStackTrace(trace)) + return nil, makeRuntimeError(msg, i.getCurrentStackTrace()) } if math.Mod(fromV.value, 1) != 0 { msg := fmt.Sprintf("substr second parameter should be an integer, got %f", fromV.value) - return nil, makeRuntimeError(msg, i.getCurrentStackTrace(trace)) + return nil, makeRuntimeError(msg, i.getCurrentStackTrace()) } - lenV, err := i.getNumber(inputLen, trace) + lenV, err := i.getNumber(inputLen) if err != nil { msg := fmt.Sprintf("substr third parameter should be a number, got %s", inputLen.getType().name) - return nil, makeRuntimeError(msg, i.getCurrentStackTrace(trace)) + return nil, makeRuntimeError(msg, i.getCurrentStackTrace()) } - lenInt, err := i.getInt(lenV, trace) + lenInt, err := i.getInt(lenV) if err != nil { msg := fmt.Sprintf("substr third parameter should be an integer, got %f", lenV.value) - return nil, makeRuntimeError(msg, i.getCurrentStackTrace(trace)) + return nil, makeRuntimeError(msg, i.getCurrentStackTrace()) } if lenInt < 0 { msg := fmt.Sprintf("substr third parameter should be greater than zero, got %d", lenInt) - return nil, makeRuntimeError(msg, i.getCurrentStackTrace(trace)) + return nil, makeRuntimeError(msg, i.getCurrentStackTrace()) } fromInt := int(fromV.value) @@ -993,26 +1026,26 @@ func builtinSubstr(i *interpreter, trace traceElement, inputStr, inputFrom, inpu return makeValueString(string(runes[fromInt:endIndex])), nil } -func builtinSplitLimit(i *interpreter, trace traceElement, strv, cv, maxSplitsV value) (value, error) { - str, err := i.getString(strv, trace) +func builtinSplitLimit(i *interpreter, strv, cv, maxSplitsV value) (value, error) { + str, err := i.getString(strv) if err != nil { return nil, err } - c, err := i.getString(cv, trace) + c, err := i.getString(cv) if err != nil { return nil, err } - maxSplits, err := i.getInt(maxSplitsV, trace) + maxSplits, err := i.getInt(maxSplitsV) if err != nil { return nil, err } if maxSplits < -1 { - return nil, i.Error(fmt.Sprintf("std.splitLimit third parameter should be -1 or non-negative, got %v", maxSplits), trace) + return nil, i.Error(fmt.Sprintf("std.splitLimit third parameter should be -1 or non-negative, got %v", maxSplits)) } sStr := str.getGoString() sC := c.getGoString() if len(sC) != 1 { - return nil, i.Error(fmt.Sprintf("std.splitLimit second parameter should have length 1, got %v", len(sC)), trace) + return nil, i.Error(fmt.Sprintf("std.splitLimit second parameter should have length 1, got %v", len(sC))) } // the convention is slightly different from strings.splitN in Go (the meaning of non-negative values is shifted by one) @@ -1030,16 +1063,16 @@ func builtinSplitLimit(i *interpreter, trace traceElement, strv, cv, maxSplitsV return makeValueArray(res), nil } -func builtinStrReplace(i *interpreter, trace traceElement, strv, fromv, tov value) (value, error) { - str, err := i.getString(strv, trace) +func builtinStrReplace(i *interpreter, strv, fromv, tov value) (value, error) { + str, err := i.getString(strv) if err != nil { return nil, err } - from, err := i.getString(fromv, trace) + from, err := i.getString(fromv) if err != nil { return nil, err } - to, err := i.getString(tov, trace) + to, err := i.getString(tov) if err != nil { return nil, err } @@ -1047,34 +1080,34 @@ func builtinStrReplace(i *interpreter, trace traceElement, strv, fromv, tov valu sFrom := from.getGoString() sTo := to.getGoString() if len(sFrom) == 0 { - return nil, i.Error("'from' string must not be zero length.", trace) + return nil, i.Error("'from' string must not be zero length.") } return makeValueString(strings.Replace(sStr, sFrom, sTo, -1)), nil } -func base64DecodeGoBytes(i *interpreter, trace traceElement, str string) ([]byte, error) { +func base64DecodeGoBytes(i *interpreter, str string) ([]byte, error) { strLen := len(str) if strLen%4 != 0 { msg := fmt.Sprintf("input string appears not to be a base64 encoded string. Wrong length found (%d)", strLen) - return nil, makeRuntimeError(msg, i.getCurrentStackTrace(trace)) + return nil, makeRuntimeError(msg, i.getCurrentStackTrace()) } decodedBytes, err := base64.StdEncoding.DecodeString(str) if err != nil { - return nil, i.Error(fmt.Sprintf("failed to decode: %s", err), trace) + return nil, i.Error(fmt.Sprintf("failed to decode: %s", err)) } return decodedBytes, nil } -func builtinBase64DecodeBytes(i *interpreter, trace traceElement, input value) (value, error) { - vStr, err := i.getString(input, trace) +func builtinBase64DecodeBytes(i *interpreter, input value) (value, error) { + vStr, err := i.getString(input) if err != nil { msg := fmt.Sprintf("base64DecodeBytes requires a string, got %s", input.getType().name) - return nil, makeRuntimeError(msg, i.getCurrentStackTrace(trace)) + return nil, makeRuntimeError(msg, i.getCurrentStackTrace()) } - decodedBytes, err := base64DecodeGoBytes(i, trace, vStr.getGoString()) + decodedBytes, err := base64DecodeGoBytes(i, vStr.getGoString()) if err != nil { return nil, err } @@ -1087,14 +1120,14 @@ func builtinBase64DecodeBytes(i *interpreter, trace traceElement, input value) ( return makeValueArray(res), nil } -func builtinBase64Decode(i *interpreter, trace traceElement, input value) (value, error) { - vStr, err := i.getString(input, trace) +func builtinBase64Decode(i *interpreter, input value) (value, error) { + vStr, err := i.getString(input) if err != nil { msg := fmt.Sprintf("base64DecodeBytes requires a string, got %s", input.getType().name) - return nil, makeRuntimeError(msg, i.getCurrentStackTrace(trace)) + return nil, makeRuntimeError(msg, i.getCurrentStackTrace()) } - decodedBytes, err := base64DecodeGoBytes(i, trace, vStr.getGoString()) + decodedBytes, err := base64DecodeGoBytes(i, vStr.getGoString()) if err != nil { return nil, err } @@ -1102,37 +1135,32 @@ func builtinBase64Decode(i *interpreter, trace traceElement, input value) (value return makeValueString(string(decodedBytes)), nil } -func builtinUglyObjectFlatMerge(i *interpreter, trace traceElement, x value) (value, error) { +func builtinUglyObjectFlatMerge(i *interpreter, x value) (value, error) { // TODO(sbarzowski) consider keeping comprehensions in AST // It will probably be way less hacky, with better error messages and better performance - objarr, err := i.getArray(x, trace) + objarr, err := i.getArray(x) if err != nil { return nil, err } newFields := make(simpleObjectFieldMap) - var anyObj *simpleObject for _, elem := range objarr.elements { - obj, err := i.evaluateObject(elem, trace) + obj, err := i.evaluateObject(elem) if err != nil { return nil, err } + // starts getting ugly - we mess with object internals simpleObj := obj.uncached.(*simpleObject) + + if len(simpleObj.locals) > 0 { + panic("Locals should have been desugared in object comprehension.") + } + // there is only one field, really for fieldName, fieldVal := range simpleObj.fields { if _, alreadyExists := newFields[fieldName]; alreadyExists { - return nil, i.Error(duplicateFieldNameErrMsg(fieldName), trace) - } - - // Here is the tricky part. Each field in a comprehension has different - // upValues, because for example in {[v]: v for v in ["x", "y", "z"] }, - // the v is different for each field. - // Yet, even though upValues are field-specific, they are shadowed by object locals, - // so we need to make holes to let them pass through - upValues := simpleObj.upValues - for _, l := range simpleObj.locals { - delete(upValues, l.name) + return nil, i.Error(duplicateFieldNameErrMsg(fieldName)) } newFields[fieldName] = simpleObjectField{ @@ -1143,29 +1171,18 @@ func builtinUglyObjectFlatMerge(i *interpreter, trace traceElement, x value) (va }, } } - anyObj = simpleObj - } - - var locals []objectLocal - var localUpValues bindingFrame - if len(objarr.elements) > 0 { - // another ugliness - we just take the locals of our last object, - // we assume that the locals are the same for each of merged objects - locals = anyObj.locals - // note that there are already holes for object locals - localUpValues = anyObj.upValues } return makeValueSimpleObject( - localUpValues, + nil, newFields, []unboundField{}, // No asserts allowed - locals, + nil, ), nil } -func builtinParseJSON(i *interpreter, trace traceElement, str value) (value, error) { - sval, err := i.getString(str, trace) +func builtinParseJSON(i *interpreter, str value) (value, error) { + sval, err := i.getString(str) if err != nil { return nil, err } @@ -1173,9 +1190,9 @@ func builtinParseJSON(i *interpreter, trace traceElement, str value) (value, err var parsedJSON interface{} err = json.Unmarshal([]byte(s), &parsedJSON) if err != nil { - return nil, i.Error(fmt.Sprintf("failed to parse JSON: %v", err.Error()), trace) + return nil, i.Error(fmt.Sprintf("failed to parse JSON: %v", err.Error())) } - return jsonToValue(i, trace, parsedJSON) + return jsonToValue(i, parsedJSON) } func jsonEncode(v interface{}) (string, error) { @@ -1194,8 +1211,8 @@ func jsonEncode(v interface{}) (string, error) { // These should ideally be unified // For backwards compatibility reasons, we are manually marshalling to json so we can control formatting // In the future, it might be apt to use a library [pretty-printing] function -func builtinManifestJSONEx(i *interpreter, trace traceElement, obj, indent value) (value, error) { - vindent, err := i.getString(indent, trace) +func builtinManifestJSONEx(i *interpreter, obj, indent value) (value, error) { + vindent, err := i.getString(indent) if err != nil { return nil, err } @@ -1217,7 +1234,7 @@ func builtinManifestJSONEx(i *interpreter, trace traceElement, obj, indent value case valueString: jStr, err := jsonEncode(v.getGoString()) if err != nil { - return "", i.Error(fmt.Sprintf("failed to marshal valueString to JSON: %v", err.Error()), trace) + return "", i.Error(fmt.Sprintf("failed to marshal valueString to JSON: %v", err.Error())) } return jStr, nil case *valueNumber: @@ -1225,14 +1242,14 @@ func builtinManifestJSONEx(i *interpreter, trace traceElement, obj, indent value case *valueBoolean: return fmt.Sprintf("%t", v.value), nil case *valueFunction: - return "", i.Error(fmt.Sprintf("tried to manifest function at %s", path), trace) + return "", i.Error(fmt.Sprintf("tried to manifest function at %s", path)) case *valueArray: newIndent := cindent + sindent lines := []string{"[\n"} var arrayLines []string for aI, cThunk := range v.elements { - cTv, err := cThunk.getValue(i, trace) + cTv, err := cThunk.getValue(i) if err != nil { return "", err } @@ -1255,14 +1272,14 @@ func builtinManifestJSONEx(i *interpreter, trace traceElement, obj, indent value sort.Strings(fields) var objectLines []string for _, fieldName := range fields { - fieldValue, err := v.index(i, trace, fieldName) + fieldValue, err := v.index(i, fieldName) if err != nil { return "", err } fieldNameMarshalled, err := jsonEncode(fieldName) if err != nil { - return "", i.Error(fmt.Sprintf("failed to marshal object fieldname to JSON: %v", err.Error()), trace) + return "", i.Error(fmt.Sprintf("failed to marshal object fieldname to JSON: %v", err.Error())) } newPath := append(path, fieldName) @@ -1278,7 +1295,7 @@ func builtinManifestJSONEx(i *interpreter, trace traceElement, obj, indent value lines = append(lines, "\n"+cindent+"}") return strings.Join(lines, ""), nil default: - return "", i.Error(fmt.Sprintf("unknown type to marshal to JSON: %s", reflect.TypeOf(v)), trace) + return "", i.Error(fmt.Sprintf("unknown type to marshal to JSON: %s", reflect.TypeOf(v))) } } @@ -1290,20 +1307,20 @@ func builtinManifestJSONEx(i *interpreter, trace traceElement, obj, indent value return makeValueString(finalString), nil } -func builtinExtVar(i *interpreter, trace traceElement, name value) (value, error) { - str, err := i.getString(name, trace) +func builtinExtVar(i *interpreter, name value) (value, error) { + str, err := i.getString(name) if err != nil { return nil, err } index := str.getGoString() if pv, ok := i.extVars[index]; ok { - return i.evaluatePV(pv, trace) + return i.evaluatePV(pv) } - return nil, i.Error("Undefined external variable: "+string(index), trace) + return nil, i.Error("Undefined external variable: " + string(index)) } -func builtinNative(i *interpreter, trace traceElement, name value) (value, error) { - str, err := i.getString(name, trace) +func builtinNative(i *interpreter, name value) (value, error) { + str, err := i.getString(name) if err != nil { return nil, err } @@ -1344,7 +1361,7 @@ func flattenArgs(args callArguments, params []namedParameter, defaults []value) return flatArgs } -type unaryBuiltinFunc func(*interpreter, traceElement, value) (value, error) +type unaryBuiltinFunc func(*interpreter, value) (value, error) type unaryBuiltin struct { name ast.Identifier @@ -1352,19 +1369,14 @@ type unaryBuiltin struct { params ast.Identifiers } -func getBuiltinTrace(trace traceElement, name ast.Identifier) traceElement { - context := "builtin function <" + string(name) + ">" - return traceElement{loc: trace.loc, context: &context} -} - -func (b *unaryBuiltin) evalCall(args callArguments, i *interpreter, trace traceElement) (value, error) { +func (b *unaryBuiltin) evalCall(args callArguments, i *interpreter) (value, error) { flatArgs := flattenArgs(args, b.parameters(), []value{}) - builtinTrace := getBuiltinTrace(trace, b.name) - x, err := flatArgs[0].getValue(i, trace) + + x, err := flatArgs[0].getValue(i) if err != nil { return nil, err } - return b.function(i, builtinTrace, x) + return b.function(i, x) } func (b *unaryBuiltin) parameters() []namedParameter { @@ -1379,7 +1391,7 @@ func (b *unaryBuiltin) Name() ast.Identifier { return b.name } -type binaryBuiltinFunc func(*interpreter, traceElement, value, value) (value, error) +type binaryBuiltinFunc func(*interpreter, value, value) (value, error) type binaryBuiltin struct { name ast.Identifier @@ -1387,18 +1399,18 @@ type binaryBuiltin struct { params ast.Identifiers } -func (b *binaryBuiltin) evalCall(args callArguments, i *interpreter, trace traceElement) (value, error) { +func (b *binaryBuiltin) evalCall(args callArguments, i *interpreter) (value, error) { flatArgs := flattenArgs(args, b.parameters(), []value{}) - builtinTrace := getBuiltinTrace(trace, b.name) - x, err := flatArgs[0].getValue(i, trace) + + x, err := flatArgs[0].getValue(i) if err != nil { return nil, err } - y, err := flatArgs[1].getValue(i, trace) + y, err := flatArgs[1].getValue(i) if err != nil { return nil, err } - return b.function(i, builtinTrace, x, y) + return b.function(i, x, y) } func (b *binaryBuiltin) parameters() []namedParameter { @@ -1413,7 +1425,7 @@ func (b *binaryBuiltin) Name() ast.Identifier { return b.name } -type ternaryBuiltinFunc func(*interpreter, traceElement, value, value, value) (value, error) +type ternaryBuiltinFunc func(*interpreter, value, value, value) (value, error) type ternaryBuiltin struct { name ast.Identifier @@ -1421,22 +1433,22 @@ type ternaryBuiltin struct { params ast.Identifiers } -func (b *ternaryBuiltin) evalCall(args callArguments, i *interpreter, trace traceElement) (value, error) { +func (b *ternaryBuiltin) evalCall(args callArguments, i *interpreter) (value, error) { flatArgs := flattenArgs(args, b.parameters(), []value{}) - builtinTrace := getBuiltinTrace(trace, b.name) - x, err := flatArgs[0].getValue(i, trace) + + x, err := flatArgs[0].getValue(i) if err != nil { return nil, err } - y, err := flatArgs[1].getValue(i, trace) + y, err := flatArgs[1].getValue(i) if err != nil { return nil, err } - z, err := flatArgs[2].getValue(i, trace) + z, err := flatArgs[2].getValue(i) if err != nil { return nil, err } - return b.function(i, builtinTrace, x, y, z) + return b.function(i, x, y, z) } func (b *ternaryBuiltin) parameters() []namedParameter { @@ -1451,7 +1463,7 @@ func (b *ternaryBuiltin) Name() ast.Identifier { return b.name } -type generalBuiltinFunc func(*interpreter, traceElement, []value) (value, error) +type generalBuiltinFunc func(*interpreter, []value) (value, error) type generalBuiltinParameter struct { name ast.Identifier @@ -1497,18 +1509,17 @@ func (b *generalBuiltin) Name() ast.Identifier { return b.name } -func (b *generalBuiltin) evalCall(args callArguments, i *interpreter, trace traceElement) (value, error) { +func (b *generalBuiltin) evalCall(args callArguments, i *interpreter) (value, error) { flatArgs := flattenArgs(args, b.parameters(), b.defaultValues()) - builtinTrace := getBuiltinTrace(trace, b.name) values := make([]value, len(flatArgs)) for j := 0; j < len(values); j++ { var err error - values[j], err = flatArgs[j].getValue(i, trace) + values[j], err = flatArgs[j].getValue(i) if err != nil { return nil, err } } - return b.function(i, builtinTrace, values) + return b.function(i, values) } // End of builtin utils diff --git a/vendor/github.com/google/go-jsonnet/imports.go b/vendor/github.com/google/go-jsonnet/imports.go index 22a8790..e994e4f 100644 --- a/vendor/github.com/google/go-jsonnet/imports.go +++ b/vendor/github.com/google/go-jsonnet/imports.go @@ -131,10 +131,10 @@ func (cache *importCache) importAST(importedFrom, importedPath string) (ast.Node } // ImportString imports a string, caches it and then returns it. -func (cache *importCache) importString(importedFrom, importedPath string, i *interpreter, trace traceElement) (valueString, error) { +func (cache *importCache) importString(importedFrom, importedPath string, i *interpreter) (valueString, error) { data, _, err := cache.importData(importedFrom, importedPath) if err != nil { - return nil, i.Error(err.Error(), trace) + return nil, i.Error(err.Error()) } return makeValueString(data.String()), nil } @@ -158,10 +158,10 @@ func codeToPV(i *interpreter, filename string, code string) *cachedThunk { } // ImportCode imports code from a path. -func (cache *importCache) importCode(importedFrom, importedPath string, i *interpreter, trace traceElement) (value, error) { +func (cache *importCache) importCode(importedFrom, importedPath string, i *interpreter) (value, error) { node, foundAt, err := cache.importAST(importedFrom, importedPath) if err != nil { - return nil, i.Error(err.Error(), trace) + return nil, i.Error(err.Error()) } var pv potentialValue if cachedPV, isCached := cache.codeCache[foundAt]; !isCached { @@ -176,7 +176,7 @@ func (cache *importCache) importCode(importedFrom, importedPath string, i *inter } else { pv = cachedPV } - return i.evaluatePV(pv, trace) + return i.evaluatePV(pv) } // Concrete importers diff --git a/vendor/github.com/google/go-jsonnet/internal/program/desugarer.go b/vendor/github.com/google/go-jsonnet/internal/program/desugarer.go index e9c8dce..98c83f7 100644 --- a/vendor/github.com/google/go-jsonnet/internal/program/desugarer.go +++ b/vendor/github.com/google/go-jsonnet/internal/program/desugarer.go @@ -155,7 +155,7 @@ func buildAnd(left ast.Node, right ast.Node) ast.Node { } // inside is assumed to be already desugared (and cannot be desugared again) -func desugarForSpec(inside ast.Node, forSpec *ast.ForSpec, objLevel int) (ast.Node, error) { +func desugarForSpec(inside ast.Node, loc ast.LocationRange, forSpec *ast.ForSpec, objLevel int) (ast.Node, error) { var body ast.Node if len(forSpec.Conditions) > 0 { cond := forSpec.Conditions[0].Expr @@ -179,11 +179,11 @@ func desugarForSpec(inside ast.Node, forSpec *ast.ForSpec, objLevel int) (ast.No if err != nil { return nil, err } - current := buildStdCall("flatMap", function, forSpec.Expr) + current := buildStdCall("flatMap", loc, function, forSpec.Expr) if forSpec.Outer == nil { return current, nil } - return desugarForSpec(current, forSpec.Outer, objLevel) + return desugarForSpec(current, loc, forSpec.Outer, objLevel) } func wrapInArray(inside ast.Node) ast.Node { @@ -195,7 +195,7 @@ func desugarArrayComp(comp *ast.ArrayComp, objLevel int) (ast.Node, error) { if err != nil { return nil, err } - return desugarForSpec(wrapInArray(comp.Body), &comp.Spec, objLevel) + return desugarForSpec(wrapInArray(comp.Body), *comp.Loc(), &comp.Spec, objLevel) } func desugarObjectComp(comp *ast.ObjectComp, objLevel int) (ast.Node, error) { @@ -204,16 +204,30 @@ func desugarObjectComp(comp *ast.ObjectComp, objLevel int) (ast.Node, error) { return nil, err } + // Magic merging which follows doesn't support object locals, so we need + // to desugar them completely, i.e. put them inside the fields. The locals + // can be different for each field in a comprehension (unlike locals in + // "normal" objects which have a fixed value), so it's not even too wasteful. + if len(obj.Locals) > 0 { + field := &obj.Fields[0] + field.Body = &ast.Local{ + Body: field.Body, + Binds: obj.Locals, + // TODO(sbarzowski) should I set some NodeBase stuff here? + } + obj.Locals = nil + } + if len(obj.Fields) != 1 { - panic("Too many fields in object comprehension, it should have been caught during parsing") + panic("Wrong number of fields in object comprehension, it should have been caught during parsing") } - desugaredArrayComp, err := desugarForSpec(wrapInArray(obj), &comp.Spec, objLevel) + desugaredArrayComp, err := desugarForSpec(wrapInArray(obj), *comp.Loc(), &comp.Spec, objLevel) if err != nil { return nil, err } - desugaredComp := buildStdCall("$objectFlatMerge", desugaredArrayComp) + desugaredComp := buildStdCall("$objectFlatMerge", *comp.Loc(), desugaredArrayComp) return desugaredComp, nil } @@ -231,7 +245,7 @@ func buildSimpleIndex(obj ast.Node, member ast.Identifier) ast.Node { } } -func buildStdCall(builtinName ast.Identifier, args ...ast.Node) ast.Node { +func buildStdCall(builtinName ast.Identifier, loc ast.LocationRange, args ...ast.Node) ast.Node { std := &ast.Var{Id: "std"} builtin := buildSimpleIndex(std, builtinName) positional := make([]ast.CommaSeparatedExpr, len(args)) @@ -239,6 +253,9 @@ func buildStdCall(builtinName ast.Identifier, args ...ast.Node) ast.Node { positional[i].Expr = args[i] } return &ast.Apply{ + NodeBase: ast.NodeBase{ + LocRange: loc, + }, Target: builtin, Arguments: ast.Arguments{Positional: positional}, } @@ -322,9 +339,14 @@ func desugar(astPtr *ast.Node, objLevel int) (err error) { node.Message = buildLiteralString("Assertion failed") } *astPtr = &ast.Conditional{ - Cond: node.Cond, - BranchTrue: node.Rest, - BranchFalse: &ast.Error{Expr: node.Message}, + Cond: node.Cond, + BranchTrue: node.Rest, + BranchFalse: &ast.Error{ + NodeBase: ast.NodeBase{ + LocRange: *node.Loc(), + }, + Expr: node.Message, + }, } err = desugar(astPtr, objLevel) if err != nil { @@ -336,9 +358,9 @@ func desugar(astPtr *ast.Node, objLevel int) (err error) { if funcname, replaced := desugaredBop[node.Op]; replaced { if node.Op == ast.BopIn { // reversed order of arguments - *astPtr = buildStdCall(funcname, node.Right, node.Left) + *astPtr = buildStdCall(funcname, *node.Loc(), node.Right, node.Left) } else { - *astPtr = buildStdCall(funcname, node.Left, node.Right) + *astPtr = buildStdCall(funcname, *node.Loc(), node.Left, node.Right) } return desugar(astPtr, objLevel) } @@ -441,7 +463,7 @@ func desugar(astPtr *ast.Node, objLevel int) (err error) { if node.Step == nil { node.Step = &ast.LiteralNull{} } - *astPtr = buildStdCall("slice", node.Target, node.BeginIndex, node.EndIndex, node.Step) + *astPtr = buildStdCall("slice", *node.Loc(), node.Target, node.BeginIndex, node.EndIndex, node.Step) err = desugar(astPtr, objLevel) if err != nil { return diff --git a/vendor/github.com/google/go-jsonnet/interpreter.go b/vendor/github.com/google/go-jsonnet/interpreter.go index adfaf97..087223d 100644 --- a/vendor/github.com/google/go-jsonnet/interpreter.go +++ b/vendor/github.com/google/go-jsonnet/interpreter.go @@ -49,15 +49,15 @@ func makeEnvironment(upValues bindingFrame, sb selfBinding) environment { } } -func (i *interpreter) getCurrentStackTrace(additional traceElement) []traceFrame { +func (i *interpreter) getCurrentStackTrace() []traceFrame { var result []traceFrame for _, f := range i.stack.stack { if f.isCall { result = append(result, traceElementToTraceFrame(f.trace)) } } - if additional.loc != nil { - result = append(result, traceElementToTraceFrame(additional)) + if i.stack.currentTrace.loc != nil { + result = append(result, traceElementToTraceFrame(i.stack.currentTrace)) } return result } @@ -95,9 +95,10 @@ func dumpCallFrame(c *callFrame) string { } type callStack struct { - calls int - limit int - stack []*callFrame + calls int + limit int + stack []*callFrame + currentTrace traceElement } func dumpCallStack(c *callStack) string { @@ -123,6 +124,7 @@ func (s *callStack) popIfExists(whichFrame int) { if s.top().isCall { s.calls-- } + s.setCurrentTrace(s.stack[len(s.stack)-1].trace) s.stack = s.stack[:len(s.stack)-1] } } @@ -142,6 +144,17 @@ func (s *callStack) tailCallTrimStack() { } } +func (s *callStack) setCurrentTrace(trace traceElement) { + if s.currentTrace != (traceElement{}) { + panic("Tried to change the traceElement while the old one was still there.") + } + s.currentTrace = trace +} + +func (s *callStack) clearCurrentTrace() { + s.currentTrace = traceElement{} +} + type tailCallStatus int const ( @@ -149,20 +162,26 @@ const ( tailCall ) -func (s *callStack) newCall(trace traceElement, env environment, trimmable bool) { +func (s *callStack) newCall(env environment, trimmable bool) { + if s.currentTrace == (traceElement{}) { + panic("Saving empty traceElement on stack") + } s.stack = append(s.stack, &callFrame{ isCall: true, - trace: trace, + trace: s.currentTrace, env: env, trimmable: trimmable, }) + s.clearCurrentTrace() s.calls++ } func (s *callStack) newLocal(vars bindingFrame) { s.stack = append(s.stack, &callFrame{ - env: makeEnvironment(vars, selfBinding{}), + env: makeEnvironment(vars, selfBinding{}), + trace: s.currentTrace, }) + s.clearCurrentTrace() } // getSelfBinding resolves the self construct @@ -256,12 +275,12 @@ func addBindings(a, b bindingFrame) bindingFrame { return result } -func (i *interpreter) newCall(trace traceElement, env environment, trimmable bool) error { +func (i *interpreter) newCall(env environment, trimmable bool) error { s := &i.stack if s.calls >= s.limit { - return makeRuntimeError("max stack frames exceeded.", i.getCurrentStackTrace(trace)) + return makeRuntimeError("max stack frames exceeded.", i.getCurrentStackTrace()) } - s.newCall(trace, env, trimmable) + s.newCall(env, trimmable) return nil } @@ -270,6 +289,10 @@ func (i *interpreter) evaluate(a ast.Node, tc tailCallStatus) (value, error) { loc: a.Loc(), context: a.Context(), } + oldTrace := i.stack.currentTrace + i.stack.clearCurrentTrace() + i.stack.setCurrentTrace(trace) + defer func() { i.stack.clearCurrentTrace(); i.stack.setCurrentTrace(oldTrace) }() switch node := a.(type) { case *ast.Array: @@ -289,7 +312,7 @@ func (i *interpreter) evaluate(a ast.Node, tc tailCallStatus) (value, error) { if err != nil { return nil, err } - x, err := i.getBoolean(xv, trace) + x, err := i.getBoolean(xv) if err != nil { return nil, err } @@ -300,14 +323,14 @@ func (i *interpreter) evaluate(a ast.Node, tc tailCallStatus) (value, error) { if err != nil { return nil, err } - return i.getBoolean(yv, trace) + return i.getBoolean(yv) } else if node.Op == ast.BopOr { // Special case for shortcut semantics. xv, err := i.evaluate(node.Left, nonTailCall) if err != nil { return nil, err } - x, err := i.getBoolean(xv, trace) + x, err := i.getBoolean(xv) if err != nil { return nil, err } @@ -318,7 +341,7 @@ func (i *interpreter) evaluate(a ast.Node, tc tailCallStatus) (value, error) { if err != nil { return nil, err } - return i.getBoolean(yv, trace) + return i.getBoolean(yv) } else { left, err := i.evaluate(node.Left, nonTailCall) @@ -331,7 +354,7 @@ func (i *interpreter) evaluate(a ast.Node, tc tailCallStatus) (value, error) { } // TODO(dcunnin): The double dereference here is probably not necessary. builtin := bopBuiltins[node.Op] - return builtin.function(i, trace, left, right) + return builtin.function(i, left, right) } case *ast.Unary: @@ -342,7 +365,7 @@ func (i *interpreter) evaluate(a ast.Node, tc tailCallStatus) (value, error) { builtin := uopBuiltins[node.Op] - result, err := builtin.function(i, trace, value) + result, err := builtin.function(i, value) if err != nil { return nil, err } @@ -353,7 +376,7 @@ func (i *interpreter) evaluate(a ast.Node, tc tailCallStatus) (value, error) { if err != nil { return nil, err } - condBool, err := i.getBoolean(cond, trace) + condBool, err := i.getBoolean(cond) if err != nil { return nil, err } @@ -378,11 +401,11 @@ func (i *interpreter) evaluate(a ast.Node, tc tailCallStatus) (value, error) { // Omitted field. continue default: - return nil, i.Error(fmt.Sprintf("Field name must be string, got %v", fieldNameValue.getType().name), trace) + return nil, i.Error(fmt.Sprintf("Field name must be string, got %v", fieldNameValue.getType().name)) } if _, ok := fields[fieldName]; ok { - return nil, i.Error(duplicateFieldNameErrMsg(fieldName), trace) + return nil, i.Error(duplicateFieldNameErrMsg(fieldName)) } var f unboundField = &codeUnboundField{field.Body} if field.PlusSuper { @@ -408,16 +431,16 @@ func (i *interpreter) evaluate(a ast.Node, tc tailCallStatus) (value, error) { return nil, err } if msgVal.getType() != stringType { - msgVal, err = builtinToString(i, trace, msgVal) + msgVal, err = builtinToString(i, msgVal) if err != nil { return nil, err } } - msg, err := i.getString(msgVal, trace) + msg, err := i.getString(msgVal) if err != nil { return nil, err } - return nil, i.Error(msg.getGoString(), trace) + return nil, i.Error(msg.getGoString()) case *ast.Index: targetValue, err := i.evaluate(node.Target, nonTailCall) @@ -430,37 +453,37 @@ func (i *interpreter) evaluate(a ast.Node, tc tailCallStatus) (value, error) { } switch target := targetValue.(type) { case *valueObject: - indexString, err := i.getString(index, trace) + indexString, err := i.getString(index) if err != nil { return nil, err } - return target.index(i, trace, indexString.getGoString()) + return target.index(i, indexString.getGoString()) case *valueArray: - indexInt, err := i.getNumber(index, trace) + indexInt, err := i.getNumber(index) if err != nil { return nil, err } // TODO(https://github.com/google/jsonnet/issues/377): non-integer indexes should be an error - return target.index(i, trace, int(indexInt.value)) + return target.index(i, int(indexInt.value)) case valueString: - indexInt, err := i.getNumber(index, trace) + indexInt, err := i.getNumber(index) if err != nil { return nil, err } // TODO(https://github.com/google/jsonnet/issues/377): non-integer indexes should be an error - return target.index(i, trace, int(indexInt.value)) + return target.index(i, int(indexInt.value)) } - return nil, i.Error(fmt.Sprintf("Value non indexable: %v", reflect.TypeOf(targetValue)), trace) + return nil, i.Error(fmt.Sprintf("Value non indexable: %v", reflect.TypeOf(targetValue))) case *ast.Import: codePath := node.Loc().FileName - return i.importCache.importCode(codePath, node.File.Value, i, trace) + return i.importCache.importCode(codePath, node.File.Value, i) case *ast.ImportStr: codePath := node.Loc().FileName - return i.importCache.importString(codePath, node.File.Value, i, trace) + return i.importCache.importString(codePath, node.File.Value, i) case *ast.LiteralBoolean: return makeValueBoolean(node.Value), nil @@ -474,7 +497,7 @@ func (i *interpreter) evaluate(a ast.Node, tc tailCallStatus) (value, error) { // too large to fit in a double. num, err := strconv.ParseFloat(node.OriginalString, 64) if err != nil { - return nil, i.Error("overflow", trace) + return nil, i.Error("overflow") } return makeValueNumber(num), nil @@ -506,25 +529,25 @@ func (i *interpreter) evaluate(a ast.Node, tc tailCallStatus) (value, error) { case *ast.Var: foo := i.stack.lookUpVarOrPanic(node.Id) - return foo.getValue(i, trace) + return foo.getValue(i) case *ast.SuperIndex: index, err := i.evaluate(node.Index, nonTailCall) if err != nil { return nil, err } - indexStr, err := i.getString(index, trace) + indexStr, err := i.getString(index) if err != nil { return nil, err } - return objectIndex(i, trace, i.stack.getSelfBinding().super(), indexStr.getGoString()) + return objectIndex(i, i.stack.getSelfBinding().super(), indexStr.getGoString()) case *ast.InSuper: index, err := i.evaluate(node.Index, nonTailCall) if err != nil { return nil, err } - indexStr, err := i.getString(index, trace) + indexStr, err := i.getString(index) if err != nil { return nil, err } @@ -542,7 +565,7 @@ func (i *interpreter) evaluate(a ast.Node, tc tailCallStatus) (value, error) { if err != nil { return nil, err } - function, err := i.getFunction(target, trace) + function, err := i.getFunction(target) if err != nil { return nil, err } @@ -561,7 +584,7 @@ func (i *interpreter) evaluate(a ast.Node, tc tailCallStatus) (value, error) { for i, arg := range node.Arguments.Named { arguments.named[i] = namedCallArgument{name: arg.Name, pv: &cachedThunk{env: &argEnv, body: arg.Arg}} } - return i.evaluateTailCall(function, arguments, tc, trace) + return i.evaluateTailCall(function, arguments, tc) case *astMakeArrayElement: arguments := callArguments{ @@ -571,7 +594,7 @@ func (i *interpreter) evaluate(a ast.Node, tc tailCallStatus) (value, error) { }, }, } - return i.evaluateTailCall(node.function, arguments, tc, trace) + return i.evaluateTailCall(node.function, arguments, tc) default: panic(fmt.Sprintf("Executing this AST type not implemented: %v", reflect.TypeOf(a))) @@ -624,14 +647,20 @@ func unparseNumber(v float64) string { } // manifestJSON converts to standard JSON representation as in "encoding/json" package -func (i *interpreter) manifestJSON(trace traceElement, v value) (interface{}, error) { +func (i *interpreter) manifestJSON(v value) (interface{}, error) { + // TODO(sbarzowski) Add nice stack traces indicating the part of the code which + // evaluates to non-manifestable value (that might require passing context about + // the root value) + if i.stack.currentTrace == (traceElement{}) { + panic("manifesting JSON with empty traceElement") + } switch v := v.(type) { case *valueBoolean: return v.value, nil case *valueFunction: - return nil, makeRuntimeError("couldn't manifest function in JSON output.", i.getCurrentStackTrace(trace)) + return nil, makeRuntimeError("couldn't manifest function as JSON", i.getCurrentStackTrace()) case *valueNumber: return v.value, nil @@ -645,11 +674,11 @@ func (i *interpreter) manifestJSON(trace traceElement, v value) (interface{}, er case *valueArray: result := make([]interface{}, 0, len(v.elements)) for _, th := range v.elements { - elVal, err := i.evaluatePV(th, trace) + elVal, err := i.evaluatePV(th) if err != nil { return nil, err } - elem, err := i.manifestJSON(trace, elVal) + elem, err := i.manifestJSON(elVal) if err != nil { return nil, err } @@ -661,7 +690,7 @@ func (i *interpreter) manifestJSON(trace traceElement, v value) (interface{}, er fieldNames := objectFields(v, withoutHidden) sort.Strings(fieldNames) - err := checkAssertions(i, trace, v) + err := checkAssertions(i, v) if err != nil { return nil, err } @@ -669,12 +698,12 @@ func (i *interpreter) manifestJSON(trace traceElement, v value) (interface{}, er result := make(map[string]interface{}) for _, fieldName := range fieldNames { - fieldVal, err := v.index(i, trace, fieldName) + fieldVal, err := v.index(i, fieldName) if err != nil { return nil, err } - field, err := i.manifestJSON(trace, fieldVal) + field, err := i.manifestJSON(fieldVal) if err != nil { return nil, err } @@ -686,7 +715,7 @@ func (i *interpreter) manifestJSON(trace traceElement, v value) (interface{}, er default: return nil, makeRuntimeError( fmt.Sprintf("manifesting this value not implemented yet: %s", reflect.TypeOf(v)), - i.getCurrentStackTrace(trace), + i.getCurrentStackTrace(), ) } @@ -790,8 +819,8 @@ func serializeJSON(v interface{}, multiline bool, indent string, buf *bytes.Buff } func (i *interpreter) manifestAndSerializeJSON( - buf *bytes.Buffer, trace traceElement, v value, multiline bool, indent string) error { - manifested, err := i.manifestJSON(trace, v) + buf *bytes.Buffer, v value, multiline bool, indent string) error { + manifested, err := i.manifestJSON(v) if err != nil { return err } @@ -800,19 +829,19 @@ func (i *interpreter) manifestAndSerializeJSON( } // manifestString expects the value to be a string and returns it. -func (i *interpreter) manifestString(buf *bytes.Buffer, trace traceElement, v value) error { +func (i *interpreter) manifestString(buf *bytes.Buffer, v value) error { switch v := v.(type) { case valueString: buf.WriteString(v.getGoString()) return nil default: - return makeRuntimeError(fmt.Sprintf("expected string result, got: %s", v.getType().name), i.getCurrentStackTrace(trace)) + return makeRuntimeError(fmt.Sprintf("expected string result, got: %s", v.getType().name), i.getCurrentStackTrace()) } } -func (i *interpreter) manifestAndSerializeMulti(trace traceElement, v value, stringOutputMode bool) (r map[string]string, err error) { +func (i *interpreter) manifestAndSerializeMulti(v value, stringOutputMode bool) (r map[string]string, err error) { r = make(map[string]string) - json, err := i.manifestJSON(trace, v) + json, err := i.manifestJSON(v) if err != nil { return r, err } @@ -826,7 +855,7 @@ func (i *interpreter) manifestAndSerializeMulti(trace traceElement, v value, str default: msg := fmt.Sprintf("multi mode: top-level object's key %s has a value of type %T, "+ "should be a string", filename, val) - return r, makeRuntimeError(msg, i.getCurrentStackTrace(trace)) + return r, makeRuntimeError(msg, i.getCurrentStackTrace()) } } else { var buf bytes.Buffer @@ -839,14 +868,14 @@ func (i *interpreter) manifestAndSerializeMulti(trace traceElement, v value, str msg := fmt.Sprintf("multi mode: top-level object was a %s, "+ "should be an object whose keys are filenames and values hold "+ "the JSON for that file.", v.getType().name) - return r, makeRuntimeError(msg, i.getCurrentStackTrace(trace)) + return r, makeRuntimeError(msg, i.getCurrentStackTrace()) } return } -func (i *interpreter) manifestAndSerializeYAMLStream(trace traceElement, v value) (r []string, err error) { +func (i *interpreter) manifestAndSerializeYAMLStream(v value) (r []string, err error) { r = make([]string, 0) - json, err := i.manifestJSON(trace, v) + json, err := i.manifestJSON(v) if err != nil { return r, err } @@ -862,12 +891,12 @@ func (i *interpreter) manifestAndSerializeYAMLStream(trace traceElement, v value msg := fmt.Sprintf("stream mode: top-level object was a %s, "+ "should be an array whose elements hold "+ "the JSON for each document in the stream.", v.getType().name) - return r, makeRuntimeError(msg, i.getCurrentStackTrace(trace)) + return r, makeRuntimeError(msg, i.getCurrentStackTrace()) } return } -func jsonToValue(i *interpreter, trace traceElement, v interface{}) (value, error) { +func jsonToValue(i *interpreter, v interface{}) (value, error) { switch v := v.(type) { case nil: return &nullValue, nil @@ -875,7 +904,7 @@ func jsonToValue(i *interpreter, trace traceElement, v interface{}) (value, erro case []interface{}: elems := make([]*cachedThunk, len(v)) for counter, elem := range v { - val, err := jsonToValue(i, trace, elem) + val, err := jsonToValue(i, elem) if err != nil { return nil, err } @@ -886,12 +915,12 @@ func jsonToValue(i *interpreter, trace traceElement, v interface{}) (value, erro case bool: return makeValueBoolean(v), nil case float64: - return makeDoubleCheck(i, trace, v) + return makeDoubleCheck(i, v) case map[string]interface{}: fieldMap := map[string]value{} for name, f := range v { - val, err := jsonToValue(i, trace, f) + val, err := jsonToValue(i, f) if err != nil { return nil, err } @@ -903,12 +932,12 @@ func jsonToValue(i *interpreter, trace traceElement, v interface{}) (value, erro return makeValueString(v), nil default: - return nil, i.Error(fmt.Sprintf("Not a json type: %#+v", v), trace) + return nil, i.Error(fmt.Sprintf("Not a json type: %#+v", v)) } } -func (i *interpreter) EvalInCleanEnv(fromWhere traceElement, env *environment, ast ast.Node, trimmable bool) (value, error) { - err := i.newCall(fromWhere, *env, trimmable) +func (i *interpreter) EvalInCleanEnv(env *environment, ast ast.Node, trimmable bool) (value, error) { + err := i.newCall(*env, trimmable) if err != nil { return nil, err } @@ -921,56 +950,54 @@ func (i *interpreter) EvalInCleanEnv(fromWhere traceElement, env *environment, a return val, err } -func (i *interpreter) evaluatePV(ph potentialValue, trace traceElement) (value, error) { - return ph.getValue(i, trace) +func (i *interpreter) evaluatePV(ph potentialValue) (value, error) { + return ph.getValue(i) } -func (i *interpreter) evaluateTailCall(function *valueFunction, args callArguments, tc tailCallStatus, trace traceElement) (value, error) { +func (i *interpreter) evaluateTailCall(function *valueFunction, args callArguments, tc tailCallStatus) (value, error) { if tc == tailCall { i.stack.tailCallTrimStack() } - return function.call(i, trace, args) + return function.call(i, args) } -func (i *interpreter) Error(s string, trace traceElement) error { - err := makeRuntimeError(s, i.getCurrentStackTrace(trace)) +func (i *interpreter) Error(s string) error { + err := makeRuntimeError(s, i.getCurrentStackTrace()) return err } -func (i *interpreter) typeErrorSpecific(bad value, good value, trace traceElement) error { +func (i *interpreter) typeErrorSpecific(bad value, good value) error { return i.Error( fmt.Sprintf("Unexpected type %v, expected %v", bad.getType().name, good.getType().name), - trace, ) } -func (i *interpreter) typeErrorGeneral(bad value, trace traceElement) error { +func (i *interpreter) typeErrorGeneral(bad value) error { return i.Error( fmt.Sprintf("Unexpected type %v", bad.getType().name), - trace, ) } -func (i *interpreter) getNumber(val value, trace traceElement) (*valueNumber, error) { +func (i *interpreter) getNumber(val value) (*valueNumber, error) { switch v := val.(type) { case *valueNumber: return v, nil default: - return nil, i.typeErrorSpecific(val, &valueNumber{}, trace) + return nil, i.typeErrorSpecific(val, &valueNumber{}) } } //nolint:unused -func (i *interpreter) evaluateNumber(pv potentialValue, trace traceElement) (*valueNumber, error) { - v, err := i.evaluatePV(pv, trace) +func (i *interpreter) evaluateNumber(pv potentialValue) (*valueNumber, error) { + v, err := i.evaluatePV(pv) if err != nil { return nil, err } - return i.getNumber(v, trace) + return i.getNumber(v) } -func (i *interpreter) getInt(val value, trace traceElement) (int, error) { - num, err := i.getNumber(val, trace) +func (i *interpreter) getInt(val value) (int, error) { + num, err := i.getNumber(val) if err != nil { return 0, err } @@ -978,128 +1005,128 @@ func (i *interpreter) getInt(val value, trace traceElement) (int, error) { // on any machine. And it's used only for indexing anyway. intNum := int(int32(num.value)) if float64(intNum) != num.value { - return 0, i.Error(fmt.Sprintf("Expected an integer, but got %v", num.value), trace) + return 0, i.Error(fmt.Sprintf("Expected an integer, but got %v", num.value)) } return intNum, nil } -func (i *interpreter) evaluateInt(pv potentialValue, trace traceElement) (int, error) { - v, err := i.evaluatePV(pv, trace) +func (i *interpreter) evaluateInt(pv potentialValue) (int, error) { + v, err := i.evaluatePV(pv) if err != nil { return 0, err } - return i.getInt(v, trace) + return i.getInt(v) } //nolint:unused -func (i *interpreter) getInt64(val value, trace traceElement) (int64, error) { - num, err := i.getNumber(val, trace) +func (i *interpreter) getInt64(val value) (int64, error) { + num, err := i.getNumber(val) if err != nil { return 0, err } intNum := int64(num.value) if float64(intNum) != num.value { - return 0, i.Error(fmt.Sprintf("Expected an integer, but got %v", num.value), trace) + return 0, i.Error(fmt.Sprintf("Expected an integer, but got %v", num.value)) } return intNum, nil } //nolint:unused -func (i *interpreter) evaluateInt64(pv potentialValue, trace traceElement) (int64, error) { - v, err := i.evaluatePV(pv, trace) +func (i *interpreter) evaluateInt64(pv potentialValue) (int64, error) { + v, err := i.evaluatePV(pv) if err != nil { return 0, err } - return i.getInt64(v, trace) + return i.getInt64(v) } -func (i *interpreter) getString(val value, trace traceElement) (valueString, error) { +func (i *interpreter) getString(val value) (valueString, error) { switch v := val.(type) { case valueString: return v, nil default: - return nil, i.typeErrorSpecific(val, emptyString(), trace) + return nil, i.typeErrorSpecific(val, emptyString()) } } //nolint:unused -func (i *interpreter) evaluateString(pv potentialValue, trace traceElement) (valueString, error) { - v, err := i.evaluatePV(pv, trace) +func (i *interpreter) evaluateString(pv potentialValue) (valueString, error) { + v, err := i.evaluatePV(pv) if err != nil { return nil, err } - return i.getString(v, trace) + return i.getString(v) } -func (i *interpreter) getBoolean(val value, trace traceElement) (*valueBoolean, error) { +func (i *interpreter) getBoolean(val value) (*valueBoolean, error) { switch v := val.(type) { case *valueBoolean: return v, nil default: - return nil, i.typeErrorSpecific(val, &valueBoolean{}, trace) + return nil, i.typeErrorSpecific(val, &valueBoolean{}) } } //nolint:unused -func (i *interpreter) evaluateBoolean(pv potentialValue, trace traceElement) (*valueBoolean, error) { - v, err := i.evaluatePV(pv, trace) +func (i *interpreter) evaluateBoolean(pv potentialValue) (*valueBoolean, error) { + v, err := i.evaluatePV(pv) if err != nil { return nil, err } - return i.getBoolean(v, trace) + return i.getBoolean(v) } -func (i *interpreter) getArray(val value, trace traceElement) (*valueArray, error) { +func (i *interpreter) getArray(val value) (*valueArray, error) { switch v := val.(type) { case *valueArray: return v, nil default: - return nil, i.typeErrorSpecific(val, &valueArray{}, trace) + return nil, i.typeErrorSpecific(val, &valueArray{}) } } //nolint:unused -func (i *interpreter) evaluateArray(pv potentialValue, trace traceElement) (*valueArray, error) { - v, err := i.evaluatePV(pv, trace) +func (i *interpreter) evaluateArray(pv potentialValue) (*valueArray, error) { + v, err := i.evaluatePV(pv) if err != nil { return nil, err } - return i.getArray(v, trace) + return i.getArray(v) } -func (i *interpreter) getFunction(val value, trace traceElement) (*valueFunction, error) { +func (i *interpreter) getFunction(val value) (*valueFunction, error) { switch v := val.(type) { case *valueFunction: return v, nil default: - return nil, i.typeErrorSpecific(val, &valueFunction{}, trace) + return nil, i.typeErrorSpecific(val, &valueFunction{}) } } //nolint:unused -func (i *interpreter) evaluateFunction(pv potentialValue, trace traceElement) (*valueFunction, error) { - v, err := i.evaluatePV(pv, trace) +func (i *interpreter) evaluateFunction(pv potentialValue) (*valueFunction, error) { + v, err := i.evaluatePV(pv) if err != nil { return nil, err } - return i.getFunction(v, trace) + return i.getFunction(v) } -func (i *interpreter) getObject(val value, trace traceElement) (*valueObject, error) { +func (i *interpreter) getObject(val value) (*valueObject, error) { switch v := val.(type) { case *valueObject: return v, nil default: - return nil, i.typeErrorSpecific(val, &valueObject{}, trace) + return nil, i.typeErrorSpecific(val, &valueObject{}) } } -func (i *interpreter) evaluateObject(pv potentialValue, trace traceElement) (*valueObject, error) { - v, err := i.evaluatePV(pv, trace) +func (i *interpreter) evaluateObject(pv potentialValue) (*valueObject, error) { + v, err := i.evaluatePV(pv) if err != nil { return nil, err } - return i.getObject(v, trace) + return i.getObject(v) } func buildStdObject(i *interpreter) (*valueObject, error) { @@ -1128,7 +1155,9 @@ func evaluateStd(i *interpreter) (value, error) { evalLoc := ast.MakeLocationRangeMessage("During evaluation of std") evalTrace := traceElement{loc: &evalLoc} node := astgen.StdAst - return i.EvalInCleanEnv(evalTrace, &beforeStdEnv, node, false) + i.stack.setCurrentTrace(evalTrace) + defer i.stack.clearCurrentTrace() + return i.EvalInCleanEnv(&beforeStdEnv, node, false) } func prepareExtVars(i *interpreter, ext vmExtMap, kind string) map[string]*cachedThunk { @@ -1188,7 +1217,9 @@ func evaluateAux(i *interpreter, node ast.Node, tla vmExtMap) (value, traceEleme loc: &evalLoc, } env := makeInitialEnv(node.Loc().FileName, i.baseStd) - result, err := i.EvalInCleanEnv(evalTrace, &env, node, false) + i.stack.setCurrentTrace(evalTrace) + result, err := i.EvalInCleanEnv(&env, node, false) + i.stack.clearCurrentTrace() if err != nil { return nil, traceElement{}, err } @@ -1199,11 +1230,13 @@ func evaluateAux(i *interpreter, node ast.Node, tla vmExtMap) (value, traceEleme for argName, pv := range toplevelArgMap { args.named = append(args.named, namedCallArgument{name: ast.Identifier(argName), pv: pv}) } - funcLoc := ast.MakeLocationRangeMessage("Top-level function") + funcLoc := ast.MakeLocationRangeMessage("Top-level function call") funcTrace := traceElement{ loc: &funcLoc, } - result, err = f.call(i, funcTrace, args) + i.stack.setCurrentTrace(funcTrace) + result, err = f.call(i, args) + i.stack.clearCurrentTrace() if err != nil { return nil, traceElement{}, err } @@ -1230,11 +1263,13 @@ func evaluate(node ast.Node, ext vmExtMap, tla vmExtMap, nativeFuncs map[string] } var buf bytes.Buffer + i.stack.setCurrentTrace(manifestationTrace) if stringOutputMode { - err = i.manifestString(&buf, manifestationTrace, result) + err = i.manifestString(&buf, result) } else { - err = i.manifestAndSerializeJSON(&buf, manifestationTrace, result, true, "") + err = i.manifestAndSerializeJSON(&buf, result, true, "") } + i.stack.clearCurrentTrace() if err != nil { return "", err } @@ -1256,7 +1291,10 @@ func evaluateMulti(node ast.Node, ext vmExtMap, tla vmExtMap, nativeFuncs map[st return nil, err } - return i.manifestAndSerializeMulti(manifestationTrace, result, stringOutputMode) + i.stack.setCurrentTrace(manifestationTrace) + manifested, err := i.manifestAndSerializeMulti(result, stringOutputMode) + i.stack.clearCurrentTrace() + return manifested, err } // TODO(sbarzowski) this function takes far too many arguments - build interpreter in vm instead @@ -1273,5 +1311,8 @@ func evaluateStream(node ast.Node, ext vmExtMap, tla vmExtMap, nativeFuncs map[s return nil, err } - return i.manifestAndSerializeYAMLStream(manifestationTrace, result) + i.stack.setCurrentTrace(manifestationTrace) + manifested, err := i.manifestAndSerializeYAMLStream(result) + i.stack.clearCurrentTrace() + return manifested, err } diff --git a/vendor/github.com/google/go-jsonnet/tests.sh b/vendor/github.com/google/go-jsonnet/tests.sh index 3a0abe2..a281864 100644 --- a/vendor/github.com/google/go-jsonnet/tests.sh +++ b/vendor/github.com/google/go-jsonnet/tests.sh @@ -3,6 +3,7 @@ set -e PYTHON_COMMAND=${PYTHON_COMMAND:=python} +JSONNET_CPP_DIR=${JSONNET_CPP_DIR:=$PWD/cpp-jsonnet} set -x @@ -19,6 +20,7 @@ else fi export IMPLEMENTATION=golang +export OVERRIDE_DIR="$PWD/testdata/cpp-tests-override/" go build ./cmd/jsonnet go build ./cmd/jsonnetfmt @@ -28,6 +30,5 @@ export DISABLE_ERROR_TESTS=true export JSONNETFMT_BIN="$PWD/jsonnetfmt" export JSONNET_BIN="$PWD/jsonnet" -git submodule update --recursive cpp-jsonnet -cd cpp-jsonnet +cd "$JSONNET_CPP_DIR" exec ./tests.sh diff --git a/vendor/github.com/google/go-jsonnet/thunks.go b/vendor/github.com/google/go-jsonnet/thunks.go index cb21769..1cd4175 100644 --- a/vendor/github.com/google/go-jsonnet/thunks.go +++ b/vendor/github.com/google/go-jsonnet/thunks.go @@ -32,7 +32,7 @@ type readyValue struct { content value } -func (rv *readyValue) evaluate(i *interpreter, trace traceElement, sb selfBinding, origBinding bindingFrame, fieldName string) (value, error) { +func (rv *readyValue) evaluate(i *interpreter, sb selfBinding, origBinding bindingFrame, fieldName string) (value, error) { return rv.content, nil } @@ -58,14 +58,14 @@ func readyThunk(content value) *cachedThunk { return &cachedThunk{content: content} } -func (t *cachedThunk) getValue(i *interpreter, trace traceElement) (value, error) { +func (t *cachedThunk) getValue(i *interpreter) (value, error) { if t.content != nil { return t.content, nil } if t.err != nil { return nil, t.err } - v, err := i.EvalInCleanEnv(trace, t.env, t.body, false) + v, err := i.EvalInCleanEnv(t.env, t.body, false) if err != nil { // TODO(sbarzowski) perhaps cache errors as well // may be necessary if we allow handling them in any way @@ -87,9 +87,9 @@ type codeUnboundField struct { body ast.Node } -func (f *codeUnboundField) evaluate(i *interpreter, trace traceElement, sb selfBinding, origBindings bindingFrame, fieldName string) (value, error) { +func (f *codeUnboundField) evaluate(i *interpreter, sb selfBinding, origBindings bindingFrame, fieldName string) (value, error) { env := makeEnvironment(origBindings, sb) - return i.EvalInCleanEnv(trace, &env, f.body, false) + return i.EvalInCleanEnv(&env, f.body, false) } // Provide additional bindings for a field. It shadows bindings from the object. @@ -99,7 +99,7 @@ type bindingsUnboundField struct { bindings bindingFrame } -func (f *bindingsUnboundField) evaluate(i *interpreter, trace traceElement, sb selfBinding, origBindings bindingFrame, fieldName string) (value, error) { +func (f *bindingsUnboundField) evaluate(i *interpreter, sb selfBinding, origBindings bindingFrame, fieldName string) (value, error) { upValues := make(bindingFrame) for variable, pvalue := range origBindings { upValues[variable] = pvalue @@ -107,7 +107,7 @@ func (f *bindingsUnboundField) evaluate(i *interpreter, trace traceElement, sb s for variable, pvalue := range f.bindings { upValues[variable] = pvalue } - return f.inner.evaluate(i, trace, sb, upValues, fieldName) + return f.inner.evaluate(i, sb, upValues, fieldName) } // plusSuperUnboundField represents a `field+: ...` that hasn't been bound to an object. @@ -115,19 +115,19 @@ type plusSuperUnboundField struct { inner unboundField } -func (f *plusSuperUnboundField) evaluate(i *interpreter, trace traceElement, sb selfBinding, origBinding bindingFrame, fieldName string) (value, error) { - right, err := f.inner.evaluate(i, trace, sb, origBinding, fieldName) +func (f *plusSuperUnboundField) evaluate(i *interpreter, sb selfBinding, origBinding bindingFrame, fieldName string) (value, error) { + right, err := f.inner.evaluate(i, sb, origBinding, fieldName) if err != nil { return nil, err } if !objectHasField(sb.super(), fieldName, withHidden) { return right, nil } - left, err := objectIndex(i, trace, sb.super(), fieldName) + left, err := objectIndex(i, sb.super(), fieldName) if err != nil { return nil, err } - return builtinPlus(i, trace, left, right) + return builtinPlus(i, left, right) } // evalCallables @@ -141,9 +141,9 @@ type closure struct { params []namedParameter } -func forceThunks(i *interpreter, trace traceElement, args *bindingFrame) error { +func forceThunks(i *interpreter, args *bindingFrame) error { for _, arg := range *args { - _, err := arg.getValue(i, trace) + _, err := arg.getValue(i) if err != nil { return err } @@ -151,7 +151,7 @@ func forceThunks(i *interpreter, trace traceElement, args *bindingFrame) error { return nil } -func (closure *closure) evalCall(arguments callArguments, i *interpreter, trace traceElement) (value, error) { +func (closure *closure) evalCall(arguments callArguments, i *interpreter) (value, error) { argThunks := make(bindingFrame) parameters := closure.parameters() for i, arg := range arguments.positional { @@ -175,7 +175,7 @@ func (closure *closure) evalCall(arguments callArguments, i *interpreter, trace } if arguments.tailstrict { - err := forceThunks(i, trace, &argThunks) + err := forceThunks(i, &argThunks) if err != nil { return nil, err } @@ -185,7 +185,7 @@ func (closure *closure) evalCall(arguments callArguments, i *interpreter, trace addBindings(closure.env.upValues, argThunks), closure.env.selfBinding, ) - return i.EvalInCleanEnv(trace, &calledEnvironment, closure.function.Body, arguments.tailstrict) + return i.EvalInCleanEnv(&calledEnvironment, closure.function.Body, arguments.tailstrict) } func (closure *closure) parameters() []namedParameter { @@ -220,15 +220,15 @@ type NativeFunction struct { } // evalCall evaluates a call to a NativeFunction and returns the result. -func (native *NativeFunction) evalCall(arguments callArguments, i *interpreter, trace traceElement) (value, error) { +func (native *NativeFunction) evalCall(arguments callArguments, i *interpreter) (value, error) { flatArgs := flattenArgs(arguments, native.parameters(), []value{}) nativeArgs := make([]interface{}, 0, len(flatArgs)) for _, arg := range flatArgs { - v, err := i.evaluatePV(arg, trace) + v, err := i.evaluatePV(arg) if err != nil { return nil, err } - json, err := i.manifestJSON(trace, v) + json, err := i.manifestJSON(v) if err != nil { return nil, err } @@ -236,9 +236,9 @@ func (native *NativeFunction) evalCall(arguments callArguments, i *interpreter, } resultJSON, err := native.Func(nativeArgs) if err != nil { - return nil, i.Error(err.Error(), trace) + return nil, i.Error(err.Error()) } - return jsonToValue(i, trace, resultJSON) + return jsonToValue(i, resultJSON) } // Parameters returns a NativeFunction's parameters. diff --git a/vendor/github.com/google/go-jsonnet/update_cpp_jsonnet.sh b/vendor/github.com/google/go-jsonnet/update_cpp_jsonnet.sh new file mode 100644 index 0000000..954d195 --- /dev/null +++ b/vendor/github.com/google/go-jsonnet/update_cpp_jsonnet.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +# Updates cpp-jsonnet repo and regenerates dependent files + +set -e +set -x + +cd cpp-jsonnet +git checkout master +git pull +cd .. +go run cmd/dumpstdlibast/dumpstdlibast.go cpp-jsonnet/stdlib/std.jsonnet > astgen/stdast.go + +set +x +echo +echo -e "\033[1mUpdate completed. Please check if any tests are broken and fix any encountered issues.\033[0m" diff --git a/vendor/github.com/google/go-jsonnet/util.go b/vendor/github.com/google/go-jsonnet/util.go new file mode 100644 index 0000000..91f24c7 --- /dev/null +++ b/vendor/github.com/google/go-jsonnet/util.go @@ -0,0 +1,38 @@ +package jsonnet + +func minInt(a, b int) int { + if a < b { + return a + } + return b +} + +func runeCmp(a, b rune) int { + if a < b { + return -1 + } else if a > b { + return 1 + } else { + return 0 + } +} + +func intCmp(a, b int) int { + if a < b { + return -1 + } else if a > b { + return 1 + } else { + return 0 + } +} + +func float64Cmp(a, b float64) int { + if a < b { + return -1 + } else if a > b { + return 1 + } else { + return 0 + } +} diff --git a/vendor/github.com/google/go-jsonnet/value.go b/vendor/github.com/google/go-jsonnet/value.go index ee299a8..7598a7d 100644 --- a/vendor/github.com/google/go-jsonnet/value.go +++ b/vendor/github.com/google/go-jsonnet/value.go @@ -58,7 +58,7 @@ var arrayType = &valueType{"array"} // TODO(sbarzowski) perhaps call it just "Thunk"? type potentialValue interface { // fromWhere keeps the information from where the evaluation was requested. - getValue(i *interpreter, fromWhere traceElement) (value, error) + getValue(i *interpreter) (value, error) aPotentialValue() } @@ -78,7 +78,7 @@ type valueString interface { length() int getRunes() []rune getGoString() string - index(i *interpreter, trace traceElement, index int) (value, error) + index(i *interpreter, index int) (value, error) } // valueFlatString represents a string value, internally using a []rune for quick @@ -89,11 +89,11 @@ type valueFlatString struct { value []rune } -func (s *valueFlatString) index(i *interpreter, trace traceElement, index int) (value, error) { +func (s *valueFlatString) index(i *interpreter, index int) (value, error) { if 0 <= index && index < s.length() { return makeValueString(string(s.value[index])), nil } - return nil, i.Error(fmt.Sprintf("Index %d out of bounds, not within [0, %v)", index, s.length()), trace) + return nil, i.Error(fmt.Sprintf("Index %d out of bounds, not within [0, %v)", index, s.length())) } func (s *valueFlatString) getRunes() []rune { @@ -142,12 +142,12 @@ func (s *valueStringTree) flattenToLeft() { } } -func (s *valueStringTree) index(i *interpreter, trace traceElement, index int) (value, error) { +func (s *valueStringTree) index(i *interpreter, index int) (value, error) { if 0 <= index && index < s.len { s.flattenToLeft() - return s.left.index(i, trace, index) + return s.left.index(i, index) } - return nil, i.Error(fmt.Sprintf("Index %d out of bounds, not within [0, %v)", index, s.length()), trace) + return nil, i.Error(fmt.Sprintf("Index %d out of bounds, not within [0, %v)", index, s.length())) } func (s *valueStringTree) getRunes() []rune { @@ -199,7 +199,7 @@ func concatStrings(a, b valueString) valueString { } } -func stringLessThan(a, b valueString) bool { +func stringCmp(a, b valueString) int { runesA := a.getRunes() runesB := b.getRunes() var length int @@ -210,10 +210,10 @@ func stringLessThan(a, b valueString) bool { } for i := 0; i < length; i++ { if runesA[i] != runesB[i] { - return runesA[i] < runesB[i] + return runeCmp(runesA[i], runesB[i]) } } - return len(runesA) < len(runesB) + return intCmp(len(runesA), len(runesB)) } func stringEqual(a, b valueString) bool { @@ -247,10 +247,6 @@ func makeValueBoolean(v bool) *valueBoolean { return &valueBoolean{value: v} } -func (b *valueBoolean) not() *valueBoolean { - return makeValueBoolean(!b.value) -} - type valueNumber struct { valueBase value float64 @@ -294,11 +290,11 @@ type valueArray struct { elements []*cachedThunk } -func (arr *valueArray) index(i *interpreter, trace traceElement, index int) (value, error) { +func (arr *valueArray) index(i *interpreter, index int) (value, error) { if 0 <= index && index < arr.length() { - return i.evaluatePV(arr.elements[index], trace) + return i.evaluatePV(arr.elements[index]) } - return nil, i.Error(fmt.Sprintf("Index %d out of bounds, not within [0, %v)", index, arr.length()), trace) + return nil, i.Error(fmt.Sprintf("Index %d out of bounds, not within [0, %v)", index, arr.length())) } func (arr *valueArray) length() int { @@ -341,29 +337,29 @@ type valueFunction struct { // TODO(sbarzowski) better name? type evalCallable interface { - evalCall(args callArguments, i *interpreter, trace traceElement) (value, error) + evalCall(args callArguments, i *interpreter) (value, error) parameters() []namedParameter } -func (f *valueFunction) call(i *interpreter, trace traceElement, args callArguments) (value, error) { - err := checkArguments(i, trace, args, f.parameters()) +func (f *valueFunction) call(i *interpreter, args callArguments) (value, error) { + err := checkArguments(i, args, f.parameters()) if err != nil { return nil, err } - return f.ec.evalCall(args, i, trace) + return f.ec.evalCall(args, i) } func (f *valueFunction) parameters() []namedParameter { return f.ec.parameters() } -func checkArguments(i *interpreter, trace traceElement, args callArguments, params []namedParameter) error { +func checkArguments(i *interpreter, args callArguments, params []namedParameter) error { numPassed := len(args.positional) maxExpected := len(params) if numPassed > maxExpected { - return i.Error(fmt.Sprintf("function expected %v positional argument(s), but got %v", maxExpected, numPassed), trace) + return i.Error(fmt.Sprintf("function expected %v positional argument(s), but got %v", maxExpected, numPassed)) } // Parameter names the function will accept. @@ -379,17 +375,17 @@ func checkArguments(i *interpreter, trace traceElement, args callArguments, para } for _, arg := range args.named { if _, present := received[arg.name]; present { - return i.Error(fmt.Sprintf("Argument %v already provided", arg.name), trace) + return i.Error(fmt.Sprintf("Argument %v already provided", arg.name)) } if _, present := accepted[arg.name]; !present { - return i.Error(fmt.Sprintf("function has no parameter %v", arg.name), trace) + return i.Error(fmt.Sprintf("function has no parameter %v", arg.name)) } received[arg.name] = true } for _, param := range params { if _, present := received[param.name]; !present && param.defaultArg == nil { - return i.Error(fmt.Sprintf("Missing argument: %v", param.name), trace) + return i.Error(fmt.Sprintf("Missing argument: %v", param.name)) } } @@ -496,8 +492,8 @@ func (*valueObject) getType() *valueType { return objectType } -func (obj *valueObject) index(i *interpreter, trace traceElement, field string) (value, error) { - return objectIndex(i, trace, objectBinding(obj), field) +func (obj *valueObject) index(i *interpreter, field string) (value, error) { + return objectIndex(i, objectBinding(obj), field) } func (obj *valueObject) assertionsChecked() bool { @@ -549,14 +545,14 @@ type simpleObject struct { locals []objectLocal } -func checkAssertionsHelper(i *interpreter, trace traceElement, obj *valueObject, curr uncachedObject, superDepth int) error { +func checkAssertionsHelper(i *interpreter, obj *valueObject, curr uncachedObject, superDepth int) error { switch curr := curr.(type) { case *extendedObject: - err := checkAssertionsHelper(i, trace, obj, curr.right, superDepth) + err := checkAssertionsHelper(i, obj, curr.right, superDepth) if err != nil { return err } - err = checkAssertionsHelper(i, trace, obj, curr.left, superDepth+curr.right.inheritanceSize()) + err = checkAssertionsHelper(i, obj, curr.left, superDepth+curr.right.inheritanceSize()) if err != nil { return err } @@ -565,7 +561,7 @@ func checkAssertionsHelper(i *interpreter, trace traceElement, obj *valueObject, for _, assert := range curr.asserts { sb := selfBinding{self: obj, superDepth: superDepth} fieldUpValues := prepareFieldUpvalues(sb, curr.upValues, curr.locals) - _, err := assert.evaluate(i, trace, sb, fieldUpValues, "") + _, err := assert.evaluate(i, sb, fieldUpValues, "") if err != nil { return err } @@ -578,13 +574,13 @@ func checkAssertionsHelper(i *interpreter, trace traceElement, obj *valueObject, } } -func checkAssertions(i *interpreter, trace traceElement, obj *valueObject) error { +func checkAssertions(i *interpreter, obj *valueObject) error { if !obj.assertionsChecked() { // Assertions may refer to the object that will normally // trigger checking of assertions, resulting in an endless recursion. // To avoid that, while we check them, we treat them as already passed. obj.setAssertionsCheckResult(errNoErrorInObjectInvariants) - obj.setAssertionsCheckResult(checkAssertionsHelper(i, trace, obj, obj.uncached, 0)) + obj.setAssertionsCheckResult(checkAssertionsHelper(i, obj, obj.uncached, 0)) } return obj.getAssertionsCheckResult() } @@ -614,7 +610,7 @@ type simpleObjectField struct { // unboundField is a field that doesn't know yet in which object it is. type unboundField interface { - evaluate(i *interpreter, trace traceElement, sb selfBinding, origBinding bindingFrame, fieldName string) (value, error) + evaluate(i *interpreter, sb selfBinding, origBinding bindingFrame, fieldName string) (value, error) } // extendedObject represents an object created through inheritance (left + right). @@ -703,18 +699,18 @@ func prepareFieldUpvalues(sb selfBinding, upValues bindingFrame, locals []object return newUpValues } -func objectIndex(i *interpreter, trace traceElement, sb selfBinding, fieldName string) (value, error) { - err := checkAssertions(i, trace, sb.self) +func objectIndex(i *interpreter, sb selfBinding, fieldName string) (value, error) { + err := checkAssertions(i, sb.self) if err != nil { return nil, err } if sb.superDepth >= sb.self.uncached.inheritanceSize() { - return nil, i.Error("Attempt to use super when there is no super class.", trace) + return nil, i.Error("Attempt to use super when there is no super class.") } found, field, upValues, locals, foundAt := findField(sb.self.uncached, sb.superDepth, fieldName) if !found { - return nil, i.Error(fmt.Sprintf("Field does not exist: %s", fieldName), trace) + return nil, i.Error(fmt.Sprintf("Field does not exist: %s", fieldName)) } if val, ok := sb.self.cache[objectCacheKey{field: fieldName, depth: foundAt}]; ok { @@ -724,7 +720,7 @@ func objectIndex(i *interpreter, trace traceElement, sb selfBinding, fieldName s fieldSelfBinding := selfBinding{self: sb.self, superDepth: foundAt} fieldUpValues := prepareFieldUpvalues(fieldSelfBinding, upValues, locals) - val, err := field.field.evaluate(i, trace, fieldSelfBinding, fieldUpValues, fieldName) + val, err := field.field.evaluate(i, fieldSelfBinding, fieldUpValues, fieldName) if err == nil { sb.self.cache[objectCacheKey{field: fieldName, depth: foundAt}] = val diff --git a/vendor/github.com/google/go-jsonnet/vm.go b/vendor/github.com/google/go-jsonnet/vm.go index 9150f36..4ba94d4 100644 --- a/vendor/github.com/google/go-jsonnet/vm.go +++ b/vendor/github.com/google/go-jsonnet/vm.go @@ -130,7 +130,7 @@ const ( ) // version is the current gojsonnet's version -const version = "v0.16.0" +const version = "v0.17.0" // Evaluate evaluates a Jsonnet program given by an Abstract Syntax Tree // and returns serialized JSON as string. diff --git a/vendor/github.com/google/uuid/README.md b/vendor/github.com/google/uuid/README.md index 9d92c11..f765a46 100644 --- a/vendor/github.com/google/uuid/README.md +++ b/vendor/github.com/google/uuid/README.md @@ -16,4 +16,4 @@ change is the ability to represent an invalid UUID (vs a NIL UUID). Full `go doc` style documentation for the package can be viewed online without installing this package by using the GoDoc site here: -http://godoc.org/github.com/google/uuid +http://pkg.go.dev/github.com/google/uuid diff --git a/vendor/github.com/google/uuid/marshal.go b/vendor/github.com/google/uuid/marshal.go index 7f9e0c6..14bd340 100644 --- a/vendor/github.com/google/uuid/marshal.go +++ b/vendor/github.com/google/uuid/marshal.go @@ -16,10 +16,11 @@ func (uuid UUID) MarshalText() ([]byte, error) { // UnmarshalText implements encoding.TextUnmarshaler. func (uuid *UUID) UnmarshalText(data []byte) error { id, err := ParseBytes(data) - if err == nil { - *uuid = id + if err != nil { + return err } - return err + *uuid = id + return nil } // MarshalBinary implements encoding.BinaryMarshaler. diff --git a/vendor/github.com/google/uuid/version1.go b/vendor/github.com/google/uuid/version1.go index 199a1ac..4631096 100644 --- a/vendor/github.com/google/uuid/version1.go +++ b/vendor/github.com/google/uuid/version1.go @@ -17,12 +17,6 @@ import ( // // In most cases, New should be used. func NewUUID() (UUID, error) { - nodeMu.Lock() - if nodeID == zeroID { - setNodeInterface("") - } - nodeMu.Unlock() - var uuid UUID now, seq, err := GetTime() if err != nil { @@ -38,7 +32,13 @@ func NewUUID() (UUID, error) { binary.BigEndian.PutUint16(uuid[4:], timeMid) binary.BigEndian.PutUint16(uuid[6:], timeHi) binary.BigEndian.PutUint16(uuid[8:], seq) + + nodeMu.Lock() + if nodeID == zeroID { + setNodeInterface("") + } copy(uuid[10:], nodeID[:]) + nodeMu.Unlock() return uuid, nil } diff --git a/vendor/github.com/google/uuid/version4.go b/vendor/github.com/google/uuid/version4.go index 84af91c..c110465 100644 --- a/vendor/github.com/google/uuid/version4.go +++ b/vendor/github.com/google/uuid/version4.go @@ -27,8 +27,13 @@ func New() UUID { // equivalent to the odds of creating a few tens of trillions of UUIDs in a // year and having one duplicate. func NewRandom() (UUID, error) { + return NewRandomFromReader(rander) +} + +// NewRandomFromReader returns a UUID based on bytes read from a given io.Reader. +func NewRandomFromReader(r io.Reader) (UUID, error) { var uuid UUID - _, err := io.ReadFull(rander, uuid[:]) + _, err := io.ReadFull(r, uuid[:]) if err != nil { return Nil, err } diff --git a/vendor/github.com/grafana/tanka/pkg/helm/charts.go b/vendor/github.com/grafana/tanka/pkg/helm/charts.go index 22f3c19..d85e8f3 100644 --- a/vendor/github.com/grafana/tanka/pkg/helm/charts.go +++ b/vendor/github.com/grafana/tanka/pkg/helm/charts.go @@ -3,7 +3,6 @@ package helm import ( "errors" "fmt" - "io/ioutil" "log" "os" "path/filepath" @@ -24,7 +23,7 @@ func LoadChartfile(projectRoot string) (*Charts, error) { // open chartfile chartfile := filepath.Join(abs, Filename) - data, err := ioutil.ReadFile(chartfile) + data, err := os.ReadFile(chartfile) if err != nil { return nil, err } @@ -68,6 +67,11 @@ type Charts struct { Helm Helm } +// chartManifest represents a Helm chart's Chart.yaml +type chartManifest struct { + Version string `yaml:"version"` +} + // ChartDir returns the directory pulled charts are saved in func (c Charts) ChartDir() string { return filepath.Join(c.projectRoot, c.Manifest.Directory) @@ -86,14 +90,45 @@ func (c Charts) Vendor() error { return err } - log.Println("Syncing Repositories ...") - if err := c.Helm.RepoUpdate(Opts{Repositories: c.Manifest.Repositories}); err != nil { - return err - } - + repositoriesUpdated := false log.Println("Pulling Charts ...") for _, r := range c.Manifest.Requires { - err := c.Helm.Pull(r.Chart, r.Version.String(), PullOpts{ + chartName := parseReqName(r.Chart) + chartPath := filepath.Join(dir, chartName) + + _, err := os.Stat(chartPath) + if err == nil { + chartManifestPath := filepath.Join(chartPath, "Chart.yaml") + chartManifestBytes, err := os.ReadFile(chartManifestPath) + if err != nil { + return fmt.Errorf("reading chart manifest: %w", err) + } + var chartYAML chartManifest + if err := yaml.Unmarshal(chartManifestBytes, &chartYAML); err != nil { + return fmt.Errorf("unmarshalling chart manifest: %w", err) + } + + if chartYAML.Version == r.Version.String() { + log.Printf(" %s@%s exists", r.Chart, r.Version.String()) + continue + } else { + log.Printf("Removing %s@%s", r.Chart, r.Version.String()) + if err := os.RemoveAll(chartPath); err != nil { + return err + } + } + } else if !os.IsNotExist(err) { + return err + } + + if !repositoriesUpdated { + log.Println("Syncing Repositories ...") + if err := c.Helm.RepoUpdate(Opts{Repositories: c.Manifest.Repositories}); err != nil { + return err + } + repositoriesUpdated = true + } + err = c.Helm.Pull(r.Chart, r.Version.String(), PullOpts{ Destination: dir, Opts: Opts{Repositories: c.Manifest.Repositories}, }) @@ -101,7 +136,7 @@ func (c Charts) Vendor() error { return err } - log.Printf(" %s@%s", r.Chart, r.Version.String()) + log.Printf(" %s@%s downloaded", r.Chart, r.Version.String()) } return nil @@ -109,13 +144,9 @@ func (c Charts) Vendor() error { // Add adds every Chart in reqs to the Manifest after validation, and runs // Vendor afterwards -func (c Charts) Add(reqs []string) error { +func (c *Charts) Add(reqs []string) error { log.Printf("Adding %v Charts ...", len(reqs)) - skip := func(s string, err error) { - log.Printf(" Skipping %s: %s.", s, err) - } - // parse new charts, append in memory added := 0 for _, s := range reqs { @@ -150,12 +181,37 @@ func (c Charts) Add(reqs []string) error { return c.Vendor() } +func (c *Charts) AddRepos(repos ...Repo) error { + added := 0 + for _, r := range repos { + if c.Manifest.Repositories.Has(r) { + skip(r.Name, fmt.Errorf("already exists")) + continue + } + + c.Manifest.Repositories = append(c.Manifest.Repositories, r) + added++ + log.Println(" OK:", r.Name) + } + + // write out + if err := write(c.Manifest, c.ManifestFile()); err != nil { + return err + } + + if added != len(repos) { + return fmt.Errorf("%v Repo(s) were skipped. Please check above logs for details", len(repos)-added) + } + + return nil +} + func InitChartfile(path string) (*Charts, error) { c := Chartfile{ Version: Version, Repositories: []Repo{{ Name: "stable", - URL: "https://kubernetes-charts.storage.googleapis.com", + URL: "https://charts.helm.sh/stable", }}, Requires: make(Requirements, 0), } @@ -174,7 +230,7 @@ func write(c Chartfile, dest string) error { return err } - return ioutil.WriteFile(dest, data, 0644) + return os.WriteFile(dest, data, 0644) } var chartExp = regexp.MustCompile(`\w+\/.+@.+`) @@ -199,3 +255,14 @@ func parseReq(s string) (*Requirement, error) { Version: *ver, }, nil } + +// parseReqName parses a name from a string of the format `repo/name` +func parseReqName(s string) string { + elems := strings.Split(s, "/") + name := elems[1] + return name +} + +func skip(s string, err error) { + log.Printf(" Skipping %s: %s.", s, err) +} diff --git a/vendor/github.com/grafana/tanka/pkg/helm/helm.go b/vendor/github.com/grafana/tanka/pkg/helm/helm.go index 300b2a4..db5b54a 100644 --- a/vendor/github.com/grafana/tanka/pkg/helm/helm.go +++ b/vendor/github.com/grafana/tanka/pkg/helm/helm.go @@ -4,7 +4,6 @@ import ( "bytes" "encoding/json" "fmt" - "io/ioutil" "os" "os/exec" @@ -106,7 +105,7 @@ func writeRepoTmpFile(r []Repo) (string, error) { "repositories": r, } - f, err := ioutil.TempFile("", "charts-repos") + f, err := os.CreateTemp("", "charts-repos") if err != nil { return "", err } diff --git a/vendor/github.com/grafana/tanka/pkg/helm/jsonnet.go b/vendor/github.com/grafana/tanka/pkg/helm/jsonnet.go index ced9a8c..3a03596 100644 --- a/vendor/github.com/grafana/tanka/pkg/helm/jsonnet.go +++ b/vendor/github.com/grafana/tanka/pkg/helm/jsonnet.go @@ -1,14 +1,11 @@ package helm import ( - "bytes" "encoding/json" "fmt" "os" "path/filepath" - "text/template" - "github.com/Masterminds/sprig/v3" "github.com/google/go-jsonnet" "github.com/google/go-jsonnet/ast" "github.com/grafana/tanka/pkg/kubernetes/manifest" @@ -71,7 +68,7 @@ func NativeFunc(h Helm) *jsonnet.NativeFunction { } // convert list to map - out, err := listAsMap(list, opts.NameFormat) + out, err := manifest.ListAsMap(list, opts.NameFormat) if err != nil { return nil, err } @@ -98,43 +95,3 @@ func parseOpts(data interface{}) (*JsonnetOpts, error) { return &opts, nil } - -func listAsMap(list manifest.List, nameFormat string) (map[string]interface{}, error) { - if nameFormat == "" { - nameFormat = DefaultNameFormat - } - - tmpl, err := template.New(""). - Funcs(sprig.TxtFuncMap()). - Parse(nameFormat) - if err != nil { - return nil, fmt.Errorf("Parsing name format: %w", err) - } - - out := make(map[string]interface{}) - for _, m := range list { - var buf bytes.Buffer - if err := tmpl.Execute(&buf, m); err != nil { - return nil, err - } - name := buf.String() - - if _, ok := out[name]; ok { - return nil, ErrorDuplicateName{name: name, format: nameFormat} - } - out[name] = map[string]interface{}(m) - } - - return out, nil -} - -// ErrorDuplicateName means two resources share the same name using the given -// nameFormat. -type ErrorDuplicateName struct { - name string - format string -} - -func (e ErrorDuplicateName) Error() string { - return fmt.Sprintf("Two resources share the same name '%s'. Please adapt the name template '%s'. See https://tanka.dev/helm#two-resources-share-the-same-name", e.name, e.format) -} diff --git a/vendor/github.com/grafana/tanka/pkg/helm/spec.go b/vendor/github.com/grafana/tanka/pkg/helm/spec.go index 23b7992..ab1feab 100644 --- a/vendor/github.com/grafana/tanka/pkg/helm/spec.go +++ b/vendor/github.com/grafana/tanka/pkg/helm/spec.go @@ -23,7 +23,7 @@ type Chartfile struct { Version uint `json:"version"` // Repositories to source from - Repositories []Repo `json:"repositories"` + Repositories Repos `json:"repositories"` // Requires lists Charts expected to be present in the charts folder Requires Requirements `json:"requires"` @@ -43,6 +43,19 @@ type Repo struct { Password string `json:"password,omitempty"` } +type Repos []Repo + +// Has reports whether 'repo' is already part of the repositories +func (r Repos) Has(repo Repo) bool { + for _, x := range r { + if x == repo { + return true + } + } + + return false +} + // Requirement describes a single required Helm Chart. // Both, Chart and Version are required type Requirement struct { diff --git a/vendor/github.com/grafana/tanka/pkg/helm/template.go b/vendor/github.com/grafana/tanka/pkg/helm/template.go index 336a672..bf64c49 100644 --- a/vendor/github.com/grafana/tanka/pkg/helm/template.go +++ b/vendor/github.com/grafana/tanka/pkg/helm/template.go @@ -4,7 +4,6 @@ import ( "bytes" "io" "os" - "strings" "github.com/grafana/tanka/pkg/kubernetes/manifest" "github.com/pkg/errors" @@ -67,8 +66,12 @@ type TemplateOpts struct { // IncludeCRDs specifies whether CustomResourceDefinitions are included in // the template output IncludeCRDs bool + // Kubernetes version used for Capabilities.KubeVersion + KubeVersion string // Namespace scope for this request Namespace string + // NoHooks specifies whether hooks should be excluded from the template output + NoHooks bool } // Flags returns all options apart from Values as their respective `helm @@ -76,8 +79,7 @@ type TemplateOpts struct { func (t TemplateOpts) Flags() []string { var flags []string - if t.APIVersions != nil { - value := strings.Join(t.APIVersions, ",") + for _, value := range t.APIVersions { flags = append(flags, "--api-versions="+value) } @@ -85,6 +87,14 @@ func (t TemplateOpts) Flags() []string { flags = append(flags, "--include-crds") } + if t.KubeVersion != "" { + flags = append(flags, "--kube-version="+t.KubeVersion) + } + + if t.NoHooks { + flags = append(flags, "--no-hooks") + } + if t.Namespace != "" { flags = append(flags, "--namespace="+t.Namespace) } diff --git a/vendor/github.com/grafana/tanka/pkg/jsonnet/eval.go b/vendor/github.com/grafana/tanka/pkg/jsonnet/eval.go index b1ba4b1..33e2eb0 100644 --- a/vendor/github.com/grafana/tanka/pkg/jsonnet/eval.go +++ b/vendor/github.com/grafana/tanka/pkg/jsonnet/eval.go @@ -1,7 +1,8 @@ package jsonnet import ( - "path/filepath" + "os" + "regexp" jsonnet "github.com/google/go-jsonnet" "github.com/pkg/errors" @@ -28,10 +29,48 @@ func (i *InjectedCode) Set(key, value string) { // Opts are additional properties for the Jsonnet VM type Opts struct { + MaxStack int ExtCode InjectedCode TLACode InjectedCode ImportPaths []string - EvalPattern string + EvalScript string + CachePath string + + CachePathRegexes []*regexp.Regexp +} + +// PathIsCached determines if a given path is matched by any of the configured cached path regexes +// If no path regexes are defined, all paths are matched +func (opts Opts) PathIsCached(path string) bool { + for _, regex := range opts.CachePathRegexes { + if regex.MatchString(path) { + return true + } + } + return len(opts.CachePathRegexes) == 0 +} + +// Clone returns a deep copy of Opts +func (o Opts) Clone() Opts { + extCode, tlaCode := InjectedCode{}, InjectedCode{} + + for k, v := range o.ExtCode { + extCode[k] = v + } + + for k, v := range o.TLACode { + tlaCode[k] = v + } + + return Opts{ + TLACode: tlaCode, + ExtCode: extCode, + ImportPaths: append([]string{}, o.ImportPaths...), + EvalScript: o.EvalScript, + + CachePath: o.CachePath, + CachePathRegexes: o.CachePathRegexes, + } } // MakeVM returns a Jsonnet VM with some extensions of Tanka, including: @@ -53,6 +92,10 @@ func MakeVM(opts Opts) *jsonnet.VM { vm.NativeFunction(nf) } + if opts.MaxStack > 0 { + vm.MaxStack = opts.MaxStack + } + return vm } @@ -60,23 +103,62 @@ func MakeVM(opts Opts) *jsonnet.VM { // result in JSON form. It disregards opts.ImportPaths in favor of automatically // resolving these according to the specified file. func EvaluateFile(jsonnetFile string, opts Opts) (string, error) { - jpath, _, _, err := jpath.Resolve(filepath.Dir(jsonnetFile)) + evalFunc := func(vm *jsonnet.VM) (string, error) { + return vm.EvaluateFile(jsonnetFile) + } + data, err := os.ReadFile(jsonnetFile) if err != nil { - return "", errors.Wrap(err, "resolving import paths") + return "", err } - opts.ImportPaths = jpath - - vm := MakeVM(opts) - return vm.EvaluateFile(jsonnetFile) + return evaluateSnippet(evalFunc, jsonnetFile, string(data), opts) } // Evaluate renders the given jsonnet into a string -func Evaluate(filename, data string, opts Opts) (string, error) { - jpath, _, _, err := jpath.Resolve(filepath.Dir(filename)) +// If cache options are given, a hash from the data will be computed and +// the resulting string will be cached for future retrieval +func Evaluate(path, data string, opts Opts) (string, error) { + evalFunc := func(vm *jsonnet.VM) (string, error) { + return vm.EvaluateAnonymousSnippet(path, data) + } + return evaluateSnippet(evalFunc, path, data, opts) +} + +type evalFunc func(vm *jsonnet.VM) (string, error) + +func evaluateSnippet(evalFunc evalFunc, path, data string, opts Opts) (string, error) { + var cache *FileEvalCache + if opts.CachePath != "" && opts.PathIsCached(path) { + cache = NewFileEvalCache(opts.CachePath) + } + + // Create VM + jpath, _, _, err := jpath.Resolve(path) if err != nil { return "", errors.Wrap(err, "resolving import paths") } opts.ImportPaths = jpath vm := MakeVM(opts) - return vm.EvaluateAnonymousSnippet(filename, data) + + var hash string + if cache != nil { + if hash, err = getSnippetHash(vm, path, data); err != nil { + return "", err + } + if v, err := cache.Get(hash); err != nil { + return "", err + } else if v != "" { + return v, nil + } + } + + content, err := evalFunc(vm) + if err != nil { + return "", err + } + + if cache != nil { + return content, cache.Store(hash, content) + } + + return content, nil } diff --git a/vendor/github.com/grafana/tanka/pkg/jsonnet/evalcache.go b/vendor/github.com/grafana/tanka/pkg/jsonnet/evalcache.go new file mode 100644 index 0000000..89195e8 --- /dev/null +++ b/vendor/github.com/grafana/tanka/pkg/jsonnet/evalcache.go @@ -0,0 +1,48 @@ +package jsonnet + +import ( + "os" + "path/filepath" +) + +// FileEvalCache is an evaluation cache that stores its data on the local filesystem +type FileEvalCache struct { + Directory string +} + +func NewFileEvalCache(cachePath string) *FileEvalCache { + return &FileEvalCache{ + Directory: cachePath, + } +} + +func (c *FileEvalCache) cachePath(hash string) (string, error) { + return filepath.Abs(filepath.Join(c.Directory, hash+".json")) +} + +func (c *FileEvalCache) Get(hash string) (string, error) { + cachePath, err := c.cachePath(hash) + if err != nil { + return "", err + } + + if bytes, err := os.ReadFile(cachePath); err == nil { + return string(bytes), err + } else if !os.IsNotExist(err) { + return "", err + } + return "", nil +} + +func (c *FileEvalCache) Store(hash, content string) error { + if err := os.MkdirAll(c.Directory, os.ModePerm); err != nil { + return err + } + + cachePath, err := c.cachePath(hash) + if err != nil { + return err + } + + return os.WriteFile(cachePath, []byte(content), 0644) +} diff --git a/vendor/github.com/grafana/tanka/pkg/jsonnet/imports.go b/vendor/github.com/grafana/tanka/pkg/jsonnet/imports.go index 99d6353..543fb95 100644 --- a/vendor/github.com/grafana/tanka/pkg/jsonnet/imports.go +++ b/vendor/github.com/grafana/tanka/pkg/jsonnet/imports.go @@ -1,9 +1,13 @@ package jsonnet import ( - "io/ioutil" + "crypto/sha256" + "encoding/base64" + "fmt" + "os" "path/filepath" "sort" + "sync" jsonnet "github.com/google/go-jsonnet" "github.com/google/go-jsonnet/ast" @@ -26,9 +30,12 @@ func TransitiveImports(dir string) ([]string, error) { return nil, err } - mainFile := filepath.Join(dir, "main.jsonnet") + entrypoint, err := jpath.Entrypoint(dir) + if err != nil { + return nil, err + } - sonnet, err := ioutil.ReadFile(mainFile) + sonnet, err := os.ReadFile(entrypoint) if err != nil { return nil, errors.Wrap(err, "opening file") } @@ -44,13 +51,13 @@ func TransitiveImports(dir string) ([]string, error) { vm.NativeFunction(nf) } - node, err := jsonnet.SnippetToAST("main.jsonnet", string(sonnet)) + node, err := jsonnet.SnippetToAST(filepath.Base(entrypoint), string(sonnet)) if err != nil { return nil, errors.Wrap(err, "creating Jsonnet AST") } imports := make(map[string]bool) - if err = importRecursive(imports, vm, node, "main.jsonnet"); err != nil { + if err = importRecursive(imports, vm, node, filepath.Base(entrypoint)); err != nil { return nil, err } @@ -65,10 +72,13 @@ func TransitiveImports(dir string) ([]string, error) { paths = append(paths, p) } - paths = append(paths, mainFile) + paths = append(paths, entrypoint) for i := range paths { paths[i], _ = filepath.Rel(rootDir, paths[i]) + + // Normalize path separators for windows + paths[i] = filepath.ToSlash(paths[i]) } sort.Strings(paths) @@ -86,7 +96,7 @@ func importRecursive(list map[string]bool, vm *jsonnet.VM, node ast.Node, curren contents, foundAt, err := vm.ImportAST(currentPath, p) if err != nil { - return errors.Wrap(err, "importing jsonnet") + return fmt.Errorf("importing '%s' from '%s': %w", p, currentPath, err) } abs, _ := filepath.Abs(foundAt) @@ -127,6 +137,44 @@ func importRecursive(list map[string]bool, vm *jsonnet.VM, node ast.Node, curren return nil } +var fileHashes sync.Map + +// getSnippetHash takes a jsonnet snippet and calculates a hash from its content +// and the content of all of its dependencies. +// File hashes are cached in-memory to optimize multiple executions of this function in a process +func getSnippetHash(vm *jsonnet.VM, path, data string) (string, error) { + node, _ := jsonnet.SnippetToAST(path, data) + result := map[string]bool{} + if err := importRecursive(result, vm, node, path); err != nil { + return "", err + } + fileNames := []string{} + for file := range result { + fileNames = append(fileNames, file) + } + sort.Strings(fileNames) + + fullHasher := sha256.New() + fullHasher.Write([]byte(data)) + for _, file := range fileNames { + var fileHash []byte + if got, ok := fileHashes.Load(file); ok { + fileHash = got.([]byte) + } else { + bytes, err := os.ReadFile(file) + if err != nil { + return "", err + } + hash := sha256.New() + fileHash = hash.Sum(bytes) + fileHashes.Store(file, fileHash) + } + fullHasher.Write(fileHash) + } + + return base64.URLEncoding.EncodeToString(fullHasher.Sum(nil)), nil +} + func uniqueStringSlice(s []string) []string { seen := make(map[string]struct{}, len(s)) j := 0 diff --git a/vendor/github.com/grafana/tanka/pkg/jsonnet/jpath/dirs.go b/vendor/github.com/grafana/tanka/pkg/jsonnet/jpath/dirs.go new file mode 100644 index 0000000..af4408a --- /dev/null +++ b/vendor/github.com/grafana/tanka/pkg/jsonnet/jpath/dirs.go @@ -0,0 +1,124 @@ +package jpath + +import ( + "os" + "path/filepath" + "runtime" +) + +// Dirs returns the project-root (root) and environment directory (base) +func Dirs(path string) (root string, base string, err error) { + root, err = FindRoot(path) + if err != nil { + return "", "", err + } + + base, err = FindBase(path, root) + if err != nil { + return root, "", err + } + + return root, base, err +} + +// FindRoot returns the absolute path of the project root, being the directory +// that directly holds `tkrc.yaml` if it exists, otherwise the directory that +// directly holds `jsonnetfile.json` +func FindRoot(path string) (dir string, err error) { + start, err := FsDir(path) + if err != nil { + return "", err + } + + // root path based on os + stop := "/" + if runtime.GOOS == "windows" { + stop = filepath.VolumeName(start) + "\\" + } + + // try tkrc.yaml first + root, err := FindParentFile("tkrc.yaml", start, stop) + if err == nil { + return root, nil + } + + // otherwise use jsonnetfile.json + root, err = FindParentFile("jsonnetfile.json", start, stop) + if _, ok := err.(ErrorFileNotFound); ok { + return "", ErrorNoRoot + } else if err != nil { + return "", err + } + + return root, nil +} + +// FindBase returns the absolute path of the environments base directory, the +// one which directly holds the entrypoint file. +func FindBase(path string, root string) (string, error) { + dir, err := FsDir(path) + if err != nil { + return "", err + } + + filename, err := Filename(path) + if err != nil { + return "", err + } + + base, err := FindParentFile(filename, dir, root) + + if _, ok := err.(ErrorFileNotFound); ok { + return "", ErrorNoBase{filename: filename} + } else if err != nil { + return "", err + } + + return base, nil +} + +// FindParentFile traverses the parent directory tree for the given `file`, +// starting from `start` and ending in `stop`. If the file is not found an error is returned. +func FindParentFile(file, start, stop string) (string, error) { + files, err := os.ReadDir(start) + if err != nil { + return "", err + } + + if dirContainsFile(files, file) { + return start, nil + } else if start == stop { + return "", ErrorFileNotFound{file} + } + return FindParentFile(file, filepath.Dir(start), stop) +} + +// dirContainsFile returns whether a file is included in a directory. +func dirContainsFile(files []os.DirEntry, filename string) bool { + for _, f := range files { + if f.Name() == filename { + return true + } + } + return false +} + +// FsDir returns the most inner directory of path, as reported by the local +// filesystem +func FsDir(path string) (string, error) { + path, err := filepath.Abs(path) + if err != nil { + return "", err + } + + fi, err := os.Stat(path) + if err != nil { + return "", err + } + + if fi.IsDir() { + return path, nil + } + + return filepath.Dir(path), nil +} diff --git a/vendor/github.com/grafana/tanka/pkg/jsonnet/jpath/errors.go b/vendor/github.com/grafana/tanka/pkg/jsonnet/jpath/errors.go new file mode 100644 index 0000000..a75ee7b --- /dev/null +++ b/vendor/github.com/grafana/tanka/pkg/jsonnet/jpath/errors.go @@ -0,0 +1,31 @@ +package jpath + +import ( + "errors" + "fmt" +) + +// ErrorNoRoot means no rootDir was found in the parent directories +var ErrorNoRoot = errors.New(`Unable to identify the project root. +Tried to find 'tkrc.yaml' or 'jsonnetfile.json' in the parent directories. +Please refer to https://tanka.dev/directory-structure for more information`) + +// ErrorNoBase means no baseDir was found in the parent directories +type ErrorNoBase struct { + filename string +} + +func (e ErrorNoBase) Error() string { + return fmt.Sprintf(`Unable to identify the environments base directory. +Tried to find '%s' in the parent directories. +Please refer to https://tanka.dev/directory-structure for more information`, e.filename) +} + +// ErrorFileNotFound means that the searched file was not found +type ErrorFileNotFound struct { + filename string +} + +func (e ErrorFileNotFound) Error() string { + return e.filename + " not found" +} diff --git a/vendor/github.com/grafana/tanka/pkg/jsonnet/jpath/jpath.go b/vendor/github.com/grafana/tanka/pkg/jsonnet/jpath/jpath.go index 779c260..d0372dc 100644 --- a/vendor/github.com/grafana/tanka/pkg/jsonnet/jpath/jpath.go +++ b/vendor/github.com/grafana/tanka/pkg/jsonnet/jpath/jpath.go @@ -1,56 +1,30 @@ package jpath import ( - "errors" - "io/ioutil" "os" "path/filepath" - "runtime" ) -var ( - // ErrorNoRoot means no rootDir was found in the parents - ErrorNoRoot = errors.New("could not locate a tkrc.yaml or jsonnetfile.json in the parent directories, which is required to identify the project root.\nRefer to https://tanka.dev/directory-structure for more information") +const DEFAULT_ENTRYPOINT = "main.jsonnet" - // ErrorNoBase means no baseDir was found in the parents - ErrorNoBase = errors.New("could not locate a main.jsonnet in the parent directories, which is required as the entrypoint for the evaluation.\nRefer to https://tanka.dev/directory-structure for more information") -) - -// ErrorFileNotFound means that the searched file was not found -type ErrorFileNotFound struct { - filename string -} - -func (e ErrorFileNotFound) Error() string { - return e.filename + " not found" -} - -// Resolve the given directory and resolves the jPath around it. This means it: +// Resolve the given path and resolves the jPath around it. This means it: // - figures out the project root (the one with .jsonnetfile, vendor/ and lib/) -// - figures out the environments base directory (the one with the main.jsonnet) +// - figures out the environments base directory (usually the main.jsonnet) // // It then constructs a jPath with the base directory, vendor/ and lib/. // This results in predictable imports, as it doesn't matter whether the user called // called the command further down tree or not. A little bit like git. -func Resolve(workdir string) (path []string, base, root string, err error) { - workdir, err = filepath.Abs(workdir) +func Resolve(path string) (jpath []string, base, root string, err error) { + root, err = FindRoot(path) if err != nil { return nil, "", "", err } - root, err = findRoot(workdir) + base, err = FindBase(path, root) if err != nil { return nil, "", "", err } - base, err = FindParentFile("main.jsonnet", workdir, root) - if err != nil { - if _, ok := err.(ErrorFileNotFound); ok { - return nil, "", "", ErrorNoBase - } - return nil, "", "", err - } - // The importer iterates through this list in reverse order return []string{ filepath.Join(root, "vendor"), @@ -60,56 +34,40 @@ func Resolve(workdir string) (path []string, base, root string, err error) { }, base, root, nil } -// findRoot searches for a rootDir by the following criteria: -// - tkrc.yaml is considered first, for a jb-independent way of marking the root -// - if it is not present (default), jsonnetfile.json is used. -func findRoot(start string) (dir string, err error) { - // root path based on os - stop := "/" - if runtime.GOOS == "windows" { - stop = filepath.VolumeName(start) + "\\" +// Filename returns the name of the entrypoint file. +// It DOES NOT return an absolute path, only a plain name like "main.jsonnet" +// To obtain an absolute path, use Entrypoint() instead. +func Filename(path string) (string, error) { + fi, err := os.Stat(path) + if err != nil { + return "", err } - // try tkrc.yaml first - root, err := FindParentFile("tkrc.yaml", start, stop) - if err == nil { - return root, nil + if fi.IsDir() { + return DEFAULT_ENTRYPOINT, nil } - // otherwise use jsonnetfile.json - root, err = FindParentFile("jsonnetfile.json", start, stop) - if err != nil { - if _, ok := err.(ErrorFileNotFound); ok { - return "", ErrorNoRoot - } - return "", err - } + return filepath.Base(fi.Name()), nil - return root, nil } -// FindParentFile traverses the parent directory tree for the given `file`, -// starting from `start` and ending in `stop`. If the file is not found an error is returned. -func FindParentFile(file, start, stop string) (string, error) { - files, err := ioutil.ReadDir(start) +// Entrypoint returns the absolute path of the environments entrypoint file (the +// one passed to jsonnet.EvaluateFile) +func Entrypoint(path string) (string, error) { + root, err := FindRoot(path) if err != nil { return "", err } - if dirContainsFile(files, file) { - return start, nil - } else if start == stop { - return "", ErrorFileNotFound{file} + base, err := FindBase(path, root) + if err != nil { + return "", err } - return FindParentFile(file, filepath.Dir(start), stop) -} -// dirContainsFile returns whether a file is included in a directory. -func dirContainsFile(files []os.FileInfo, filename string) bool { - for _, f := range files { - if f.Name() == filename { - return true - } + filename, err := Filename(path) + if err != nil { + return "", err } - return false + + return filepath.Join(base, filename), nil } diff --git a/vendor/github.com/grafana/tanka/pkg/jsonnet/native/funcs.go b/vendor/github.com/grafana/tanka/pkg/jsonnet/native/funcs.go index 336cef5..e6ef0f0 100644 --- a/vendor/github.com/grafana/tanka/pkg/jsonnet/native/funcs.go +++ b/vendor/github.com/grafana/tanka/pkg/jsonnet/native/funcs.go @@ -10,6 +10,7 @@ import ( jsonnet "github.com/google/go-jsonnet" "github.com/google/go-jsonnet/ast" "github.com/grafana/tanka/pkg/helm" + "github.com/grafana/tanka/pkg/kustomize" "github.com/pkg/errors" yaml "gopkg.in/yaml.v3" ) @@ -32,6 +33,7 @@ func Funcs() []*jsonnet.NativeFunction { regexSubst(), helm.NativeFunc(helm.ExecHelm{}), + kustomize.NativeFunc(kustomize.ExecKustomize{}), } } diff --git a/vendor/github.com/grafana/tanka/pkg/kubernetes/apply.go b/vendor/github.com/grafana/tanka/pkg/kubernetes/apply.go index 76af3fd..3ff7cf9 100644 --- a/vendor/github.com/grafana/tanka/pkg/kubernetes/apply.go +++ b/vendor/github.com/grafana/tanka/pkg/kubernetes/apply.go @@ -91,8 +91,13 @@ See https://tanka.dev/garbage-collection for more details.`) func (k *Kubernetes) uids(state manifest.List) (map[string]bool, error) { uids := make(map[string]bool) - live, err := k.ctl.GetByState(state) - if err != nil { + live, err := k.ctl.GetByState(state, client.GetByStateOpts{ + IgnoreNotFound: true, + }) + if _, ok := err.(client.ErrorNothingReturned); ok { + // return empty map of uids when kubectl returns nothing + return uids, nil + } else if err != nil { return nil, err } diff --git a/vendor/github.com/grafana/tanka/pkg/kubernetes/client/client.go b/vendor/github.com/grafana/tanka/pkg/kubernetes/client/client.go index 4bc048d..28bec6e 100644 --- a/vendor/github.com/grafana/tanka/pkg/kubernetes/client/client.go +++ b/vendor/github.com/grafana/tanka/pkg/kubernetes/client/client.go @@ -9,7 +9,7 @@ type Client interface { // Get the specified object(s) from the cluster Get(namespace, kind, name string) (manifest.Manifest, error) GetByLabels(namespace, kind string, labels map[string]string) (manifest.List, error) - GetByState(data manifest.List) (manifest.List, error) + GetByState(data manifest.List, opts GetByStateOpts) (manifest.List, error) // Apply the configuration to the cluster. `data` must contain a plaintext // format that is `kubectl-apply(1)` compatible @@ -24,6 +24,10 @@ type Client interface { // Namespaces the cluster currently has Namespaces() (map[string]bool, error) + + // Namespace retrieves a namespace from the cluster + Namespace(namespace string) (manifest.Manifest, error) + // Resources returns all known api-resources of the cluster Resources() (Resources, error) @@ -51,3 +55,12 @@ type ApplyOpts struct { // DeleteOpts allow to specify additional parameters for delete operations // Currently not different from ApplyOpts, but may be required in the future type DeleteOpts ApplyOpts + +// GetByStateOpts allow to specify additional parameters for GetByState function +// Currently there is just ignoreNotFound parameter which is only useful for +// GetByState() so we only have GetByStateOpts instead of more generic GetOpts +// for all get operations +type GetByStateOpts struct { + // ignoreNotFound allows to ignore errors caused by missing objects + IgnoreNotFound bool +} diff --git a/vendor/github.com/grafana/tanka/pkg/kubernetes/client/delete.go b/vendor/github.com/grafana/tanka/pkg/kubernetes/client/delete.go index 55a2593..844f49b 100644 --- a/vendor/github.com/grafana/tanka/pkg/kubernetes/client/delete.go +++ b/vendor/github.com/grafana/tanka/pkg/kubernetes/client/delete.go @@ -1,7 +1,9 @@ package client import ( + "bytes" "os" + "strings" ) func (k Kubectl) Delete(namespace, kind, name string, opts DeleteOpts) error { @@ -14,11 +16,19 @@ func (k Kubectl) Delete(namespace, kind, name string, opts DeleteOpts) error { } cmd := k.ctl("delete", argv...) - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr + + var stdout bytes.Buffer + var stderr bytes.Buffer + + cmd.Stdout = &stdout + cmd.Stderr = &stderr cmd.Stdin = os.Stdin if err := cmd.Run(); err != nil { + if strings.Contains(stderr.String(), "Error from server (NotFound):") { + print("Delete failed: " + stderr.String()) + return nil + } return err } diff --git a/vendor/github.com/grafana/tanka/pkg/kubernetes/client/diff.go b/vendor/github.com/grafana/tanka/pkg/kubernetes/client/diff.go index 22aefbc..b4958b1 100644 --- a/vendor/github.com/grafana/tanka/pkg/kubernetes/client/diff.go +++ b/vendor/github.com/grafana/tanka/pkg/kubernetes/client/diff.go @@ -11,25 +11,49 @@ import ( "github.com/grafana/tanka/pkg/kubernetes/manifest" ) -// DiffServerSide takes the desired state and computes the differences on the -// server, returning them in `diff(1)` format +// DiffServerSide takes the desired state and computes the differences, returning them in `diff(1)` format +// It also validates that manifests are valid server-side func (k Kubectl) DiffServerSide(data manifest.List) (*string, error) { - cmd := k.ctl("diff", "-f", "-") + return k.diff(data, true) +} - raw := bytes.Buffer{} - cmd.Stdout = &raw +// DiffClientSide takes the desired state and computes the differences, returning them in `diff(1)` format +func (k Kubectl) DiffClientSide(data manifest.List) (*string, error) { + return k.diff(data, false) +} +func (k Kubectl) diff(data manifest.List, validate bool) (*string, error) { fw := FilterWriter{filters: []*regexp.Regexp{regexp.MustCompile(`exit status \d`)}} - cmd.Stderr = &fw + diffCmd := func(serverSide bool) (string, error) { + args := []string{"-f", "-"} + if serverSide { + args = append(args, "--server-side") + } + cmd := k.ctl("diff", args...) - cmd.Stdin = strings.NewReader(data.String()) + raw := bytes.Buffer{} + cmd.Stdout = &raw + cmd.Stderr = &fw + cmd.Stdin = strings.NewReader(data.String()) + err := cmd.Run() + return raw.String(), err + } + + if validate { + // Running the diff server-side, this checks that the resource definitions are valid + // However, it also diffs with server-side kubernetes elements, so it adds in a lot of elements that we shouldn't consider + _, err := diffCmd(true) + if diffErr := parseDiffErr(err, fw.buf, k.Info().ClientVersion); diffErr != nil { + return nil, diffErr + } + } - err := cmd.Run() + // Running the actual diff without considering server-side elements + s, err := diffCmd(false) if diffErr := parseDiffErr(err, fw.buf, k.Info().ClientVersion); diffErr != nil { return nil, diffErr } - s := raw.String() if s == "" { return nil, nil } diff --git a/vendor/github.com/grafana/tanka/pkg/kubernetes/client/errors.go b/vendor/github.com/grafana/tanka/pkg/kubernetes/client/errors.go index 356f783..4d12d01 100644 --- a/vendor/github.com/grafana/tanka/pkg/kubernetes/client/errors.go +++ b/vendor/github.com/grafana/tanka/pkg/kubernetes/client/errors.go @@ -34,3 +34,10 @@ type ErrorNoCluster string func (e ErrorNoCluster) Error() string { return fmt.Sprintf("no cluster that matches the apiServer `%s` was found. Please check your $KUBECONFIG", string(e)) } + +// ErrorNothingReturned means that there was no output returned +type ErrorNothingReturned struct{} + +func (e ErrorNothingReturned) Error() string { + return "kubectl returned no output" +} diff --git a/vendor/github.com/grafana/tanka/pkg/kubernetes/client/get.go b/vendor/github.com/grafana/tanka/pkg/kubernetes/client/get.go index 44adb21..a82c035 100644 --- a/vendor/github.com/grafana/tanka/pkg/kubernetes/client/get.go +++ b/vendor/github.com/grafana/tanka/pkg/kubernetes/client/get.go @@ -37,9 +37,10 @@ func (k Kubectl) GetByLabels(namespace, kind string, labels map[string]string) ( // GetByState returns the full object, including runtime fields for each // resource in the state -func (k Kubectl) GetByState(data manifest.List) (manifest.List, error) { +func (k Kubectl) GetByState(data manifest.List, opts GetByStateOpts) (manifest.List, error) { list, err := k.get("", "", []string{"-f", "-"}, getOpts{ - stdin: data.String(), + ignoreNotFound: opts.IgnoreNotFound, + stdin: data.String(), }) if err != nil { return nil, err @@ -49,8 +50,9 @@ func (k Kubectl) GetByState(data manifest.List) (manifest.List, error) { } type getOpts struct { - allNamespaces bool - stdin string + allNamespaces bool + ignoreNotFound bool + stdin string } func (k Kubectl) get(namespace, kind string, selector []string, opts getOpts) (manifest.Manifest, error) { @@ -58,6 +60,9 @@ func (k Kubectl) get(namespace, kind string, selector []string, opts getOpts) (m argv := []string{ "-o", "json", } + if opts.ignoreNotFound { + argv = append(argv, "--ignore-not-found") + } if opts.allNamespaces { argv = append(argv, "--all-namespaces") @@ -85,6 +90,12 @@ func (k Kubectl) get(namespace, kind string, selector []string, opts getOpts) (m return nil, parseGetErr(err, serr.String()) } + // return error if nothing was returned + // because parsing empty output as json would cause errors + if sout.Len() == 0 { + return nil, ErrorNothingReturned{} + } + // parse result var m manifest.Manifest if err := json.Unmarshal(sout.Bytes(), &m); err != nil { diff --git a/vendor/github.com/grafana/tanka/pkg/kubernetes/client/kubectl.go b/vendor/github.com/grafana/tanka/pkg/kubernetes/client/kubectl.go index 5fde895..e13eb1e 100644 --- a/vendor/github.com/grafana/tanka/pkg/kubernetes/client/kubectl.go +++ b/vendor/github.com/grafana/tanka/pkg/kubernetes/client/kubectl.go @@ -3,6 +3,7 @@ package client import ( "bytes" "encoding/json" + "fmt" "os" "regexp" @@ -51,12 +52,13 @@ func (k Kubectl) Namespaces() (map[string]bool, error) { cmd := k.ctl("get", "namespaces", "-o", "json") var sout bytes.Buffer + var serr bytes.Buffer cmd.Stdout = &sout - cmd.Stderr = os.Stderr + cmd.Stderr = &serr err := cmd.Run() if err != nil { - return nil, err + return nil, errors.Wrap(err, string(serr.Bytes())) } var list manifest.Manifest @@ -76,6 +78,38 @@ func (k Kubectl) Namespaces() (map[string]bool, error) { return namespaces, nil } +type ErrNamespaceNotFound struct { + Namespace string +} + +func (e ErrNamespaceNotFound) Error() string { + return fmt.Sprintf("Namespace not found: %s", e.Namespace) +} + +// Namespace finds a single namespace in the cluster +func (k Kubectl) Namespace(namespace string) (manifest.Manifest, error) { + cmd := k.ctl("get", "namespaces", namespace, "-o", "json", "--ignore-not-found") + + var sout bytes.Buffer + cmd.Stdout = &sout + cmd.Stderr = os.Stderr + + err := cmd.Run() + if err != nil { + return nil, err + } + if len(sout.Bytes()) == 0 { + return nil, ErrNamespaceNotFound{ + Namespace: namespace, + } + } + var ns manifest.Manifest + if err := json.Unmarshal(sout.Bytes(), &ns); err != nil { + return nil, err + } + return ns, nil +} + // FilterWriter is an io.Writer that discards every message that matches at // least one of the regular expressions. type FilterWriter struct { diff --git a/vendor/github.com/grafana/tanka/pkg/kubernetes/client/resources.go b/vendor/github.com/grafana/tanka/pkg/kubernetes/client/resources.go index 96a6809..9b4d5bc 100644 --- a/vendor/github.com/grafana/tanka/pkg/kubernetes/client/resources.go +++ b/vendor/github.com/grafana/tanka/pkg/kubernetes/client/resources.go @@ -30,6 +30,7 @@ func (r Resources) Namespaced(m manifest.Manifest) bool { // Resource is a Kubernetes API Resource type Resource struct { APIGroup string `json:"APIGROUP"` + APIVersion string `json:"APIVERSION"` Kind string `json:"KIND"` Name string `json:"NAME"` Namespaced bool `json:"NAMESPACED,string"` @@ -38,7 +39,14 @@ type Resource struct { } func (r Resource) FQN() string { - return strings.TrimSuffix(r.Kind+"."+r.APIGroup, ".") + apiGroup := "" + if r.APIGroup != "" { + // this is only set in kubectl v1.18 and earlier + apiGroup = r.APIGroup + } else if pos := strings.Index(r.APIVersion, "/"); pos > 0 { + apiGroup = r.APIVersion[0:pos] + } + return strings.TrimSuffix(r.Name+"."+apiGroup, ".") } // Resources returns all API resources known to the server diff --git a/vendor/github.com/grafana/tanka/pkg/kubernetes/diff.go b/vendor/github.com/grafana/tanka/pkg/kubernetes/diff.go index ab0b84e..acbfdb9 100644 --- a/vendor/github.com/grafana/tanka/pkg/kubernetes/diff.go +++ b/vendor/github.com/grafana/tanka/pkg/kubernetes/diff.go @@ -23,7 +23,18 @@ Please upgrade kubectl to at least version 1.18.1.`) // required for separating namespaces, err := k.ctl.Namespaces() if err != nil { - return nil, errors.Wrap(err, "listing namespaces") + resourceNamespaces := state.Namespaces() + namespaces = map[string]bool{} + for _, namespace := range resourceNamespaces { + _, err = k.ctl.Namespace(namespace) + if err != nil { + if errors.As(err, client.ErrNamespaceNotFound{}) { + continue + } + return nil, errors.Wrap(err, "retrieving namespaces") + } + namespaces[namespace] = true + } } resources, err := k.ctl.Resources() if err != nil { @@ -48,13 +59,27 @@ Please upgrade kubectl to at least version 1.18.1.`) return nil, err } - // reports all resources as new - staticDiff := StaticDiffer(true) + // reports all resources as created + staticDiffAllCreated := StaticDiffer(true) + + // reports all resources as deleted + staticDiffAllDeleted := StaticDiffer(false) + + // include orphaned resources in the diff if it was requested by the user + orphaned := manifest.List{} + if opts.WithPrune { + // find orphaned resources + orphaned, err = k.Orphaned(state) + if err != nil { + return nil, err + } + } // run the diff d, err := multiDiff{ {differ: liveDiff, state: live}, - {differ: staticDiff, state: soon}, + {differ: staticDiffAllCreated, state: soon}, + {differ: staticDiffAllDeleted, state: orphaned}, }.diff() switch { diff --git a/vendor/github.com/grafana/tanka/pkg/kubernetes/kubernetes.go b/vendor/github.com/grafana/tanka/pkg/kubernetes/kubernetes.go index 2e4f07c..22f42ed 100644 --- a/vendor/github.com/grafana/tanka/pkg/kubernetes/kubernetes.go +++ b/vendor/github.com/grafana/tanka/pkg/kubernetes/kubernetes.go @@ -12,7 +12,7 @@ import ( // Kubernetes exposes methods to work with the Kubernetes orchestrator type Kubernetes struct { - Env v1alpha1.Config + Env v1alpha1.Environment // Client (kubectl) ctl client.Client @@ -26,7 +26,7 @@ type Kubernetes struct { type Differ func(manifest.List) (*string, error) // New creates a new Kubernetes with an initialized client -func New(env v1alpha1.Config) (*Kubernetes, error) { +func New(env v1alpha1.Environment) (*Kubernetes, error) { // setup client ctl, err := client.New(env.Spec.APIServer) if err != nil { @@ -46,8 +46,9 @@ func New(env v1alpha1.Config) (*Kubernetes, error) { Env: env, ctl: ctl, differs: map[string]Differ{ - "native": ctl.DiffServerSide, - "subset": SubsetDiffer(ctl), + "native": ctl.DiffClientSide, + "validate": ctl.DiffServerSide, + "subset": SubsetDiffer(ctl), }, } @@ -63,6 +64,8 @@ func (k *Kubernetes) Close() error { type DiffOpts struct { // Use `diffstat(1)` to create a histogram of the changes instead Summarize bool + // Find orphaned resources and include them in the diff + WithPrune bool // Set the diff-strategy. If unset, the value set in the spec is used Strategy string diff --git a/vendor/github.com/grafana/tanka/pkg/kubernetes/manifest/errors.go b/vendor/github.com/grafana/tanka/pkg/kubernetes/manifest/errors.go index 3afcf33..e1b6dba 100644 --- a/vendor/github.com/grafana/tanka/pkg/kubernetes/manifest/errors.go +++ b/vendor/github.com/grafana/tanka/pkg/kubernetes/manifest/errors.go @@ -66,3 +66,14 @@ func (s SampleString) Indent(n int) string { lines := strings.Split(s.String(), "\n") return indent + strings.Join(lines, "\n"+indent) } + +// ErrorDuplicateName means two resources share the same name using the given +// nameFormat. +type ErrorDuplicateName struct { + name string + format string +} + +func (e ErrorDuplicateName) Error() string { + return fmt.Sprintf("Two resources share the same name '%s'. Please adapt the name template '%s'.", e.name, e.format) +} diff --git a/vendor/github.com/grafana/tanka/pkg/kubernetes/manifest/manifest.go b/vendor/github.com/grafana/tanka/pkg/kubernetes/manifest/manifest.go index a62634c..51f577d 100644 --- a/vendor/github.com/grafana/tanka/pkg/kubernetes/manifest/manifest.go +++ b/vendor/github.com/grafana/tanka/pkg/kubernetes/manifest/manifest.go @@ -4,7 +4,9 @@ import ( "bytes" "encoding/json" "fmt" + "text/template" + "github.com/Masterminds/sprig/v3" "github.com/pkg/errors" "github.com/stretchr/objx" yaml "gopkg.in/yaml.v2" @@ -60,7 +62,7 @@ func (m Manifest) Verify() error { if !o.Get("metadata").IsMSI() { fields["metadata"] = ErrInvalidMap } - if !o.Get("metadata.name").IsStr() { + if !o.Get("metadata.name").IsStr() && !o.Get("metadata.generateName").IsStr() { fields["metadata.name"] = ErrInvalidStr } @@ -261,6 +263,20 @@ func (m List) String() string { return buf.String() } +func (m List) Namespaces() []string { + namespaces := map[string]struct{}{} + for _, manifest := range m { + if namespace := manifest.Metadata().Namespace(); namespace != "" { + namespaces[namespace] = struct{}{} + } + } + keys := []string{} + for k := range namespaces { + keys = append(keys, k) + } + return keys +} + func m2o(m interface{}) objx.Map { switch mm := m.(type) { case Metadata: @@ -270,3 +286,35 @@ func m2o(m interface{}) objx.Map { } return nil } + +// DefaultNameFormat to use when no nameFormat is supplied +const DefaultNameFormat = `{{ print .kind "_" .metadata.name | snakecase }}` + +func ListAsMap(list List, nameFormat string) (map[string]interface{}, error) { + if nameFormat == "" { + nameFormat = DefaultNameFormat + } + + tmpl, err := template.New(""). + Funcs(sprig.TxtFuncMap()). + Parse(nameFormat) + if err != nil { + return nil, fmt.Errorf("Parsing name format: %w", err) + } + + out := make(map[string]interface{}) + for _, m := range list { + var buf bytes.Buffer + if err := tmpl.Execute(&buf, m); err != nil { + return nil, err + } + name := buf.String() + + if _, ok := out[name]; ok { + return nil, ErrorDuplicateName{name: name, format: nameFormat} + } + out[name] = map[string]interface{}(m) + } + + return out, nil +} diff --git a/vendor/github.com/grafana/tanka/pkg/kubernetes/util/diff.go b/vendor/github.com/grafana/tanka/pkg/kubernetes/util/diff.go index 31b5679..fe04324 100644 --- a/vendor/github.com/grafana/tanka/pkg/kubernetes/util/diff.go +++ b/vendor/github.com/grafana/tanka/pkg/kubernetes/util/diff.go @@ -3,7 +3,6 @@ package util import ( "bytes" "fmt" - "io/ioutil" "os" "os/exec" "path/filepath" @@ -26,16 +25,16 @@ func DiffName(m manifest.Manifest) string { // DiffStr computes the differences between the strings `is` and `should` using the // UNIX `diff(1)` utility. func DiffStr(name, is, should string) (string, error) { - dir, err := ioutil.TempDir("", "diff") + dir, err := os.MkdirTemp("", "diff") if err != nil { return "", err } defer os.RemoveAll(dir) - if err := ioutil.WriteFile(filepath.Join(dir, "LIVE-"+name), []byte(is), os.ModePerm); err != nil { + if err := os.WriteFile(filepath.Join(dir, "LIVE-"+name), []byte(is), os.ModePerm); err != nil { return "", err } - if err := ioutil.WriteFile(filepath.Join(dir, "MERGED-"+name), []byte(should), os.ModePerm); err != nil { + if err := os.WriteFile(filepath.Join(dir, "MERGED-"+name), []byte(should), os.ModePerm); err != nil { return "", err } diff --git a/vendor/github.com/grafana/tanka/pkg/kustomize/build.go b/vendor/github.com/grafana/tanka/pkg/kustomize/build.go new file mode 100644 index 0000000..ca53aea --- /dev/null +++ b/vendor/github.com/grafana/tanka/pkg/kustomize/build.go @@ -0,0 +1,40 @@ +package kustomize + +import ( + "bytes" + "io" + "os" + + "github.com/grafana/tanka/pkg/kubernetes/manifest" + "github.com/pkg/errors" + yaml "gopkg.in/yaml.v3" +) + +// Build expands a Kustomize into a regular manifest.List using the `kustomize +// build` command +func (k ExecKustomize) Build(path string) (manifest.List, error) { + cmd := k.cmd("build", path) + var buf bytes.Buffer + cmd.Stdout = &buf + cmd.Stderr = os.Stderr + + if err := cmd.Run(); err != nil { + return nil, errors.Wrap(err, "Expanding Kustomize") + } + + var list manifest.List + d := yaml.NewDecoder(&buf) + for { + var m manifest.Manifest + if err := d.Decode(&m); err != nil { + if err == io.EOF { + break + } + return nil, errors.Wrap(err, "Parsing Kustomize output") + } + + list = append(list, m) + } + + return list, nil +} diff --git a/vendor/github.com/grafana/tanka/pkg/kustomize/jsonnet.go b/vendor/github.com/grafana/tanka/pkg/kustomize/jsonnet.go new file mode 100644 index 0000000..c857ec0 --- /dev/null +++ b/vendor/github.com/grafana/tanka/pkg/kustomize/jsonnet.go @@ -0,0 +1,86 @@ +package kustomize + +import ( + "encoding/json" + "fmt" + "os" + "path/filepath" + + "github.com/google/go-jsonnet" + "github.com/google/go-jsonnet/ast" + "github.com/grafana/tanka/pkg/kubernetes/manifest" +) + +// JsonnetOpts are additional properties the consumer of the native func might +// pass. +type JsonnetOpts struct { + // CalledFrom is the file that calls kustomizeBuild. This is used to find the + // vendored Kustomize relative to this file + CalledFrom string `json:"calledFrom"` + // NameBuild is used to create the keys in the resulting map + NameFormat string `json:"nameFormat"` +} + +// NativeFunc returns a jsonnet native function that provides the same +// functionality as `Kustomize.Build` of this package. Kustomize yamls are required to be +// present on the local filesystem, at a relative location to the file that +// calls `kustomize.build()` / `std.native('kustomizeBuild')`. This guarantees +// hermeticity +func NativeFunc(k Kustomize) *jsonnet.NativeFunction { + return &jsonnet.NativeFunction{ + Name: "kustomizeBuild", + // Similar to `kustomize build {path}` where {path} is a local path + Params: ast.Identifiers{"path", "opts"}, + Func: func(data []interface{}) (interface{}, error) { + path, ok := data[0].(string) + if !ok { + return nil, fmt.Errorf("Argument 'path' must be of 'string' type, got '%T' instead", data[0]) + } + + // TODO: validate data[1] actually follows the struct scheme + opts, err := parseOpts(data[1]) + if err != nil { + return "", err + } + + // resolve the Kustomize path relative to the caller + callerDir := filepath.Dir(opts.CalledFrom) + actual_path := filepath.Join(callerDir, path) + if _, err := os.Stat(actual_path); err != nil { + return nil, fmt.Errorf("kustomizeBuild: Failed to find kustomization at '%s': %s. See https://tanka.dev/kustomize#failed-to-find-kustomization", actual_path, err) + } + + // render resources + list, err := k.Build(actual_path) + if err != nil { + return nil, err + } + + // convert list to map + out, err := manifest.ListAsMap(list, opts.NameFormat) + if err != nil { + return nil, err + } + + return out, nil + }, + } +} + +func parseOpts(data interface{}) (*JsonnetOpts, error) { + c, err := json.Marshal(data) + if err != nil { + return nil, err + } + var opts JsonnetOpts + if err := json.Unmarshal(c, &opts); err != nil { + return nil, err + } + + // Kustomize paths are only allowed at relative paths. Use conf.CalledFrom to find the callers directory + if opts.CalledFrom == "" { + return nil, fmt.Errorf("kustomizeBuild: 'opts.calledFrom' is unset or empty.\nTanka needs this to find your Kustomize. See https://tanka.dev/kustomize#optscalledfrom-unset\n") + } + + return &opts, nil +} diff --git a/vendor/github.com/grafana/tanka/pkg/kustomize/kustomize.go b/vendor/github.com/grafana/tanka/pkg/kustomize/kustomize.go new file mode 100644 index 0000000..1d4c5bd --- /dev/null +++ b/vendor/github.com/grafana/tanka/pkg/kustomize/kustomize.go @@ -0,0 +1,39 @@ +package kustomize + +import ( + "os" + "os/exec" + + "github.com/grafana/tanka/pkg/kubernetes/manifest" +) + +// Kustomize provides high level access to some Kustomize operations +type Kustomize interface { + // Build returns the individual resources of a Kustomize + Build(path string) (manifest.List, error) +} + +// ExecKustomize is a Kustomize implementation powered by the `kustomize` +// command line utility +type ExecKustomize struct{} + +// cmd returns a prepared exec.Cmd to use the `kustomize` binary +func (e ExecKustomize) cmd(action string, args ...string) *exec.Cmd { + argv := []string{action} + argv = append(argv, args...) + + cmd := kustomizeCmd(argv...) + cmd.Stderr = os.Stderr + + return cmd +} + +// kustomizeCmd returns a bare exec.Cmd pointed at the local kustomize binary +func kustomizeCmd(args ...string) *exec.Cmd { + bin := "kustomize" + if env := os.Getenv("TANKA_KUSTOMIZE_PATH"); env != "" { + bin = env + } + + return exec.Command(bin, args...) +} diff --git a/vendor/github.com/grafana/tanka/pkg/process/process.go b/vendor/github.com/grafana/tanka/pkg/process/process.go index c14ae8b..5d725e5 100644 --- a/vendor/github.com/grafana/tanka/pkg/process/process.go +++ b/vendor/github.com/grafana/tanka/pkg/process/process.go @@ -18,11 +18,17 @@ const ( // - tanka.dev/** labels // - filtering // - best-effort sorting -func Process(raw interface{}, cfg v1alpha1.Config, exprs Matchers) (manifest.List, error) { +func Process(cfg v1alpha1.Environment, exprs Matchers) (manifest.List, error) { + raw := cfg.Data + + if raw == nil { + return manifest.List{}, nil + } + // Scan for everything that looks like a Kubernetes object extracted, err := Extract(raw) if err != nil { - return nil, err + return nil, fmt.Errorf("got an error while extracting env `%s`: %w", cfg.Metadata.Name, err) } // Unwrap *List types @@ -56,7 +62,7 @@ func Process(raw interface{}, cfg v1alpha1.Config, exprs Matchers) (manifest.Lis } // Label conditionally adds tanka.dev/** labels to each manifest in the List -func Label(list manifest.List, cfg v1alpha1.Config) manifest.List { +func Label(list manifest.List, cfg v1alpha1.Environment) manifest.List { for i, m := range list { // inject tanka.dev/environment label if cfg.Spec.InjectLabels { @@ -68,7 +74,7 @@ func Label(list manifest.List, cfg v1alpha1.Config) manifest.List { return list } -func ResourceDefaults(list manifest.List, cfg v1alpha1.Config) manifest.List { +func ResourceDefaults(list manifest.List, cfg v1alpha1.Environment) manifest.List { for i, m := range list { for k, v := range cfg.Spec.ResourceDefaults.Annotations { annotations := m.Metadata().Annotations() diff --git a/vendor/github.com/grafana/tanka/pkg/spec/spec.go b/vendor/github.com/grafana/tanka/pkg/spec/spec.go index 989c489..86f5515 100644 --- a/vendor/github.com/grafana/tanka/pkg/spec/spec.go +++ b/vendor/github.com/grafana/tanka/pkg/spec/spec.go @@ -2,13 +2,13 @@ package spec import ( "encoding/json" - "io/ioutil" "os" "path/filepath" "regexp" "github.com/pkg/errors" + "github.com/grafana/tanka/pkg/jsonnet/jpath" "github.com/grafana/tanka/pkg/spec/v1alpha1" ) @@ -18,41 +18,57 @@ const APIGroup = "tanka.dev" // Specfile is the filename for the environment config const Specfile = "spec.json" -// ParseDir parses the given environments `spec.json` into a `v1alpha1.Config` +// ParseDir parses the given environments `spec.json` into a `v1alpha1.Environment` // object with the name set to the directories name -func ParseDir(baseDir, name string) (*v1alpha1.Config, error) { - fi, err := os.Stat(baseDir) +func ParseDir(path string) (*v1alpha1.Environment, error) { + root, base, err := jpath.Dirs(path) if err != nil { return nil, err } - if !fi.IsDir() { - return nil, errors.New("baseDir is not an directory") + + // name of the environment: relative path from rootDir + name, err := filepath.Rel(root, base) + if err != nil { + return nil, err + } + + file, err := jpath.Entrypoint(path) + if err != nil { + return nil, err } - data, err := ioutil.ReadFile(filepath.Join(baseDir, Specfile)) + namespace, err := filepath.Rel(root, file) + if err != nil { + return nil, err + } + + data, err := os.ReadFile(filepath.Join(base, Specfile)) if err != nil { if os.IsNotExist(err) { c := v1alpha1.New() - c.Metadata.Name = name - return c, ErrNoSpec{name} + c.Metadata.Name = name // legacy behavior + c.Metadata.Namespace = namespace + return c, ErrNoSpec{path} } return nil, err } - return Parse(data, name) + c, err := Parse(data, namespace) + if c != nil { + // set the name field + c.Metadata.Name = name // legacy behavior + } + + return c, err } -// Parse parses the json `data` into a `v1alpha1.Config` object. -// `name` is the name of the environment -func Parse(data []byte, name string) (*v1alpha1.Config, error) { +// Parse parses the json `data` into a `v1alpha1.Environment` object. +func Parse(data []byte, namespace string) (*v1alpha1.Environment, error) { config := v1alpha1.New() if err := json.Unmarshal(data, config); err != nil { return nil, errors.Wrap(err, "parsing spec.json") } - // set the name field - config.Metadata.Name = name - if err := handleDeprecated(config, data); err != nil { return config, err } @@ -62,10 +78,12 @@ func Parse(data []byte, name string) (*v1alpha1.Config, error) { config.Spec.APIServer = "https://" + config.Spec.APIServer } + config.Metadata.Namespace = namespace + return config, nil } -func handleDeprecated(c *v1alpha1.Config, data []byte) error { +func handleDeprecated(c *v1alpha1.Environment, data []byte) error { var errDepr ErrDeprecated var msi map[string]interface{} diff --git a/vendor/github.com/grafana/tanka/pkg/spec/v1alpha1/config.go b/vendor/github.com/grafana/tanka/pkg/spec/v1alpha1/environment.go similarity index 64% rename from vendor/github.com/grafana/tanka/pkg/spec/v1alpha1/config.go rename to vendor/github.com/grafana/tanka/pkg/spec/v1alpha1/environment.go index 77ce029..59bc6d1 100644 --- a/vendor/github.com/grafana/tanka/pkg/spec/v1alpha1/config.go +++ b/vendor/github.com/grafana/tanka/pkg/spec/v1alpha1/environment.go @@ -1,10 +1,14 @@ package v1alpha1 -import "strings" +import ( + "crypto/sha256" + "encoding/hex" + "fmt" +) -// New creates a new Config object with internal values already set -func New() *Config { - c := Config{} +// New creates a new Environment object with internal values already set +func New() *Environment { + c := Environment{} // constants c.APIVersion = "tanka.dev/v1alpha1" @@ -18,19 +22,20 @@ func New() *Config { return &c } -// Config holds the configuration variables for config version v1alpha1 -// ApiVersion and Kind are currently unused, this may change in the future. -type Config struct { - APIVersion string `json:"apiVersion"` - Kind string `json:"kind"` - Metadata Metadata `json:"metadata"` - Spec Spec `json:"spec"` +// Environment represents a set of resources in relation to its Kubernetes cluster +type Environment struct { + APIVersion string `json:"apiVersion"` + Kind string `json:"kind"` + Metadata Metadata `json:"metadata"` + Spec Spec `json:"spec"` + Data interface{} `json:"data,omitempty"` } // Metadata is meant for humans and not parsed type Metadata struct { - Name string `json:"name,omitempty"` - Labels map[string]string `json:"labels,omitempty"` + Name string `json:"name,omitempty"` + Namespace string `json:"namespace,omitempty"` + Labels map[string]string `json:"labels,omitempty"` } // Has and Get make Metadata a simple wrapper for labels.Labels to use our map in their querier @@ -45,7 +50,9 @@ func (m Metadata) Get(label string) (value string) { } func (m Metadata) NameLabel() string { - return strings.Replace(m.Name, "/", ".", -1) + partsHash := sha256.Sum256([]byte(fmt.Sprintf("%s:%s", m.Name, m.Namespace))) + chars := []rune(hex.EncodeToString(partsHash[:])) + return string(chars[:48]) } // Spec defines Kubernetes properties diff --git a/vendor/github.com/grafana/tanka/pkg/tanka/errors.go b/vendor/github.com/grafana/tanka/pkg/tanka/errors.go new file mode 100644 index 0000000..7444383 --- /dev/null +++ b/vendor/github.com/grafana/tanka/pkg/tanka/errors.go @@ -0,0 +1,39 @@ +package tanka + +import ( + "fmt" + "strings" +) + +// ErrNoEnv means that the given jsonnet has no Environment object +// This must not be fatal, some operations work without +type ErrNoEnv struct { + path string +} + +func (e ErrNoEnv) Error() string { + return fmt.Sprintf("unable to find an Environment in '%s'", e.path) +} + +// ErrMultipleEnvs means that the given jsonnet has multiple Environment objects +type ErrMultipleEnvs struct { + path string + names []string +} + +func (e ErrMultipleEnvs) Error() string { + return fmt.Sprintf("found multiple Environments in '%s'. Use `--name` to select a single one: \n - %s", e.path, strings.Join(e.names, "\n - ")) +} + +// ErrParallel is an array of errors collected while processing in parallel +type ErrParallel struct { + errors []error +} + +func (e ErrParallel) Error() string { + returnErr := "Errors occurred during parallel processing:\n\n" + for _, err := range e.errors { + returnErr = fmt.Sprintf("%s- %s\n\n", returnErr, err.Error()) + } + return returnErr +} diff --git a/vendor/github.com/grafana/tanka/pkg/tanka/evaluators.go b/vendor/github.com/grafana/tanka/pkg/tanka/evaluators.go new file mode 100644 index 0000000..8ceaa9f --- /dev/null +++ b/vendor/github.com/grafana/tanka/pkg/tanka/evaluators.go @@ -0,0 +1,147 @@ +package tanka + +import ( + "fmt" + "strings" + + "github.com/pkg/errors" + + "github.com/grafana/tanka/pkg/jsonnet" + "github.com/grafana/tanka/pkg/jsonnet/jpath" +) + +// EvalJsonnet evaluates the jsonnet environment at the given file system path +func evalJsonnet(path string, opts jsonnet.Opts) (raw string, err error) { + entrypoint, err := jpath.Entrypoint(path) + if err != nil { + return "", err + } + + // evaluate Jsonnet + if opts.EvalScript != "" { + var tla []string + for k := range opts.TLACode { + tla = append(tla, k+"="+k) + } + evalScript := fmt.Sprintf(` + local main = (import '%s'); + %s +`, entrypoint, opts.EvalScript) + + if len(tla) != 0 { + tlaJoin := strings.Join(tla, ", ") + evalScript = fmt.Sprintf(` +function(%s) + local main = (import '%s')(%s); + %s +`, tlaJoin, entrypoint, tlaJoin, opts.EvalScript) + } + + raw, err = jsonnet.Evaluate(path, evalScript, opts) + if err != nil { + return "", errors.Wrap(err, "evaluating jsonnet") + } + return raw, nil + } + + raw, err = jsonnet.EvaluateFile(entrypoint, opts) + if err != nil { + return "", errors.Wrap(err, "evaluating jsonnet") + } + return raw, nil +} + +const PatternEvalScript = "main.%s" + +// MetadataEvalScript finds the Environment object (without its .data object) +const MetadataEvalScript = ` +local noDataEnv(object) = + std.prune( + if std.isObject(object) + then + if std.objectHas(object, 'apiVersion') + && std.objectHas(object, 'kind') + then + if object.kind == 'Environment' + then object { data+:: {} } + else {} + else + std.mapWithKey( + function(key, obj) + noDataEnv(obj), + object + ) + else if std.isArray(object) + then + std.map( + function(obj) + noDataEnv(obj), + object + ) + else {} + ); + +noDataEnv(main) +` + +// MetadataSingleEnvEvalScript returns a Single Environment object +const MetadataSingleEnvEvalScript = ` +local singleEnv(object) = + std.prune( + if std.isObject(object) + then + if std.objectHas(object, 'apiVersion') + && std.objectHas(object, 'kind') + then + if object.kind == 'Environment' + && object.metadata.name == '%s' + then object { data:: super.data } + else {} + else + std.mapWithKey( + function(key, obj) + singleEnv(obj), + object + ) + else if std.isArray(object) + then + std.map( + function(obj) + singleEnv(obj), + object + ) + else {} + ); + +singleEnv(main) +` + +// SingleEnvEvalScript returns a Single Environment object +const SingleEnvEvalScript = ` +local singleEnv(object) = + if std.isObject(object) + then + if std.objectHas(object, 'apiVersion') + && std.objectHas(object, 'kind') + then + if object.kind == 'Environment' + && std.member(object.metadata.name, '%s') + then object + else {} + else + std.mapWithKey( + function(key, obj) + singleEnv(obj), + object + ) + else if std.isArray(object) + then + std.map( + function(obj) + singleEnv(obj), + object + ) + else {}; + +singleEnv(main) +` diff --git a/vendor/github.com/grafana/tanka/pkg/tanka/export.go b/vendor/github.com/grafana/tanka/pkg/tanka/export.go new file mode 100644 index 0000000..40febb4 --- /dev/null +++ b/vendor/github.com/grafana/tanka/pkg/tanka/export.go @@ -0,0 +1,221 @@ +package tanka + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "os" + "path/filepath" + "strings" + "text/template" + + "github.com/Masterminds/sprig/v3" + "k8s.io/apimachinery/pkg/labels" + + "github.com/grafana/tanka/pkg/kubernetes/manifest" + "github.com/grafana/tanka/pkg/spec/v1alpha1" +) + +// BelRune is a string of the Ascii character BEL which made computers ring in ancient times +// We use it as "magic" char for the subfolder creation as it is a non printable character and thereby will never be +// in a valid filepath by accident. Only when we include it. +const BelRune = string(rune(7)) + +// When exporting manifests to files, it becomes increasingly hard to map manifests back to its environment, this file +// can be used to map the files back to their environment. This is aimed to be used by CI/CD but can also be used for +// debugging purposes. +const manifestFile = "manifest.json" + +// ExportEnvOpts specify options on how to export environments +type ExportEnvOpts struct { + // formatting the filename based on the exported Kubernetes manifest + Format string + // extension of the filename + Extension string + // merge export with existing directory + Merge bool + // optional: options to parse Jsonnet + Opts Opts + // optional: filter environments based on labels + Selector labels.Selector + // optional: number of environments to process in parallel + Parallelism int +} + +func ExportEnvironments(envs []*v1alpha1.Environment, to string, opts *ExportEnvOpts) error { + // Keep track of which file maps to which environment + fileToEnv := map[string]string{} + + // dir must be empty + empty, err := dirEmpty(to) + if err != nil { + return fmt.Errorf("Checking target dir: %s", err) + } + if !empty && !opts.Merge { + return fmt.Errorf("Output dir `%s` not empty. Pass --merge to ignore this", to) + } + + // get all environments for paths + loadedEnvs, err := parallelLoadEnvironments(envs, parallelOpts{ + Opts: opts.Opts, + Selector: opts.Selector, + Parallelism: opts.Parallelism, + }) + if err != nil { + return err + } + + for _, env := range loadedEnvs { + // get the manifests + loaded, err := LoadManifests(env, opts.Opts.Filters) + if err != nil { + return err + } + + env := loaded.Env + res := loaded.Resources + + // create raw manifest version of env for templating + env.Data = nil + raw, err := json.Marshal(env) + if err != nil { + return err + } + var menv manifest.Manifest + if err := json.Unmarshal(raw, &menv); err != nil { + return err + } + + // create template + manifestTemplate, err := createTemplate(opts.Format, menv) + if err != nil { + return fmt.Errorf("Parsing format: %s", err) + } + + // write each to a file + for _, m := range res { + // apply template + name, err := applyTemplate(manifestTemplate, m) + if err != nil { + return fmt.Errorf("executing name template: %w", err) + } + + // Create all subfolders in path + relpath := name + "." + opts.Extension + path := filepath.Join(to, relpath) + + fileToEnv[relpath] = env.Metadata.Namespace + + // Abort if already exists + if exists, err := fileExists(path); err != nil { + return err + } else if exists { + return fmt.Errorf("File '%s' already exists. Aborting", path) + } + + // Write manifest + data := m.String() + if err := writeExportFile(path, []byte(data)); err != nil { + return err + } + } + } + + // Write manifest file + if len(fileToEnv) != 0 { + data, err := json.MarshalIndent(fileToEnv, "", " ") + if err != nil { + return err + } + path := filepath.Join(to, manifestFile) + if err := writeExportFile(path, data); err != nil { + return err + } + } + + return nil +} + +func fileExists(name string) (bool, error) { + _, err := os.Stat(name) + if os.IsNotExist(err) { + return false, nil + } + if err != nil { + return false, err + } + return true, nil +} + +func dirEmpty(dir string) (bool, error) { + f, err := os.Open(dir) + if os.IsNotExist(err) { + return true, os.MkdirAll(dir, os.ModePerm) + } else if err != nil { + return false, err + } + defer f.Close() + + _, err = f.Readdirnames(1) + if err == io.EOF { + return true, nil + } + return false, err +} + +func writeExportFile(path string, data []byte) error { + if err := os.MkdirAll(filepath.Dir(path), 0700); err != nil { + return fmt.Errorf("creating filepath '%s': %s", filepath.Dir(path), err) + } + + return os.WriteFile(path, data, 0644) +} + +func createTemplate(format string, env manifest.Manifest) (*template.Template, error) { + // Replace all os.path separators in string with BelRune for creating subfolders + replaceFormat := replaceTmplText(format, string(os.PathSeparator), BelRune) + + envMap := template.FuncMap{"env": func() manifest.Manifest { return env }} + + template, err := template.New(""). + Funcs(sprig.TxtFuncMap()). // register Masterminds/sprig + Funcs(envMap). // register environment mapping + Parse(replaceFormat) // parse template + if err != nil { + return nil, err + } + return template, nil +} + +func replaceTmplText(s, old, new string) string { + parts := []string{} + l := strings.Index(s, "{{") + r := strings.Index(s, "}}") + 2 + + for l != -1 && l < r { + // replace only in text between template action blocks + text := strings.ReplaceAll(s[:l], old, new) + action := s[l:r] + parts = append(parts, text, action) + s = s[r:] + l = strings.Index(s, "{{") + r = strings.Index(s, "}}") + 2 + } + parts = append(parts, strings.ReplaceAll(s, old, new)) + return strings.Join(parts, "") +} + +func applyTemplate(template *template.Template, m manifest.Manifest) (path string, err error) { + buf := bytes.Buffer{} + if err := template.Execute(&buf, m); err != nil { + return "", err + } + + // Replace all os.path separators in string in order to not accidentally create subfolders + path = strings.Replace(buf.String(), string(os.PathSeparator), "-", -1) + // Replace the BEL character inserted with a path separator again in order to create a subfolder + path = strings.Replace(path, BelRune, string(os.PathSeparator), -1) + + return path, nil +} diff --git a/vendor/github.com/grafana/tanka/pkg/tanka/find.go b/vendor/github.com/grafana/tanka/pkg/tanka/find.go new file mode 100644 index 0000000..43db957 --- /dev/null +++ b/vendor/github.com/grafana/tanka/pkg/tanka/find.go @@ -0,0 +1,124 @@ +package tanka + +import ( + "fmt" + "os" + "path/filepath" + + "github.com/grafana/tanka/pkg/jsonnet/jpath" + "github.com/grafana/tanka/pkg/spec/v1alpha1" + "github.com/pkg/errors" + "k8s.io/apimachinery/pkg/labels" +) + +// FindOpts are optional arguments for FindEnvs +type FindOpts struct { + JsonnetOpts + Selector labels.Selector +} + +// FindEnvs returns metadata of all environments recursively found in 'path'. +// Each directory is tested and included if it is a valid environment, either +// static or inline. If a directory is a valid environment, its subdirectories +// are not checked. +func FindEnvs(path string, opts FindOpts) ([]*v1alpha1.Environment, error) { + // find all environments at dir + envs, errs := find(path, Opts{JsonnetOpts: opts.JsonnetOpts}) + if errs != nil { + return envs, ErrParallel{errors: errs} + } + + // optionally filter + if opts.Selector == nil || opts.Selector.Empty() { + return envs, nil + } + + filtered := make([]*v1alpha1.Environment, 0, len(envs)) + for _, e := range envs { + if !opts.Selector.Matches(e.Metadata) { + continue + } + filtered = append(filtered, e) + } + + return filtered, nil +} + +func findErr(path string, err error) []error { + return []error{fmt.Errorf("%s:\n %w", path, err)} +} + +// find implements the actual functionality described at 'FindEnvs' +func find(path string, opts Opts) ([]*v1alpha1.Environment, []error) { + // try if this has envs + list, err := List(path, opts) + if err != nil && + // expected when looking for environments + !errors.As(err, &jpath.ErrorNoBase{}) && + !errors.As(err, &jpath.ErrorFileNotFound{}) { + return nil, findErr(path, err) + } + if len(list) != 0 { + // it has. don't search deeper + return list, nil + } + + stat, err := os.Stat(path) + if err != nil { + return nil, findErr(path, err) + } + + // if path is a file, don't search deeper + if !stat.IsDir() { + return nil, nil + } + + // list directory + files, err := os.ReadDir(path) + if err != nil { + return nil, findErr(path, err) + } + + // it's not one. Maybe subdirectories are? + ch := make(chan findOut) + routines := 0 + + // recursively find in parallel + for _, fi := range files { + if !fi.IsDir() { + continue + } + + routines++ + go findShim(filepath.Join(path, fi.Name()), opts, ch) + } + + // collect parallel results + var errs []error + var envs []*v1alpha1.Environment + + for i := 0; i < routines; i++ { + out := <-ch + if out.errs != nil { + errs = append(errs, out.errs...) + } + + envs = append(envs, out.envs...) + } + + if len(errs) != 0 { + return envs, errs + } + + return envs, nil +} + +type findOut struct { + envs []*v1alpha1.Environment + errs []error +} + +func findShim(dir string, opts Opts, ch chan findOut) { + envs, errs := find(dir, opts) + ch <- findOut{envs: envs, errs: errs} +} diff --git a/vendor/github.com/grafana/tanka/pkg/tanka/format.go b/vendor/github.com/grafana/tanka/pkg/tanka/format.go index 62fbc8f..941c25a 100644 --- a/vendor/github.com/grafana/tanka/pkg/tanka/format.go +++ b/vendor/github.com/grafana/tanka/pkg/tanka/format.go @@ -2,7 +2,6 @@ package tanka import ( "fmt" - "io/ioutil" "os" "path/filepath" @@ -46,7 +45,7 @@ func FormatFiles(fds []string, opts *FormatOpts) ([]string, error) { outFn := opts.OutFn if outFn == nil { outFn = func(name, content string) error { - return ioutil.WriteFile(name, []byte(content), 0644) + return os.WriteFile(name, []byte(content), 0644) } } @@ -58,7 +57,7 @@ func FormatFiles(fds []string, opts *FormatOpts) ([]string, error) { var changed []string for _, p := range paths { - content, err := ioutil.ReadFile(p) + content, err := os.ReadFile(p) if err != nil { return nil, err } @@ -104,7 +103,10 @@ func findFiles(target string, excludes []glob.Glob) ([]string, error) { // godirwalk is faster than filepath.Walk, 'cause no os.Stat required err = godirwalk.Walk(target, &godirwalk.Options{ - Callback: func(path string, de *godirwalk.Dirent) error { + Callback: func(rawPath string, de *godirwalk.Dirent) error { + // Normalize slashes for Windows + path := filepath.ToSlash(rawPath) + if de.IsDir() { return nil } diff --git a/vendor/github.com/grafana/tanka/pkg/tanka/inline.go b/vendor/github.com/grafana/tanka/pkg/tanka/inline.go new file mode 100644 index 0000000..3e11feb --- /dev/null +++ b/vendor/github.com/grafana/tanka/pkg/tanka/inline.go @@ -0,0 +1,170 @@ +package tanka + +import ( + "encoding/json" + "fmt" + "path/filepath" + "sort" + + "github.com/grafana/tanka/pkg/jsonnet/jpath" + "github.com/grafana/tanka/pkg/kubernetes/manifest" + "github.com/grafana/tanka/pkg/process" + "github.com/grafana/tanka/pkg/spec" + "github.com/grafana/tanka/pkg/spec/v1alpha1" +) + +// InlineLoader loads an environment that is specified inline from within +// Jsonnet. The Jsonnet output is expected to hold a tanka.dev/Environment type, +// Kubernetes resources are expected at the `data` key of this very type +type InlineLoader struct{} + +func (i *InlineLoader) Load(path string, opts LoaderOpts) (*v1alpha1.Environment, error) { + if opts.Name != "" { + opts.JsonnetOpts.EvalScript = fmt.Sprintf(SingleEnvEvalScript, opts.Name) + } + + data, err := i.Eval(path, opts) + if err != nil { + return nil, err + } + + envs, err := extractEnvs(data) + if err != nil { + return nil, err + } + + if len(envs) > 1 { + names := make([]string, 0, len(envs)) + for _, e := range envs { + // If there's a full match on the given name, use this environment + if name := e.Metadata().Name(); name == opts.Name { + envs = manifest.List{e} + break + } else { + names = append(names, name) + } + } + if len(envs) > 1 { + sort.Strings(names) + return nil, ErrMultipleEnvs{path, names} + } + } + + if len(envs) == 0 { + return nil, fmt.Errorf("found no matching environments; run 'tk env list %s' to view available options", path) + } + + // TODO: Re-serializing the entire env here. This is horribly inefficient + envData, err := json.Marshal(envs[0]) + if err != nil { + return nil, err + } + + env, err := inlineParse(path, envData) + if err != nil { + return nil, err + } + + return env, nil +} + +func (i *InlineLoader) Peek(path string, opts LoaderOpts) (*v1alpha1.Environment, error) { + opts.JsonnetOpts.EvalScript = MetadataEvalScript + if opts.Name != "" { + opts.JsonnetOpts.EvalScript = fmt.Sprintf(MetadataSingleEnvEvalScript, opts.Name) + } + return i.Load(path, opts) +} + +func (i *InlineLoader) List(path string, opts LoaderOpts) ([]*v1alpha1.Environment, error) { + opts.JsonnetOpts.EvalScript = MetadataEvalScript + data, err := i.Eval(path, opts) + if err != nil { + return nil, err + } + + list, err := extractEnvs(data) + if err != nil { + return nil, err + } + + envs := make([]*v1alpha1.Environment, 0, len(list)) + for _, raw := range list { + data, err := json.Marshal(raw) + if err != nil { + return nil, err + } + + env, err := inlineParse(path, data) + if err != nil { + return nil, err + } + + envs = append(envs, env) + } + + return envs, nil +} + +func (i *InlineLoader) Eval(path string, opts LoaderOpts) (interface{}, error) { + // Can't provide env as extVar, as we need to evaluate Jsonnet first to know it + opts.ExtCode.Set(environmentExtCode, `error "Using tk.env and std.extVar('tanka.dev/environment') is only supported for static environments. Directly access this data using standard Jsonnet instead."`) + + raw, err := evalJsonnet(path, opts.JsonnetOpts) + if err != nil { + return nil, err + } + + var data interface{} + if err := json.Unmarshal([]byte(raw), &data); err != nil { + return nil, err + } + + return data, nil +} + +func inlineParse(path string, data []byte) (*v1alpha1.Environment, error) { + root, err := jpath.FindRoot(path) + if err != nil { + return nil, err + } + + file, err := jpath.Entrypoint(path) + if err != nil { + return nil, err + } + + namespace, err := filepath.Rel(root, file) + if err != nil { + return nil, err + } + + env, err := spec.Parse(data, namespace) + if err != nil { + return nil, err + } + + return env, nil +} + +// extractEnvs filters out any Environment manifests +func extractEnvs(data interface{}) (manifest.List, error) { + // Scan for everything that looks like a Kubernetes object + extracted, err := process.Extract(data) + if err != nil { + return nil, err + } + + // Unwrap *List types + if err := process.Unwrap(extracted); err != nil { + return nil, err + } + + out := make(manifest.List, 0, len(extracted)) + for _, m := range extracted { + out = append(out, m) + } + + // Extract only object of Kind: Environment + return process.Filter(out, process.MustStrExps("Environment/.*")), nil +} diff --git a/vendor/github.com/grafana/tanka/pkg/tanka/load.go b/vendor/github.com/grafana/tanka/pkg/tanka/load.go new file mode 100644 index 0000000..7cf928a --- /dev/null +++ b/vendor/github.com/grafana/tanka/pkg/tanka/load.go @@ -0,0 +1,170 @@ +package tanka + +import ( + "fmt" + "os" + "path/filepath" + + "github.com/grafana/tanka/pkg/jsonnet/jpath" + "github.com/grafana/tanka/pkg/kubernetes" + "github.com/grafana/tanka/pkg/kubernetes/manifest" + "github.com/grafana/tanka/pkg/process" + "github.com/grafana/tanka/pkg/spec" + "github.com/grafana/tanka/pkg/spec/v1alpha1" + "github.com/pkg/errors" +) + +// environmentExtCode is the extCode ID `tk.env` uses underneath +// TODO: remove "import tk" and replace it with tanka-util +const environmentExtCode = spec.APIGroup + "/environment" + +// Load loads the Environment at `path`. It automatically detects whether to +// load inline or statically +func Load(path string, opts Opts) (*LoadResult, error) { + env, err := LoadEnvironment(path, opts) + if err != nil { + return nil, err + } + + result, err := LoadManifests(env, opts.Filters) + if err != nil { + return nil, err + } + + // Check if there are still any inline environments in the manifests + // They are not real k8s resources, and cannot be applied + if envs := process.Filter(result.Resources, process.MustStrExps("Environment/.*")); len(envs) > 0 { + return nil, errors.New("found a tanka Environment resource. Check that you aren't using a spec.json and inline environments simultaneously") + } + + return result, nil +} + +func LoadEnvironment(path string, opts Opts) (*v1alpha1.Environment, error) { + loader, err := DetectLoader(path) + if err != nil { + return nil, err + } + + env, err := loader.Load(path, LoaderOpts{opts.JsonnetOpts, opts.Name}) + if err != nil { + return nil, err + } + + return env, nil +} + +func LoadManifests(env *v1alpha1.Environment, filters process.Matchers) (*LoadResult, error) { + if err := checkVersion(env.Spec.ExpectVersions.Tanka); err != nil { + return nil, err + } + + processed, err := process.Process(*env, filters) + if err != nil { + return nil, err + } + + return &LoadResult{Env: env, Resources: processed}, nil +} + +// Peek loads the metadata of the environment at path. To get resources as well, +// use Load +func Peek(path string, opts Opts) (*v1alpha1.Environment, error) { + loader, err := DetectLoader(path) + if err != nil { + return nil, err + } + + return loader.Peek(path, LoaderOpts{opts.JsonnetOpts, opts.Name}) +} + +// List finds metadata of all environments at path that could possibly be +// loaded. List can be used to deal with multiple inline environments, by first +// listing them, choosing the right one and then only loading that one +func List(path string, opts Opts) ([]*v1alpha1.Environment, error) { + loader, err := DetectLoader(path) + if err != nil { + return nil, err + } + + return loader.List(path, LoaderOpts{opts.JsonnetOpts, opts.Name}) +} + +// Eval returns the raw evaluated Jsonnet +func Eval(path string, opts Opts) (interface{}, error) { + loader, err := DetectLoader(path) + if err != nil { + return nil, err + } + + return loader.Eval(path, LoaderOpts{opts.JsonnetOpts, opts.Name}) +} + +// DetectLoader detects whether the environment is inline or static and picks +// the approriate loader +func DetectLoader(path string) (Loader, error) { + _, base, err := jpath.Dirs(path) + if err != nil { + return nil, err + } + + // check if spec.json exists + _, err = os.Stat(filepath.Join(base, spec.Specfile)) + if os.IsNotExist(err) { + return &InlineLoader{}, nil + } else if err != nil { + return nil, err + } + + return &StaticLoader{}, nil +} + +// Loader is an abstraction over the process of loading Environments +type Loader interface { + // Load a single environment at path + Load(path string, opts LoaderOpts) (*v1alpha1.Environment, error) + + // Peek only loads metadata and omits the actual resources + Peek(path string, opts LoaderOpts) (*v1alpha1.Environment, error) + + // List returns metadata of all possible environments at path that can be + // loaded + List(path string, opts LoaderOpts) ([]*v1alpha1.Environment, error) + + // Eval returns the raw evaluated Jsonnet + Eval(path string, opts LoaderOpts) (interface{}, error) +} + +type LoaderOpts struct { + JsonnetOpts + Name string +} + +type LoadResult struct { + Env *v1alpha1.Environment + Resources manifest.List +} + +func (l LoadResult) Connect() (*kubernetes.Kubernetes, error) { + env := *l.Env + + // check env is complete + s := "" + if env.Spec.APIServer == "" { + s += " * spec.apiServer: No Kubernetes cluster endpoint specified" + } + if env.Spec.Namespace == "" { + s += " * spec.namespace: Default namespace missing" + } + if s != "" { + return nil, fmt.Errorf("Your Environment's spec.json seems incomplete:\n%s\n\nPlease see https://tanka.dev/config for reference", s) + } + + // connect client + kube, err := kubernetes.New(env) + if err != nil { + return nil, errors.Wrap(err, "connecting to Kubernetes") + } + + return kube, nil +} diff --git a/vendor/github.com/grafana/tanka/pkg/tanka/parallel.go b/vendor/github.com/grafana/tanka/pkg/tanka/parallel.go new file mode 100644 index 0000000..4fb0a1c --- /dev/null +++ b/vendor/github.com/grafana/tanka/pkg/tanka/parallel.go @@ -0,0 +1,98 @@ +package tanka + +import ( + "fmt" + "log" + "path/filepath" + + "k8s.io/apimachinery/pkg/labels" + + "github.com/grafana/tanka/pkg/jsonnet/jpath" + "github.com/grafana/tanka/pkg/spec/v1alpha1" + "github.com/pkg/errors" +) + +const defaultParallelism = 8 + +type parallelOpts struct { + Opts + Selector labels.Selector + Parallelism int +} + +// parallelLoadEnvironments evaluates multiple environments in parallel +func parallelLoadEnvironments(envs []*v1alpha1.Environment, opts parallelOpts) ([]*v1alpha1.Environment, error) { + jobsCh := make(chan parallelJob) + outCh := make(chan parallelOut, len(envs)) + + if opts.Parallelism <= 0 { + opts.Parallelism = defaultParallelism + } + + for i := 0; i < opts.Parallelism; i++ { + go parallelWorker(jobsCh, outCh) + } + + for _, env := range envs { + o := opts.Opts + + // TODO: This is required because the map[string]string in here is not + // concurrency-safe. Instead of putting this burden on the caller, find + // a way to handle this inside the jsonnet package. A possible way would + // be to make the jsonnet package less general, more tightly coupling it + // to Tanka workflow thus being able to handle such cases + o.JsonnetOpts = o.JsonnetOpts.Clone() + + o.Name = env.Metadata.Name + path := env.Metadata.Namespace + rootDir, err := jpath.FindRoot(path) + if err != nil { + return nil, errors.Wrap(err, "finding root") + } + jobsCh <- parallelJob{ + path: filepath.Join(rootDir, path), + opts: o, + } + } + close(jobsCh) + + var outenvs []*v1alpha1.Environment + var errors []error + for i := 0; i < len(envs); i++ { + out := <-outCh + if out.err != nil { + errors = append(errors, out.err) + continue + } + if opts.Selector == nil || opts.Selector.Empty() || opts.Selector.Matches(out.env.Metadata) { + outenvs = append(outenvs, out.env) + } + } + + if len(errors) != 0 { + return outenvs, ErrParallel{errors: errors} + } + + return outenvs, nil +} + +type parallelJob struct { + path string + opts Opts +} + +type parallelOut struct { + env *v1alpha1.Environment + err error +} + +func parallelWorker(jobsCh <-chan parallelJob, outCh chan parallelOut) { + for job := range jobsCh { + log.Printf("Loading %s from %s", job.opts.Name, job.path) + env, err := LoadEnvironment(job.path, job.opts) + if err != nil { + err = fmt.Errorf("%s:\n %w", job.path, err) + } + outCh <- parallelOut{env: env, err: err} + } +} diff --git a/vendor/github.com/grafana/tanka/pkg/tanka/parse.go b/vendor/github.com/grafana/tanka/pkg/tanka/parse.go deleted file mode 100644 index 0c1068f..0000000 --- a/vendor/github.com/grafana/tanka/pkg/tanka/parse.go +++ /dev/null @@ -1,188 +0,0 @@ -package tanka - -import ( - "encoding/json" - "fmt" - "log" - "path/filepath" - - "github.com/Masterminds/semver" - "github.com/pkg/errors" - - "github.com/grafana/tanka/pkg/jsonnet" - "github.com/grafana/tanka/pkg/jsonnet/jpath" - "github.com/grafana/tanka/pkg/kubernetes" - "github.com/grafana/tanka/pkg/kubernetes/manifest" - "github.com/grafana/tanka/pkg/process" - "github.com/grafana/tanka/pkg/spec" - "github.com/grafana/tanka/pkg/spec/v1alpha1" -) - -// DEFAULT_DEV_VERSION is the placeholder version used when no actual semver is -// provided using ldflags -const DEFAULT_DEV_VERSION = "dev" - -// CURRENT_VERSION is the current version of the running Tanka code -var CURRENT_VERSION = DEFAULT_DEV_VERSION - -// loaded is the final result of all processing stages: -// 1. jpath.Resolve: Consruct import paths -// 2. parseSpec: load spec.json -// 3. evalJsonnet: evaluate Jsonnet to JSON -// 4. process.Process: post-processing -// -// Also connect() is provided to connect to the cluster for live operations -type loaded struct { - Env *v1alpha1.Config - Resources manifest.List -} - -// connect opens a connection to the backing Kubernetes cluster. -func (p *loaded) connect() (*kubernetes.Kubernetes, error) { - env := *p.Env - - // check env is complete - s := "" - if env.Spec.APIServer == "" { - s += " * spec.apiServer: No Kubernetes cluster endpoint specified" - } - if env.Spec.Namespace == "" { - s += " * spec.namespace: Default namespace missing" - } - if s != "" { - return nil, fmt.Errorf("Your Environment's spec.json seems incomplete:\n%s\n\nPlease see https://tanka.dev/config for reference", s) - } - - // connect client - kube, err := kubernetes.New(env) - if err != nil { - return nil, errors.Wrap(err, "connecting to Kubernetes") - } - - return kube, nil -} - -// load runs all processing stages described at the Processed type -func load(dir string, opts Opts) (*loaded, error) { - raw, env, err := eval(dir, opts.JsonnetOpts) - if err != nil { - return nil, err - } - - if err := checkVersion(env.Spec.ExpectVersions.Tanka); err != nil { - return nil, err - } - - rec, err := process.Process(raw, *env, opts.Filters) - if err != nil { - return nil, err - } - - return &loaded{ - Resources: rec, - Env: env, - }, nil -} - -// eval runs all processing stages describe at the Processed type apart from -// post-processing, thus returning the raw Jsonnet result. -func eval(dir string, opts jsonnet.Opts) (raw interface{}, env *v1alpha1.Config, err error) { - _, baseDir, rootDir, err := jpath.Resolve(dir) - if err != nil { - return nil, nil, errors.Wrap(err, "resolving jpath") - } - - env, err = parseSpec(baseDir, rootDir) - if err != nil { - return nil, nil, err - } - - raw, err = evalJsonnet(baseDir, env, opts) - if err != nil { - return nil, nil, errors.Wrap(err, "evaluating jsonnet") - } - - return raw, env, nil -} - -// parseEnv parses the `spec.json` of the environment and returns a -// *kubernetes.Kubernetes from it -func parseSpec(baseDir, rootDir string) (*v1alpha1.Config, error) { - // name of the environment: relative path from rootDir - name, _ := filepath.Rel(rootDir, baseDir) - - config, err := spec.ParseDir(baseDir, name) - if err != nil { - switch err.(type) { - // the config includes deprecated fields - case spec.ErrDeprecated: - log.Println(err) - // spec.json missing. we can still work with the default value - case spec.ErrNoSpec: - return config, nil - // some other error - default: - return nil, errors.Wrap(err, "reading spec.json") - } - } - - return config, nil -} - -// evalJsonnet evaluates the jsonnet environment at the given directory starting with -// `main.jsonnet` -func evalJsonnet(baseDir string, env *v1alpha1.Config, opts jsonnet.Opts) (interface{}, error) { - // make env spec accessible from Jsonnet - jsonEnv, err := json.Marshal(env) - if err != nil { - return nil, errors.Wrap(err, "marshalling environment config") - } - opts.ExtCode.Set(spec.APIGroup+"/environment", string(jsonEnv)) - - // evaluate Jsonnet - var raw string - mainFile := filepath.Join(baseDir, "main.jsonnet") - if opts.EvalPattern != "" { - evalScript := fmt.Sprintf("(import '%s').%s", mainFile, opts.EvalPattern) - raw, err = jsonnet.Evaluate(mainFile, evalScript, opts) - if err != nil { - return nil, err - } - } else { - raw, err = jsonnet.EvaluateFile(mainFile, opts) - if err != nil { - return nil, err - } - } - // parse result - var data interface{} - if err := json.Unmarshal([]byte(raw), &data); err != nil { - return nil, err - } - return data, nil -} - -func checkVersion(constraint string) error { - if constraint == "" { - return nil - } - if CURRENT_VERSION == DEFAULT_DEV_VERSION { - return nil - } - - c, err := semver.NewConstraint(constraint) - if err != nil { - return fmt.Errorf("Parsing version constraint: '%w'. Please check 'spec.expectVersions.tanka'", err) - } - - v, err := semver.NewVersion(CURRENT_VERSION) - if err != nil { - return fmt.Errorf("'%s' is not a valid semantic version: '%w'.\nThis likely means your build of Tanka is broken, as this is a compile-time value. When in doubt, please raise an issue", CURRENT_VERSION, err) - } - - if !c.Check(v) { - return fmt.Errorf("Current version '%s' does not satisfy the version required by the environment: '%s'. You likely need to use another version of Tanka", CURRENT_VERSION, constraint) - } - - return nil -} diff --git a/vendor/github.com/grafana/tanka/pkg/tanka/prune.go b/vendor/github.com/grafana/tanka/pkg/tanka/prune.go index c6178e7..78b8a34 100644 --- a/vendor/github.com/grafana/tanka/pkg/tanka/prune.go +++ b/vendor/github.com/grafana/tanka/pkg/tanka/prune.go @@ -2,6 +2,9 @@ package tanka import ( "fmt" + "log" + + "github.com/fatih/color" "github.com/grafana/tanka/pkg/kubernetes" "github.com/grafana/tanka/pkg/term" @@ -21,11 +24,11 @@ type PruneOpts struct { // Jsonnet. It uses the `tanka.dev/environment` label to identify those. func Prune(baseDir string, opts PruneOpts) error { // parse jsonnet, init k8s client - p, err := load(baseDir, opts.Opts) + p, err := Load(baseDir, opts.Opts) if err != nil { return err } - kube, err := p.connect() + kube, err := p.Connect() if err != nil { return err } @@ -38,7 +41,7 @@ func Prune(baseDir string, opts PruneOpts) error { } if len(orphaned) == 0 { - fmt.Println("Nothing found to prune.") + log.Println("Nothing found to prune.") return nil } @@ -51,6 +54,22 @@ func Prune(baseDir string, opts PruneOpts) error { } fmt.Print(term.Colordiff(*diff).String()) + // print namespace removal warning + namespaces := []string{} + for _, obj := range orphaned { + if obj.Kind() == "Namespace" { + namespaces = append(namespaces, obj.Metadata().Name()) + } + } + if len(namespaces) > 0 { + warning := color.New(color.FgHiYellow, color.Bold).FprintfFunc() + warning(color.Error, "WARNING: This will delete following namespaces and all resources in them:\n") + for _, ns := range namespaces { + log.Printf(" - %s\n", ns) + } + log.Println("") + } + // prompt for confirm if opts.AutoApprove { } else if err := confirmPrompt("Pruning from", p.Env.Spec.Namespace, kube.Info()); err != nil { diff --git a/vendor/github.com/grafana/tanka/pkg/tanka/static.go b/vendor/github.com/grafana/tanka/pkg/tanka/static.go new file mode 100644 index 0000000..4da2e6d --- /dev/null +++ b/vendor/github.com/grafana/tanka/pkg/tanka/static.go @@ -0,0 +1,102 @@ +package tanka + +import ( + "encoding/json" + "log" + + "github.com/grafana/tanka/pkg/spec" + "github.com/grafana/tanka/pkg/spec/v1alpha1" +) + +// StaticLoader loads an environment from a static file called `spec.json`. +// Jsonnet is evaluated as normal +type StaticLoader struct{} + +func (s StaticLoader) Load(path string, opts LoaderOpts) (*v1alpha1.Environment, error) { + config, err := s.Peek(path, opts) + if err != nil { + return nil, err + } + + data, err := s.Eval(path, opts) + if err != nil { + return nil, err + } + config.Data = data + + return config, nil +} + +func (s StaticLoader) Peek(path string, opts LoaderOpts) (*v1alpha1.Environment, error) { + config, err := parseStaticSpec(path) + if err != nil { + return nil, err + } + + return config, nil +} + +func (s StaticLoader) List(path string, opts LoaderOpts) ([]*v1alpha1.Environment, error) { + env, err := s.Peek(path, opts) + if err != nil { + return nil, err + } + + return []*v1alpha1.Environment{env}, nil +} + +func (s *StaticLoader) Eval(path string, opts LoaderOpts) (interface{}, error) { + config, err := s.Peek(path, opts) + if err != nil { + return nil, err + } + + envCode, err := specToExtCode(config) + if err != nil { + return nil, err + } + opts.ExtCode.Set(environmentExtCode, envCode) + + raw, err := evalJsonnet(path, opts.JsonnetOpts) + if err != nil { + return nil, err + } + + var data interface{} + if err := json.Unmarshal([]byte(raw), &data); err != nil { + return nil, err + } + + return data, nil +} + +func specToExtCode(spec *v1alpha1.Environment) (string, error) { + spec.Data = nil + data, err := json.Marshal(spec) + if err != nil { + return "", err + } + + return string(data), nil +} + +// parseStaticSpec parses the `spec.json` of the environment and returns a +// *kubernetes.Kubernetes from it +func parseStaticSpec(path string) (*v1alpha1.Environment, error) { + env, err := spec.ParseDir(path) + if err != nil { + switch err.(type) { + // the config includes deprecated fields + case spec.ErrDeprecated: + log.Println(err) + // spec.json missing. we can still work with the default value + case spec.ErrNoSpec: + return env, nil + // some other error + default: + return nil, err + } + } + + return env, nil +} diff --git a/vendor/github.com/grafana/tanka/pkg/tanka/status.go b/vendor/github.com/grafana/tanka/pkg/tanka/status.go index c9e22d8..eb8cdb6 100644 --- a/vendor/github.com/grafana/tanka/pkg/tanka/status.go +++ b/vendor/github.com/grafana/tanka/pkg/tanka/status.go @@ -10,18 +10,18 @@ import ( // the individual resources of the desired state and also the status of the // client. type Info struct { - Env *v1alpha1.Config + Env *v1alpha1.Environment Resources manifest.List Client client.Info } // Status returns information about the particular environment func Status(baseDir string, opts Opts) (*Info, error) { - r, err := load(baseDir, opts) + r, err := Load(baseDir, opts) if err != nil { return nil, err } - kube, err := r.connect() + kube, err := r.Connect() if err != nil { return nil, err } diff --git a/vendor/github.com/grafana/tanka/pkg/tanka/tanka.go b/vendor/github.com/grafana/tanka/pkg/tanka/tanka.go index a5bd6cd..d7f91a0 100644 --- a/vendor/github.com/grafana/tanka/pkg/tanka/tanka.go +++ b/vendor/github.com/grafana/tanka/pkg/tanka/tanka.go @@ -5,16 +5,55 @@ package tanka import ( + "fmt" + + "github.com/Masterminds/semver" + "github.com/grafana/tanka/pkg/jsonnet" "github.com/grafana/tanka/pkg/process" ) +type JsonnetOpts = jsonnet.Opts + // Opts specify general, optional properties that apply to all actions type Opts struct { JsonnetOpts // Filters are used to optionally select a subset of the resources Filters process.Matchers + + // Name is used to extract a single environment from multiple environments + Name string } -type JsonnetOpts = jsonnet.Opts +// DEFAULT_DEV_VERSION is the placeholder version used when no actual semver is +// provided using ldflags +const DEFAULT_DEV_VERSION = "dev" + +// CURRENT_VERSION is the current version of the running Tanka code +var CURRENT_VERSION = DEFAULT_DEV_VERSION + +func checkVersion(constraint string) error { + if constraint == "" { + return nil + } + if CURRENT_VERSION == DEFAULT_DEV_VERSION { + return nil + } + + c, err := semver.NewConstraint(constraint) + if err != nil { + return fmt.Errorf("Parsing version constraint: '%w'. Please check 'spec.expectVersions.tanka'", err) + } + + v, err := semver.NewVersion(CURRENT_VERSION) + if err != nil { + return fmt.Errorf("'%s' is not a valid semantic version: '%w'.\nThis likely means your build of Tanka is broken, as this is a compile-time value. When in doubt, please raise an issue", CURRENT_VERSION, err) + } + + if !c.Check(v) { + return fmt.Errorf("Current version '%s' does not satisfy the version required by the environment: '%s'. You likely need to use another version of Tanka", CURRENT_VERSION, constraint) + } + + return nil +} diff --git a/vendor/github.com/grafana/tanka/pkg/tanka/workflow.go b/vendor/github.com/grafana/tanka/pkg/tanka/workflow.go index bbe8f51..e0e305d 100644 --- a/vendor/github.com/grafana/tanka/pkg/tanka/workflow.go +++ b/vendor/github.com/grafana/tanka/pkg/tanka/workflow.go @@ -2,6 +2,7 @@ package tanka import ( "fmt" + "log" "github.com/fatih/color" @@ -30,11 +31,11 @@ type ApplyOpts struct { // the evaluated jsonnet to the Kubernetes cluster defined in the environments // `spec.json`. func Apply(baseDir string, opts ApplyOpts) error { - l, err := load(baseDir, opts.Opts) + l, err := Load(baseDir, opts.Opts) if err != nil { return err } - kube, err := l.connect() + kube, err := l.Connect() if err != nil { return err } @@ -45,7 +46,7 @@ func Apply(baseDir string, opts ApplyOpts) error { switch { case err != nil: // This is not fatal, the diff is not strictly required - fmt.Println("Error diffing:", err) + log.Println("Error diffing:", err) case diff == nil: tmp := "Warning: There are no differences. Your apply may not do anything at all." diff = &tmp @@ -88,10 +89,14 @@ func confirmPrompt(action, namespace string, info client.Info) error { type DiffOpts struct { Opts - // Strategy must be one of "native" or "subset" + // Strategy must be one of "native", "validate", or "subset" Strategy string // Summarize prints a summary, instead of the actual diff Summarize bool + // WithPrune includes objects to be deleted by prune command in the diff + WithPrune bool + // Exit with 0 even when differences are found + ExitZero bool } // Diff parses the environment at the given directory (a `baseDir`) and returns @@ -101,11 +106,11 @@ type DiffOpts struct { // The cluster information is retrieved from the environments `spec.json`. // NOTE: This function requires on `diff(1)`, `kubectl(1)` and perhaps `diffstat(1)` func Diff(baseDir string, opts DiffOpts) (*string, error) { - l, err := load(baseDir, opts.Opts) + l, err := Load(baseDir, opts.Opts) if err != nil { return nil, err } - kube, err := l.connect() + kube, err := l.Connect() if err != nil { return nil, err } @@ -114,6 +119,7 @@ func Diff(baseDir string, opts DiffOpts) (*string, error) { return kube.Diff(l.Resources, kubernetes.DiffOpts{ Summarize: opts.Summarize, Strategy: opts.Strategy, + WithPrune: opts.WithPrune, }) } @@ -134,11 +140,11 @@ type DeleteOpts struct { // the generated objects from the Kubernetes cluster defined in the environment's // `spec.json`. func Delete(baseDir string, opts DeleteOpts) error { - l, err := load(baseDir, opts.Opts) + l, err := Load(baseDir, opts.Opts) if err != nil { return err } - kube, err := l.connect() + kube, err := l.Connect() if err != nil { return err } @@ -174,19 +180,10 @@ func Delete(baseDir string, opts DeleteOpts) error { // the list of Kubernetes objects. // Tip: use the `String()` function on the returned list to get the familiar yaml stream func Show(baseDir string, opts Opts) (manifest.List, error) { - l, err := load(baseDir, opts) + l, err := Load(baseDir, opts) if err != nil { return nil, err } return l.Resources, nil } - -// Eval returns the raw evaluated Jsonnet output (without any transformations) -func Eval(dir string, opts Opts) (raw interface{}, err error) { - r, _, err := eval(dir, opts.JsonnetOpts) - if err != nil { - return nil, err - } - return r, nil -} diff --git a/vendor/github.com/grafana/tanka/pkg/term/alert.go b/vendor/github.com/grafana/tanka/pkg/term/alert.go index f17e098..d566262 100644 --- a/vendor/github.com/grafana/tanka/pkg/term/alert.go +++ b/vendor/github.com/grafana/tanka/pkg/term/alert.go @@ -3,22 +3,42 @@ package term import ( "bufio" "fmt" + "io" "os" "github.com/pkg/errors" ) +var ErrConfirmationFailed = errors.New("aborted by user") + // Confirm asks the user for confirmation func Confirm(msg, approval string) error { - reader := bufio.NewReader(os.Stdin) - fmt.Println(msg) - fmt.Printf("Please type '%s' to confirm: ", approval) - read, err := reader.ReadString('\n') + return confirmFrom(os.Stdin, os.Stdout, msg, approval) +} + +func confirmFrom(r io.Reader, w io.Writer, msg, approval string) error { + reader := bufio.NewScanner(r) + _, err := fmt.Fprintln(w, msg) if err != nil { - return errors.Wrap(err, "reading from stdin") + return errors.Wrap(err, "writing to stdout") } - if read != approval+"\n" { - return errors.New("aborted by user") + + _, err = fmt.Fprintf(w, "Please type '%s' to confirm: ", approval) + if err != nil { + return errors.Wrap(err, "writing to stdout") } + + if !reader.Scan() { + if err := reader.Err(); err != nil { + return errors.Wrap(err, "reading from stdin") + } + + return ErrConfirmationFailed + } + + if reader.Text() != approval { + return ErrConfirmationFailed + } + return nil } diff --git a/vendor/github.com/huandu/xstrings/common.go b/vendor/github.com/huandu/xstrings/common.go index f427cc8..2aff57a 100644 --- a/vendor/github.com/huandu/xstrings/common.go +++ b/vendor/github.com/huandu/xstrings/common.go @@ -3,11 +3,15 @@ package xstrings +import ( + "bytes" +) + const bufferMaxInitGrowSize = 2048 // Lazy initialize a buffer. -func allocBuffer(orig, cur string) *stringBuilder { - output := &stringBuilder{} +func allocBuffer(orig, cur string) *bytes.Buffer { + output := &bytes.Buffer{} maxSize := len(orig) * 4 // Avoid to reserve too much memory at once. diff --git a/vendor/github.com/huandu/xstrings/convert.go b/vendor/github.com/huandu/xstrings/convert.go index 3d5a349..3d58fa8 100644 --- a/vendor/github.com/huandu/xstrings/convert.go +++ b/vendor/github.com/huandu/xstrings/convert.go @@ -4,6 +4,7 @@ package xstrings import ( + "bytes" "math/rand" "unicode" "unicode/utf8" @@ -22,7 +23,7 @@ func ToCamelCase(str string) string { return "" } - buf := &stringBuilder{} + buf := &bytes.Buffer{} var r0, r1 rune var size int @@ -111,7 +112,7 @@ func camelCaseToLowerCase(str string, connector rune) string { return "" } - buf := &stringBuilder{} + buf := &bytes.Buffer{} wt, word, remaining := nextWord(str) for len(remaining) > 0 { @@ -373,7 +374,7 @@ func nextValidRune(str string, prev rune) (r rune, size int) { return } -func toLower(buf *stringBuilder, wt wordType, str string, connector rune) { +func toLower(buf *bytes.Buffer, wt wordType, str string, connector rune) { buf.Grow(buf.Len() + len(str)) if wt != upperCaseWord && wt != connectorWord { @@ -400,7 +401,7 @@ func SwapCase(str string) string { var r rune var size int - buf := &stringBuilder{} + buf := &bytes.Buffer{} for len(str) > 0 { r, size = utf8.DecodeRuneInString(str) @@ -434,7 +435,7 @@ func FirstRuneToUpper(str string) string { return str } - buf := &stringBuilder{} + buf := &bytes.Buffer{} buf.WriteRune(unicode.ToUpper(r)) buf.WriteString(str[size:]) return buf.String() @@ -452,7 +453,7 @@ func FirstRuneToLower(str string) string { return str } - buf := &stringBuilder{} + buf := &bytes.Buffer{} buf.WriteRune(unicode.ToLower(r)) buf.WriteString(str[size:]) return buf.String() @@ -565,7 +566,7 @@ func Successor(str string) string { // Needs to add one character for carry. if i < 0 && carry != ' ' { - buf := &stringBuilder{} + buf := &bytes.Buffer{} buf.Grow(l + 4) // Reserve enough space for write. if lastAlphanumeric != 0 { diff --git a/vendor/github.com/huandu/xstrings/format.go b/vendor/github.com/huandu/xstrings/format.go index 8cd76c5..2d02df1 100644 --- a/vendor/github.com/huandu/xstrings/format.go +++ b/vendor/github.com/huandu/xstrings/format.go @@ -4,6 +4,7 @@ package xstrings import ( + "bytes" "unicode/utf8" ) @@ -27,7 +28,7 @@ func ExpandTabs(str string, tabSize int) string { var r rune var i, size, column, expand int - var output *stringBuilder + var output *bytes.Buffer orig := str @@ -42,7 +43,7 @@ func ExpandTabs(str string, tabSize int) string { } for i = 0; i < expand; i++ { - output.WriteRune(' ') + output.WriteByte(byte(' ')) } column += expand @@ -87,7 +88,7 @@ func LeftJustify(str string, length int, pad string) string { remains := length - l padLen := Len(pad) - output := &stringBuilder{} + output := &bytes.Buffer{} output.Grow(len(str) + (remains/padLen+1)*len(pad)) output.WriteString(str) writePadString(output, pad, padLen, remains) @@ -113,7 +114,7 @@ func RightJustify(str string, length int, pad string) string { remains := length - l padLen := Len(pad) - output := &stringBuilder{} + output := &bytes.Buffer{} output.Grow(len(str) + (remains/padLen+1)*len(pad)) writePadString(output, pad, padLen, remains) output.WriteString(str) @@ -139,7 +140,7 @@ func Center(str string, length int, pad string) string { remains := length - l padLen := Len(pad) - output := &stringBuilder{} + output := &bytes.Buffer{} output.Grow(len(str) + (remains/padLen+1)*len(pad)) writePadString(output, pad, padLen, remains/2) output.WriteString(str) @@ -147,7 +148,7 @@ func Center(str string, length int, pad string) string { return output.String() } -func writePadString(output *stringBuilder, pad string, padLen, remains int) { +func writePadString(output *bytes.Buffer, pad string, padLen, remains int) { var r rune var size int diff --git a/vendor/github.com/huandu/xstrings/manipulate.go b/vendor/github.com/huandu/xstrings/manipulate.go index 64075f9..0eefb43 100644 --- a/vendor/github.com/huandu/xstrings/manipulate.go +++ b/vendor/github.com/huandu/xstrings/manipulate.go @@ -4,6 +4,7 @@ package xstrings import ( + "bytes" "strings" "unicode/utf8" ) @@ -130,7 +131,7 @@ func Insert(dst, src string, index int) string { // Scrub scrubs invalid utf8 bytes with repl string. // Adjacent invalid bytes are replaced only once. func Scrub(str, repl string) string { - var buf *stringBuilder + var buf *bytes.Buffer var r rune var size, pos int var hasError bool @@ -143,7 +144,7 @@ func Scrub(str, repl string) string { if r == utf8.RuneError { if !hasError { if buf == nil { - buf = &stringBuilder{} + buf = &bytes.Buffer{} } buf.WriteString(origin[:pos]) diff --git a/vendor/github.com/huandu/xstrings/stringbuilder.go b/vendor/github.com/huandu/xstrings/stringbuilder.go deleted file mode 100644 index bb0919d..0000000 --- a/vendor/github.com/huandu/xstrings/stringbuilder.go +++ /dev/null @@ -1,7 +0,0 @@ -//+build go1.10 - -package xstrings - -import "strings" - -type stringBuilder = strings.Builder diff --git a/vendor/github.com/huandu/xstrings/stringbuilder_go110.go b/vendor/github.com/huandu/xstrings/stringbuilder_go110.go deleted file mode 100644 index dac389d..0000000 --- a/vendor/github.com/huandu/xstrings/stringbuilder_go110.go +++ /dev/null @@ -1,9 +0,0 @@ -//+build !go1.10 - -package xstrings - -import "bytes" - -type stringBuilder struct { - bytes.Buffer -} diff --git a/vendor/github.com/huandu/xstrings/translate.go b/vendor/github.com/huandu/xstrings/translate.go index 42e694f..66e23f8 100644 --- a/vendor/github.com/huandu/xstrings/translate.go +++ b/vendor/github.com/huandu/xstrings/translate.go @@ -4,6 +4,7 @@ package xstrings import ( + "bytes" "unicode" "unicode/utf8" ) @@ -151,12 +152,12 @@ func NewTranslator(from, to string) *Translator { continue } - _, toStart = tr.addRuneRange(fromStart, fromEnd, toStart, toStart, singleRunes) + fromStart, toStart = tr.addRuneRange(fromStart, fromEnd, toStart, toStart, singleRunes) fromEnd = utf8.RuneError } if fromEnd != utf8.RuneError { - tr.addRune(fromEnd, toStart, singleRunes) + singleRunes = tr.addRune(fromEnd, toStart, singleRunes) } tr.reverted = reverted @@ -302,7 +303,7 @@ func (tr *Translator) Translate(str string) string { orig := str - var output *stringBuilder + var output *bytes.Buffer for len(str) > 0 { r, size = utf8.DecodeRuneInString(str) @@ -499,7 +500,7 @@ func Squeeze(str, pattern string) string { var size int var skipSqueeze, matched bool var tr *Translator - var output *stringBuilder + var output *bytes.Buffer orig := str last = -1 diff --git a/vendor/github.com/imdario/mergo/README.md b/vendor/github.com/imdario/mergo/README.md index 02fc81e..876abb5 100644 --- a/vendor/github.com/imdario/mergo/README.md +++ b/vendor/github.com/imdario/mergo/README.md @@ -1,44 +1,54 @@ # Mergo -A helper to merge structs and maps in Golang. Useful for configuration default values, avoiding messy if-statements. - -Also a lovely [comune](http://en.wikipedia.org/wiki/Mergo) (municipality) in the Province of Ancona in the Italian region of Marche. - -## Status - -It is ready for production use. [It is used in several projects by Docker, Google, The Linux Foundation, VMWare, Shopify, etc](https://github.com/imdario/mergo#mergo-in-the-wild). [![GoDoc][3]][4] -[![GoCard][5]][6] +[![GitHub release][5]][6] +[![GoCard][7]][8] [![Build Status][1]][2] -[![Coverage Status][7]][8] -[![Sourcegraph][9]][10] -[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fimdario%2Fmergo.svg?type=shield)](https://app.fossa.io/projects/git%2Bgithub.com%2Fimdario%2Fmergo?ref=badge_shield) +[![Coverage Status][9]][10] +[![Sourcegraph][11]][12] +[![FOSSA Status][13]][14] + +[![GoCenter Kudos][15]][16] [1]: https://travis-ci.org/imdario/mergo.png [2]: https://travis-ci.org/imdario/mergo [3]: https://godoc.org/github.com/imdario/mergo?status.svg [4]: https://godoc.org/github.com/imdario/mergo -[5]: https://goreportcard.com/badge/imdario/mergo -[6]: https://goreportcard.com/report/github.com/imdario/mergo -[7]: https://coveralls.io/repos/github/imdario/mergo/badge.svg?branch=master -[8]: https://coveralls.io/github/imdario/mergo?branch=master -[9]: https://sourcegraph.com/github.com/imdario/mergo/-/badge.svg -[10]: https://sourcegraph.com/github.com/imdario/mergo?badge +[5]: https://img.shields.io/github/release/imdario/mergo.svg +[6]: https://github.com/imdario/mergo/releases +[7]: https://goreportcard.com/badge/imdario/mergo +[8]: https://goreportcard.com/report/github.com/imdario/mergo +[9]: https://coveralls.io/repos/github/imdario/mergo/badge.svg?branch=master +[10]: https://coveralls.io/github/imdario/mergo?branch=master +[11]: https://sourcegraph.com/github.com/imdario/mergo/-/badge.svg +[12]: https://sourcegraph.com/github.com/imdario/mergo?badge +[13]: https://app.fossa.io/api/projects/git%2Bgithub.com%2Fimdario%2Fmergo.svg?type=shield +[14]: https://app.fossa.io/projects/git%2Bgithub.com%2Fimdario%2Fmergo?ref=badge_shield +[15]: https://search.gocenter.io/api/ui/badge/github.com%2Fimdario%2Fmergo +[16]: https://search.gocenter.io/github.com/imdario/mergo -### Latest release +A helper to merge structs and maps in Golang. Useful for configuration default values, avoiding messy if-statements. -[Release v0.3.7](https://github.com/imdario/mergo/releases/tag/v0.3.7). +Mergo merges same-type structs and maps by setting default values in zero-value fields. Mergo won't merge unexported (private) fields. It will do recursively any exported one. It also won't merge structs inside maps (because they are not addressable using Go reflection). + +Also a lovely [comune](http://en.wikipedia.org/wiki/Mergo) (municipality) in the Province of Ancona in the Italian region of Marche. + +## Status + +It is ready for production use. [It is used in several projects by Docker, Google, The Linux Foundation, VMWare, Shopify, etc](https://github.com/imdario/mergo#mergo-in-the-wild). ### Important note -Please keep in mind that in [0.3.2](//github.com/imdario/mergo/releases/tag/0.3.2) Mergo changed `Merge()`and `Map()` signatures to support [transformers](#transformers). An optional/variadic argument has been added, so it won't break existing code. +Please keep in mind that a problematic PR broke [0.3.9](//github.com/imdario/mergo/releases/tag/0.3.9). I reverted it in [0.3.10](//github.com/imdario/mergo/releases/tag/0.3.10), and I consider it stable but not bug-free. Also, this version adds suppot for go modules. -If you were using Mergo **before** April 6th 2015, please check your project works as intended after updating your local copy with ```go get -u github.com/imdario/mergo```. I apologize for any issue caused by its previous behavior and any future bug that Mergo could cause (I hope it won't!) in existing projects after the change (release 0.2.0). +Keep in mind that in [0.3.2](//github.com/imdario/mergo/releases/tag/0.3.2), Mergo changed `Merge()`and `Map()` signatures to support [transformers](#transformers). I added an optional/variadic argument so that it won't break the existing code. + +If you were using Mergo before April 6th, 2015, please check your project works as intended after updating your local copy with ```go get -u github.com/imdario/mergo```. I apologize for any issue caused by its previous behavior and any future bug that Mergo could cause in existing projects after the change (release 0.2.0). ### Donations -If Mergo is useful to you, consider buying me a coffee, a beer or making a monthly donation so I can keep building great free software. :heart_eyes: +If Mergo is useful to you, consider buying me a coffee, a beer, or making a monthly donation to allow me to keep building great free software. :heart_eyes: Buy Me a Coffee at ko-fi.com [![Beerpay](https://beerpay.io/imdario/mergo/badge.svg)](https://beerpay.io/imdario/mergo) @@ -87,8 +97,9 @@ If Mergo is useful to you, consider buying me a coffee, a beer or making a month - [mantasmatelis/whooplist-server](https://github.com/mantasmatelis/whooplist-server) - [jnuthong/item_search](https://github.com/jnuthong/item_search) - [bukalapak/snowboard](https://github.com/bukalapak/snowboard) +- [janoszen/containerssh](https://github.com/janoszen/containerssh) -## Installation +## Install go get github.com/imdario/mergo @@ -99,7 +110,7 @@ If Mergo is useful to you, consider buying me a coffee, a beer or making a month ## Usage -You can only merge same-type structs with exported fields initialized as zero value of their type and same-types maps. Mergo won't merge unexported (private) fields but will do recursively any exported one. It won't merge empty structs value as [they are not considered zero values](https://golang.org/ref/spec#The_zero_value) either. Also maps will be merged recursively except for structs inside maps (because they are not addressable using Go reflection). +You can only merge same-type structs with exported fields initialized as zero value of their type and same-types maps. Mergo won't merge unexported (private) fields but will do recursively any exported one. It won't merge empty structs value as [they are zero values](https://golang.org/ref/spec#The_zero_value) too. Also, maps will be merged recursively except for structs inside maps (because they are not addressable using Go reflection). ```go if err := mergo.Merge(&dst, src); err != nil { @@ -125,9 +136,7 @@ if err := mergo.Map(&dst, srcMap); err != nil { Warning: if you map a struct to map, it won't do it recursively. Don't expect Mergo to map struct members of your struct as `map[string]interface{}`. They will be just assigned as values. -More information and examples in [godoc documentation](http://godoc.org/github.com/imdario/mergo). - -### Nice example +Here is a nice example: ```go package main @@ -175,10 +184,10 @@ import ( "time" ) -type timeTransfomer struct { +type timeTransformer struct { } -func (t timeTransfomer) Transformer(typ reflect.Type) func(dst, src reflect.Value) error { +func (t timeTransformer) Transformer(typ reflect.Type) func(dst, src reflect.Value) error { if typ == reflect.TypeOf(time.Time{}) { return func(dst, src reflect.Value) error { if dst.CanSet() { @@ -202,7 +211,7 @@ type Snapshot struct { func main() { src := Snapshot{time.Now()} dest := Snapshot{} - mergo.Merge(&dest, src, mergo.WithTransformers(timeTransfomer{})) + mergo.Merge(&dest, src, mergo.WithTransformers(timeTransformer{})) fmt.Println(dest) // Will print // { 2018-01-12 01:15:00 +0000 UTC m=+0.000000001 } diff --git a/vendor/github.com/imdario/mergo/doc.go b/vendor/github.com/imdario/mergo/doc.go index 6e9aa7b..fcd985f 100644 --- a/vendor/github.com/imdario/mergo/doc.go +++ b/vendor/github.com/imdario/mergo/doc.go @@ -4,41 +4,140 @@ // license that can be found in the LICENSE file. /* -Package mergo merges same-type structs and maps by setting default values in zero-value fields. +A helper to merge structs and maps in Golang. Useful for configuration default values, avoiding messy if-statements. -Mergo won't merge unexported (private) fields but will do recursively any exported one. It also won't merge structs inside maps (because they are not addressable using Go reflection). +Mergo merges same-type structs and maps by setting default values in zero-value fields. Mergo won't merge unexported (private) fields. It will do recursively any exported one. It also won't merge structs inside maps (because they are not addressable using Go reflection). + +Status + +It is ready for production use. It is used in several projects by Docker, Google, The Linux Foundation, VMWare, Shopify, etc. + +Important note + +Please keep in mind that a problematic PR broke 0.3.9. We reverted it in 0.3.10. We consider 0.3.10 as stable but not bug-free. . Also, this version adds suppot for go modules. + +Keep in mind that in 0.3.2, Mergo changed Merge() and Map() signatures to support transformers. We added an optional/variadic argument so that it won't break the existing code. + +If you were using Mergo before April 6th, 2015, please check your project works as intended after updating your local copy with go get -u github.com/imdario/mergo. I apologize for any issue caused by its previous behavior and any future bug that Mergo could cause in existing projects after the change (release 0.2.0). + +Install + +Do your usual installation procedure: + + go get github.com/imdario/mergo + + // use in your .go code + import ( + "github.com/imdario/mergo" + ) Usage -From my own work-in-progress project: +You can only merge same-type structs with exported fields initialized as zero value of their type and same-types maps. Mergo won't merge unexported (private) fields but will do recursively any exported one. It won't merge empty structs value as they are zero values too. Also, maps will be merged recursively except for structs inside maps (because they are not addressable using Go reflection). + + if err := mergo.Merge(&dst, src); err != nil { + // ... + } + +Also, you can merge overwriting values using the transformer WithOverride. + + if err := mergo.Merge(&dst, src, mergo.WithOverride); err != nil { + // ... + } + +Additionally, you can map a map[string]interface{} to a struct (and otherwise, from struct to map), following the same restrictions as in Merge(). Keys are capitalized to find each corresponding exported field. + + if err := mergo.Map(&dst, srcMap); err != nil { + // ... + } + +Warning: if you map a struct to map, it won't do it recursively. Don't expect Mergo to map struct members of your struct as map[string]interface{}. They will be just assigned as values. + +Here is a nice example: + + package main + + import ( + "fmt" + "github.com/imdario/mergo" + ) - type networkConfig struct { - Protocol string - Address string - ServerType string `json: "server_type"` - Port uint16 + type Foo struct { + A string + B int64 } - type FssnConfig struct { - Network networkConfig + func main() { + src := Foo{ + A: "one", + B: 2, + } + dest := Foo{ + A: "two", + } + mergo.Merge(&dest, src) + fmt.Println(dest) + // Will print + // {two 2} } - var fssnDefault = FssnConfig { - networkConfig { - "tcp", - "127.0.0.1", - "http", - 31560, - }, +Transformers + +Transformers allow to merge specific types differently than in the default behavior. In other words, now you can customize how some types are merged. For example, time.Time is a struct; it doesn't have zero value but IsZero can return true because it has fields with zero value. How can we merge a non-zero time.Time? + + package main + + import ( + "fmt" + "github.com/imdario/mergo" + "reflect" + "time" + ) + + type timeTransformer struct { } - // Inside a function [...] + func (t timeTransformer) Transformer(typ reflect.Type) func(dst, src reflect.Value) error { + if typ == reflect.TypeOf(time.Time{}) { + return func(dst, src reflect.Value) error { + if dst.CanSet() { + isZero := dst.MethodByName("IsZero") + result := isZero.Call([]reflect.Value{}) + if result[0].Bool() { + dst.Set(src) + } + } + return nil + } + } + return nil + } + + type Snapshot struct { + Time time.Time + // ... + } - if err := mergo.Merge(&config, fssnDefault); err != nil { - log.Fatal(err) + func main() { + src := Snapshot{time.Now()} + dest := Snapshot{} + mergo.Merge(&dest, src, mergo.WithTransformers(timeTransformer{})) + fmt.Println(dest) + // Will print + // { 2018-01-12 01:15:00 +0000 UTC m=+0.000000001 } } - // More code [...] +Contact me + +If I can help you, you have an idea or you are using Mergo in your projects, don't hesitate to drop me a line (or a pull request): https://twitter.com/im_dario + +About + +Written by Dario Castañé: https://da.rio.hn + +License + +BSD 3-Clause license, as Go language. */ package mergo diff --git a/vendor/github.com/imdario/mergo/go.mod b/vendor/github.com/imdario/mergo/go.mod new file mode 100644 index 0000000..3d689d9 --- /dev/null +++ b/vendor/github.com/imdario/mergo/go.mod @@ -0,0 +1,5 @@ +module github.com/imdario/mergo + +go 1.13 + +require gopkg.in/yaml.v2 v2.3.0 diff --git a/vendor/github.com/imdario/mergo/go.sum b/vendor/github.com/imdario/mergo/go.sum new file mode 100644 index 0000000..168980d --- /dev/null +++ b/vendor/github.com/imdario/mergo/go.sum @@ -0,0 +1,4 @@ +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/vendor/github.com/imdario/mergo/map.go b/vendor/github.com/imdario/mergo/map.go index d83258b..a13a7ee 100644 --- a/vendor/github.com/imdario/mergo/map.go +++ b/vendor/github.com/imdario/mergo/map.go @@ -99,11 +99,11 @@ func deepMap(dst, src reflect.Value, visited map[uintptr]*visit, depth int, conf continue } if srcKind == dstKind { - if _, err = deepMerge(dstElement, srcElement, visited, depth+1, config); err != nil { + if err = deepMerge(dstElement, srcElement, visited, depth+1, config); err != nil { return } } else if dstKind == reflect.Interface && dstElement.Kind() == reflect.Interface { - if _, err = deepMerge(dstElement, srcElement, visited, depth+1, config); err != nil { + if err = deepMerge(dstElement, srcElement, visited, depth+1, config); err != nil { return } } else if srcKind == reflect.Map { @@ -141,6 +141,9 @@ func MapWithOverwrite(dst, src interface{}, opts ...func(*Config)) error { } func _map(dst, src interface{}, opts ...func(*Config)) error { + if dst != nil && reflect.ValueOf(dst).Kind() != reflect.Ptr { + return ErrNonPointerAgument + } var ( vDst, vSrc reflect.Value err error @@ -157,8 +160,7 @@ func _map(dst, src interface{}, opts ...func(*Config)) error { // To be friction-less, we redirect equal-type arguments // to deepMerge. Only because arguments can be anything. if vSrc.Kind() == vDst.Kind() { - _, err := deepMerge(vDst, vSrc, make(map[uintptr]*visit), 0, config) - return err + return deepMerge(vDst, vSrc, make(map[uintptr]*visit), 0, config) } switch vSrc.Kind() { case reflect.Struct: diff --git a/vendor/github.com/imdario/mergo/merge.go b/vendor/github.com/imdario/mergo/merge.go index 3332c9c..afa84a1 100644 --- a/vendor/github.com/imdario/mergo/merge.go +++ b/vendor/github.com/imdario/mergo/merge.go @@ -11,26 +11,26 @@ package mergo import ( "fmt" "reflect" - "unsafe" ) -func hasExportedField(dst reflect.Value) (exported bool) { +func hasMergeableFields(dst reflect.Value) (exported bool) { for i, n := 0, dst.NumField(); i < n; i++ { field := dst.Type().Field(i) - if isExportedComponent(&field) { - return true + if field.Anonymous && dst.Field(i).Kind() == reflect.Struct { + exported = exported || hasMergeableFields(dst.Field(i)) + } else if isExportedComponent(&field) { + exported = exported || len(field.PkgPath) == 0 } } return } func isExportedComponent(field *reflect.StructField) bool { - name := field.Name pkgPath := field.PkgPath if len(pkgPath) > 0 { return false } - c := name[0] + c := field.Name[0] if 'a' <= c && c <= 'z' || c == '_' { return false } @@ -44,6 +44,8 @@ type Config struct { Transformers Transformers overwriteWithEmptyValue bool overwriteSliceWithEmptyValue bool + sliceDeepCopy bool + debug bool } type Transformers interface { @@ -53,17 +55,16 @@ type Transformers interface { // Traverses recursively both values, assigning src's fields values to dst. // The map argument tracks comparisons that have already been seen, which allows // short circuiting on recursive types. -func deepMerge(dstIn, src reflect.Value, visited map[uintptr]*visit, depth int, config *Config) (dst reflect.Value, err error) { - dst = dstIn +func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, config *Config) (err error) { overwrite := config.Overwrite typeCheck := config.TypeCheck overwriteWithEmptySrc := config.overwriteWithEmptyValue overwriteSliceWithEmptySrc := config.overwriteSliceWithEmptyValue + sliceDeepCopy := config.sliceDeepCopy if !src.IsValid() { return } - if dst.CanAddr() { addr := dst.UnsafeAddr() h := 17 * addr @@ -71,7 +72,7 @@ func deepMerge(dstIn, src reflect.Value, visited map[uintptr]*visit, depth int, typ := dst.Type() for p := seen; p != nil; p = p.next { if p.ptr == addr && p.typ == typ { - return dst, nil + return nil } } // Remember, remember... @@ -85,126 +86,154 @@ func deepMerge(dstIn, src reflect.Value, visited map[uintptr]*visit, depth int, } } - if dst.IsValid() && src.IsValid() && src.Type() != dst.Type() { - err = fmt.Errorf("cannot append two different types (%s, %s)", src.Kind(), dst.Kind()) - return - } - switch dst.Kind() { case reflect.Struct: - if hasExportedField(dst) { - dstCp := reflect.New(dst.Type()).Elem() + if hasMergeableFields(dst) { for i, n := 0, dst.NumField(); i < n; i++ { - dstField := dst.Field(i) - structField := dst.Type().Field(i) - // copy un-exported struct fields - if !isExportedComponent(&structField) { - rf := dstCp.Field(i) - rf = reflect.NewAt(rf.Type(), unsafe.Pointer(rf.UnsafeAddr())).Elem() //nolint:gosec - dstRF := dst.Field(i) - if !dst.Field(i).CanAddr() { - continue - } - - dstRF = reflect.NewAt(dstRF.Type(), unsafe.Pointer(dstRF.UnsafeAddr())).Elem() //nolint:gosec - rf.Set(dstRF) - continue - } - dstField, err = deepMerge(dstField, src.Field(i), visited, depth+1, config) - if err != nil { + if err = deepMerge(dst.Field(i), src.Field(i), visited, depth+1, config); err != nil { return } - dstCp.Field(i).Set(dstField) } - - if dst.CanSet() { - dst.Set(dstCp) - } else { - dst = dstCp - } - return } else { if (isReflectNil(dst) || overwrite) && (!isEmptyValue(src) || overwriteWithEmptySrc) { - dst = src + dst.Set(src) } } - case reflect.Map: if dst.IsNil() && !src.IsNil() { - if dst.CanSet() { - dst.Set(reflect.MakeMap(dst.Type())) - } else { - dst = src - return + dst.Set(reflect.MakeMap(dst.Type())) + } + + if src.Kind() != reflect.Map { + if overwrite { + dst.Set(src) } + return } + for _, key := range src.MapKeys() { srcElement := src.MapIndex(key) - dstElement := dst.MapIndex(key) if !srcElement.IsValid() { continue } - if dst.MapIndex(key).IsValid() { - k := dstElement.Interface() - dstElement = reflect.ValueOf(k) - } - if isReflectNil(srcElement) { - if overwrite || isReflectNil(dstElement) { - dst.SetMapIndex(key, srcElement) + dstElement := dst.MapIndex(key) + switch srcElement.Kind() { + case reflect.Chan, reflect.Func, reflect.Map, reflect.Interface, reflect.Slice: + if srcElement.IsNil() { + if overwrite { + dst.SetMapIndex(key, srcElement) + } + continue + } + fallthrough + default: + if !srcElement.CanInterface() { + continue + } + switch reflect.TypeOf(srcElement.Interface()).Kind() { + case reflect.Struct: + fallthrough + case reflect.Ptr: + fallthrough + case reflect.Map: + srcMapElm := srcElement + dstMapElm := dstElement + if srcMapElm.CanInterface() { + srcMapElm = reflect.ValueOf(srcMapElm.Interface()) + if dstMapElm.IsValid() { + dstMapElm = reflect.ValueOf(dstMapElm.Interface()) + } + } + if err = deepMerge(dstMapElm, srcMapElm, visited, depth+1, config); err != nil { + return + } + case reflect.Slice: + srcSlice := reflect.ValueOf(srcElement.Interface()) + + var dstSlice reflect.Value + if !dstElement.IsValid() || dstElement.IsNil() { + dstSlice = reflect.MakeSlice(srcSlice.Type(), 0, srcSlice.Len()) + } else { + dstSlice = reflect.ValueOf(dstElement.Interface()) + } + + if (!isEmptyValue(src) || overwriteWithEmptySrc || overwriteSliceWithEmptySrc) && (overwrite || isEmptyValue(dst)) && !config.AppendSlice && !sliceDeepCopy { + if typeCheck && srcSlice.Type() != dstSlice.Type() { + return fmt.Errorf("cannot override two slices with different type (%s, %s)", srcSlice.Type(), dstSlice.Type()) + } + dstSlice = srcSlice + } else if config.AppendSlice { + if srcSlice.Type() != dstSlice.Type() { + return fmt.Errorf("cannot append two slices with different type (%s, %s)", srcSlice.Type(), dstSlice.Type()) + } + dstSlice = reflect.AppendSlice(dstSlice, srcSlice) + } else if sliceDeepCopy { + i := 0 + for ; i < srcSlice.Len() && i < dstSlice.Len(); i++ { + srcElement := srcSlice.Index(i) + dstElement := dstSlice.Index(i) + + if srcElement.CanInterface() { + srcElement = reflect.ValueOf(srcElement.Interface()) + } + if dstElement.CanInterface() { + dstElement = reflect.ValueOf(dstElement.Interface()) + } + + if err = deepMerge(dstElement, srcElement, visited, depth+1, config); err != nil { + return + } + } + + } + dst.SetMapIndex(key, dstSlice) } - continue } - if !srcElement.CanInterface() { + if dstElement.IsValid() && !isEmptyValue(dstElement) && (reflect.TypeOf(srcElement.Interface()).Kind() == reflect.Map || reflect.TypeOf(srcElement.Interface()).Kind() == reflect.Slice) { continue } - if srcElement.CanInterface() { - srcElement = reflect.ValueOf(srcElement.Interface()) - if dstElement.IsValid() { - dstElement = reflect.ValueOf(dstElement.Interface()) + if srcElement.IsValid() && ((srcElement.Kind() != reflect.Ptr && overwrite) || !dstElement.IsValid() || isEmptyValue(dstElement)) { + if dst.IsNil() { + dst.Set(reflect.MakeMap(dst.Type())) } + dst.SetMapIndex(key, srcElement) } - dstElement, err = deepMerge(dstElement, srcElement, visited, depth+1, config) - if err != nil { - return - } - dst.SetMapIndex(key, dstElement) - } case reflect.Slice: - newSlice := dst - if (!isEmptyValue(src) || overwriteWithEmptySrc || overwriteSliceWithEmptySrc) && (overwrite || isEmptyValue(dst)) && !config.AppendSlice { - if typeCheck && src.Type() != dst.Type() { - return dst, fmt.Errorf("cannot override two slices with different type (%s, %s)", src.Type(), dst.Type()) - } - newSlice = src - } else if config.AppendSlice { - if typeCheck && src.Type() != dst.Type() { - err = fmt.Errorf("cannot append two slice with different type (%s, %s)", src.Type(), dst.Type()) - return - } - newSlice = reflect.AppendSlice(dst, src) - } - if dst.CanSet() { - dst.Set(newSlice) - } else { - dst = newSlice - } - case reflect.Ptr, reflect.Interface: - if isReflectNil(src) { + if !dst.CanSet() { break } + if (!isEmptyValue(src) || overwriteWithEmptySrc || overwriteSliceWithEmptySrc) && (overwrite || isEmptyValue(dst)) && !config.AppendSlice && !sliceDeepCopy { + dst.Set(src) + } else if config.AppendSlice { + if src.Type() != dst.Type() { + return fmt.Errorf("cannot append two slice with different type (%s, %s)", src.Type(), dst.Type()) + } + dst.Set(reflect.AppendSlice(dst, src)) + } else if sliceDeepCopy { + for i := 0; i < src.Len() && i < dst.Len(); i++ { + srcElement := src.Index(i) + dstElement := dst.Index(i) + if srcElement.CanInterface() { + srcElement = reflect.ValueOf(srcElement.Interface()) + } + if dstElement.CanInterface() { + dstElement = reflect.ValueOf(dstElement.Interface()) + } - if dst.Kind() != reflect.Ptr && src.Type().AssignableTo(dst.Type()) { - if dst.IsNil() || overwrite { - if overwrite || isEmptyValue(dst) { - if dst.CanSet() { - dst.Set(src) - } else { - dst = src - } + if err = deepMerge(dstElement, srcElement, visited, depth+1, config); err != nil { + return } } + } + case reflect.Ptr: + fallthrough + case reflect.Interface: + if isReflectNil(src) { + if overwriteWithEmptySrc && dst.CanSet() && src.Type().AssignableTo(dst.Type()) { + dst.Set(src) + } break } @@ -214,33 +243,35 @@ func deepMerge(dstIn, src reflect.Value, visited map[uintptr]*visit, depth int, dst.Set(src) } } else if src.Kind() == reflect.Ptr { - if dst, err = deepMerge(dst.Elem(), src.Elem(), visited, depth+1, config); err != nil { + if err = deepMerge(dst.Elem(), src.Elem(), visited, depth+1, config); err != nil { return } - dst = dst.Addr() } else if dst.Elem().Type() == src.Type() { - if dst, err = deepMerge(dst.Elem(), src, visited, depth+1, config); err != nil { + if err = deepMerge(dst.Elem(), src, visited, depth+1, config); err != nil { return } } else { - return dst, ErrDifferentArgumentsTypes + return ErrDifferentArgumentsTypes } break } + if dst.IsNil() || overwrite { - if (overwrite || isEmptyValue(dst)) && (overwriteWithEmptySrc || !isEmptyValue(src)) { - if dst.CanSet() { - dst.Set(src) - } else { - dst = src - } + if dst.CanSet() && (overwrite || isEmptyValue(dst)) { + dst.Set(src) } - } else if _, err = deepMerge(dst.Elem(), src.Elem(), visited, depth+1, config); err != nil { - return + break + } + + if dst.Elem().Kind() == src.Elem().Kind() { + if err = deepMerge(dst.Elem(), src.Elem(), visited, depth+1, config); err != nil { + return + } + break } default: - overwriteFull := (!isEmptyValue(src) || overwriteWithEmptySrc) && (overwrite || isEmptyValue(dst)) - if overwriteFull { + mustSet := (isEmptyValue(dst) || overwrite) && (!isEmptyValue(src) || overwriteWithEmptySrc) + if mustSet { if dst.CanSet() { dst.Set(src) } else { @@ -281,6 +312,7 @@ func WithOverride(config *Config) { // WithOverwriteWithEmptyValue will make merge override non empty dst attributes with empty src attributes values. func WithOverwriteWithEmptyValue(config *Config) { + config.Overwrite = true config.overwriteWithEmptyValue = true } @@ -299,7 +331,16 @@ func WithTypeCheck(config *Config) { config.TypeCheck = true } +// WithSliceDeepCopy will merge slice element one by one with Overwrite flag. +func WithSliceDeepCopy(config *Config) { + config.sliceDeepCopy = true + config.Overwrite = true +} + func merge(dst, src interface{}, opts ...func(*Config)) error { + if dst != nil && reflect.ValueOf(dst).Kind() != reflect.Ptr { + return ErrNonPointerAgument + } var ( vDst, vSrc reflect.Value err error @@ -314,14 +355,10 @@ func merge(dst, src interface{}, opts ...func(*Config)) error { if vDst, vSrc, err = resolveValues(dst, src); err != nil { return err } - if !vDst.CanSet() { - return fmt.Errorf("cannot set dst, needs reference") - } if vDst.Type() != vSrc.Type() { return ErrDifferentArgumentsTypes } - _, err = deepMerge(vDst, vSrc, make(map[uintptr]*visit), 0, config) - return err + return deepMerge(vDst, vSrc, make(map[uintptr]*visit), 0, config) } // IsReflectNil is the reflect value provided nil diff --git a/vendor/github.com/imdario/mergo/mergo.go b/vendor/github.com/imdario/mergo/mergo.go index a82fea2..3cc926c 100644 --- a/vendor/github.com/imdario/mergo/mergo.go +++ b/vendor/github.com/imdario/mergo/mergo.go @@ -20,6 +20,7 @@ var ( ErrNotSupported = errors.New("only structs and maps are supported") ErrExpectedMapAsDestination = errors.New("dst was expected to be a map") ErrExpectedStructAsDestination = errors.New("dst was expected to be a struct") + ErrNonPointerAgument = errors.New("dst must be a pointer") ) // During deepMerge, must keep track of checks that are @@ -75,23 +76,3 @@ func resolveValues(dst, src interface{}) (vDst, vSrc reflect.Value, err error) { } return } - -// Traverses recursively both values, assigning src's fields values to dst. -// The map argument tracks comparisons that have already been seen, which allows -// short circuiting on recursive types. -func deeper(dst, src reflect.Value, visited map[uintptr]*visit, depth int) (err error) { - if dst.CanAddr() { - addr := dst.UnsafeAddr() - h := 17 * addr - seen := visited[h] - typ := dst.Type() - for p := seen; p != nil; p = p.next { - if p.ptr == addr && p.typ == typ { - return nil - } - } - // Remember, remember... - visited[h] = &visit{addr, typ, seen} - } - return // TODO refactor -} diff --git a/vendor/github.com/karrick/godirwalk/README.md b/vendor/github.com/karrick/godirwalk/README.md index 72c51a5..0902cce 100644 --- a/vendor/github.com/karrick/godirwalk/README.md +++ b/vendor/github.com/karrick/godirwalk/README.md @@ -26,6 +26,12 @@ provided callback function. dirname := "some/directory/root" err := godirwalk.Walk(dirname, &godirwalk.Options{ Callback: func(osPathname string, de *godirwalk.Dirent) error { + // Following string operation is not most performant way + // of doing this, but common enough to warrant a simple + // example here: + if strings.Contains(osPathname, ".git") { + return godirwalk.SkipThis + } fmt.Printf("%s %s\n", de.ModeType(), osPathname) return nil }, @@ -47,8 +53,8 @@ Here's why I use `godirwalk` in preference to `filepath.Walk`, When compared against `filepath.Walk` in benchmarks, it has been observed to run between five and ten times the speed on darwin, at -speeds comparable to the that of the unix `find` utility; about twice -the speed on linux; and about four times the speed on Windows. +speeds comparable to the that of the unix `find` utility; and about +twice the speed on linux; and about four times the speed on Windows. How does it obtain this performance boost? It does less work to give you nearly the same output. This library calls the same `syscall` @@ -60,11 +66,11 @@ file system entry data from the operating system. While traversing a file system directory tree, `filepath.Walk` obtains the list of immediate descendants of a directory, and throws away the -file system node type information provided by the operating system -that comes with the node's name. Then, immediately prior to invoking -the callback function, `filepath.Walk` invokes `os.Stat` for each -node, and passes the returned `os.FileInfo` information to the -callback. +node type information for the file system entry that is provided by +the operating system that comes with the node's name. Then, +immediately prior to invoking the callback function, `filepath.Walk` +invokes `os.Stat` for each node, and passes the returned `os.FileInfo` +information to the callback. While the `os.FileInfo` information provided by `os.Stat` is extremely helpful--and even includes the `os.FileMode` data--providing it @@ -141,13 +147,20 @@ The takeaway is that behavior is different based on which platform until it is fixed in the standard library, it presents a compatibility problem. -This library correctly identifies symbolic links that point to -directories and will only follow them when `FollowSymbolicLinks` is -set to true. Behavior on Windows and other operating systems is -identical. +This library fixes the above problem such that it will never follow +logical file sytem loops on either unix or Windows. Furthermore, it +will only follow symbolic links when `FollowSymbolicLinks` is set to +true. Behavior on Windows and other operating systems is identical. ### It's more easy to use than `filepath.Walk` +While this library strives to mimic the behavior of the incredibly +well-written `filepath.Walk` standard library, there are places where +it deviates a bit in order to provide a more easy or intuitive caller +interface. + +#### Callback interface does not send you an error to check + Since this library does not invoke `os.Stat` on every file system node it encounters, there is no possible error event for the callback function to filter on. The third argument in the `filepath.WalkFunc` @@ -155,23 +168,105 @@ function signature to pass the error from `os.Stat` to the callback function is no longer necessary, and thus eliminated from signature of the callback function from this library. -Also, `filepath.Walk` invokes the callback function with a solidus -delimited pathname regardless of the os-specific path separator. This -library invokes the callback function with the os-specific pathname -separator, obviating a call to `filepath.Clean` in the callback -function for each node prior to actually using the provided pathname. +Furthermore, this slight interface difference between +`filepath.WalkFunc` and this library's `WalkFunc` eliminates the +boilerplate code that callback handlers must write when they use +`filepath.Walk`. Rather than every callback function needing to check +the error value passed into it and branch accordingly, users of this +library do not even have an error value to check immediately upon +entry into the callback function. This is an improvement both in +runtime performance and code clarity. + +#### Callback function is invoked with OS specific file system path separator + +On every OS platform `filepath.Walk` invokes the callback function +with a solidus (`/`) delimited pathname. By contrast this library +invokes the callback with the os-specific pathname separator, +obviating a call to `filepath.Clean` in the callback function for each +node prior to actually using the provided pathname. In other words, even on Windows, `filepath.Walk` will invoke the callback with `some/path/to/foo.txt`, requiring well written clients to perform pathname normalization for every file prior to working with -the specified file. In truth, many clients developed on unix and not -tested on Windows neglect this subtlety, and will result in software -bugs when running on Windows. This library would invoke the callback -function with `some\path\to\foo.txt` for the same file when running on -Windows, eliminating the need to normalize the pathname by the client, -and lessen the likelyhood that a client will work on unix but not on +the specified file. This is a hidden boilerplate requirement to create +truly os agnostic callback functions. In truth, many clients developed +on unix and not tested on Windows neglect this subtlety, and will +result in software bugs when someone tries to run that software on Windows. +This library invokes the callback function with `some\path\to\foo.txt` +for the same file when running on Windows, eliminating the need to +normalize the pathname by the client, and lessen the likelyhood that a +client will work on unix but not on Windows. + +This enhancement eliminates necessity for some more boilerplate code +in callback functions while improving the runtime performance of this +library. + +#### `godirwalk.SkipThis` is more intuitive to use than `filepath.SkipDir` + +One arguably confusing aspect of the `filepath.WalkFunc` interface +that this library must emulate is how a caller tells the `Walk` +function to skip file system entries. With both `filepath.Walk` and +this library's `Walk`, when a callback function wants to skip a +directory and not descend into its children, it returns +`filepath.SkipDir`. If the callback function returns +`filepath.SkipDir` for a non-directory, `filepath.Walk` and this +library will stop processing any more entries in the current +directory. This is not necessarily what most developers want or +expect. If you want to simply skip a particular non-directory entry +but continue processing entries in the directory, the callback +function must return nil. + +The implications of this interface design is when you want to walk a +file system hierarchy and skip an entry, you have to return a +different value based on what type of file system entry that node +is. To skip an entry, if the entry is a directory, you must return +`filepath.SkipDir`, and if entry is not a directory, you must return +`nil`. This is an unfortunate hurdle I have observed many developers +struggling with, simply because it is not an intuitive interface. + +Here is an example callback function that adheres to +`filepath.WalkFunc` interface to have it skip any file system entry +whose full pathname includes a particular substring, `optSkip`. Note +that this library still supports identical behavior of `filepath.Walk` +when the callback function returns `filepath.SkipDir`. + +```Go + func callback1(osPathname string, de *godirwalk.Dirent) error { + if optSkip != "" && strings.Contains(osPathname, optSkip) { + if b, err := de.IsDirOrSymlinkToDir(); b == true && err == nil { + return filepath.SkipDir + } + return nil + } + // Process file like normal... + return nil + } +``` + +This library attempts to eliminate some of that logic boilerplate +required in callback functions by providing a new token error value, +`SkipThis`, which a callback function may return to skip the current +file system entry regardless of what type of entry it is. If the +current entry is a directory, its children will not be enumerated, +exactly as if the callback had returned `filepath.SkipDir`. If the +current entry is a non-directory, the next file system entry in the +current directory will be enumerated, exactly as if the callback +returned `nil`. The following example callback function has identical +behavior as the previous, but has less boilerplate, and admittedly +logic that I find more simple to follow. + +```Go + func callback2(osPathname string, de *godirwalk.Dirent) error { + if optSkip != "" && strings.Contains(osPathname, optSkip) { + return godirwalk.SkipThis + } + // Process file like normal... + return nil + } +``` + ### It's more flexible than `filepath.Walk` #### Configurable Handling of Symbolic Links @@ -182,7 +277,7 @@ does. However, it does invoke the callback function with each node it finds, including symbolic links. If a particular use case exists to follow symbolic links when traversing a directory tree, this library can be invoked in manner to do so, by setting the -`FollowSymbolicLinks` parameter to true. +`FollowSymbolicLinks` config parameter to `true`. #### Configurable Sorting of Directory Children @@ -191,11 +286,12 @@ descendants of a directory prior to visiting each node, just like `filepath.Walk` does. This is usually the desired behavior. However, this does come at slight performance and memory penalties required to sort the names when a directory node has many entries. Additionally if -caller specifies `Unsorted` enumeration, reading directories is lazily -performed as the caller consumes entries. If a particular use case -exists that does not require sorting the directory's immediate -descendants prior to visiting its nodes, this library will skip the -sorting step when the `Unsorted` parameter is set to true. +caller specifies `Unsorted` enumeration in the configuration +parameter, reading directories is lazily performed as the caller +consumes entries. If a particular use case exists that does not +require sorting the directory's immediate descendants prior to +visiting its nodes, this library will skip the sorting step when the +`Unsorted` parameter is set to `true`. Here's an interesting read of the potential hazzards of traversing a file system hierarchy in a non-deterministic order. If you know the @@ -208,10 +304,11 @@ setting this option. #### Configurable Post Children Callback This library provides upstream code with the ability to specify a -callback to be invoked for each directory after its children are -processed. This has been used to recursively delete empty directories -after traversing the file system in a more efficient manner. See the -`examples/clean-empties` directory for an example of this usage. +callback function to be invoked for each directory after its children +are processed. This has been used to recursively delete empty +directories after traversing the file system in a more efficient +manner. See the `examples/clean-empties` directory for an example of +this usage. #### Configurable Error Callback diff --git a/vendor/github.com/karrick/godirwalk/readdir_unix.go b/vendor/github.com/karrick/godirwalk/readdir_unix.go index cb54b3d..a993038 100644 --- a/vendor/github.com/karrick/godirwalk/readdir_unix.go +++ b/vendor/github.com/karrick/godirwalk/readdir_unix.go @@ -31,6 +31,7 @@ func readDirents(osDirname string, scratchBuffer []byte) ([]*Dirent, error) { scratchBuffer = newScratchBuffer() } + var sde syscall.Dirent for { if len(workBuffer) == 0 { n, err := syscall.ReadDirent(fd, scratchBuffer) @@ -51,14 +52,14 @@ func readDirents(osDirname string, scratchBuffer []byte) ([]*Dirent, error) { workBuffer = scratchBuffer[:n] // trim work buffer to number of bytes read } - sde := (*syscall.Dirent)(unsafe.Pointer(&workBuffer[0])) // point entry to first syscall.Dirent in buffer - workBuffer = workBuffer[reclen(sde):] // advance buffer for next iteration through loop + copy((*[unsafe.Sizeof(syscall.Dirent{})]byte)(unsafe.Pointer(&sde))[:], workBuffer) + workBuffer = workBuffer[reclen(&sde):] // advance buffer for next iteration through loop - if inoFromDirent(sde) == 0 { + if inoFromDirent(&sde) == 0 { continue // inode set to 0 indicates an entry that was marked as deleted } - nameSlice := nameFromDirent(sde) + nameSlice := nameFromDirent(&sde) nameLength := len(nameSlice) if nameLength == 0 || (nameSlice[0] == '.' && (nameLength == 1 || (nameLength == 2 && nameSlice[1] == '.'))) { @@ -66,7 +67,7 @@ func readDirents(osDirname string, scratchBuffer []byte) ([]*Dirent, error) { } childName := string(nameSlice) - mt, err := modeTypeFromDirent(sde, osDirname, childName) + mt, err := modeTypeFromDirent(&sde, osDirname, childName) if err != nil { _ = dh.Close() return nil, err diff --git a/vendor/github.com/karrick/godirwalk/scandir_unix.go b/vendor/github.com/karrick/godirwalk/scandir_unix.go index 1e6688a..33250b6 100644 --- a/vendor/github.com/karrick/godirwalk/scandir_unix.go +++ b/vendor/github.com/karrick/godirwalk/scandir_unix.go @@ -18,7 +18,7 @@ type Scanner struct { statErr error // statErr is any error return while attempting to stat an entry dh *os.File // used to close directory after done reading de *Dirent // most recently decoded directory entry - sde *syscall.Dirent + sde syscall.Dirent fd int // file descriptor used to read entries from directory } @@ -77,7 +77,7 @@ func NewScannerWithScratchBuffer(osDirname string, scratchBuffer []byte) (*Scann func (s *Scanner) Dirent() (*Dirent, error) { if s.de == nil { s.de = &Dirent{name: s.childName, path: s.osDirname} - s.de.modeType, s.statErr = modeTypeFromDirent(s.sde, s.osDirname, s.childName) + s.de.modeType, s.statErr = modeTypeFromDirent(&s.sde, s.osDirname, s.childName) } return s.de, s.statErr } @@ -96,7 +96,8 @@ func (s *Scanner) done(err error) { s.osDirname, s.childName = "", "" s.scratchBuffer, s.workBuffer = nil, nil - s.dh, s.de, s.sde, s.statErr = nil, nil, nil, nil + s.dh, s.de, s.statErr = nil, nil, nil + s.sde = syscall.Dirent{} s.fd = 0 } @@ -144,14 +145,15 @@ func (s *Scanner) Scan() bool { s.workBuffer = s.scratchBuffer[:n] // trim work buffer to number of bytes read } - s.sde = (*syscall.Dirent)(unsafe.Pointer(&s.workBuffer[0])) // point entry to first syscall.Dirent in buffer - s.workBuffer = s.workBuffer[reclen(s.sde):] // advance buffer for next iteration through loop + // point entry to first syscall.Dirent in buffer + copy((*[unsafe.Sizeof(syscall.Dirent{})]byte)(unsafe.Pointer(&s.sde))[:], s.workBuffer) + s.workBuffer = s.workBuffer[reclen(&s.sde):] // advance buffer for next iteration through loop - if inoFromDirent(s.sde) == 0 { + if inoFromDirent(&s.sde) == 0 { continue // inode set to 0 indicates an entry that was marked as deleted } - nameSlice := nameFromDirent(s.sde) + nameSlice := nameFromDirent(&s.sde) nameLength := len(nameSlice) if nameLength == 0 || (nameSlice[0] == '.' && (nameLength == 1 || (nameLength == 2 && nameSlice[1] == '.'))) { diff --git a/vendor/github.com/karrick/godirwalk/walk.go b/vendor/github.com/karrick/godirwalk/walk.go index b15a190..9d0235d 100644 --- a/vendor/github.com/karrick/godirwalk/walk.go +++ b/vendor/github.com/karrick/godirwalk/walk.go @@ -96,6 +96,11 @@ const ( SkipNode ) +// SkipThis is used as a return value from WalkFuncs to indicate that the file +// system entry named in the call is to be skipped. It is not returned as an +// error by any function. +var SkipThis = errors.New("skip this directory entry") + // WalkFunc is the type of the function called for each file system node visited // by Walk. The pathname argument will contain the argument to Walk as a prefix; // that is, if Walk is called with "dir", which is a directory containing the @@ -119,6 +124,55 @@ const ( // Walk skips the remaining files in the containing directory. Note that any // supplied ErrorCallback function is not invoked with filepath.SkipDir when the // Callback or PostChildrenCallback functions return that special value. +// +// One arguably confusing aspect of the filepath.WalkFunc API that this library +// must emulate is how a caller tells Walk to skip file system entries or +// directories. With both filepath.Walk and this Walk, when a callback function +// wants to skip a directory and not descend into its children, it returns +// filepath.SkipDir. If the callback function returns filepath.SkipDir for a +// non-directory, filepath.Walk and this library will stop processing any more +// entries in the current directory, which is what many people do not want. If +// you want to simply skip a particular non-directory entry but continue +// processing entries in the directory, a callback function must return nil. The +// implications of this API is when you want to walk a file system hierarchy and +// skip an entry, when the entry is a directory, you must return one value, +// namely filepath.SkipDir, but when the entry is a non-directory, you must +// return a different value, namely nil. In other words, to get identical +// behavior for two file system entry types you need to send different token +// values. +// +// Here is an example callback function that adheres to filepath.Walk API to +// have it skip any file system entry whose full pathname includes a particular +// substring, optSkip: +// +// func callback1(osPathname string, de *godirwalk.Dirent) error { +// if optSkip != "" && strings.Contains(osPathname, optSkip) { +// if b, err := de.IsDirOrSymlinkToDir(); b == true && err == nil { +// return filepath.SkipDir +// } +// return nil +// } +// // Process file like normal... +// return nil +// } +// +// This library attempts to eliminate some of that logic boilerplate by +// providing a new token error value, SkipThis, which a callback function may +// return to skip the current file system entry regardless of what type of entry +// it is. If the current entry is a directory, its children will not be +// enumerated, exactly as if the callback returned filepath.SkipDir. If the +// current entry is a non-directory, the next file system entry in the current +// directory will be enumerated, exactly as if the callback returned nil. The +// following example callback function has identical behavior as the previous, +// but has less boilerplate, and admittedly more simple logic. +// +// func callback2(osPathname string, de *godirwalk.Dirent) error { +// if optSkip != "" && strings.Contains(osPathname, optSkip) { +// return godirwalk.SkipThis +// } +// // Process file like normal... +// return nil +// } type WalkFunc func(osPathname string, directoryEntry *Dirent) error // Walk walks the file tree rooted at the specified directory, calling the @@ -201,10 +255,15 @@ func Walk(pathname string, options *Options) error { options.ErrorCallback = defaultErrorCallback } - if err = walk(pathname, dirent, options); err != filepath.SkipDir { + err = walk(pathname, dirent, options) + switch err { + case nil, SkipThis, filepath.SkipDir: + // silence SkipThis and filepath.SkipDir for top level + debug("no error of significance: %v\n", err) + return nil + default: return err } - return nil // silence SkipDir for top level } // defaultErrorCallback always returns Halt because if the upstream code did not @@ -217,7 +276,7 @@ func defaultErrorCallback(_ string, _ error) ErrorAction { return Halt } func walk(osPathname string, dirent *Dirent, options *Options) error { err := options.Callback(osPathname, dirent) if err != nil { - if err == filepath.SkipDir { + if err == SkipThis || err == filepath.SkipDir { return err } if action := options.ErrorCallback(osPathname, err); action == SkipNode { @@ -278,7 +337,7 @@ func walk(osPathname string, dirent *Dirent, options *Options) error { } err = walk(osChildname, deChild, options) debug("osChildname: %q; error: %v\n", osChildname, err) - if err == nil { + if err == nil || err == SkipThis { continue } if err != filepath.SkipDir { diff --git a/vendor/github.com/mattn/go-colorable/colorable_windows.go b/vendor/github.com/mattn/go-colorable/colorable_windows.go index 04c4229..41215d7 100644 --- a/vendor/github.com/mattn/go-colorable/colorable_windows.go +++ b/vendor/github.com/mattn/go-colorable/colorable_windows.go @@ -719,7 +719,7 @@ loop: n256setup() } attr &= backgroundMask - attr |= n256foreAttr[n256] + attr |= n256foreAttr[n256%len(n256foreAttr)] i += 2 } } else if len(token) == 5 && token[i+1] == "2" { @@ -761,7 +761,7 @@ loop: n256setup() } attr &= foregroundMask - attr |= n256backAttr[n256] + attr |= n256backAttr[n256%len(n256backAttr)] i += 2 } } else if len(token) == 5 && token[i+1] == "2" { diff --git a/vendor/github.com/mattn/go-colorable/noncolorable.go b/vendor/github.com/mattn/go-colorable/noncolorable.go index 95f2c6b..2dcb09a 100644 --- a/vendor/github.com/mattn/go-colorable/noncolorable.go +++ b/vendor/github.com/mattn/go-colorable/noncolorable.go @@ -27,7 +27,10 @@ loop: } if c1 != 0x1b { bw[0] = c1 - w.out.Write(bw[:]) + _, err = w.out.Write(bw[:]) + if err != nil { + break loop + } continue } c2, err := er.ReadByte() diff --git a/vendor/github.com/mattn/go-isatty/.travis.yml b/vendor/github.com/mattn/go-isatty/.travis.yml deleted file mode 100644 index 604314d..0000000 --- a/vendor/github.com/mattn/go-isatty/.travis.yml +++ /dev/null @@ -1,14 +0,0 @@ -language: go -sudo: false -go: - - 1.13.x - - tip - -before_install: - - go get -t -v ./... - -script: - - ./go.test.sh - -after_success: - - bash <(curl -s https://codecov.io/bash) diff --git a/vendor/github.com/mattn/go-isatty/go.mod b/vendor/github.com/mattn/go-isatty/go.mod index 605c4c2..c9a20b7 100644 --- a/vendor/github.com/mattn/go-isatty/go.mod +++ b/vendor/github.com/mattn/go-isatty/go.mod @@ -2,4 +2,4 @@ module github.com/mattn/go-isatty go 1.12 -require golang.org/x/sys v0.0.0-20200116001909-b77594299b42 +require golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c diff --git a/vendor/github.com/mattn/go-isatty/isatty_bsd.go b/vendor/github.com/mattn/go-isatty/isatty_bsd.go index 711f288..39bbcf0 100644 --- a/vendor/github.com/mattn/go-isatty/isatty_bsd.go +++ b/vendor/github.com/mattn/go-isatty/isatty_bsd.go @@ -1,3 +1,4 @@ +//go:build (darwin || freebsd || openbsd || netbsd || dragonfly) && !appengine // +build darwin freebsd openbsd netbsd dragonfly // +build !appengine diff --git a/vendor/github.com/mattn/go-isatty/isatty_others.go b/vendor/github.com/mattn/go-isatty/isatty_others.go index ff714a3..3150322 100644 --- a/vendor/github.com/mattn/go-isatty/isatty_others.go +++ b/vendor/github.com/mattn/go-isatty/isatty_others.go @@ -1,4 +1,5 @@ -// +build appengine js nacl +//go:build appengine || js || nacl || wasm +// +build appengine js nacl wasm package isatty diff --git a/vendor/github.com/mattn/go-isatty/isatty_plan9.go b/vendor/github.com/mattn/go-isatty/isatty_plan9.go index c5b6e0c..bae7f9b 100644 --- a/vendor/github.com/mattn/go-isatty/isatty_plan9.go +++ b/vendor/github.com/mattn/go-isatty/isatty_plan9.go @@ -1,3 +1,4 @@ +//go:build plan9 // +build plan9 package isatty diff --git a/vendor/github.com/mattn/go-isatty/isatty_solaris.go b/vendor/github.com/mattn/go-isatty/isatty_solaris.go index bdd5c79..0c3acf2 100644 --- a/vendor/github.com/mattn/go-isatty/isatty_solaris.go +++ b/vendor/github.com/mattn/go-isatty/isatty_solaris.go @@ -1,5 +1,5 @@ -// +build solaris -// +build !appengine +//go:build solaris && !appengine +// +build solaris,!appengine package isatty @@ -8,10 +8,9 @@ import ( ) // IsTerminal returns true if the given file descriptor is a terminal. -// see: http://src.illumos.org/source/xref/illumos-gate/usr/src/lib/libbc/libc/gen/common/isatty.c +// see: https://src.illumos.org/source/xref/illumos-gate/usr/src/lib/libc/port/gen/isatty.c func IsTerminal(fd uintptr) bool { - var termio unix.Termio - err := unix.IoctlSetTermio(int(fd), unix.TCGETA, &termio) + _, err := unix.IoctlGetTermio(int(fd), unix.TCGETA) return err == nil } diff --git a/vendor/github.com/mattn/go-isatty/isatty_tcgets.go b/vendor/github.com/mattn/go-isatty/isatty_tcgets.go index 31a1ca9..6778765 100644 --- a/vendor/github.com/mattn/go-isatty/isatty_tcgets.go +++ b/vendor/github.com/mattn/go-isatty/isatty_tcgets.go @@ -1,4 +1,5 @@ -// +build linux aix +//go:build (linux || aix || zos) && !appengine +// +build linux aix zos // +build !appengine package isatty diff --git a/vendor/github.com/mattn/go-isatty/isatty_windows.go b/vendor/github.com/mattn/go-isatty/isatty_windows.go index 1fa8691..8e3c991 100644 --- a/vendor/github.com/mattn/go-isatty/isatty_windows.go +++ b/vendor/github.com/mattn/go-isatty/isatty_windows.go @@ -1,5 +1,5 @@ -// +build windows -// +build !appengine +//go:build windows && !appengine +// +build windows,!appengine package isatty @@ -76,7 +76,7 @@ func isCygwinPipeName(name string) bool { } // getFileNameByHandle use the undocomented ntdll NtQueryObject to get file full name from file handler -// since GetFileInformationByHandleEx is not avilable under windows Vista and still some old fashion +// since GetFileInformationByHandleEx is not available under windows Vista and still some old fashion // guys are using Windows XP, this is a workaround for those guys, it will also work on system from // Windows vista to 10 // see https://stackoverflow.com/a/18792477 for details diff --git a/vendor/github.com/mattn/go-isatty/renovate.json b/vendor/github.com/mattn/go-isatty/renovate.json deleted file mode 100644 index 5ae9d96..0000000 --- a/vendor/github.com/mattn/go-isatty/renovate.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "extends": [ - "config:base" - ], - "postUpdateOptions": [ - "gomodTidy" - ] -} diff --git a/vendor/github.com/shopspring/decimal/.gitignore b/vendor/github.com/shopspring/decimal/.gitignore new file mode 100644 index 0000000..8a43ce9 --- /dev/null +++ b/vendor/github.com/shopspring/decimal/.gitignore @@ -0,0 +1,6 @@ +.git +*.swp + +# IntelliJ +.idea/ +*.iml diff --git a/vendor/github.com/shopspring/decimal/.travis.yml b/vendor/github.com/shopspring/decimal/.travis.yml new file mode 100644 index 0000000..55d42b2 --- /dev/null +++ b/vendor/github.com/shopspring/decimal/.travis.yml @@ -0,0 +1,13 @@ +language: go + +go: + - 1.7.x + - 1.12.x + - 1.13.x + - tip + +install: + - go build . + +script: + - go test -v diff --git a/vendor/github.com/shopspring/decimal/CHANGELOG.md b/vendor/github.com/shopspring/decimal/CHANGELOG.md new file mode 100644 index 0000000..01ba02f --- /dev/null +++ b/vendor/github.com/shopspring/decimal/CHANGELOG.md @@ -0,0 +1,19 @@ +## Decimal v1.2.0 + +#### BREAKING +- Drop support for Go version older than 1.7 [#172](https://github.com/shopspring/decimal/pull/172) + +#### FEATURES +- Add NewFromInt and NewFromInt32 initializers [#72](https://github.com/shopspring/decimal/pull/72) +- Add support for Go modules [#157](https://github.com/shopspring/decimal/pull/157) +- Add BigInt, BigFloat helper methods [#171](https://github.com/shopspring/decimal/pull/171) + +#### ENHANCEMENTS +- Memory usage optimization [#160](https://github.com/shopspring/decimal/pull/160) +- Updated travis CI golang versions [#156](https://github.com/shopspring/decimal/pull/156) +- Update documentation [#173](https://github.com/shopspring/decimal/pull/173) +- Improve code quality [#174](https://github.com/shopspring/decimal/pull/174) + +#### BUGFIXES +- Revert remove insignificant digits [#159](https://github.com/shopspring/decimal/pull/159) +- Remove 15 interval for RoundCash [#166](https://github.com/shopspring/decimal/pull/166) diff --git a/vendor/github.com/shopspring/decimal/LICENSE b/vendor/github.com/shopspring/decimal/LICENSE new file mode 100644 index 0000000..ad2148a --- /dev/null +++ b/vendor/github.com/shopspring/decimal/LICENSE @@ -0,0 +1,45 @@ +The MIT License (MIT) + +Copyright (c) 2015 Spring, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +- Based on https://github.com/oguzbilgic/fpd, which has the following license: +""" +The MIT License (MIT) + +Copyright (c) 2013 Oguz Bilgic + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +""" diff --git a/vendor/github.com/shopspring/decimal/README.md b/vendor/github.com/shopspring/decimal/README.md new file mode 100644 index 0000000..b70f901 --- /dev/null +++ b/vendor/github.com/shopspring/decimal/README.md @@ -0,0 +1,130 @@ +# decimal + +[![Build Status](https://travis-ci.org/shopspring/decimal.png?branch=master)](https://travis-ci.org/shopspring/decimal) [![GoDoc](https://godoc.org/github.com/shopspring/decimal?status.svg)](https://godoc.org/github.com/shopspring/decimal) [![Go Report Card](https://goreportcard.com/badge/github.com/shopspring/decimal)](https://goreportcard.com/report/github.com/shopspring/decimal) + +Arbitrary-precision fixed-point decimal numbers in go. + +_Note:_ Decimal library can "only" represent numbers with a maximum of 2^31 digits after the decimal point. + +## Features + + * The zero-value is 0, and is safe to use without initialization + * Addition, subtraction, multiplication with no loss of precision + * Division with specified precision + * Database/sql serialization/deserialization + * JSON and XML serialization/deserialization + +## Install + +Run `go get github.com/shopspring/decimal` + +## Requirements + +Decimal library requires Go version `>=1.7` + +## Usage + +```go +package main + +import ( + "fmt" + "github.com/shopspring/decimal" +) + +func main() { + price, err := decimal.NewFromString("136.02") + if err != nil { + panic(err) + } + + quantity := decimal.NewFromInt(3) + + fee, _ := decimal.NewFromString(".035") + taxRate, _ := decimal.NewFromString(".08875") + + subtotal := price.Mul(quantity) + + preTax := subtotal.Mul(fee.Add(decimal.NewFromFloat(1))) + + total := preTax.Mul(taxRate.Add(decimal.NewFromFloat(1))) + + fmt.Println("Subtotal:", subtotal) // Subtotal: 408.06 + fmt.Println("Pre-tax:", preTax) // Pre-tax: 422.3421 + fmt.Println("Taxes:", total.Sub(preTax)) // Taxes: 37.482861375 + fmt.Println("Total:", total) // Total: 459.824961375 + fmt.Println("Tax rate:", total.Sub(preTax).Div(preTax)) // Tax rate: 0.08875 +} +``` + +## Documentation + +http://godoc.org/github.com/shopspring/decimal + +## Production Usage + +* [Spring](https://shopspring.com/), since August 14, 2014. +* If you are using this in production, please let us know! + +## FAQ + +#### Why don't you just use float64? + +Because float64 (or any binary floating point type, actually) can't represent +numbers such as `0.1` exactly. + +Consider this code: http://play.golang.org/p/TQBd4yJe6B You might expect that +it prints out `10`, but it actually prints `9.999999999999831`. Over time, +these small errors can really add up! + +#### Why don't you just use big.Rat? + +big.Rat is fine for representing rational numbers, but Decimal is better for +representing money. Why? Here's a (contrived) example: + +Let's say you use big.Rat, and you have two numbers, x and y, both +representing 1/3, and you have `z = 1 - x - y = 1/3`. If you print each one +out, the string output has to stop somewhere (let's say it stops at 3 decimal +digits, for simplicity), so you'll get 0.333, 0.333, and 0.333. But where did +the other 0.001 go? + +Here's the above example as code: http://play.golang.org/p/lCZZs0w9KE + +With Decimal, the strings being printed out represent the number exactly. So, +if you have `x = y = 1/3` (with precision 3), they will actually be equal to +0.333, and when you do `z = 1 - x - y`, `z` will be equal to .334. No money is +unaccounted for! + +You still have to be careful. If you want to split a number `N` 3 ways, you +can't just send `N/3` to three different people. You have to pick one to send +`N - (2/3*N)` to. That person will receive the fraction of a penny remainder. + +But, it is much easier to be careful with Decimal than with big.Rat. + +#### Why isn't the API similar to big.Int's? + +big.Int's API is built to reduce the number of memory allocations for maximal +performance. This makes sense for its use-case, but the trade-off is that the +API is awkward and easy to misuse. + +For example, to add two big.Ints, you do: `z := new(big.Int).Add(x, y)`. A +developer unfamiliar with this API might try to do `z := a.Add(a, b)`. This +modifies `a` and sets `z` as an alias for `a`, which they might not expect. It +also modifies any other aliases to `a`. + +Here's an example of the subtle bugs you can introduce with big.Int's API: +https://play.golang.org/p/x2R_78pa8r + +In contrast, it's difficult to make such mistakes with decimal. Decimals +behave like other go numbers types: even though `a = b` will not deep copy +`b` into `a`, it is impossible to modify a Decimal, since all Decimal methods +return new Decimals and do not modify the originals. The downside is that +this causes extra allocations, so Decimal is less performant. My assumption +is that if you're using Decimals, you probably care more about correctness +than performance. + +## License + +The MIT License (MIT) + +This is a heavily modified fork of [fpd.Decimal](https://github.com/oguzbilgic/fpd), which was also released under the MIT License. diff --git a/vendor/github.com/shopspring/decimal/decimal-go.go b/vendor/github.com/shopspring/decimal/decimal-go.go new file mode 100644 index 0000000..9958d69 --- /dev/null +++ b/vendor/github.com/shopspring/decimal/decimal-go.go @@ -0,0 +1,415 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Multiprecision decimal numbers. +// For floating-point formatting only; not general purpose. +// Only operations are assign and (binary) left/right shift. +// Can do binary floating point in multiprecision decimal precisely +// because 2 divides 10; cannot do decimal floating point +// in multiprecision binary precisely. + +package decimal + +type decimal struct { + d [800]byte // digits, big-endian representation + nd int // number of digits used + dp int // decimal point + neg bool // negative flag + trunc bool // discarded nonzero digits beyond d[:nd] +} + +func (a *decimal) String() string { + n := 10 + a.nd + if a.dp > 0 { + n += a.dp + } + if a.dp < 0 { + n += -a.dp + } + + buf := make([]byte, n) + w := 0 + switch { + case a.nd == 0: + return "0" + + case a.dp <= 0: + // zeros fill space between decimal point and digits + buf[w] = '0' + w++ + buf[w] = '.' + w++ + w += digitZero(buf[w : w+-a.dp]) + w += copy(buf[w:], a.d[0:a.nd]) + + case a.dp < a.nd: + // decimal point in middle of digits + w += copy(buf[w:], a.d[0:a.dp]) + buf[w] = '.' + w++ + w += copy(buf[w:], a.d[a.dp:a.nd]) + + default: + // zeros fill space between digits and decimal point + w += copy(buf[w:], a.d[0:a.nd]) + w += digitZero(buf[w : w+a.dp-a.nd]) + } + return string(buf[0:w]) +} + +func digitZero(dst []byte) int { + for i := range dst { + dst[i] = '0' + } + return len(dst) +} + +// trim trailing zeros from number. +// (They are meaningless; the decimal point is tracked +// independent of the number of digits.) +func trim(a *decimal) { + for a.nd > 0 && a.d[a.nd-1] == '0' { + a.nd-- + } + if a.nd == 0 { + a.dp = 0 + } +} + +// Assign v to a. +func (a *decimal) Assign(v uint64) { + var buf [24]byte + + // Write reversed decimal in buf. + n := 0 + for v > 0 { + v1 := v / 10 + v -= 10 * v1 + buf[n] = byte(v + '0') + n++ + v = v1 + } + + // Reverse again to produce forward decimal in a.d. + a.nd = 0 + for n--; n >= 0; n-- { + a.d[a.nd] = buf[n] + a.nd++ + } + a.dp = a.nd + trim(a) +} + +// Maximum shift that we can do in one pass without overflow. +// A uint has 32 or 64 bits, and we have to be able to accommodate 9<> 63) +const maxShift = uintSize - 4 + +// Binary shift right (/ 2) by k bits. k <= maxShift to avoid overflow. +func rightShift(a *decimal, k uint) { + r := 0 // read pointer + w := 0 // write pointer + + // Pick up enough leading digits to cover first shift. + var n uint + for ; n>>k == 0; r++ { + if r >= a.nd { + if n == 0 { + // a == 0; shouldn't get here, but handle anyway. + a.nd = 0 + return + } + for n>>k == 0 { + n = n * 10 + r++ + } + break + } + c := uint(a.d[r]) + n = n*10 + c - '0' + } + a.dp -= r - 1 + + var mask uint = (1 << k) - 1 + + // Pick up a digit, put down a digit. + for ; r < a.nd; r++ { + c := uint(a.d[r]) + dig := n >> k + n &= mask + a.d[w] = byte(dig + '0') + w++ + n = n*10 + c - '0' + } + + // Put down extra digits. + for n > 0 { + dig := n >> k + n &= mask + if w < len(a.d) { + a.d[w] = byte(dig + '0') + w++ + } else if dig > 0 { + a.trunc = true + } + n = n * 10 + } + + a.nd = w + trim(a) +} + +// Cheat sheet for left shift: table indexed by shift count giving +// number of new digits that will be introduced by that shift. +// +// For example, leftcheats[4] = {2, "625"}. That means that +// if we are shifting by 4 (multiplying by 16), it will add 2 digits +// when the string prefix is "625" through "999", and one fewer digit +// if the string prefix is "000" through "624". +// +// Credit for this trick goes to Ken. + +type leftCheat struct { + delta int // number of new digits + cutoff string // minus one digit if original < a. +} + +var leftcheats = []leftCheat{ + // Leading digits of 1/2^i = 5^i. + // 5^23 is not an exact 64-bit floating point number, + // so have to use bc for the math. + // Go up to 60 to be large enough for 32bit and 64bit platforms. + /* + seq 60 | sed 's/^/5^/' | bc | + awk 'BEGIN{ print "\t{ 0, \"\" }," } + { + log2 = log(2)/log(10) + printf("\t{ %d, \"%s\" },\t// * %d\n", + int(log2*NR+1), $0, 2**NR) + }' + */ + {0, ""}, + {1, "5"}, // * 2 + {1, "25"}, // * 4 + {1, "125"}, // * 8 + {2, "625"}, // * 16 + {2, "3125"}, // * 32 + {2, "15625"}, // * 64 + {3, "78125"}, // * 128 + {3, "390625"}, // * 256 + {3, "1953125"}, // * 512 + {4, "9765625"}, // * 1024 + {4, "48828125"}, // * 2048 + {4, "244140625"}, // * 4096 + {4, "1220703125"}, // * 8192 + {5, "6103515625"}, // * 16384 + {5, "30517578125"}, // * 32768 + {5, "152587890625"}, // * 65536 + {6, "762939453125"}, // * 131072 + {6, "3814697265625"}, // * 262144 + {6, "19073486328125"}, // * 524288 + {7, "95367431640625"}, // * 1048576 + {7, "476837158203125"}, // * 2097152 + {7, "2384185791015625"}, // * 4194304 + {7, "11920928955078125"}, // * 8388608 + {8, "59604644775390625"}, // * 16777216 + {8, "298023223876953125"}, // * 33554432 + {8, "1490116119384765625"}, // * 67108864 + {9, "7450580596923828125"}, // * 134217728 + {9, "37252902984619140625"}, // * 268435456 + {9, "186264514923095703125"}, // * 536870912 + {10, "931322574615478515625"}, // * 1073741824 + {10, "4656612873077392578125"}, // * 2147483648 + {10, "23283064365386962890625"}, // * 4294967296 + {10, "116415321826934814453125"}, // * 8589934592 + {11, "582076609134674072265625"}, // * 17179869184 + {11, "2910383045673370361328125"}, // * 34359738368 + {11, "14551915228366851806640625"}, // * 68719476736 + {12, "72759576141834259033203125"}, // * 137438953472 + {12, "363797880709171295166015625"}, // * 274877906944 + {12, "1818989403545856475830078125"}, // * 549755813888 + {13, "9094947017729282379150390625"}, // * 1099511627776 + {13, "45474735088646411895751953125"}, // * 2199023255552 + {13, "227373675443232059478759765625"}, // * 4398046511104 + {13, "1136868377216160297393798828125"}, // * 8796093022208 + {14, "5684341886080801486968994140625"}, // * 17592186044416 + {14, "28421709430404007434844970703125"}, // * 35184372088832 + {14, "142108547152020037174224853515625"}, // * 70368744177664 + {15, "710542735760100185871124267578125"}, // * 140737488355328 + {15, "3552713678800500929355621337890625"}, // * 281474976710656 + {15, "17763568394002504646778106689453125"}, // * 562949953421312 + {16, "88817841970012523233890533447265625"}, // * 1125899906842624 + {16, "444089209850062616169452667236328125"}, // * 2251799813685248 + {16, "2220446049250313080847263336181640625"}, // * 4503599627370496 + {16, "11102230246251565404236316680908203125"}, // * 9007199254740992 + {17, "55511151231257827021181583404541015625"}, // * 18014398509481984 + {17, "277555756156289135105907917022705078125"}, // * 36028797018963968 + {17, "1387778780781445675529539585113525390625"}, // * 72057594037927936 + {18, "6938893903907228377647697925567626953125"}, // * 144115188075855872 + {18, "34694469519536141888238489627838134765625"}, // * 288230376151711744 + {18, "173472347597680709441192448139190673828125"}, // * 576460752303423488 + {19, "867361737988403547205962240695953369140625"}, // * 1152921504606846976 +} + +// Is the leading prefix of b lexicographically less than s? +func prefixIsLessThan(b []byte, s string) bool { + for i := 0; i < len(s); i++ { + if i >= len(b) { + return true + } + if b[i] != s[i] { + return b[i] < s[i] + } + } + return false +} + +// Binary shift left (* 2) by k bits. k <= maxShift to avoid overflow. +func leftShift(a *decimal, k uint) { + delta := leftcheats[k].delta + if prefixIsLessThan(a.d[0:a.nd], leftcheats[k].cutoff) { + delta-- + } + + r := a.nd // read index + w := a.nd + delta // write index + + // Pick up a digit, put down a digit. + var n uint + for r--; r >= 0; r-- { + n += (uint(a.d[r]) - '0') << k + quo := n / 10 + rem := n - 10*quo + w-- + if w < len(a.d) { + a.d[w] = byte(rem + '0') + } else if rem != 0 { + a.trunc = true + } + n = quo + } + + // Put down extra digits. + for n > 0 { + quo := n / 10 + rem := n - 10*quo + w-- + if w < len(a.d) { + a.d[w] = byte(rem + '0') + } else if rem != 0 { + a.trunc = true + } + n = quo + } + + a.nd += delta + if a.nd >= len(a.d) { + a.nd = len(a.d) + } + a.dp += delta + trim(a) +} + +// Binary shift left (k > 0) or right (k < 0). +func (a *decimal) Shift(k int) { + switch { + case a.nd == 0: + // nothing to do: a == 0 + case k > 0: + for k > maxShift { + leftShift(a, maxShift) + k -= maxShift + } + leftShift(a, uint(k)) + case k < 0: + for k < -maxShift { + rightShift(a, maxShift) + k += maxShift + } + rightShift(a, uint(-k)) + } +} + +// If we chop a at nd digits, should we round up? +func shouldRoundUp(a *decimal, nd int) bool { + if nd < 0 || nd >= a.nd { + return false + } + if a.d[nd] == '5' && nd+1 == a.nd { // exactly halfway - round to even + // if we truncated, a little higher than what's recorded - always round up + if a.trunc { + return true + } + return nd > 0 && (a.d[nd-1]-'0')%2 != 0 + } + // not halfway - digit tells all + return a.d[nd] >= '5' +} + +// Round a to nd digits (or fewer). +// If nd is zero, it means we're rounding +// just to the left of the digits, as in +// 0.09 -> 0.1. +func (a *decimal) Round(nd int) { + if nd < 0 || nd >= a.nd { + return + } + if shouldRoundUp(a, nd) { + a.RoundUp(nd) + } else { + a.RoundDown(nd) + } +} + +// Round a down to nd digits (or fewer). +func (a *decimal) RoundDown(nd int) { + if nd < 0 || nd >= a.nd { + return + } + a.nd = nd + trim(a) +} + +// Round a up to nd digits (or fewer). +func (a *decimal) RoundUp(nd int) { + if nd < 0 || nd >= a.nd { + return + } + + // round up + for i := nd - 1; i >= 0; i-- { + c := a.d[i] + if c < '9' { // can stop after this digit + a.d[i]++ + a.nd = i + 1 + return + } + } + + // Number is all 9s. + // Change to single 1 with adjusted decimal point. + a.d[0] = '1' + a.nd = 1 + a.dp++ +} + +// Extract integer part, rounded appropriately. +// No guarantees about overflow. +func (a *decimal) RoundedInteger() uint64 { + if a.dp > 20 { + return 0xFFFFFFFFFFFFFFFF + } + var i int + n := uint64(0) + for i = 0; i < a.dp && i < a.nd; i++ { + n = n*10 + uint64(a.d[i]-'0') + } + for ; i < a.dp; i++ { + n *= 10 + } + if shouldRoundUp(a, a.dp) { + n++ + } + return n +} diff --git a/vendor/github.com/shopspring/decimal/decimal.go b/vendor/github.com/shopspring/decimal/decimal.go new file mode 100644 index 0000000..801c1a0 --- /dev/null +++ b/vendor/github.com/shopspring/decimal/decimal.go @@ -0,0 +1,1477 @@ +// Package decimal implements an arbitrary precision fixed-point decimal. +// +// The zero-value of a Decimal is 0, as you would expect. +// +// The best way to create a new Decimal is to use decimal.NewFromString, ex: +// +// n, err := decimal.NewFromString("-123.4567") +// n.String() // output: "-123.4567" +// +// To use Decimal as part of a struct: +// +// type Struct struct { +// Number Decimal +// } +// +// Note: This can "only" represent numbers with a maximum of 2^31 digits after the decimal point. +package decimal + +import ( + "database/sql/driver" + "encoding/binary" + "fmt" + "math" + "math/big" + "strconv" + "strings" +) + +// DivisionPrecision is the number of decimal places in the result when it +// doesn't divide exactly. +// +// Example: +// +// d1 := decimal.NewFromFloat(2).Div(decimal.NewFromFloat(3)) +// d1.String() // output: "0.6666666666666667" +// d2 := decimal.NewFromFloat(2).Div(decimal.NewFromFloat(30000)) +// d2.String() // output: "0.0000666666666667" +// d3 := decimal.NewFromFloat(20000).Div(decimal.NewFromFloat(3)) +// d3.String() // output: "6666.6666666666666667" +// decimal.DivisionPrecision = 3 +// d4 := decimal.NewFromFloat(2).Div(decimal.NewFromFloat(3)) +// d4.String() // output: "0.667" +// +var DivisionPrecision = 16 + +// MarshalJSONWithoutQuotes should be set to true if you want the decimal to +// be JSON marshaled as a number, instead of as a string. +// WARNING: this is dangerous for decimals with many digits, since many JSON +// unmarshallers (ex: Javascript's) will unmarshal JSON numbers to IEEE 754 +// double-precision floating point numbers, which means you can potentially +// silently lose precision. +var MarshalJSONWithoutQuotes = false + +// Zero constant, to make computations faster. +// Zero should never be compared with == or != directly, please use decimal.Equal or decimal.Cmp instead. +var Zero = New(0, 1) + +var zeroInt = big.NewInt(0) +var oneInt = big.NewInt(1) +var twoInt = big.NewInt(2) +var fourInt = big.NewInt(4) +var fiveInt = big.NewInt(5) +var tenInt = big.NewInt(10) +var twentyInt = big.NewInt(20) + +// Decimal represents a fixed-point decimal. It is immutable. +// number = value * 10 ^ exp +type Decimal struct { + value *big.Int + + // NOTE(vadim): this must be an int32, because we cast it to float64 during + // calculations. If exp is 64 bit, we might lose precision. + // If we cared about being able to represent every possible decimal, we + // could make exp a *big.Int but it would hurt performance and numbers + // like that are unrealistic. + exp int32 +} + +// New returns a new fixed-point decimal, value * 10 ^ exp. +func New(value int64, exp int32) Decimal { + return Decimal{ + value: big.NewInt(value), + exp: exp, + } +} + +// NewFromInt converts a int64 to Decimal. +// +// Example: +// +// NewFromInt(123).String() // output: "123" +// NewFromInt(-10).String() // output: "-10" +func NewFromInt(value int64) Decimal { + return Decimal{ + value: big.NewInt(value), + exp: 0, + } +} + +// NewFromInt32 converts a int32 to Decimal. +// +// Example: +// +// NewFromInt(123).String() // output: "123" +// NewFromInt(-10).String() // output: "-10" +func NewFromInt32(value int32) Decimal { + return Decimal{ + value: big.NewInt(int64(value)), + exp: 0, + } +} + +// NewFromBigInt returns a new Decimal from a big.Int, value * 10 ^ exp +func NewFromBigInt(value *big.Int, exp int32) Decimal { + return Decimal{ + value: big.NewInt(0).Set(value), + exp: exp, + } +} + +// NewFromString returns a new Decimal from a string representation. +// Trailing zeroes are not trimmed. +// +// Example: +// +// d, err := NewFromString("-123.45") +// d2, err := NewFromString(".0001") +// d3, err := NewFromString("1.47000") +// +func NewFromString(value string) (Decimal, error) { + originalInput := value + var intString string + var exp int64 + + // Check if number is using scientific notation + eIndex := strings.IndexAny(value, "Ee") + if eIndex != -1 { + expInt, err := strconv.ParseInt(value[eIndex+1:], 10, 32) + if err != nil { + if e, ok := err.(*strconv.NumError); ok && e.Err == strconv.ErrRange { + return Decimal{}, fmt.Errorf("can't convert %s to decimal: fractional part too long", value) + } + return Decimal{}, fmt.Errorf("can't convert %s to decimal: exponent is not numeric", value) + } + value = value[:eIndex] + exp = expInt + } + + parts := strings.Split(value, ".") + if len(parts) == 1 { + // There is no decimal point, we can just parse the original string as + // an int + intString = value + } else if len(parts) == 2 { + intString = parts[0] + parts[1] + expInt := -len(parts[1]) + exp += int64(expInt) + } else { + return Decimal{}, fmt.Errorf("can't convert %s to decimal: too many .s", value) + } + + dValue := new(big.Int) + _, ok := dValue.SetString(intString, 10) + if !ok { + return Decimal{}, fmt.Errorf("can't convert %s to decimal", value) + } + + if exp < math.MinInt32 || exp > math.MaxInt32 { + // NOTE(vadim): I doubt a string could realistically be this long + return Decimal{}, fmt.Errorf("can't convert %s to decimal: fractional part too long", originalInput) + } + + return Decimal{ + value: dValue, + exp: int32(exp), + }, nil +} + +// RequireFromString returns a new Decimal from a string representation +// or panics if NewFromString would have returned an error. +// +// Example: +// +// d := RequireFromString("-123.45") +// d2 := RequireFromString(".0001") +// +func RequireFromString(value string) Decimal { + dec, err := NewFromString(value) + if err != nil { + panic(err) + } + return dec +} + +// NewFromFloat converts a float64 to Decimal. +// +// The converted number will contain the number of significant digits that can be +// represented in a float with reliable roundtrip. +// This is typically 15 digits, but may be more in some cases. +// See https://www.exploringbinary.com/decimal-precision-of-binary-floating-point-numbers/ for more information. +// +// For slightly faster conversion, use NewFromFloatWithExponent where you can specify the precision in absolute terms. +// +// NOTE: this will panic on NaN, +/-inf +func NewFromFloat(value float64) Decimal { + if value == 0 { + return New(0, 0) + } + return newFromFloat(value, math.Float64bits(value), &float64info) +} + +// NewFromFloat32 converts a float32 to Decimal. +// +// The converted number will contain the number of significant digits that can be +// represented in a float with reliable roundtrip. +// This is typically 6-8 digits depending on the input. +// See https://www.exploringbinary.com/decimal-precision-of-binary-floating-point-numbers/ for more information. +// +// For slightly faster conversion, use NewFromFloatWithExponent where you can specify the precision in absolute terms. +// +// NOTE: this will panic on NaN, +/-inf +func NewFromFloat32(value float32) Decimal { + if value == 0 { + return New(0, 0) + } + // XOR is workaround for https://github.com/golang/go/issues/26285 + a := math.Float32bits(value) ^ 0x80808080 + return newFromFloat(float64(value), uint64(a)^0x80808080, &float32info) +} + +func newFromFloat(val float64, bits uint64, flt *floatInfo) Decimal { + if math.IsNaN(val) || math.IsInf(val, 0) { + panic(fmt.Sprintf("Cannot create a Decimal from %v", val)) + } + exp := int(bits>>flt.mantbits) & (1<>(flt.expbits+flt.mantbits) != 0 + + roundShortest(&d, mant, exp, flt) + // If less than 19 digits, we can do calculation in an int64. + if d.nd < 19 { + tmp := int64(0) + m := int64(1) + for i := d.nd - 1; i >= 0; i-- { + tmp += m * int64(d.d[i]-'0') + m *= 10 + } + if d.neg { + tmp *= -1 + } + return Decimal{value: big.NewInt(tmp), exp: int32(d.dp) - int32(d.nd)} + } + dValue := new(big.Int) + dValue, ok := dValue.SetString(string(d.d[:d.nd]), 10) + if ok { + return Decimal{value: dValue, exp: int32(d.dp) - int32(d.nd)} + } + + return NewFromFloatWithExponent(val, int32(d.dp)-int32(d.nd)) +} + +// NewFromFloatWithExponent converts a float64 to Decimal, with an arbitrary +// number of fractional digits. +// +// Example: +// +// NewFromFloatWithExponent(123.456, -2).String() // output: "123.46" +// +func NewFromFloatWithExponent(value float64, exp int32) Decimal { + if math.IsNaN(value) || math.IsInf(value, 0) { + panic(fmt.Sprintf("Cannot create a Decimal from %v", value)) + } + + bits := math.Float64bits(value) + mant := bits & (1<<52 - 1) + exp2 := int32((bits >> 52) & (1<<11 - 1)) + sign := bits >> 63 + + if exp2 == 0 { + // specials + if mant == 0 { + return Decimal{} + } + // subnormal + exp2++ + } else { + // normal + mant |= 1 << 52 + } + + exp2 -= 1023 + 52 + + // normalizing base-2 values + for mant&1 == 0 { + mant = mant >> 1 + exp2++ + } + + // maximum number of fractional base-10 digits to represent 2^N exactly cannot be more than -N if N<0 + if exp < 0 && exp < exp2 { + if exp2 < 0 { + exp = exp2 + } else { + exp = 0 + } + } + + // representing 10^M * 2^N as 5^M * 2^(M+N) + exp2 -= exp + + temp := big.NewInt(1) + dMant := big.NewInt(int64(mant)) + + // applying 5^M + if exp > 0 { + temp = temp.SetInt64(int64(exp)) + temp = temp.Exp(fiveInt, temp, nil) + } else if exp < 0 { + temp = temp.SetInt64(-int64(exp)) + temp = temp.Exp(fiveInt, temp, nil) + dMant = dMant.Mul(dMant, temp) + temp = temp.SetUint64(1) + } + + // applying 2^(M+N) + if exp2 > 0 { + dMant = dMant.Lsh(dMant, uint(exp2)) + } else if exp2 < 0 { + temp = temp.Lsh(temp, uint(-exp2)) + } + + // rounding and downscaling + if exp > 0 || exp2 < 0 { + halfDown := new(big.Int).Rsh(temp, 1) + dMant = dMant.Add(dMant, halfDown) + dMant = dMant.Quo(dMant, temp) + } + + if sign == 1 { + dMant = dMant.Neg(dMant) + } + + return Decimal{ + value: dMant, + exp: exp, + } +} + +// rescale returns a rescaled version of the decimal. Returned +// decimal may be less precise if the given exponent is bigger +// than the initial exponent of the Decimal. +// NOTE: this will truncate, NOT round +// +// Example: +// +// d := New(12345, -4) +// d2 := d.rescale(-1) +// d3 := d2.rescale(-4) +// println(d1) +// println(d2) +// println(d3) +// +// Output: +// +// 1.2345 +// 1.2 +// 1.2000 +// +func (d Decimal) rescale(exp int32) Decimal { + d.ensureInitialized() + + if d.exp == exp { + return Decimal{ + new(big.Int).Set(d.value), + d.exp, + } + } + + // NOTE(vadim): must convert exps to float64 before - to prevent overflow + diff := math.Abs(float64(exp) - float64(d.exp)) + value := new(big.Int).Set(d.value) + + expScale := new(big.Int).Exp(tenInt, big.NewInt(int64(diff)), nil) + if exp > d.exp { + value = value.Quo(value, expScale) + } else if exp < d.exp { + value = value.Mul(value, expScale) + } + + return Decimal{ + value: value, + exp: exp, + } +} + +// Abs returns the absolute value of the decimal. +func (d Decimal) Abs() Decimal { + d.ensureInitialized() + d2Value := new(big.Int).Abs(d.value) + return Decimal{ + value: d2Value, + exp: d.exp, + } +} + +// Add returns d + d2. +func (d Decimal) Add(d2 Decimal) Decimal { + rd, rd2 := RescalePair(d, d2) + + d3Value := new(big.Int).Add(rd.value, rd2.value) + return Decimal{ + value: d3Value, + exp: rd.exp, + } +} + +// Sub returns d - d2. +func (d Decimal) Sub(d2 Decimal) Decimal { + rd, rd2 := RescalePair(d, d2) + + d3Value := new(big.Int).Sub(rd.value, rd2.value) + return Decimal{ + value: d3Value, + exp: rd.exp, + } +} + +// Neg returns -d. +func (d Decimal) Neg() Decimal { + d.ensureInitialized() + val := new(big.Int).Neg(d.value) + return Decimal{ + value: val, + exp: d.exp, + } +} + +// Mul returns d * d2. +func (d Decimal) Mul(d2 Decimal) Decimal { + d.ensureInitialized() + d2.ensureInitialized() + + expInt64 := int64(d.exp) + int64(d2.exp) + if expInt64 > math.MaxInt32 || expInt64 < math.MinInt32 { + // NOTE(vadim): better to panic than give incorrect results, as + // Decimals are usually used for money + panic(fmt.Sprintf("exponent %v overflows an int32!", expInt64)) + } + + d3Value := new(big.Int).Mul(d.value, d2.value) + return Decimal{ + value: d3Value, + exp: int32(expInt64), + } +} + +// Shift shifts the decimal in base 10. +// It shifts left when shift is positive and right if shift is negative. +// In simpler terms, the given value for shift is added to the exponent +// of the decimal. +func (d Decimal) Shift(shift int32) Decimal { + d.ensureInitialized() + return Decimal{ + value: new(big.Int).Set(d.value), + exp: d.exp + shift, + } +} + +// Div returns d / d2. If it doesn't divide exactly, the result will have +// DivisionPrecision digits after the decimal point. +func (d Decimal) Div(d2 Decimal) Decimal { + return d.DivRound(d2, int32(DivisionPrecision)) +} + +// QuoRem does divsion with remainder +// d.QuoRem(d2,precision) returns quotient q and remainder r such that +// d = d2 * q + r, q an integer multiple of 10^(-precision) +// 0 <= r < abs(d2) * 10 ^(-precision) if d>=0 +// 0 >= r > -abs(d2) * 10 ^(-precision) if d<0 +// Note that precision<0 is allowed as input. +func (d Decimal) QuoRem(d2 Decimal, precision int32) (Decimal, Decimal) { + d.ensureInitialized() + d2.ensureInitialized() + if d2.value.Sign() == 0 { + panic("decimal division by 0") + } + scale := -precision + e := int64(d.exp - d2.exp - scale) + if e > math.MaxInt32 || e < math.MinInt32 { + panic("overflow in decimal QuoRem") + } + var aa, bb, expo big.Int + var scalerest int32 + // d = a 10^ea + // d2 = b 10^eb + if e < 0 { + aa = *d.value + expo.SetInt64(-e) + bb.Exp(tenInt, &expo, nil) + bb.Mul(d2.value, &bb) + scalerest = d.exp + // now aa = a + // bb = b 10^(scale + eb - ea) + } else { + expo.SetInt64(e) + aa.Exp(tenInt, &expo, nil) + aa.Mul(d.value, &aa) + bb = *d2.value + scalerest = scale + d2.exp + // now aa = a ^ (ea - eb - scale) + // bb = b + } + var q, r big.Int + q.QuoRem(&aa, &bb, &r) + dq := Decimal{value: &q, exp: scale} + dr := Decimal{value: &r, exp: scalerest} + return dq, dr +} + +// DivRound divides and rounds to a given precision +// i.e. to an integer multiple of 10^(-precision) +// for a positive quotient digit 5 is rounded up, away from 0 +// if the quotient is negative then digit 5 is rounded down, away from 0 +// Note that precision<0 is allowed as input. +func (d Decimal) DivRound(d2 Decimal, precision int32) Decimal { + // QuoRem already checks initialization + q, r := d.QuoRem(d2, precision) + // the actual rounding decision is based on comparing r*10^precision and d2/2 + // instead compare 2 r 10 ^precision and d2 + var rv2 big.Int + rv2.Abs(r.value) + rv2.Lsh(&rv2, 1) + // now rv2 = abs(r.value) * 2 + r2 := Decimal{value: &rv2, exp: r.exp + precision} + // r2 is now 2 * r * 10 ^ precision + var c = r2.Cmp(d2.Abs()) + + if c < 0 { + return q + } + + if d.value.Sign()*d2.value.Sign() < 0 { + return q.Sub(New(1, -precision)) + } + + return q.Add(New(1, -precision)) +} + +// Mod returns d % d2. +func (d Decimal) Mod(d2 Decimal) Decimal { + quo := d.Div(d2).Truncate(0) + return d.Sub(d2.Mul(quo)) +} + +// Pow returns d to the power d2 +func (d Decimal) Pow(d2 Decimal) Decimal { + var temp Decimal + if d2.IntPart() == 0 { + return NewFromFloat(1) + } + temp = d.Pow(d2.Div(NewFromFloat(2))) + if d2.IntPart()%2 == 0 { + return temp.Mul(temp) + } + if d2.IntPart() > 0 { + return temp.Mul(temp).Mul(d) + } + return temp.Mul(temp).Div(d) +} + +// Cmp compares the numbers represented by d and d2 and returns: +// +// -1 if d < d2 +// 0 if d == d2 +// +1 if d > d2 +// +func (d Decimal) Cmp(d2 Decimal) int { + d.ensureInitialized() + d2.ensureInitialized() + + if d.exp == d2.exp { + return d.value.Cmp(d2.value) + } + + rd, rd2 := RescalePair(d, d2) + + return rd.value.Cmp(rd2.value) +} + +// Equal returns whether the numbers represented by d and d2 are equal. +func (d Decimal) Equal(d2 Decimal) bool { + return d.Cmp(d2) == 0 +} + +// Equals is deprecated, please use Equal method instead +func (d Decimal) Equals(d2 Decimal) bool { + return d.Equal(d2) +} + +// GreaterThan (GT) returns true when d is greater than d2. +func (d Decimal) GreaterThan(d2 Decimal) bool { + return d.Cmp(d2) == 1 +} + +// GreaterThanOrEqual (GTE) returns true when d is greater than or equal to d2. +func (d Decimal) GreaterThanOrEqual(d2 Decimal) bool { + cmp := d.Cmp(d2) + return cmp == 1 || cmp == 0 +} + +// LessThan (LT) returns true when d is less than d2. +func (d Decimal) LessThan(d2 Decimal) bool { + return d.Cmp(d2) == -1 +} + +// LessThanOrEqual (LTE) returns true when d is less than or equal to d2. +func (d Decimal) LessThanOrEqual(d2 Decimal) bool { + cmp := d.Cmp(d2) + return cmp == -1 || cmp == 0 +} + +// Sign returns: +// +// -1 if d < 0 +// 0 if d == 0 +// +1 if d > 0 +// +func (d Decimal) Sign() int { + if d.value == nil { + return 0 + } + return d.value.Sign() +} + +// IsPositive return +// +// true if d > 0 +// false if d == 0 +// false if d < 0 +func (d Decimal) IsPositive() bool { + return d.Sign() == 1 +} + +// IsNegative return +// +// true if d < 0 +// false if d == 0 +// false if d > 0 +func (d Decimal) IsNegative() bool { + return d.Sign() == -1 +} + +// IsZero return +// +// true if d == 0 +// false if d > 0 +// false if d < 0 +func (d Decimal) IsZero() bool { + return d.Sign() == 0 +} + +// Exponent returns the exponent, or scale component of the decimal. +func (d Decimal) Exponent() int32 { + return d.exp +} + +// Coefficient returns the coefficient of the decimal. It is scaled by 10^Exponent() +func (d Decimal) Coefficient() *big.Int { + d.ensureInitialized() + // we copy the coefficient so that mutating the result does not mutate the + // Decimal. + return big.NewInt(0).Set(d.value) +} + +// IntPart returns the integer component of the decimal. +func (d Decimal) IntPart() int64 { + scaledD := d.rescale(0) + return scaledD.value.Int64() +} + +// BigInt returns integer component of the decimal as a BigInt. +func (d Decimal) BigInt() *big.Int { + scaledD := d.rescale(0) + i := &big.Int{} + i.SetString(scaledD.String(), 10) + return i +} + +// BigFloat returns decimal as BigFloat. +// Be aware that casting decimal to BigFloat might cause a loss of precision. +func (d Decimal) BigFloat() *big.Float { + f := &big.Float{} + f.SetString(d.String()) + return f +} + +// Rat returns a rational number representation of the decimal. +func (d Decimal) Rat() *big.Rat { + d.ensureInitialized() + if d.exp <= 0 { + // NOTE(vadim): must negate after casting to prevent int32 overflow + denom := new(big.Int).Exp(tenInt, big.NewInt(-int64(d.exp)), nil) + return new(big.Rat).SetFrac(d.value, denom) + } + + mul := new(big.Int).Exp(tenInt, big.NewInt(int64(d.exp)), nil) + num := new(big.Int).Mul(d.value, mul) + return new(big.Rat).SetFrac(num, oneInt) +} + +// Float64 returns the nearest float64 value for d and a bool indicating +// whether f represents d exactly. +// For more details, see the documentation for big.Rat.Float64 +func (d Decimal) Float64() (f float64, exact bool) { + return d.Rat().Float64() +} + +// String returns the string representation of the decimal +// with the fixed point. +// +// Example: +// +// d := New(-12345, -3) +// println(d.String()) +// +// Output: +// +// -12.345 +// +func (d Decimal) String() string { + return d.string(true) +} + +// StringFixed returns a rounded fixed-point string with places digits after +// the decimal point. +// +// Example: +// +// NewFromFloat(0).StringFixed(2) // output: "0.00" +// NewFromFloat(0).StringFixed(0) // output: "0" +// NewFromFloat(5.45).StringFixed(0) // output: "5" +// NewFromFloat(5.45).StringFixed(1) // output: "5.5" +// NewFromFloat(5.45).StringFixed(2) // output: "5.45" +// NewFromFloat(5.45).StringFixed(3) // output: "5.450" +// NewFromFloat(545).StringFixed(-1) // output: "550" +// +func (d Decimal) StringFixed(places int32) string { + rounded := d.Round(places) + return rounded.string(false) +} + +// StringFixedBank returns a banker rounded fixed-point string with places digits +// after the decimal point. +// +// Example: +// +// NewFromFloat(0).StringFixedBank(2) // output: "0.00" +// NewFromFloat(0).StringFixedBank(0) // output: "0" +// NewFromFloat(5.45).StringFixedBank(0) // output: "5" +// NewFromFloat(5.45).StringFixedBank(1) // output: "5.4" +// NewFromFloat(5.45).StringFixedBank(2) // output: "5.45" +// NewFromFloat(5.45).StringFixedBank(3) // output: "5.450" +// NewFromFloat(545).StringFixedBank(-1) // output: "540" +// +func (d Decimal) StringFixedBank(places int32) string { + rounded := d.RoundBank(places) + return rounded.string(false) +} + +// StringFixedCash returns a Swedish/Cash rounded fixed-point string. For +// more details see the documentation at function RoundCash. +func (d Decimal) StringFixedCash(interval uint8) string { + rounded := d.RoundCash(interval) + return rounded.string(false) +} + +// Round rounds the decimal to places decimal places. +// If places < 0, it will round the integer part to the nearest 10^(-places). +// +// Example: +// +// NewFromFloat(5.45).Round(1).String() // output: "5.5" +// NewFromFloat(545).Round(-1).String() // output: "550" +// +func (d Decimal) Round(places int32) Decimal { + // truncate to places + 1 + ret := d.rescale(-places - 1) + + // add sign(d) * 0.5 + if ret.value.Sign() < 0 { + ret.value.Sub(ret.value, fiveInt) + } else { + ret.value.Add(ret.value, fiveInt) + } + + // floor for positive numbers, ceil for negative numbers + _, m := ret.value.DivMod(ret.value, tenInt, new(big.Int)) + ret.exp++ + if ret.value.Sign() < 0 && m.Cmp(zeroInt) != 0 { + ret.value.Add(ret.value, oneInt) + } + + return ret +} + +// RoundBank rounds the decimal to places decimal places. +// If the final digit to round is equidistant from the nearest two integers the +// rounded value is taken as the even number +// +// If places < 0, it will round the integer part to the nearest 10^(-places). +// +// Examples: +// +// NewFromFloat(5.45).Round(1).String() // output: "5.4" +// NewFromFloat(545).Round(-1).String() // output: "540" +// NewFromFloat(5.46).Round(1).String() // output: "5.5" +// NewFromFloat(546).Round(-1).String() // output: "550" +// NewFromFloat(5.55).Round(1).String() // output: "5.6" +// NewFromFloat(555).Round(-1).String() // output: "560" +// +func (d Decimal) RoundBank(places int32) Decimal { + + round := d.Round(places) + remainder := d.Sub(round).Abs() + + half := New(5, -places-1) + if remainder.Cmp(half) == 0 && round.value.Bit(0) != 0 { + if round.value.Sign() < 0 { + round.value.Add(round.value, oneInt) + } else { + round.value.Sub(round.value, oneInt) + } + } + + return round +} + +// RoundCash aka Cash/Penny/öre rounding rounds decimal to a specific +// interval. The amount payable for a cash transaction is rounded to the nearest +// multiple of the minimum currency unit available. The following intervals are +// available: 5, 10, 25, 50 and 100; any other number throws a panic. +// 5: 5 cent rounding 3.43 => 3.45 +// 10: 10 cent rounding 3.45 => 3.50 (5 gets rounded up) +// 25: 25 cent rounding 3.41 => 3.50 +// 50: 50 cent rounding 3.75 => 4.00 +// 100: 100 cent rounding 3.50 => 4.00 +// For more details: https://en.wikipedia.org/wiki/Cash_rounding +func (d Decimal) RoundCash(interval uint8) Decimal { + var iVal *big.Int + switch interval { + case 5: + iVal = twentyInt + case 10: + iVal = tenInt + case 25: + iVal = fourInt + case 50: + iVal = twoInt + case 100: + iVal = oneInt + default: + panic(fmt.Sprintf("Decimal does not support this Cash rounding interval `%d`. Supported: 5, 10, 25, 50, 100", interval)) + } + dVal := Decimal{ + value: iVal, + } + + // TODO: optimize those calculations to reduce the high allocations (~29 allocs). + return d.Mul(dVal).Round(0).Div(dVal).Truncate(2) +} + +// Floor returns the nearest integer value less than or equal to d. +func (d Decimal) Floor() Decimal { + d.ensureInitialized() + + if d.exp >= 0 { + return d + } + + exp := big.NewInt(10) + + // NOTE(vadim): must negate after casting to prevent int32 overflow + exp.Exp(exp, big.NewInt(-int64(d.exp)), nil) + + z := new(big.Int).Div(d.value, exp) + return Decimal{value: z, exp: 0} +} + +// Ceil returns the nearest integer value greater than or equal to d. +func (d Decimal) Ceil() Decimal { + d.ensureInitialized() + + if d.exp >= 0 { + return d + } + + exp := big.NewInt(10) + + // NOTE(vadim): must negate after casting to prevent int32 overflow + exp.Exp(exp, big.NewInt(-int64(d.exp)), nil) + + z, m := new(big.Int).DivMod(d.value, exp, new(big.Int)) + if m.Cmp(zeroInt) != 0 { + z.Add(z, oneInt) + } + return Decimal{value: z, exp: 0} +} + +// Truncate truncates off digits from the number, without rounding. +// +// NOTE: precision is the last digit that will not be truncated (must be >= 0). +// +// Example: +// +// decimal.NewFromString("123.456").Truncate(2).String() // "123.45" +// +func (d Decimal) Truncate(precision int32) Decimal { + d.ensureInitialized() + if precision >= 0 && -precision > d.exp { + return d.rescale(-precision) + } + return d +} + +// UnmarshalJSON implements the json.Unmarshaler interface. +func (d *Decimal) UnmarshalJSON(decimalBytes []byte) error { + if string(decimalBytes) == "null" { + return nil + } + + str, err := unquoteIfQuoted(decimalBytes) + if err != nil { + return fmt.Errorf("error decoding string '%s': %s", decimalBytes, err) + } + + decimal, err := NewFromString(str) + *d = decimal + if err != nil { + return fmt.Errorf("error decoding string '%s': %s", str, err) + } + return nil +} + +// MarshalJSON implements the json.Marshaler interface. +func (d Decimal) MarshalJSON() ([]byte, error) { + var str string + if MarshalJSONWithoutQuotes { + str = d.String() + } else { + str = "\"" + d.String() + "\"" + } + return []byte(str), nil +} + +// UnmarshalBinary implements the encoding.BinaryUnmarshaler interface. As a string representation +// is already used when encoding to text, this method stores that string as []byte +func (d *Decimal) UnmarshalBinary(data []byte) error { + // Extract the exponent + d.exp = int32(binary.BigEndian.Uint32(data[:4])) + + // Extract the value + d.value = new(big.Int) + return d.value.GobDecode(data[4:]) +} + +// MarshalBinary implements the encoding.BinaryMarshaler interface. +func (d Decimal) MarshalBinary() (data []byte, err error) { + // Write the exponent first since it's a fixed size + v1 := make([]byte, 4) + binary.BigEndian.PutUint32(v1, uint32(d.exp)) + + // Add the value + var v2 []byte + if v2, err = d.value.GobEncode(); err != nil { + return + } + + // Return the byte array + data = append(v1, v2...) + return +} + +// Scan implements the sql.Scanner interface for database deserialization. +func (d *Decimal) Scan(value interface{}) error { + // first try to see if the data is stored in database as a Numeric datatype + switch v := value.(type) { + + case float32: + *d = NewFromFloat(float64(v)) + return nil + + case float64: + // numeric in sqlite3 sends us float64 + *d = NewFromFloat(v) + return nil + + case int64: + // at least in sqlite3 when the value is 0 in db, the data is sent + // to us as an int64 instead of a float64 ... + *d = New(v, 0) + return nil + + default: + // default is trying to interpret value stored as string + str, err := unquoteIfQuoted(v) + if err != nil { + return err + } + *d, err = NewFromString(str) + return err + } +} + +// Value implements the driver.Valuer interface for database serialization. +func (d Decimal) Value() (driver.Value, error) { + return d.String(), nil +} + +// UnmarshalText implements the encoding.TextUnmarshaler interface for XML +// deserialization. +func (d *Decimal) UnmarshalText(text []byte) error { + str := string(text) + + dec, err := NewFromString(str) + *d = dec + if err != nil { + return fmt.Errorf("error decoding string '%s': %s", str, err) + } + + return nil +} + +// MarshalText implements the encoding.TextMarshaler interface for XML +// serialization. +func (d Decimal) MarshalText() (text []byte, err error) { + return []byte(d.String()), nil +} + +// GobEncode implements the gob.GobEncoder interface for gob serialization. +func (d Decimal) GobEncode() ([]byte, error) { + return d.MarshalBinary() +} + +// GobDecode implements the gob.GobDecoder interface for gob serialization. +func (d *Decimal) GobDecode(data []byte) error { + return d.UnmarshalBinary(data) +} + +// StringScaled first scales the decimal then calls .String() on it. +// NOTE: buggy, unintuitive, and DEPRECATED! Use StringFixed instead. +func (d Decimal) StringScaled(exp int32) string { + return d.rescale(exp).String() +} + +func (d Decimal) string(trimTrailingZeros bool) string { + if d.exp >= 0 { + return d.rescale(0).value.String() + } + + abs := new(big.Int).Abs(d.value) + str := abs.String() + + var intPart, fractionalPart string + + // NOTE(vadim): this cast to int will cause bugs if d.exp == INT_MIN + // and you are on a 32-bit machine. Won't fix this super-edge case. + dExpInt := int(d.exp) + if len(str) > -dExpInt { + intPart = str[:len(str)+dExpInt] + fractionalPart = str[len(str)+dExpInt:] + } else { + intPart = "0" + + num0s := -dExpInt - len(str) + fractionalPart = strings.Repeat("0", num0s) + str + } + + if trimTrailingZeros { + i := len(fractionalPart) - 1 + for ; i >= 0; i-- { + if fractionalPart[i] != '0' { + break + } + } + fractionalPart = fractionalPart[:i+1] + } + + number := intPart + if len(fractionalPart) > 0 { + number += "." + fractionalPart + } + + if d.value.Sign() < 0 { + return "-" + number + } + + return number +} + +func (d *Decimal) ensureInitialized() { + if d.value == nil { + d.value = new(big.Int) + } +} + +// Min returns the smallest Decimal that was passed in the arguments. +// +// To call this function with an array, you must do: +// +// Min(arr[0], arr[1:]...) +// +// This makes it harder to accidentally call Min with 0 arguments. +func Min(first Decimal, rest ...Decimal) Decimal { + ans := first + for _, item := range rest { + if item.Cmp(ans) < 0 { + ans = item + } + } + return ans +} + +// Max returns the largest Decimal that was passed in the arguments. +// +// To call this function with an array, you must do: +// +// Max(arr[0], arr[1:]...) +// +// This makes it harder to accidentally call Max with 0 arguments. +func Max(first Decimal, rest ...Decimal) Decimal { + ans := first + for _, item := range rest { + if item.Cmp(ans) > 0 { + ans = item + } + } + return ans +} + +// Sum returns the combined total of the provided first and rest Decimals +func Sum(first Decimal, rest ...Decimal) Decimal { + total := first + for _, item := range rest { + total = total.Add(item) + } + + return total +} + +// Avg returns the average value of the provided first and rest Decimals +func Avg(first Decimal, rest ...Decimal) Decimal { + count := New(int64(len(rest)+1), 0) + sum := Sum(first, rest...) + return sum.Div(count) +} + +// RescalePair rescales two decimals to common exponential value (minimal exp of both decimals) +func RescalePair(d1 Decimal, d2 Decimal) (Decimal, Decimal) { + d1.ensureInitialized() + d2.ensureInitialized() + + if d1.exp == d2.exp { + return d1, d2 + } + + baseScale := min(d1.exp, d2.exp) + if baseScale != d1.exp { + return d1.rescale(baseScale), d2 + } + return d1, d2.rescale(baseScale) +} + +func min(x, y int32) int32 { + if x >= y { + return y + } + return x +} + +func unquoteIfQuoted(value interface{}) (string, error) { + var bytes []byte + + switch v := value.(type) { + case string: + bytes = []byte(v) + case []byte: + bytes = v + default: + return "", fmt.Errorf("could not convert value '%+v' to byte array of type '%T'", + value, value) + } + + // If the amount is quoted, strip the quotes + if len(bytes) > 2 && bytes[0] == '"' && bytes[len(bytes)-1] == '"' { + bytes = bytes[1 : len(bytes)-1] + } + return string(bytes), nil +} + +// NullDecimal represents a nullable decimal with compatibility for +// scanning null values from the database. +type NullDecimal struct { + Decimal Decimal + Valid bool +} + +// Scan implements the sql.Scanner interface for database deserialization. +func (d *NullDecimal) Scan(value interface{}) error { + if value == nil { + d.Valid = false + return nil + } + d.Valid = true + return d.Decimal.Scan(value) +} + +// Value implements the driver.Valuer interface for database serialization. +func (d NullDecimal) Value() (driver.Value, error) { + if !d.Valid { + return nil, nil + } + return d.Decimal.Value() +} + +// UnmarshalJSON implements the json.Unmarshaler interface. +func (d *NullDecimal) UnmarshalJSON(decimalBytes []byte) error { + if string(decimalBytes) == "null" { + d.Valid = false + return nil + } + d.Valid = true + return d.Decimal.UnmarshalJSON(decimalBytes) +} + +// MarshalJSON implements the json.Marshaler interface. +func (d NullDecimal) MarshalJSON() ([]byte, error) { + if !d.Valid { + return []byte("null"), nil + } + return d.Decimal.MarshalJSON() +} + +// Trig functions + +// Atan returns the arctangent, in radians, of x. +func (d Decimal) Atan() Decimal { + if d.Equal(NewFromFloat(0.0)) { + return d + } + if d.GreaterThan(NewFromFloat(0.0)) { + return d.satan() + } + return d.Neg().satan().Neg() +} + +func (d Decimal) xatan() Decimal { + P0 := NewFromFloat(-8.750608600031904122785e-01) + P1 := NewFromFloat(-1.615753718733365076637e+01) + P2 := NewFromFloat(-7.500855792314704667340e+01) + P3 := NewFromFloat(-1.228866684490136173410e+02) + P4 := NewFromFloat(-6.485021904942025371773e+01) + Q0 := NewFromFloat(2.485846490142306297962e+01) + Q1 := NewFromFloat(1.650270098316988542046e+02) + Q2 := NewFromFloat(4.328810604912902668951e+02) + Q3 := NewFromFloat(4.853903996359136964868e+02) + Q4 := NewFromFloat(1.945506571482613964425e+02) + z := d.Mul(d) + b1 := P0.Mul(z).Add(P1).Mul(z).Add(P2).Mul(z).Add(P3).Mul(z).Add(P4).Mul(z) + b2 := z.Add(Q0).Mul(z).Add(Q1).Mul(z).Add(Q2).Mul(z).Add(Q3).Mul(z).Add(Q4) + z = b1.Div(b2) + z = d.Mul(z).Add(d) + return z +} + +// satan reduces its argument (known to be positive) +// to the range [0, 0.66] and calls xatan. +func (d Decimal) satan() Decimal { + Morebits := NewFromFloat(6.123233995736765886130e-17) // pi/2 = PIO2 + Morebits + Tan3pio8 := NewFromFloat(2.41421356237309504880) // tan(3*pi/8) + pi := NewFromFloat(3.14159265358979323846264338327950288419716939937510582097494459) + + if d.LessThanOrEqual(NewFromFloat(0.66)) { + return d.xatan() + } + if d.GreaterThan(Tan3pio8) { + return pi.Div(NewFromFloat(2.0)).Sub(NewFromFloat(1.0).Div(d).xatan()).Add(Morebits) + } + return pi.Div(NewFromFloat(4.0)).Add((d.Sub(NewFromFloat(1.0)).Div(d.Add(NewFromFloat(1.0)))).xatan()).Add(NewFromFloat(0.5).Mul(Morebits)) +} + +// sin coefficients +var _sin = [...]Decimal{ + NewFromFloat(1.58962301576546568060e-10), // 0x3de5d8fd1fd19ccd + NewFromFloat(-2.50507477628578072866e-8), // 0xbe5ae5e5a9291f5d + NewFromFloat(2.75573136213857245213e-6), // 0x3ec71de3567d48a1 + NewFromFloat(-1.98412698295895385996e-4), // 0xbf2a01a019bfdf03 + NewFromFloat(8.33333333332211858878e-3), // 0x3f8111111110f7d0 + NewFromFloat(-1.66666666666666307295e-1), // 0xbfc5555555555548 +} + +// Sin returns the sine of the radian argument x. +func (d Decimal) Sin() Decimal { + PI4A := NewFromFloat(7.85398125648498535156e-1) // 0x3fe921fb40000000, Pi/4 split into three parts + PI4B := NewFromFloat(3.77489470793079817668e-8) // 0x3e64442d00000000, + PI4C := NewFromFloat(2.69515142907905952645e-15) // 0x3ce8469898cc5170, + M4PI := NewFromFloat(1.273239544735162542821171882678754627704620361328125) // 4/pi + + if d.Equal(NewFromFloat(0.0)) { + return d + } + // make argument positive but save the sign + sign := false + if d.LessThan(NewFromFloat(0.0)) { + d = d.Neg() + sign = true + } + + j := d.Mul(M4PI).IntPart() // integer part of x/(Pi/4), as integer for tests on the phase angle + y := NewFromFloat(float64(j)) // integer part of x/(Pi/4), as float + + // map zeros to origin + if j&1 == 1 { + j++ + y = y.Add(NewFromFloat(1.0)) + } + j &= 7 // octant modulo 2Pi radians (360 degrees) + // reflect in x axis + if j > 3 { + sign = !sign + j -= 4 + } + z := d.Sub(y.Mul(PI4A)).Sub(y.Mul(PI4B)).Sub(y.Mul(PI4C)) // Extended precision modular arithmetic + zz := z.Mul(z) + + if j == 1 || j == 2 { + w := zz.Mul(zz).Mul(_cos[0].Mul(zz).Add(_cos[1]).Mul(zz).Add(_cos[2]).Mul(zz).Add(_cos[3]).Mul(zz).Add(_cos[4]).Mul(zz).Add(_cos[5])) + y = NewFromFloat(1.0).Sub(NewFromFloat(0.5).Mul(zz)).Add(w) + } else { + y = z.Add(z.Mul(zz).Mul(_sin[0].Mul(zz).Add(_sin[1]).Mul(zz).Add(_sin[2]).Mul(zz).Add(_sin[3]).Mul(zz).Add(_sin[4]).Mul(zz).Add(_sin[5]))) + } + if sign { + y = y.Neg() + } + return y +} + +// cos coefficients +var _cos = [...]Decimal{ + NewFromFloat(-1.13585365213876817300e-11), // 0xbda8fa49a0861a9b + NewFromFloat(2.08757008419747316778e-9), // 0x3e21ee9d7b4e3f05 + NewFromFloat(-2.75573141792967388112e-7), // 0xbe927e4f7eac4bc6 + NewFromFloat(2.48015872888517045348e-5), // 0x3efa01a019c844f5 + NewFromFloat(-1.38888888888730564116e-3), // 0xbf56c16c16c14f91 + NewFromFloat(4.16666666666665929218e-2), // 0x3fa555555555554b +} + +// Cos returns the cosine of the radian argument x. +func (d Decimal) Cos() Decimal { + + PI4A := NewFromFloat(7.85398125648498535156e-1) // 0x3fe921fb40000000, Pi/4 split into three parts + PI4B := NewFromFloat(3.77489470793079817668e-8) // 0x3e64442d00000000, + PI4C := NewFromFloat(2.69515142907905952645e-15) // 0x3ce8469898cc5170, + M4PI := NewFromFloat(1.273239544735162542821171882678754627704620361328125) // 4/pi + + // make argument positive + sign := false + if d.LessThan(NewFromFloat(0.0)) { + d = d.Neg() + } + + j := d.Mul(M4PI).IntPart() // integer part of x/(Pi/4), as integer for tests on the phase angle + y := NewFromFloat(float64(j)) // integer part of x/(Pi/4), as float + + // map zeros to origin + if j&1 == 1 { + j++ + y = y.Add(NewFromFloat(1.0)) + } + j &= 7 // octant modulo 2Pi radians (360 degrees) + // reflect in x axis + if j > 3 { + sign = !sign + j -= 4 + } + if j > 1 { + sign = !sign + } + + z := d.Sub(y.Mul(PI4A)).Sub(y.Mul(PI4B)).Sub(y.Mul(PI4C)) // Extended precision modular arithmetic + zz := z.Mul(z) + + if j == 1 || j == 2 { + y = z.Add(z.Mul(zz).Mul(_sin[0].Mul(zz).Add(_sin[1]).Mul(zz).Add(_sin[2]).Mul(zz).Add(_sin[3]).Mul(zz).Add(_sin[4]).Mul(zz).Add(_sin[5]))) + } else { + w := zz.Mul(zz).Mul(_cos[0].Mul(zz).Add(_cos[1]).Mul(zz).Add(_cos[2]).Mul(zz).Add(_cos[3]).Mul(zz).Add(_cos[4]).Mul(zz).Add(_cos[5])) + y = NewFromFloat(1.0).Sub(NewFromFloat(0.5).Mul(zz)).Add(w) + } + if sign { + y = y.Neg() + } + return y +} + +var _tanP = [...]Decimal{ + NewFromFloat(-1.30936939181383777646e+4), // 0xc0c992d8d24f3f38 + NewFromFloat(1.15351664838587416140e+6), // 0x413199eca5fc9ddd + NewFromFloat(-1.79565251976484877988e+7), // 0xc1711fead3299176 +} +var _tanQ = [...]Decimal{ + NewFromFloat(1.00000000000000000000e+0), + NewFromFloat(1.36812963470692954678e+4), //0x40cab8a5eeb36572 + NewFromFloat(-1.32089234440210967447e+6), //0xc13427bc582abc96 + NewFromFloat(2.50083801823357915839e+7), //0x4177d98fc2ead8ef + NewFromFloat(-5.38695755929454629881e+7), //0xc189afe03cbe5a31 +} + +// Tan returns the tangent of the radian argument x. +func (d Decimal) Tan() Decimal { + + PI4A := NewFromFloat(7.85398125648498535156e-1) // 0x3fe921fb40000000, Pi/4 split into three parts + PI4B := NewFromFloat(3.77489470793079817668e-8) // 0x3e64442d00000000, + PI4C := NewFromFloat(2.69515142907905952645e-15) // 0x3ce8469898cc5170, + M4PI := NewFromFloat(1.273239544735162542821171882678754627704620361328125) // 4/pi + + if d.Equal(NewFromFloat(0.0)) { + return d + } + + // make argument positive but save the sign + sign := false + if d.LessThan(NewFromFloat(0.0)) { + d = d.Neg() + sign = true + } + + j := d.Mul(M4PI).IntPart() // integer part of x/(Pi/4), as integer for tests on the phase angle + y := NewFromFloat(float64(j)) // integer part of x/(Pi/4), as float + + // map zeros to origin + if j&1 == 1 { + j++ + y = y.Add(NewFromFloat(1.0)) + } + + z := d.Sub(y.Mul(PI4A)).Sub(y.Mul(PI4B)).Sub(y.Mul(PI4C)) // Extended precision modular arithmetic + zz := z.Mul(z) + + if zz.GreaterThan(NewFromFloat(1e-14)) { + w := zz.Mul(_tanP[0].Mul(zz).Add(_tanP[1]).Mul(zz).Add(_tanP[2])) + x := zz.Add(_tanQ[1]).Mul(zz).Add(_tanQ[2]).Mul(zz).Add(_tanQ[3]).Mul(zz).Add(_tanQ[4]) + y = z.Add(z.Mul(w.Div(x))) + } else { + y = z + } + if j&2 == 2 { + y = NewFromFloat(-1.0).Div(y) + } + if sign { + y = y.Neg() + } + return y +} diff --git a/vendor/github.com/shopspring/decimal/go.mod b/vendor/github.com/shopspring/decimal/go.mod new file mode 100644 index 0000000..ae1b7aa --- /dev/null +++ b/vendor/github.com/shopspring/decimal/go.mod @@ -0,0 +1,3 @@ +module github.com/shopspring/decimal + +go 1.13 diff --git a/vendor/github.com/shopspring/decimal/rounding.go b/vendor/github.com/shopspring/decimal/rounding.go new file mode 100644 index 0000000..8008f55 --- /dev/null +++ b/vendor/github.com/shopspring/decimal/rounding.go @@ -0,0 +1,119 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Multiprecision decimal numbers. +// For floating-point formatting only; not general purpose. +// Only operations are assign and (binary) left/right shift. +// Can do binary floating point in multiprecision decimal precisely +// because 2 divides 10; cannot do decimal floating point +// in multiprecision binary precisely. + +package decimal + +type floatInfo struct { + mantbits uint + expbits uint + bias int +} + +var float32info = floatInfo{23, 8, -127} +var float64info = floatInfo{52, 11, -1023} + +// roundShortest rounds d (= mant * 2^exp) to the shortest number of digits +// that will let the original floating point value be precisely reconstructed. +func roundShortest(d *decimal, mant uint64, exp int, flt *floatInfo) { + // If mantissa is zero, the number is zero; stop now. + if mant == 0 { + d.nd = 0 + return + } + + // Compute upper and lower such that any decimal number + // between upper and lower (possibly inclusive) + // will round to the original floating point number. + + // We may see at once that the number is already shortest. + // + // Suppose d is not denormal, so that 2^exp <= d < 10^dp. + // The closest shorter number is at least 10^(dp-nd) away. + // The lower/upper bounds computed below are at distance + // at most 2^(exp-mantbits). + // + // So the number is already shortest if 10^(dp-nd) > 2^(exp-mantbits), + // or equivalently log2(10)*(dp-nd) > exp-mantbits. + // It is true if 332/100*(dp-nd) >= exp-mantbits (log2(10) > 3.32). + minexp := flt.bias + 1 // minimum possible exponent + if exp > minexp && 332*(d.dp-d.nd) >= 100*(exp-int(flt.mantbits)) { + // The number is already shortest. + return + } + + // d = mant << (exp - mantbits) + // Next highest floating point number is mant+1 << exp-mantbits. + // Our upper bound is halfway between, mant*2+1 << exp-mantbits-1. + upper := new(decimal) + upper.Assign(mant*2 + 1) + upper.Shift(exp - int(flt.mantbits) - 1) + + // d = mant << (exp - mantbits) + // Next lowest floating point number is mant-1 << exp-mantbits, + // unless mant-1 drops the significant bit and exp is not the minimum exp, + // in which case the next lowest is mant*2-1 << exp-mantbits-1. + // Either way, call it mantlo << explo-mantbits. + // Our lower bound is halfway between, mantlo*2+1 << explo-mantbits-1. + var mantlo uint64 + var explo int + if mant > 1< 1 { + nextSel = selSegs[1] + } + + mapMatches := mapAccessRegex.FindStringSubmatch(s) + if len(mapMatches) > 0 { + if _, err := strconv.Atoi(mapMatches[2]); err != nil { + thisSel = mapMatches[1] + nextSel = "[" + mapMatches[2] + "]" + mapMatches[3] + + if thisSel == "" { + thisSel = mapMatches[2] + nextSel = mapMatches[3] + } + + if nextSel == "" { + selSegs = []string{"", ""} + } else if nextSel[0] == '.' { + nextSel = nextSel[1:] + } + } + } + + return thisSel, nextSel +} + // access accesses the object using the selector and performs the // appropriate action. func access(current interface{}, selector string, value interface{}, isSet bool) interface{} { - selSegs := strings.SplitN(selector, PathSeparator, 2) - thisSel := selSegs[0] - index := -1 + thisSel, nextSel := getKey(selector) + index := -1 if strings.Contains(thisSel, "[") { index, thisSel = getIndex(thisSel) } @@ -88,7 +128,7 @@ func access(current interface{}, selector string, value interface{}, isSet bool) switch current.(type) { case map[string]interface{}: curMSI := current.(map[string]interface{}) - if len(selSegs) <= 1 && isSet { + if nextSel == "" && isSet { curMSI[thisSel] = value return nil } @@ -102,9 +142,10 @@ func access(current interface{}, selector string, value interface{}, isSet bool) default: current = nil } + // do we need to access the item of an array? if index > -1 { - if array, ok := current.([]interface{}); ok { + if array, ok := interSlice(current); ok { if index < len(array) { current = array[index] } else { @@ -112,8 +153,27 @@ func access(current interface{}, selector string, value interface{}, isSet bool) } } } - if len(selSegs) > 1 { - current = access(current, selSegs[1], value, isSet) + if nextSel != "" { + current = access(current, nextSel, value, isSet) } return current } + +func interSlice(slice interface{}) ([]interface{}, bool) { + if array, ok := slice.([]interface{}); ok { + return array, ok + } + + s := reflect.ValueOf(slice) + if s.Kind() != reflect.Slice { + return nil, false + } + + ret := make([]interface{}, s.Len()) + + for i := 0; i < s.Len(); i++ { + ret[i] = s.Index(i).Interface() + } + + return ret, true +} diff --git a/vendor/github.com/stretchr/testify/assert/assertion_compare.go b/vendor/github.com/stretchr/testify/assert/assertion_compare.go index dc20039..41649d2 100644 --- a/vendor/github.com/stretchr/testify/assert/assertion_compare.go +++ b/vendor/github.com/stretchr/testify/assert/assertion_compare.go @@ -13,12 +13,42 @@ const ( compareGreater ) +var ( + intType = reflect.TypeOf(int(1)) + int8Type = reflect.TypeOf(int8(1)) + int16Type = reflect.TypeOf(int16(1)) + int32Type = reflect.TypeOf(int32(1)) + int64Type = reflect.TypeOf(int64(1)) + + uintType = reflect.TypeOf(uint(1)) + uint8Type = reflect.TypeOf(uint8(1)) + uint16Type = reflect.TypeOf(uint16(1)) + uint32Type = reflect.TypeOf(uint32(1)) + uint64Type = reflect.TypeOf(uint64(1)) + + float32Type = reflect.TypeOf(float32(1)) + float64Type = reflect.TypeOf(float64(1)) + + stringType = reflect.TypeOf("") +) + func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { + obj1Value := reflect.ValueOf(obj1) + obj2Value := reflect.ValueOf(obj2) + + // throughout this switch we try and avoid calling .Convert() if possible, + // as this has a pretty big performance impact switch kind { case reflect.Int: { - intobj1 := obj1.(int) - intobj2 := obj2.(int) + intobj1, ok := obj1.(int) + if !ok { + intobj1 = obj1Value.Convert(intType).Interface().(int) + } + intobj2, ok := obj2.(int) + if !ok { + intobj2 = obj2Value.Convert(intType).Interface().(int) + } if intobj1 > intobj2 { return compareGreater, true } @@ -31,8 +61,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { } case reflect.Int8: { - int8obj1 := obj1.(int8) - int8obj2 := obj2.(int8) + int8obj1, ok := obj1.(int8) + if !ok { + int8obj1 = obj1Value.Convert(int8Type).Interface().(int8) + } + int8obj2, ok := obj2.(int8) + if !ok { + int8obj2 = obj2Value.Convert(int8Type).Interface().(int8) + } if int8obj1 > int8obj2 { return compareGreater, true } @@ -45,8 +81,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { } case reflect.Int16: { - int16obj1 := obj1.(int16) - int16obj2 := obj2.(int16) + int16obj1, ok := obj1.(int16) + if !ok { + int16obj1 = obj1Value.Convert(int16Type).Interface().(int16) + } + int16obj2, ok := obj2.(int16) + if !ok { + int16obj2 = obj2Value.Convert(int16Type).Interface().(int16) + } if int16obj1 > int16obj2 { return compareGreater, true } @@ -59,8 +101,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { } case reflect.Int32: { - int32obj1 := obj1.(int32) - int32obj2 := obj2.(int32) + int32obj1, ok := obj1.(int32) + if !ok { + int32obj1 = obj1Value.Convert(int32Type).Interface().(int32) + } + int32obj2, ok := obj2.(int32) + if !ok { + int32obj2 = obj2Value.Convert(int32Type).Interface().(int32) + } if int32obj1 > int32obj2 { return compareGreater, true } @@ -73,8 +121,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { } case reflect.Int64: { - int64obj1 := obj1.(int64) - int64obj2 := obj2.(int64) + int64obj1, ok := obj1.(int64) + if !ok { + int64obj1 = obj1Value.Convert(int64Type).Interface().(int64) + } + int64obj2, ok := obj2.(int64) + if !ok { + int64obj2 = obj2Value.Convert(int64Type).Interface().(int64) + } if int64obj1 > int64obj2 { return compareGreater, true } @@ -87,8 +141,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { } case reflect.Uint: { - uintobj1 := obj1.(uint) - uintobj2 := obj2.(uint) + uintobj1, ok := obj1.(uint) + if !ok { + uintobj1 = obj1Value.Convert(uintType).Interface().(uint) + } + uintobj2, ok := obj2.(uint) + if !ok { + uintobj2 = obj2Value.Convert(uintType).Interface().(uint) + } if uintobj1 > uintobj2 { return compareGreater, true } @@ -101,8 +161,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { } case reflect.Uint8: { - uint8obj1 := obj1.(uint8) - uint8obj2 := obj2.(uint8) + uint8obj1, ok := obj1.(uint8) + if !ok { + uint8obj1 = obj1Value.Convert(uint8Type).Interface().(uint8) + } + uint8obj2, ok := obj2.(uint8) + if !ok { + uint8obj2 = obj2Value.Convert(uint8Type).Interface().(uint8) + } if uint8obj1 > uint8obj2 { return compareGreater, true } @@ -115,8 +181,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { } case reflect.Uint16: { - uint16obj1 := obj1.(uint16) - uint16obj2 := obj2.(uint16) + uint16obj1, ok := obj1.(uint16) + if !ok { + uint16obj1 = obj1Value.Convert(uint16Type).Interface().(uint16) + } + uint16obj2, ok := obj2.(uint16) + if !ok { + uint16obj2 = obj2Value.Convert(uint16Type).Interface().(uint16) + } if uint16obj1 > uint16obj2 { return compareGreater, true } @@ -129,8 +201,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { } case reflect.Uint32: { - uint32obj1 := obj1.(uint32) - uint32obj2 := obj2.(uint32) + uint32obj1, ok := obj1.(uint32) + if !ok { + uint32obj1 = obj1Value.Convert(uint32Type).Interface().(uint32) + } + uint32obj2, ok := obj2.(uint32) + if !ok { + uint32obj2 = obj2Value.Convert(uint32Type).Interface().(uint32) + } if uint32obj1 > uint32obj2 { return compareGreater, true } @@ -143,8 +221,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { } case reflect.Uint64: { - uint64obj1 := obj1.(uint64) - uint64obj2 := obj2.(uint64) + uint64obj1, ok := obj1.(uint64) + if !ok { + uint64obj1 = obj1Value.Convert(uint64Type).Interface().(uint64) + } + uint64obj2, ok := obj2.(uint64) + if !ok { + uint64obj2 = obj2Value.Convert(uint64Type).Interface().(uint64) + } if uint64obj1 > uint64obj2 { return compareGreater, true } @@ -157,8 +241,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { } case reflect.Float32: { - float32obj1 := obj1.(float32) - float32obj2 := obj2.(float32) + float32obj1, ok := obj1.(float32) + if !ok { + float32obj1 = obj1Value.Convert(float32Type).Interface().(float32) + } + float32obj2, ok := obj2.(float32) + if !ok { + float32obj2 = obj2Value.Convert(float32Type).Interface().(float32) + } if float32obj1 > float32obj2 { return compareGreater, true } @@ -171,8 +261,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { } case reflect.Float64: { - float64obj1 := obj1.(float64) - float64obj2 := obj2.(float64) + float64obj1, ok := obj1.(float64) + if !ok { + float64obj1 = obj1Value.Convert(float64Type).Interface().(float64) + } + float64obj2, ok := obj2.(float64) + if !ok { + float64obj2 = obj2Value.Convert(float64Type).Interface().(float64) + } if float64obj1 > float64obj2 { return compareGreater, true } @@ -185,8 +281,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { } case reflect.String: { - stringobj1 := obj1.(string) - stringobj2 := obj2.(string) + stringobj1, ok := obj1.(string) + if !ok { + stringobj1 = obj1Value.Convert(stringType).Interface().(string) + } + stringobj2, ok := obj2.(string) + if !ok { + stringobj2 = obj2Value.Convert(stringType).Interface().(string) + } if stringobj1 > stringobj2 { return compareGreater, true } @@ -240,6 +342,24 @@ func LessOrEqual(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...inter return compareTwoValues(t, e1, e2, []CompareType{compareLess, compareEqual}, "\"%v\" is not less than or equal to \"%v\"", msgAndArgs) } +// Positive asserts that the specified element is positive +// +// assert.Positive(t, 1) +// assert.Positive(t, 1.23) +func Positive(t TestingT, e interface{}, msgAndArgs ...interface{}) bool { + zero := reflect.Zero(reflect.TypeOf(e)) + return compareTwoValues(t, e, zero.Interface(), []CompareType{compareGreater}, "\"%v\" is not positive", msgAndArgs) +} + +// Negative asserts that the specified element is negative +// +// assert.Negative(t, -1) +// assert.Negative(t, -1.23) +func Negative(t TestingT, e interface{}, msgAndArgs ...interface{}) bool { + zero := reflect.Zero(reflect.TypeOf(e)) + return compareTwoValues(t, e, zero.Interface(), []CompareType{compareLess}, "\"%v\" is not negative", msgAndArgs) +} + func compareTwoValues(t TestingT, e1 interface{}, e2 interface{}, allowedComparesResults []CompareType, failMessage string, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() diff --git a/vendor/github.com/stretchr/testify/assert/assertion_format.go b/vendor/github.com/stretchr/testify/assert/assertion_format.go index 49370eb..4dfd122 100644 --- a/vendor/github.com/stretchr/testify/assert/assertion_format.go +++ b/vendor/github.com/stretchr/testify/assert/assertion_format.go @@ -114,6 +114,24 @@ func Errorf(t TestingT, err error, msg string, args ...interface{}) bool { return Error(t, err, append([]interface{}{msg}, args...)...) } +// ErrorAsf asserts that at least one of the errors in err's chain matches target, and if so, sets target to that error value. +// This is a wrapper for errors.As. +func ErrorAsf(t TestingT, err error, target interface{}, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return ErrorAs(t, err, target, append([]interface{}{msg}, args...)...) +} + +// ErrorIsf asserts that at least one of the errors in err's chain matches target. +// This is a wrapper for errors.Is. +func ErrorIsf(t TestingT, err error, target error, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return ErrorIs(t, err, target, append([]interface{}{msg}, args...)...) +} + // Eventuallyf asserts that given condition will be met in waitFor time, // periodically checking target function each tick. // @@ -321,6 +339,54 @@ func InEpsilonSlicef(t TestingT, expected interface{}, actual interface{}, epsil return InEpsilonSlice(t, expected, actual, epsilon, append([]interface{}{msg}, args...)...) } +// IsDecreasingf asserts that the collection is decreasing +// +// assert.IsDecreasingf(t, []int{2, 1, 0}, "error message %s", "formatted") +// assert.IsDecreasingf(t, []float{2, 1}, "error message %s", "formatted") +// assert.IsDecreasingf(t, []string{"b", "a"}, "error message %s", "formatted") +func IsDecreasingf(t TestingT, object interface{}, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return IsDecreasing(t, object, append([]interface{}{msg}, args...)...) +} + +// IsIncreasingf asserts that the collection is increasing +// +// assert.IsIncreasingf(t, []int{1, 2, 3}, "error message %s", "formatted") +// assert.IsIncreasingf(t, []float{1, 2}, "error message %s", "formatted") +// assert.IsIncreasingf(t, []string{"a", "b"}, "error message %s", "formatted") +func IsIncreasingf(t TestingT, object interface{}, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return IsIncreasing(t, object, append([]interface{}{msg}, args...)...) +} + +// IsNonDecreasingf asserts that the collection is not decreasing +// +// assert.IsNonDecreasingf(t, []int{1, 1, 2}, "error message %s", "formatted") +// assert.IsNonDecreasingf(t, []float{1, 2}, "error message %s", "formatted") +// assert.IsNonDecreasingf(t, []string{"a", "b"}, "error message %s", "formatted") +func IsNonDecreasingf(t TestingT, object interface{}, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return IsNonDecreasing(t, object, append([]interface{}{msg}, args...)...) +} + +// IsNonIncreasingf asserts that the collection is not increasing +// +// assert.IsNonIncreasingf(t, []int{2, 1, 1}, "error message %s", "formatted") +// assert.IsNonIncreasingf(t, []float{2, 1}, "error message %s", "formatted") +// assert.IsNonIncreasingf(t, []string{"b", "a"}, "error message %s", "formatted") +func IsNonIncreasingf(t TestingT, object interface{}, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return IsNonIncreasing(t, object, append([]interface{}{msg}, args...)...) +} + // IsTypef asserts that the specified objects are of the same type. func IsTypef(t TestingT, expectedType interface{}, object interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { @@ -375,6 +441,17 @@ func LessOrEqualf(t TestingT, e1 interface{}, e2 interface{}, msg string, args . return LessOrEqual(t, e1, e2, append([]interface{}{msg}, args...)...) } +// Negativef asserts that the specified element is negative +// +// assert.Negativef(t, -1, "error message %s", "formatted") +// assert.Negativef(t, -1.23, "error message %s", "formatted") +func Negativef(t TestingT, e interface{}, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return Negative(t, e, append([]interface{}{msg}, args...)...) +} + // Neverf asserts that the given condition doesn't satisfy in waitFor time, // periodically checking the target function each tick. // @@ -476,6 +553,15 @@ func NotEqualValuesf(t TestingT, expected interface{}, actual interface{}, msg s return NotEqualValues(t, expected, actual, append([]interface{}{msg}, args...)...) } +// NotErrorIsf asserts that at none of the errors in err's chain matches target. +// This is a wrapper for errors.Is. +func NotErrorIsf(t TestingT, err error, target error, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return NotErrorIs(t, err, target, append([]interface{}{msg}, args...)...) +} + // NotNilf asserts that the specified object is not nil. // // assert.NotNilf(t, err, "error message %s", "formatted") @@ -572,6 +658,17 @@ func PanicsWithValuef(t TestingT, expected interface{}, f PanicTestFunc, msg str return PanicsWithValue(t, expected, f, append([]interface{}{msg}, args...)...) } +// Positivef asserts that the specified element is positive +// +// assert.Positivef(t, 1, "error message %s", "formatted") +// assert.Positivef(t, 1.23, "error message %s", "formatted") +func Positivef(t TestingT, e interface{}, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return Positive(t, e, append([]interface{}{msg}, args...)...) +} + // Regexpf asserts that a specified regexp matches a string. // // assert.Regexpf(t, regexp.MustCompile("start"), "it's starting", "error message %s", "formatted") diff --git a/vendor/github.com/stretchr/testify/assert/assertion_forward.go b/vendor/github.com/stretchr/testify/assert/assertion_forward.go index 9db8894..25337a6 100644 --- a/vendor/github.com/stretchr/testify/assert/assertion_forward.go +++ b/vendor/github.com/stretchr/testify/assert/assertion_forward.go @@ -204,6 +204,42 @@ func (a *Assertions) Error(err error, msgAndArgs ...interface{}) bool { return Error(a.t, err, msgAndArgs...) } +// ErrorAs asserts that at least one of the errors in err's chain matches target, and if so, sets target to that error value. +// This is a wrapper for errors.As. +func (a *Assertions) ErrorAs(err error, target interface{}, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return ErrorAs(a.t, err, target, msgAndArgs...) +} + +// ErrorAsf asserts that at least one of the errors in err's chain matches target, and if so, sets target to that error value. +// This is a wrapper for errors.As. +func (a *Assertions) ErrorAsf(err error, target interface{}, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return ErrorAsf(a.t, err, target, msg, args...) +} + +// ErrorIs asserts that at least one of the errors in err's chain matches target. +// This is a wrapper for errors.Is. +func (a *Assertions) ErrorIs(err error, target error, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return ErrorIs(a.t, err, target, msgAndArgs...) +} + +// ErrorIsf asserts that at least one of the errors in err's chain matches target. +// This is a wrapper for errors.Is. +func (a *Assertions) ErrorIsf(err error, target error, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return ErrorIsf(a.t, err, target, msg, args...) +} + // Errorf asserts that a function returned an error (i.e. not `nil`). // // actualObj, err := SomeFunction() @@ -631,6 +667,102 @@ func (a *Assertions) InEpsilonf(expected interface{}, actual interface{}, epsilo return InEpsilonf(a.t, expected, actual, epsilon, msg, args...) } +// IsDecreasing asserts that the collection is decreasing +// +// a.IsDecreasing([]int{2, 1, 0}) +// a.IsDecreasing([]float{2, 1}) +// a.IsDecreasing([]string{"b", "a"}) +func (a *Assertions) IsDecreasing(object interface{}, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return IsDecreasing(a.t, object, msgAndArgs...) +} + +// IsDecreasingf asserts that the collection is decreasing +// +// a.IsDecreasingf([]int{2, 1, 0}, "error message %s", "formatted") +// a.IsDecreasingf([]float{2, 1}, "error message %s", "formatted") +// a.IsDecreasingf([]string{"b", "a"}, "error message %s", "formatted") +func (a *Assertions) IsDecreasingf(object interface{}, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return IsDecreasingf(a.t, object, msg, args...) +} + +// IsIncreasing asserts that the collection is increasing +// +// a.IsIncreasing([]int{1, 2, 3}) +// a.IsIncreasing([]float{1, 2}) +// a.IsIncreasing([]string{"a", "b"}) +func (a *Assertions) IsIncreasing(object interface{}, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return IsIncreasing(a.t, object, msgAndArgs...) +} + +// IsIncreasingf asserts that the collection is increasing +// +// a.IsIncreasingf([]int{1, 2, 3}, "error message %s", "formatted") +// a.IsIncreasingf([]float{1, 2}, "error message %s", "formatted") +// a.IsIncreasingf([]string{"a", "b"}, "error message %s", "formatted") +func (a *Assertions) IsIncreasingf(object interface{}, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return IsIncreasingf(a.t, object, msg, args...) +} + +// IsNonDecreasing asserts that the collection is not decreasing +// +// a.IsNonDecreasing([]int{1, 1, 2}) +// a.IsNonDecreasing([]float{1, 2}) +// a.IsNonDecreasing([]string{"a", "b"}) +func (a *Assertions) IsNonDecreasing(object interface{}, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return IsNonDecreasing(a.t, object, msgAndArgs...) +} + +// IsNonDecreasingf asserts that the collection is not decreasing +// +// a.IsNonDecreasingf([]int{1, 1, 2}, "error message %s", "formatted") +// a.IsNonDecreasingf([]float{1, 2}, "error message %s", "formatted") +// a.IsNonDecreasingf([]string{"a", "b"}, "error message %s", "formatted") +func (a *Assertions) IsNonDecreasingf(object interface{}, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return IsNonDecreasingf(a.t, object, msg, args...) +} + +// IsNonIncreasing asserts that the collection is not increasing +// +// a.IsNonIncreasing([]int{2, 1, 1}) +// a.IsNonIncreasing([]float{2, 1}) +// a.IsNonIncreasing([]string{"b", "a"}) +func (a *Assertions) IsNonIncreasing(object interface{}, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return IsNonIncreasing(a.t, object, msgAndArgs...) +} + +// IsNonIncreasingf asserts that the collection is not increasing +// +// a.IsNonIncreasingf([]int{2, 1, 1}, "error message %s", "formatted") +// a.IsNonIncreasingf([]float{2, 1}, "error message %s", "formatted") +// a.IsNonIncreasingf([]string{"b", "a"}, "error message %s", "formatted") +func (a *Assertions) IsNonIncreasingf(object interface{}, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return IsNonIncreasingf(a.t, object, msg, args...) +} + // IsType asserts that the specified objects are of the same type. func (a *Assertions) IsType(expectedType interface{}, object interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { @@ -739,6 +871,28 @@ func (a *Assertions) Lessf(e1 interface{}, e2 interface{}, msg string, args ...i return Lessf(a.t, e1, e2, msg, args...) } +// Negative asserts that the specified element is negative +// +// a.Negative(-1) +// a.Negative(-1.23) +func (a *Assertions) Negative(e interface{}, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return Negative(a.t, e, msgAndArgs...) +} + +// Negativef asserts that the specified element is negative +// +// a.Negativef(-1, "error message %s", "formatted") +// a.Negativef(-1.23, "error message %s", "formatted") +func (a *Assertions) Negativef(e interface{}, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return Negativef(a.t, e, msg, args...) +} + // Never asserts that the given condition doesn't satisfy in waitFor time, // periodically checking the target function each tick. // @@ -941,6 +1095,24 @@ func (a *Assertions) NotEqualf(expected interface{}, actual interface{}, msg str return NotEqualf(a.t, expected, actual, msg, args...) } +// NotErrorIs asserts that at none of the errors in err's chain matches target. +// This is a wrapper for errors.Is. +func (a *Assertions) NotErrorIs(err error, target error, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return NotErrorIs(a.t, err, target, msgAndArgs...) +} + +// NotErrorIsf asserts that at none of the errors in err's chain matches target. +// This is a wrapper for errors.Is. +func (a *Assertions) NotErrorIsf(err error, target error, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return NotErrorIsf(a.t, err, target, msg, args...) +} + // NotNil asserts that the specified object is not nil. // // a.NotNil(err) @@ -1133,6 +1305,28 @@ func (a *Assertions) Panicsf(f PanicTestFunc, msg string, args ...interface{}) b return Panicsf(a.t, f, msg, args...) } +// Positive asserts that the specified element is positive +// +// a.Positive(1) +// a.Positive(1.23) +func (a *Assertions) Positive(e interface{}, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return Positive(a.t, e, msgAndArgs...) +} + +// Positivef asserts that the specified element is positive +// +// a.Positivef(1, "error message %s", "formatted") +// a.Positivef(1.23, "error message %s", "formatted") +func (a *Assertions) Positivef(e interface{}, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return Positivef(a.t, e, msg, args...) +} + // Regexp asserts that a specified regexp matches a string. // // a.Regexp(regexp.MustCompile("start"), "it's starting") diff --git a/vendor/github.com/stretchr/testify/assert/assertion_order.go b/vendor/github.com/stretchr/testify/assert/assertion_order.go new file mode 100644 index 0000000..1c3b471 --- /dev/null +++ b/vendor/github.com/stretchr/testify/assert/assertion_order.go @@ -0,0 +1,81 @@ +package assert + +import ( + "fmt" + "reflect" +) + +// isOrdered checks that collection contains orderable elements. +func isOrdered(t TestingT, object interface{}, allowedComparesResults []CompareType, failMessage string, msgAndArgs ...interface{}) bool { + objKind := reflect.TypeOf(object).Kind() + if objKind != reflect.Slice && objKind != reflect.Array { + return false + } + + objValue := reflect.ValueOf(object) + objLen := objValue.Len() + + if objLen <= 1 { + return true + } + + value := objValue.Index(0) + valueInterface := value.Interface() + firstValueKind := value.Kind() + + for i := 1; i < objLen; i++ { + prevValue := value + prevValueInterface := valueInterface + + value = objValue.Index(i) + valueInterface = value.Interface() + + compareResult, isComparable := compare(prevValueInterface, valueInterface, firstValueKind) + + if !isComparable { + return Fail(t, fmt.Sprintf("Can not compare type \"%s\" and \"%s\"", reflect.TypeOf(value), reflect.TypeOf(prevValue)), msgAndArgs...) + } + + if !containsValue(allowedComparesResults, compareResult) { + return Fail(t, fmt.Sprintf(failMessage, prevValue, value), msgAndArgs...) + } + } + + return true +} + +// IsIncreasing asserts that the collection is increasing +// +// assert.IsIncreasing(t, []int{1, 2, 3}) +// assert.IsIncreasing(t, []float{1, 2}) +// assert.IsIncreasing(t, []string{"a", "b"}) +func IsIncreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { + return isOrdered(t, object, []CompareType{compareLess}, "\"%v\" is not less than \"%v\"", msgAndArgs) +} + +// IsNonIncreasing asserts that the collection is not increasing +// +// assert.IsNonIncreasing(t, []int{2, 1, 1}) +// assert.IsNonIncreasing(t, []float{2, 1}) +// assert.IsNonIncreasing(t, []string{"b", "a"}) +func IsNonIncreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { + return isOrdered(t, object, []CompareType{compareEqual, compareGreater}, "\"%v\" is not greater than or equal to \"%v\"", msgAndArgs) +} + +// IsDecreasing asserts that the collection is decreasing +// +// assert.IsDecreasing(t, []int{2, 1, 0}) +// assert.IsDecreasing(t, []float{2, 1}) +// assert.IsDecreasing(t, []string{"b", "a"}) +func IsDecreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { + return isOrdered(t, object, []CompareType{compareGreater}, "\"%v\" is not greater than \"%v\"", msgAndArgs) +} + +// IsNonDecreasing asserts that the collection is not decreasing +// +// assert.IsNonDecreasing(t, []int{1, 1, 2}) +// assert.IsNonDecreasing(t, []float{1, 2}) +// assert.IsNonDecreasing(t, []string{"a", "b"}) +func IsNonDecreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { + return isOrdered(t, object, []CompareType{compareLess, compareEqual}, "\"%v\" is not less than or equal to \"%v\"", msgAndArgs) +} diff --git a/vendor/github.com/stretchr/testify/assert/assertions.go b/vendor/github.com/stretchr/testify/assert/assertions.go index 914a10d..bcac440 100644 --- a/vendor/github.com/stretchr/testify/assert/assertions.go +++ b/vendor/github.com/stretchr/testify/assert/assertions.go @@ -172,8 +172,8 @@ func isTest(name, prefix string) bool { if len(name) == len(prefix) { // "Test" is ok return true } - rune, _ := utf8.DecodeRuneInString(name[len(prefix):]) - return !unicode.IsLower(rune) + r, _ := utf8.DecodeRuneInString(name[len(prefix):]) + return !unicode.IsLower(r) } func messageFromMsgAndArgs(msgAndArgs ...interface{}) string { @@ -1622,6 +1622,7 @@ var spewConfig = spew.ConfigState{ DisableCapacities: true, SortKeys: true, DisableMethods: true, + MaxDepth: 10, } type tHelper interface { @@ -1693,3 +1694,81 @@ func Never(t TestingT, condition func() bool, waitFor time.Duration, tick time.D } } } + +// ErrorIs asserts that at least one of the errors in err's chain matches target. +// This is a wrapper for errors.Is. +func ErrorIs(t TestingT, err, target error, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + if errors.Is(err, target) { + return true + } + + var expectedText string + if target != nil { + expectedText = target.Error() + } + + chain := buildErrorChainString(err) + + return Fail(t, fmt.Sprintf("Target error should be in err chain:\n"+ + "expected: %q\n"+ + "in chain: %s", expectedText, chain, + ), msgAndArgs...) +} + +// NotErrorIs asserts that at none of the errors in err's chain matches target. +// This is a wrapper for errors.Is. +func NotErrorIs(t TestingT, err, target error, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + if !errors.Is(err, target) { + return true + } + + var expectedText string + if target != nil { + expectedText = target.Error() + } + + chain := buildErrorChainString(err) + + return Fail(t, fmt.Sprintf("Target error should not be in err chain:\n"+ + "found: %q\n"+ + "in chain: %s", expectedText, chain, + ), msgAndArgs...) +} + +// ErrorAs asserts that at least one of the errors in err's chain matches target, and if so, sets target to that error value. +// This is a wrapper for errors.As. +func ErrorAs(t TestingT, err error, target interface{}, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + if errors.As(err, target) { + return true + } + + chain := buildErrorChainString(err) + + return Fail(t, fmt.Sprintf("Should be in error chain:\n"+ + "expected: %q\n"+ + "in chain: %s", target, chain, + ), msgAndArgs...) +} + +func buildErrorChainString(err error) string { + if err == nil { + return "" + } + + e := errors.Unwrap(err) + chain := fmt.Sprintf("%q", err.Error()) + for e != nil { + chain += fmt.Sprintf("\n\t%q", e.Error()) + e = errors.Unwrap(e) + } + return chain +} diff --git a/vendor/github.com/thoas/go-funk/.travis.yml b/vendor/github.com/thoas/go-funk/.travis.yml index b134094..f8aa08a 100644 --- a/vendor/github.com/thoas/go-funk/.travis.yml +++ b/vendor/github.com/thoas/go-funk/.travis.yml @@ -3,9 +3,5 @@ before_install: - go get golang.org/x/tools/cmd/cover - go get github.com/stretchr/testify go: - - "1.7" - - "1.8" - - "1.9" - - "1.10" - - "tip" + - "1.16" script: make test diff --git a/vendor/github.com/thoas/go-funk/Makefile b/vendor/github.com/thoas/go-funk/Makefile index 2fed2b2..59ae860 100644 --- a/vendor/github.com/thoas/go-funk/Makefile +++ b/vendor/github.com/thoas/go-funk/Makefile @@ -1,5 +1,11 @@ +build: + go build -v ./... + test: - go test -v + go test -v ./... + +lint: + golangci-lint run bench: go test -benchmem -bench . diff --git a/vendor/github.com/thoas/go-funk/README.rst b/vendor/github.com/thoas/go-funk/README.rst index b98dce4..2f6b539 100644 --- a/vendor/github.com/thoas/go-funk/README.rst +++ b/vendor/github.com/thoas/go-funk/README.rst @@ -7,7 +7,7 @@ go-funk .. image:: https://godoc.org/github.com/thoas/go-funk?status.svg :alt: GoDoc - :target: https://godoc.org/github.com/thoas/go-funk + :target: https://pkg.go.dev/github.com/thoas/go-funk .. image:: https://goreportcard.com/badge/github.com/thoas/go-funk :alt: Go report @@ -17,8 +17,8 @@ go-funk Generic helpers rely on reflect_, be careful this code runs exclusively on runtime so you must have a good test suite. -These helpers have started as an experiment to learn reflect_. It may looks like lodash_ in some aspects but -it will have its own roadmap, lodash_ is an awesome library with a lot of works behind it, all features included in +These helpers have started as an experiment to learn reflect_. It may look like lodash_ in some aspects but +it will have its own roadmap. lodash_ is an awesome library with a lot of work behind it, all features included in ``go-funk`` come from internal use cases. You can also find typesafe implementation in the godoc_. @@ -87,7 +87,7 @@ funk.Contains Returns true if an element is present in a iteratee (slice, map, string). -One frustrating thing in Go is to implement ``contains`` methods for each types, for example: +One frustrating thing in Go is to implement ``contains`` methods for each type, for example: .. code-block:: go @@ -109,6 +109,9 @@ this can be replaced by ``funk.Contains``: // slice of Foo ptr funk.Contains([]*Foo{f}, f) // true + funk.Contains([]*Foo{f}, func (foo *Foo) bool { + return foo.ID == f.ID + }) // true funk.Contains([]*Foo{f}, nil) // false b := &Foo{ @@ -126,6 +129,9 @@ this can be replaced by ``funk.Contains``: // even map funk.Contains(map[int]string{1: "Florent"}, 1) // true + funk.Contains(map[int]string{1: "Florent"}, func(key int, name string) bool { + return key == 1 // or `name == "Florent"` for the value type + }) // true see also, typesafe implementations: ContainsInt_, ContainsInt64_, ContainsFloat32_, ContainsFloat64_, ContainsString_ @@ -135,16 +141,49 @@ see also, typesafe implementations: ContainsInt_, ContainsInt64_, ContainsFloat3 .. _ContainsInt64: https://godoc.org/github.com/thoas/go-funk#ContainsInt64 .. _ContainsString: https://godoc.org/github.com/thoas/go-funk#ContainsString +funk.Intersect +.............. + +Returns the intersection between two collections. + +.. code-block:: go + + funk.Intersect([]int{1, 2, 3, 4}, []int{2, 4, 6}) // []int{2, 4} + funk.Intersect([]string{"foo", "bar", "hello", "bar"}, []string{"foo", "bar"}) // []string{"foo", "bar"} + +see also, typesafe implementations: IntersectString + +.. IntersectString: https://godoc.org/github.com/thoas/go-funk#IntersectString + + +funk.Difference +.............. + +Returns the difference between two collections. + +.. code-block:: go + + funk.Difference([]int{1, 2, 3, 4}, []int{2, 4, 6}) // []int{1, 3}, []int{6} + funk.Difference([]string{"foo", "bar", "hello", "bar"}, []string{"foo", "bar"}) // []string{"hello"}, []string{} + +see also, typesafe implementations: DifferenceString + +.. DifferenceString: https://godoc.org/github.com/thoas/go-funk#DifferenceString + + funk.IndexOf ............ -Gets the index at which the first occurrence of value is found in array or return -1 +Gets the index at which the first occurrence of a value is found in an array or return -1 if the value cannot be found. .. code-block:: go // slice of string funk.IndexOf([]string{"foo", "bar"}, "bar") // 1 + funk.IndexOf([]string{"foo", "bar"}, func(value string) bool { + return value == "bar" + }) // 1 funk.IndexOf([]string{"foo", "bar"}, "gilles") // -1 see also, typesafe implementations: IndexOfInt_, IndexOfInt64_, IndexOfFloat32_, IndexOfFloat64_, IndexOfString_ @@ -158,13 +197,16 @@ see also, typesafe implementations: IndexOfInt_, IndexOfInt64_, IndexOfFloat32_, funk.LastIndexOf ................ -Gets the index at which the last occurrence of value is found in array or return -1 +Gets the index at which the last occurrence of a value is found in an array or return -1 if the value cannot be found. .. code-block:: go // slice of string funk.LastIndexOf([]string{"foo", "bar", "bar"}, "bar") // 2 + funk.LastIndexOf([]string{"foo", "bar"}, func(value string) bool { + return value == "bar" + }) // 2 funk.LastIndexOf([]string{"foo", "bar"}, "gilles") // -1 see also, typesafe implementations: LastIndexOfInt_, LastIndexOfInt64_, LastIndexOfFloat32_, LastIndexOfFloat64_, LastIndexOfString_ @@ -219,6 +261,26 @@ see also, typesafe implementations: FilterInt_, FilterInt64_, FilterFloat32_, Fi .. _FilterInt64: https://godoc.org/github.com/thoas/go-funk#FilterInt64 .. _FilterString: https://godoc.org/github.com/thoas/go-funk#FilterString +funk.Reduce +........... + +Reduces an iteratee based on an accumulator function or operation rune for numbers. + +.. code-block:: go + + // Using operation runes. '+' and '*' only supported. + r := funk.Reduce([]int{1, 2, 3, 4}, '+', float64(0)) // 10 + r := funk.Reduce([]int{1, 2, 3, 4}, '*', 1) // 24 + + // Using accumulator function + r := funk.Reduce([]int{1, 2, 3, 4}, func(acc float64, num int) float64 { + return acc + float64(num) + }, float64(0)) // 10 + + r := funk.Reduce([]int{1, 2, 3, 4}, func(acc string, num int) string { + return acc + fmt.Sprint(num) + }, "") // "1234" + funk.Find ......... @@ -275,10 +337,33 @@ Manipulates an iteratee (map, slice) and transforms it to another type: return fmt.Sprintf("%d", k), v }) // map[string]string{"1": "Florent", "2": "Gilles"} +funk.FlatMap +............ + +Manipulates an iteratee (map, slice) and transforms it to to a flattened collection of another type: + +* map -> slice +* slice -> slice + +.. code-block:: go + + r := funk.FlatMap([][]int{{1, 2}, {3, 4}}, func(x []int) []int { + return append(x, 0) + }) // []int{1, 2, 0, 3, 4, 0} + + mapping := map[string][]int{ + "Florent": {1, 2}, + "Gilles": {3, 4}, + } + + r = funk.FlatMap(mapping, func(k string, v []int) []int { + return v + }) // []int{1, 2, 3, 4} + funk.Get ........ -Retrieves the value at path of struct(s). +Retrieves the value at path of struct(s) or map(s). .. code-block:: go @@ -316,6 +401,34 @@ Retrieves the value at path of struct(s). funk.Get(foo, "Bar.Bars.Bar.Name") // []string{"Level2-1", "Level2-2"} funk.Get(foo, "Bar.Name") // Test +``funk.Get`` also support ``map`` values: + +.. code-block:: go + + bar := map[string]interface{}{ + "Name": "Test", + } + + foo1 := map[string]interface{}{ + "ID": 1, + "FirstName": "Dark", + "LastName": "Vador", + "Age": 30, + "Bar": bar, + } + + foo2 := &map[string]interface{}{ + "ID": 1, + "FirstName": "Dark", + "LastName": "Vador", + "Age": 30, + } // foo2.Bar is nil + + funk.Get(bar, "Name") // "Test" + funk.Get([]map[string]interface{}{foo1, foo2}, "Bar.Name") // []string{"Test"} + funk.Get(foo2, "Bar.Name") // nil + + ``funk.Get`` also handles ``nil`` values: .. code-block:: go @@ -342,6 +455,80 @@ Retrieves the value at path of struct(s). funk.Get([]*Foo{foo1, foo2}, "Bar.Name") // []string{"Test"} funk.Get(foo2, "Bar.Name") // nil + + +funk.GetOrElse +.............. + +Retrieves the value of the pointer or default. + +.. code-block:: go + + str := "hello world" + GetOrElse(&str, "foobar") // string{"hello world"} + GetOrElse(str, "foobar") // string{"hello world"} + GetOrElse(nil, "foobar") // string{"foobar"} + +funk.Set +........ +Set value at a path of a struct + +.. code-block:: go + + var bar Bar = Bar{ + Name: "level-0", + Bar: &Bar{ + Name: "level-1", + Bars: []*Bar{ + {Name: "level2-1"}, + {Name: "level2-2"}, + }, + }, + } + + _ = Set(&bar, "level-0-new", "Name") + fmt.Println(bar.Name) // "level-0-new" + + MustSet(&bar, "level-1-new", "Bar.Name") + fmt.Println(bar.Bar.Name) // "level-1-new" + + Set(&bar, "level-2-new", "Bar.Bars.Name") + fmt.Println(bar.Bar.Bars[0].Name) // "level-2-new" + fmt.Println(bar.Bar.Bars[1].Name) // "level-2-new" + +funk.MustSet +............ +Short hand for funk.Set if struct does not contain interface{} field type to discard errors. + +funk.Prune +.......... +Copy a struct with only selected fields. Slice is handled by pruning all elements. + +.. code-block:: go + + bar := &Bar{ + Name: "Test", + } + + foo1 := &Foo{ + ID: 1, + FirstName: "Dark", + LastName: "Vador", + Bar: bar, + } + + pruned, _ := Prune(foo1, []string{"FirstName", "Bar.Name"}) + // *Foo{ + // ID: 0, + // FirstName: "Dark", + // LastName: "", + // Bar: &Bar{Name: "Test}, + // } + +funk.PruneByTag +.......... +Same functionality as funk.Prune, but uses struct tags instead of struct field names. + funk.Keys ......... @@ -417,7 +604,7 @@ If array can't be split evenly, the final chunk will be the remaining element. funk.FlattenDeep ................ -Recursively flattens array. +Recursively flattens an array. .. code-block:: go @@ -494,10 +681,25 @@ see also, typesafe implementations: ShuffleInt_, ShuffleInt64_, ShuffleFloat32_, .. _ShuffleInt64: https://godoc.org/github.com/thoas/go-funk#ShuffleInt64 .. _ShuffleString: https://godoc.org/github.com/thoas/go-funk#ShuffleString +funk.Subtract +............. + +Returns the subtraction between two collections. It preserve order. + +.. code-block:: go + + funk.Subtract([]int{0, 1, 2, 3, 4}, []int{0, 4}) // []int{1, 2, 3} + funk.Subtract([]int{0, 3, 2, 3, 4}, []int{0, 4}) // []int{3, 2, 3} + + +see also, typesafe implementations: SubtractString_ + +.. SubtractString: https://godoc.org/github.com/thoas/go-funk#SubtractString + funk.Sum ........ -Computes the sum of the values in array. +Computes the sum of the values in an array. .. code-block:: go @@ -514,7 +716,7 @@ see also, typesafe implementations: SumInt_, SumInt64_, SumFloat32_, SumFloat64_ funk.Reverse ............ -Transforms an array the first element will become the last, the second element +Transforms an array such that the first element will become the last, the second element will become the second to last, etc. .. code-block:: go @@ -568,12 +770,23 @@ Generates a sharded string with a fixed length and depth. funk.Shard("e89d66bdfdd4dd26b682cc77e23a86eb", 2, 2, false) // []string{"e8", "9d", "e89d66bdfdd4dd26b682cc77e23a86eb"} - funk.Shard("e89d66bdfdd4dd26b682cc77e23a86eb", 2, 2, true) // []string{"e8", "9d", "66", "bdfdd4dd26b682cc77e23a86eb"} + funk.Shard("e89d66bdfdd4dd26b682cc77e23a86eb", 2, 3, true) // []string{"e8", "9d", "66", "bdfdd4dd26b682cc77e23a86eb"} + +funk.Subset +............. + +Returns true if a collection is a subset of another + +.. code-block:: go + funk.Subset([]int{1, 2, 4}, []int{1, 2, 3, 4, 5}) // true + funk.Subset([]string{"foo", "bar"},[]string{"foo", "bar", "hello", "bar", "hi"}) //true + + Performance ----------- -``go-funk`` has currently an open issue about performance_, don't hesitate to participate in the discussion +``go-funk`` currently has an open issue about performance_, don't hesitate to participate in the discussion to enhance the generic helpers implementations. Let's stop beating around the bush, a typesafe implementation in pure Go of ``funk.Contains``, let's say for example: @@ -592,7 +805,7 @@ Let's stop beating around the bush, a typesafe implementation in pure Go of ``fu will always outperform an implementation based on reflect_ in terms of speed and allocs because of how it's implemented in the language. -If you want a similarity gorm_ will always be slower than sqlx_ (which is very low level btw) and will uses more allocs. +If you want a similarity, gorm_ will always be slower than sqlx_ (which is very low level btw) and will use more allocs. You must not think generic helpers of ``go-funk`` as a replacement when you are dealing with performance in your codebase, you should use typesafe implementations instead. @@ -600,7 +813,7 @@ you should use typesafe implementations instead. Contributing ------------ -* Ping me on twitter `@thoas `_ +* Ping me on twitter `@thoas `_ (DMs, mentions, whatever :)) * Fork the `project `_ * Fix `open issues `_ or request new features diff --git a/vendor/github.com/thoas/go-funk/assign.go b/vendor/github.com/thoas/go-funk/assign.go new file mode 100644 index 0000000..e35c750 --- /dev/null +++ b/vendor/github.com/thoas/go-funk/assign.go @@ -0,0 +1,129 @@ +package funk + +import ( + "errors" + "fmt" + "reflect" + "strings" +) + +// Set assigns in at path with value val. i.e. in.path = val +// in accepts types of ptr to struct, ptr to variable, slice and ptr to slice. +// Along the path, interface{} is supported and nil ptr is initialized to ptr to zero value +// of the type until the variable to be set is obtained. +// It returns errors when encountering along the path unknown types, uninitialized +// interface{} or interface{} containing struct directly (not ptr to struct). +// +// Slice is resolved the same way in funk.Get(), by traversing each element of the slice, +// so that each element of the slice's corresponding field are going to be set to the same provided val. +// If Set is called on slice with empty path "", it behaves the same as funk.Fill() +// +// If in is well formed, i.e. do not expect above descripted errors to happen, funk.MustSet() +// is a short hand wrapper to discard error return +func Set(in interface{}, val interface{}, path string) error { + if in == nil { + return errors.New("Cannot Set nil") + } + parts := []string{} + if path != "" { + parts = strings.Split(path, ".") + } + return setByParts(in, val, parts) +} + +// we need this layer to handle interface{} type +func setByParts(in interface{}, val interface{}, parts []string) error { + + if in == nil { + // nil interface can happen during traversing the path + return errors.New("Cannot traverse nil/uninitialized interface{}") + } + + inValue := reflect.ValueOf(in) + inKind := inValue.Type().Kind() + + // Note: if interface contains a struct (not ptr to struct) then the content of the struct cannot be set. + // I.e. it is not CanAddr() or CanSet() + // So we require in interface{} to be a ptr, slice or array + if inKind == reflect.Ptr { + inValue = inValue.Elem() // if it is ptr we set its content not ptr its self + } else if inKind != reflect.Array && inKind != reflect.Slice { + return fmt.Errorf("Type %s not supported by Set", inValue.Type().String()) + } + + return set(inValue, reflect.ValueOf(val), parts) +} + +// traverse inValue using path in parts and set the dst to be setValue +func set(inValue reflect.Value, setValue reflect.Value, parts []string) error { + + // traverse the path to get the inValue we need to set + i := 0 + for i < len(parts) { + + kind := inValue.Kind() + + switch kind { + case reflect.Invalid: + // do not expect this case to happen + return errors.New("nil pointer found along the path") + case reflect.Struct: + fValue := inValue.FieldByName(parts[i]) + if !fValue.IsValid() { + return fmt.Errorf("field name %v is not found in struct %v", parts[i], inValue.Type().String()) + } + if !fValue.CanSet() { + return fmt.Errorf("field name %v is not exported in struct %v", parts[i], inValue.Type().String()) + } + inValue = fValue + i++ + case reflect.Slice | reflect.Array: + // set all its elements + length := inValue.Len() + for j := 0; j < length; j++ { + err := set(inValue.Index(j), setValue, parts[i:]) + if err != nil { + return err + } + } + return nil + case reflect.Ptr: + // only traverse down one level + if inValue.IsNil() { + // we initialize nil ptr to ptr to zero value of the type + // and continue traversing + inValue.Set(reflect.New(inValue.Type().Elem())) + } + // traverse the ptr until it is not pointer any more or is nil again + inValue = redirectValue(inValue) + case reflect.Interface: + // Note: if interface contains a struct (not ptr to struct) then the content of the struct cannot be set. + // I.e. it is not CanAddr() or CanSet(). This is why setByParts has a nil ptr check. + // we treat this as a new call to setByParts, and it will do proper check of the types + return setByParts(inValue.Interface(), setValue.Interface(), parts[i:]) + default: + return fmt.Errorf("kind %v in path %v is not supported", kind, parts[i]) + } + + } + // here inValue holds the value we need to set + + // interface{} can be set to any val + // other types we ensure the type matches + if inValue.Kind() != setValue.Kind() && inValue.Kind() != reflect.Interface { + return fmt.Errorf("cannot set target of type %v with type %v", inValue.Kind(), setValue.Kind()) + } + inValue.Set(setValue) + + return nil +} + +// MustSet is functionally the same as Set. +// It panics instead of returning error. +// It is safe to use if the in value is well formed. +func MustSet(in interface{}, val interface{}, path string) { + err := Set(in, val, path) + if err != nil { + panic(err) + } +} diff --git a/vendor/github.com/thoas/go-funk/builder.go b/vendor/github.com/thoas/go-funk/builder.go index 3201bfe..6dfc814 100644 --- a/vendor/github.com/thoas/go-funk/builder.go +++ b/vendor/github.com/thoas/go-funk/builder.go @@ -11,14 +11,18 @@ type Builder interface { Compact() Builder Drop(n int) Builder Filter(predicate interface{}) Builder + Flatten() Builder FlattenDeep() Builder Initial() Builder Intersect(y interface{}) Builder + Join(rarr interface{}, fnc JoinFnc) Builder Map(mapFunc interface{}) Builder + FlatMap(mapFunc interface{}) Builder Reverse() Builder Shuffle() Builder Tail() Builder Uniq() Builder + Without(values ...interface{}) Builder All() bool Any() bool @@ -35,7 +39,7 @@ type Builder interface { LastIndexOf(elem interface{}) int NotEmpty() bool Product() float64 - Reduce(reduceFunc, acc interface{}) float64 + Reduce(reduceFunc, acc interface{}) interface{} Sum() float64 Type() reflect.Type Value() interface{} @@ -72,7 +76,7 @@ func LazyChain(v interface{}) Builder { } -// LazyChainWith creates a lzy go-funk.Builder from a generator. Like LazyChain, each +// LazyChainWith creates a lazy go-funk.Builder from a generator. Like LazyChain, each // method call generate a new builder containing a method generating the previous value. // But, instead of using a collection, it takes a generator which can generate values. // With LazyChainWith, to can create a generic pipeline of collection transformation and, diff --git a/vendor/github.com/thoas/go-funk/chain_builder.go b/vendor/github.com/thoas/go-funk/chain_builder.go index 7680842..18226ab 100644 --- a/vendor/github.com/thoas/go-funk/chain_builder.go +++ b/vendor/github.com/thoas/go-funk/chain_builder.go @@ -21,6 +21,9 @@ func (b *chainBuilder) Drop(n int) Builder { func (b *chainBuilder) Filter(predicate interface{}) Builder { return &chainBuilder{Filter(b.collection, predicate)} } +func (b *chainBuilder) Flatten() Builder { + return &chainBuilder{Flatten(b.collection)} +} func (b *chainBuilder) FlattenDeep() Builder { return &chainBuilder{FlattenDeep(b.collection)} } @@ -30,9 +33,15 @@ func (b *chainBuilder) Initial() Builder { func (b *chainBuilder) Intersect(y interface{}) Builder { return &chainBuilder{Intersect(b.collection, y)} } +func (b *chainBuilder) Join(rarr interface{}, fnc JoinFnc) Builder { + return &chainBuilder{Join(b.collection, rarr, fnc)} +} func (b *chainBuilder) Map(mapFunc interface{}) Builder { return &chainBuilder{Map(b.collection, mapFunc)} } +func (b *chainBuilder) FlatMap(mapFunc interface{}) Builder { + return &chainBuilder{FlatMap(b.collection, mapFunc)} +} func (b *chainBuilder) Reverse() Builder { return &chainBuilder{Reverse(b.collection)} } @@ -45,6 +54,9 @@ func (b *chainBuilder) Tail() Builder { func (b *chainBuilder) Uniq() Builder { return &chainBuilder{Uniq(b.collection)} } +func (b *chainBuilder) Without(values ...interface{}) Builder { + return &chainBuilder{Without(b.collection, values...)} +} func (b *chainBuilder) All() bool { v := reflect.ValueOf(b.collection) @@ -113,7 +125,7 @@ func (b *chainBuilder) NotEmpty() bool { func (b *chainBuilder) Product() float64 { return Product(b.collection) } -func (b *chainBuilder) Reduce(reduceFunc, acc interface{}) float64 { +func (b *chainBuilder) Reduce(reduceFunc, acc interface{}) interface{} { return Reduce(b.collection, reduceFunc, acc) } func (b *chainBuilder) Sum() float64 { diff --git a/vendor/github.com/thoas/go-funk/go.mod b/vendor/github.com/thoas/go-funk/go.mod new file mode 100644 index 0000000..23fb24a --- /dev/null +++ b/vendor/github.com/thoas/go-funk/go.mod @@ -0,0 +1,5 @@ +module github.com/thoas/go-funk + +go 1.13 + +require github.com/stretchr/testify v1.4.0 diff --git a/vendor/github.com/thoas/go-funk/go.sum b/vendor/github.com/thoas/go-funk/go.sum new file mode 100644 index 0000000..8fdee58 --- /dev/null +++ b/vendor/github.com/thoas/go-funk/go.sum @@ -0,0 +1,11 @@ +github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/vendor/github.com/thoas/go-funk/helpers.go b/vendor/github.com/thoas/go-funk/helpers.go index 656477c..da3f032 100644 --- a/vendor/github.com/thoas/go-funk/helpers.go +++ b/vendor/github.com/thoas/go-funk/helpers.go @@ -75,7 +75,7 @@ func PtrOf(itf interface{}) interface{} { func IsFunction(in interface{}, num ...int) bool { funcType := reflect.TypeOf(in) - result := funcType.Kind() == reflect.Func + result := funcType != nil && funcType.Kind() == reflect.Func if len(num) >= 1 { result = result && funcType.NumIn() == num[0] @@ -88,6 +88,27 @@ func IsFunction(in interface{}, num ...int) bool { return result } +// IsPredicate returns if the argument is a predicate function. +func IsPredicate(in interface{}, inTypes ...reflect.Type) bool { + if len(inTypes) == 0 { + inTypes = append(inTypes, nil) + } + + funcType := reflect.TypeOf(in) + + result := funcType != nil && funcType.Kind() == reflect.Func + + result = result && funcType.NumOut() == 1 && funcType.Out(0).Kind() == reflect.Bool + result = result && funcType.NumIn() == len(inTypes) + + for i := 0; result && i < len(inTypes); i++ { + inType := inTypes[i] + result = inType == nil || inType.ConvertibleTo(funcType.In(i)) + } + + return result +} + // IsEqual returns if the two objects are equal func IsEqual(expected interface{}, actual interface{}) bool { if expected == nil || actual == nil { diff --git a/vendor/github.com/thoas/go-funk/intersection.go b/vendor/github.com/thoas/go-funk/intersection.go index ce53de5..a275ef0 100644 --- a/vendor/github.com/thoas/go-funk/intersection.go +++ b/vendor/github.com/thoas/go-funk/intersection.go @@ -5,6 +5,10 @@ import ( ) // Intersect returns the intersection between two collections. +// +// Deprecated: use Join(x, y, InnerJoin) instead of Intersect, InnerJoin +// implements deduplication mechanism, so verify your code behaviour +// before using it func Intersect(x interface{}, y interface{}) interface{} { if !IsCollection(x) { panic("First parameter must be a collection") @@ -66,3 +70,184 @@ func IntersectString(x []string, y []string) []string { return set } + +// Difference returns the difference between two collections. +func Difference(x interface{}, y interface{}) (interface{}, interface{}) { + if !IsCollection(x) { + panic("First parameter must be a collection") + } + if !IsCollection(y) { + panic("Second parameter must be a collection") + } + + xValue := reflect.ValueOf(x) + xType := xValue.Type() + + yValue := reflect.ValueOf(y) + yType := yValue.Type() + + if NotEqual(xType, yType) { + panic("Parameters must have the same type") + } + + leftType := reflect.SliceOf(xType.Elem()) + leftSlice := reflect.MakeSlice(leftType, 0, 0) + rightType := reflect.SliceOf(yType.Elem()) + rightSlice := reflect.MakeSlice(rightType, 0, 0) + + for i := 0; i < xValue.Len(); i++ { + v := xValue.Index(i).Interface() + if !Contains(y, v) { + leftSlice = reflect.Append(leftSlice, xValue.Index(i)) + } + } + + for i := 0; i < yValue.Len(); i++ { + v := yValue.Index(i).Interface() + if !Contains(x, v) { + rightSlice = reflect.Append(rightSlice, yValue.Index(i)) + } + } + + return leftSlice.Interface(), rightSlice.Interface() +} + +// DifferenceString returns the difference between two collections of strings. +func DifferenceString(x []string, y []string) ([]string, []string) { + leftSlice := []string{} + rightSlice := []string{} + + for _, v := range x { + if !ContainsString(y, v) { + leftSlice = append(leftSlice, v) + } + } + + for _, v := range y { + if !ContainsString(x, v) { + rightSlice = append(rightSlice, v) + } + } + + return leftSlice, rightSlice +} + +// DifferenceInt64 returns the difference between two collections of int64s. +func DifferenceInt64(x []int64, y []int64) ([]int64, []int64) { + leftSlice := []int64{} + rightSlice := []int64{} + + for _, v := range x { + if !ContainsInt64(y, v) { + leftSlice = append(leftSlice, v) + } + } + + for _, v := range y { + if !ContainsInt64(x, v) { + rightSlice = append(rightSlice, v) + } + } + + return leftSlice, rightSlice +} + +// DifferenceInt32 returns the difference between two collections of ints32. +func DifferenceInt32(x []int32, y []int32) ([]int32, []int32) { + leftSlice := []int32{} + rightSlice := []int32{} + + for _, v := range x { + if !ContainsInt32(y, v) { + leftSlice = append(leftSlice, v) + } + } + + for _, v := range y { + if !ContainsInt32(x, v) { + rightSlice = append(rightSlice, v) + } + } + + return leftSlice, rightSlice +} + +// DifferenceInt returns the difference between two collections of ints. +func DifferenceInt(x []int, y []int) ([]int, []int) { + leftSlice := []int{} + rightSlice := []int{} + + for _, v := range x { + if !ContainsInt(y, v) { + leftSlice = append(leftSlice, v) + } + } + + for _, v := range y { + if !ContainsInt(x, v) { + rightSlice = append(rightSlice, v) + } + } + + return leftSlice, rightSlice +} + +// DifferenceUInt returns the difference between two collections of uints. +func DifferenceUInt(x []uint, y []uint) ([]uint, []uint) { + leftSlice := []uint{} + rightSlice := []uint{} + + for _, v := range x { + if !ContainsUInt(y, v) { + leftSlice = append(leftSlice, v) + } + } + + for _, v := range y { + if !ContainsUInt(x, v) { + rightSlice = append(rightSlice, v) + } + } + + return leftSlice, rightSlice +} + +// DifferenceUInt32 returns the difference between two collections of uints32. +func DifferenceUInt32(x []uint32, y []uint32) ([]uint32, []uint32) { + leftSlice := []uint32{} + rightSlice := []uint32{} + + for _, v := range x { + if !ContainsUInt32(y, v) { + leftSlice = append(leftSlice, v) + } + } + + for _, v := range y { + if !ContainsUInt32(x, v) { + rightSlice = append(rightSlice, v) + } + } + + return leftSlice, rightSlice +} + +// DifferenceUInt64 returns the difference between two collections of uints64. +func DifferenceUInt64(x []uint64, y []uint64) ([]uint64, []uint64) { + leftSlice := []uint64{} + rightSlice := []uint64{} + + for _, v := range x { + if !ContainsUInt64(y, v) { + leftSlice = append(leftSlice, v) + } + } + + for _, v := range y { + if !ContainsUInt64(x, v) { + rightSlice = append(rightSlice, v) + } + } + + return leftSlice, rightSlice +} diff --git a/vendor/github.com/thoas/go-funk/join.go b/vendor/github.com/thoas/go-funk/join.go new file mode 100644 index 0000000..f5de606 --- /dev/null +++ b/vendor/github.com/thoas/go-funk/join.go @@ -0,0 +1,111 @@ +package funk + +import ( + "reflect" + "strings" +) + +type JoinFnc func(lx, rx reflect.Value) reflect.Value + +// Join combines two collections using the given join method. +func Join(larr, rarr interface{}, fnc JoinFnc) interface{} { + if !IsCollection(larr) { + panic("First parameter must be a collection") + } + if !IsCollection(rarr) { + panic("Second parameter must be a collection") + } + + lvalue := reflect.ValueOf(larr) + rvalue := reflect.ValueOf(rarr) + if NotEqual(lvalue.Type(), rvalue.Type()) { + panic("Parameters must have the same type") + } + + return fnc(lvalue, rvalue).Interface() +} + +// InnerJoin finds and returns matching data from two collections. +func InnerJoin(lx, rx reflect.Value) reflect.Value { + result := reflect.MakeSlice(reflect.SliceOf(lx.Type().Elem()), 0, lx.Len()+rx.Len()) + rhash := hashSlice(rx) + lhash := make(map[interface{}]struct{}, lx.Len()) + + for i := 0; i < lx.Len(); i++ { + v := lx.Index(i) + _, ok := rhash[v.Interface()] + _, alreadyExists := lhash[v.Interface()] + if ok && !alreadyExists { + lhash[v.Interface()] = struct{}{} + result = reflect.Append(result, v) + } + } + return result +} + +// OuterJoin finds and returns dissimilar data from two collections. +func OuterJoin(lx, rx reflect.Value) reflect.Value { + ljoin := LeftJoin(lx, rx) + rjoin := RightJoin(lx, rx) + + result := reflect.MakeSlice(reflect.SliceOf(lx.Type().Elem()), ljoin.Len()+rjoin.Len(), ljoin.Len()+rjoin.Len()) + for i := 0; i < ljoin.Len(); i++ { + result.Index(i).Set(ljoin.Index(i)) + } + for i := 0; i < rjoin.Len(); i++ { + result.Index(ljoin.Len() + i).Set(rjoin.Index(i)) + } + + return result +} + +// LeftJoin finds and returns dissimilar data from the first collection (left). +func LeftJoin(lx, rx reflect.Value) reflect.Value { + result := reflect.MakeSlice(reflect.SliceOf(lx.Type().Elem()), 0, lx.Len()) + rhash := hashSlice(rx) + + for i := 0; i < lx.Len(); i++ { + v := lx.Index(i) + _, ok := rhash[v.Interface()] + if !ok { + result = reflect.Append(result, v) + } + } + return result +} + +// LeftJoin finds and returns dissimilar data from the second collection (right). +func RightJoin(lx, rx reflect.Value) reflect.Value { return LeftJoin(rx, lx) } + +func hashSlice(arr reflect.Value) map[interface{}]struct{} { + hash := map[interface{}]struct{}{} + for i := 0; i < arr.Len(); i++ { + v := arr.Index(i).Interface() + hash[v] = struct{}{} + } + return hash +} + +// StringerJoin joins an array of elements which implement the `String() string` function. +// Direct copy of strings.Join() with a few tweaks. +func StringerJoin(elems []interface{ String() string }, sep string) string { + switch len(elems) { + case 0: + return "" + case 1: + return elems[0].String() + } + n := len(sep) * (len(elems) - 1) + for i := 0; i < len(elems); i++ { + n += len(elems[i].String()) + } + + var b strings.Builder + b.Grow(n) + b.WriteString(elems[0].String()) + for _, s := range elems[1:] { + b.WriteString(sep) + b.WriteString(s.String()) + } + return b.String() +} diff --git a/vendor/github.com/thoas/go-funk/join_primitives.go b/vendor/github.com/thoas/go-funk/join_primitives.go new file mode 100644 index 0000000..eefcac1 --- /dev/null +++ b/vendor/github.com/thoas/go-funk/join_primitives.go @@ -0,0 +1,373 @@ +package funk + +type JoinIntFnc func(lx, rx []int) []int + +// JoinInt combines two int collections using the given join method. +func JoinInt(larr, rarr []int, fnc JoinIntFnc) []int { + return fnc(larr, rarr) +} + +// InnerJoinInt finds and returns matching data from two int collections. +func InnerJoinInt(lx, rx []int) []int { + result := make([]int, 0, len(lx)+len(rx)) + rhash := hashSliceInt(rx) + lhash := make(map[int]struct{}, len(lx)) + + for _, v := range lx { + _, ok := rhash[v] + _, alreadyExists := lhash[v] + if ok && !alreadyExists { + lhash[v] = struct{}{} + result = append(result, v) + } + } + return result +} + +// OuterJoinInt finds and returns dissimilar data from two int collections. +func OuterJoinInt(lx, rx []int) []int { + ljoin := LeftJoinInt(lx, rx) + rjoin := RightJoinInt(lx, rx) + + result := make([]int, len(ljoin)+len(rjoin)) + copy(result, ljoin) + for i, v := range rjoin { + result[len(ljoin)+i] = v + } + return result +} + +// LeftJoinInt finds and returns dissimilar data from the first int collection (left). +func LeftJoinInt(lx, rx []int) []int { + result := make([]int, 0, len(lx)) + rhash := hashSliceInt(rx) + + for _, v := range lx { + _, ok := rhash[v] + if !ok { + result = append(result, v) + } + } + return result +} + +// LeftJoinInt finds and returns dissimilar data from the second int collection (right). +func RightJoinInt(lx, rx []int) []int { return LeftJoinInt(rx, lx) } + +func hashSliceInt(arr []int) map[int]struct{} { + hash := make(map[int]struct{}, len(arr)) + for _, i := range arr { + hash[i] = struct{}{} + } + return hash +} + +type JoinInt32Fnc func(lx, rx []int32) []int32 + +// JoinInt32 combines two int32 collections using the given join method. +func JoinInt32(larr, rarr []int32, fnc JoinInt32Fnc) []int32 { + return fnc(larr, rarr) +} + +// InnerJoinInt32 finds and returns matching data from two int32 collections. +func InnerJoinInt32(lx, rx []int32) []int32 { + result := make([]int32, 0, len(lx)+len(rx)) + rhash := hashSliceInt32(rx) + lhash := make(map[int32]struct{}, len(lx)) + + for _, v := range lx { + _, ok := rhash[v] + _, alreadyExists := lhash[v] + if ok && !alreadyExists { + lhash[v] = struct{}{} + result = append(result, v) + } + } + return result +} + +// OuterJoinInt32 finds and returns dissimilar data from two int32 collections. +func OuterJoinInt32(lx, rx []int32) []int32 { + ljoin := LeftJoinInt32(lx, rx) + rjoin := RightJoinInt32(lx, rx) + + result := make([]int32, len(ljoin)+len(rjoin)) + copy(result, ljoin) + for i, v := range rjoin { + result[len(ljoin)+i] = v + } + return result +} + +// LeftJoinInt32 finds and returns dissimilar data from the first int32 collection (left). +func LeftJoinInt32(lx, rx []int32) []int32 { + result := make([]int32, 0, len(lx)) + rhash := hashSliceInt32(rx) + + for _, v := range lx { + _, ok := rhash[v] + if !ok { + result = append(result, v) + } + } + return result +} + +// LeftJoinInt32 finds and returns dissimilar data from the second int32 collection (right). +func RightJoinInt32(lx, rx []int32) []int32 { return LeftJoinInt32(rx, lx) } + +func hashSliceInt32(arr []int32) map[int32]struct{} { + hash := make(map[int32]struct{}, len(arr)) + for _, i := range arr { + hash[i] = struct{}{} + } + return hash +} + +type JoinInt64Fnc func(lx, rx []int64) []int64 + +// JoinInt64 combines two int64 collections using the given join method. +func JoinInt64(larr, rarr []int64, fnc JoinInt64Fnc) []int64 { + return fnc(larr, rarr) +} + +// InnerJoinInt64 finds and returns matching data from two int64 collections. +func InnerJoinInt64(lx, rx []int64) []int64 { + result := make([]int64, 0, len(lx)+len(rx)) + rhash := hashSliceInt64(rx) + lhash := make(map[int64]struct{}, len(lx)) + + for _, v := range lx { + _, ok := rhash[v] + _, alreadyExists := lhash[v] + if ok && !alreadyExists { + lhash[v] = struct{}{} + result = append(result, v) + } + } + return result +} + +// OuterJoinInt64 finds and returns dissimilar data from two int64 collections. +func OuterJoinInt64(lx, rx []int64) []int64 { + ljoin := LeftJoinInt64(lx, rx) + rjoin := RightJoinInt64(lx, rx) + + result := make([]int64, len(ljoin)+len(rjoin)) + copy(result, ljoin) + for i, v := range rjoin { + result[len(ljoin)+i] = v + } + return result +} + +// LeftJoinInt64 finds and returns dissimilar data from the first int64 collection (left). +func LeftJoinInt64(lx, rx []int64) []int64 { + result := make([]int64, 0, len(lx)) + rhash := hashSliceInt64(rx) + + for _, v := range lx { + _, ok := rhash[v] + if !ok { + result = append(result, v) + } + } + return result +} + +// LeftJoinInt64 finds and returns dissimilar data from the second int64 collection (right). +func RightJoinInt64(lx, rx []int64) []int64 { return LeftJoinInt64(rx, lx) } + +func hashSliceInt64(arr []int64) map[int64]struct{} { + hash := make(map[int64]struct{}, len(arr)) + for _, i := range arr { + hash[i] = struct{}{} + } + return hash +} + +type JoinStringFnc func(lx, rx []string) []string + +// JoinString combines two string collections using the given join method. +func JoinString(larr, rarr []string, fnc JoinStringFnc) []string { + return fnc(larr, rarr) +} + +// InnerJoinString finds and returns matching data from two string collections. +func InnerJoinString(lx, rx []string) []string { + result := make([]string, 0, len(lx)+len(rx)) + rhash := hashSliceString(rx) + lhash := make(map[string]struct{}, len(lx)) + + for _, v := range lx { + _, ok := rhash[v] + _, alreadyExists := lhash[v] + if ok && !alreadyExists { + lhash[v] = struct{}{} + result = append(result, v) + } + } + return result +} + +// OuterJoinString finds and returns dissimilar data from two string collections. +func OuterJoinString(lx, rx []string) []string { + ljoin := LeftJoinString(lx, rx) + rjoin := RightJoinString(lx, rx) + + result := make([]string, len(ljoin)+len(rjoin)) + copy(result, ljoin) + for i, v := range rjoin { + result[len(ljoin)+i] = v + } + return result +} + +// LeftJoinString finds and returns dissimilar data from the first string collection (left). +func LeftJoinString(lx, rx []string) []string { + result := make([]string, 0, len(lx)) + rhash := hashSliceString(rx) + + for _, v := range lx { + _, ok := rhash[v] + if !ok { + result = append(result, v) + } + } + return result +} + +// LeftJoinString finds and returns dissimilar data from the second string collection (right). +func RightJoinString(lx, rx []string) []string { return LeftJoinString(rx, lx) } + +func hashSliceString(arr []string) map[string]struct{} { + hash := make(map[string]struct{}, len(arr)) + for _, i := range arr { + hash[i] = struct{}{} + } + return hash +} + +type JoinFloat32Fnc func(lx, rx []float32) []float32 + +// JoinFloat32 combines two float32 collections using the given join method. +func JoinFloat32(larr, rarr []float32, fnc JoinFloat32Fnc) []float32 { + return fnc(larr, rarr) +} + +// InnerJoinFloat32 finds and returns matching data from two float32 collections. +func InnerJoinFloat32(lx, rx []float32) []float32 { + result := make([]float32, 0, len(lx)+len(rx)) + rhash := hashSliceFloat32(rx) + lhash := make(map[float32]struct{}, len(lx)) + + for _, v := range lx { + _, ok := rhash[v] + _, alreadyExists := lhash[v] + if ok && !alreadyExists { + lhash[v] = struct{}{} + result = append(result, v) + } + } + return result +} + +// OuterJoinFloat32 finds and returns dissimilar data from two float32 collections. +func OuterJoinFloat32(lx, rx []float32) []float32 { + ljoin := LeftJoinFloat32(lx, rx) + rjoin := RightJoinFloat32(lx, rx) + + result := make([]float32, len(ljoin)+len(rjoin)) + copy(result, ljoin) + for i, v := range rjoin { + result[len(ljoin)+i] = v + } + return result +} + +// LeftJoinFloat32 finds and returns dissimilar data from the first float32 collection (left). +func LeftJoinFloat32(lx, rx []float32) []float32 { + result := make([]float32, 0, len(lx)) + rhash := hashSliceFloat32(rx) + + for _, v := range lx { + _, ok := rhash[v] + if !ok { + result = append(result, v) + } + } + return result +} + +// LeftJoinFloat32 finds and returns dissimilar data from the second float32 collection (right). +func RightJoinFloat32(lx, rx []float32) []float32 { return LeftJoinFloat32(rx, lx) } + +func hashSliceFloat32(arr []float32) map[float32]struct{} { + hash := make(map[float32]struct{}, len(arr)) + for _, i := range arr { + hash[i] = struct{}{} + } + return hash +} + +type JoinFloat64Fnc func(lx, rx []float64) []float64 + +// JoinFloat64 combines two float64 collections using the given join method. +func JoinFloat64(larr, rarr []float64, fnc JoinFloat64Fnc) []float64 { + return fnc(larr, rarr) +} + +// InnerJoinFloat64 finds and returns matching data from two float64 collections. +func InnerJoinFloat64(lx, rx []float64) []float64 { + result := make([]float64, 0, len(lx)+len(rx)) + rhash := hashSliceFloat64(rx) + lhash := make(map[float64]struct{}, len(lx)) + + for _, v := range lx { + _, ok := rhash[v] + _, alreadyExists := lhash[v] + if ok && !alreadyExists { + lhash[v] = struct{}{} + result = append(result, v) + } + } + return result +} + +// OuterJoinFloat64 finds and returns dissimilar data from two float64 collections. +func OuterJoinFloat64(lx, rx []float64) []float64 { + ljoin := LeftJoinFloat64(lx, rx) + rjoin := RightJoinFloat64(lx, rx) + + result := make([]float64, len(ljoin)+len(rjoin)) + copy(result, ljoin) + for i, v := range rjoin { + result[len(ljoin)+i] = v + } + return result +} + +// LeftJoinFloat64 finds and returns dissimilar data from the first float64 collection (left). +func LeftJoinFloat64(lx, rx []float64) []float64 { + result := make([]float64, 0, len(lx)) + rhash := hashSliceFloat64(rx) + + for _, v := range lx { + _, ok := rhash[v] + if !ok { + result = append(result, v) + } + } + return result +} + +// LeftJoinFloat64 finds and returns dissimilar data from the second float64 collection (right). +func RightJoinFloat64(lx, rx []float64) []float64 { return LeftJoinFloat64(rx, lx) } + +func hashSliceFloat64(arr []float64) map[float64]struct{} { + hash := make(map[float64]struct{}, len(arr)) + for _, i := range arr { + hash[i] = struct{}{} + } + return hash +} diff --git a/vendor/github.com/thoas/go-funk/lazy_builder.go b/vendor/github.com/thoas/go-funk/lazy_builder.go index 2641b50..9ba1907 100644 --- a/vendor/github.com/thoas/go-funk/lazy_builder.go +++ b/vendor/github.com/thoas/go-funk/lazy_builder.go @@ -18,6 +18,9 @@ func (b *lazyBuilder) Drop(n int) Builder { func (b *lazyBuilder) Filter(predicate interface{}) Builder { return &lazyBuilder{func() interface{} { return Filter(b.exec(), predicate) }} } +func (b *lazyBuilder) Flatten() Builder { + return &lazyBuilder{func() interface{} { return Flatten(b.exec()) }} +} func (b *lazyBuilder) FlattenDeep() Builder { return &lazyBuilder{func() interface{} { return FlattenDeep(b.exec()) }} } @@ -27,9 +30,15 @@ func (b *lazyBuilder) Initial() Builder { func (b *lazyBuilder) Intersect(y interface{}) Builder { return &lazyBuilder{func() interface{} { return Intersect(b.exec(), y) }} } +func (b *lazyBuilder) Join(rarr interface{}, fnc JoinFnc) Builder { + return &lazyBuilder{func() interface{} { return Join(b.exec(), rarr, fnc) }} +} func (b *lazyBuilder) Map(mapFunc interface{}) Builder { return &lazyBuilder{func() interface{} { return Map(b.exec(), mapFunc) }} } +func (b *lazyBuilder) FlatMap(mapFunc interface{}) Builder { + return &lazyBuilder{func() interface{} { return FlatMap(b.exec(), mapFunc) }} +} func (b *lazyBuilder) Reverse() Builder { return &lazyBuilder{func() interface{} { return Reverse(b.exec()) }} } @@ -42,6 +51,9 @@ func (b *lazyBuilder) Tail() Builder { func (b *lazyBuilder) Uniq() Builder { return &lazyBuilder{func() interface{} { return Uniq(b.exec()) }} } +func (b *lazyBuilder) Without(values ...interface{}) Builder { + return &lazyBuilder{func() interface{} { return Without(b.exec(), values...) }} +} func (b *lazyBuilder) All() bool { return (&chainBuilder{b.exec()}).All() @@ -88,7 +100,7 @@ func (b *lazyBuilder) NotEmpty() bool { func (b *lazyBuilder) Product() float64 { return Product(b.exec()) } -func (b *lazyBuilder) Reduce(reduceFunc, acc interface{}) float64 { +func (b *lazyBuilder) Reduce(reduceFunc, acc interface{}) interface{} { return Reduce(b.exec(), reduceFunc, acc) } func (b *lazyBuilder) Sum() float64 { diff --git a/vendor/github.com/thoas/go-funk/max.go b/vendor/github.com/thoas/go-funk/max.go new file mode 100644 index 0000000..0d63dc6 --- /dev/null +++ b/vendor/github.com/thoas/go-funk/max.go @@ -0,0 +1,178 @@ +package funk + +import "strings" + +// MaxInt validates the input, compares the elements and returns the maximum element in an array/slice. +// It accepts []int +// It returns int +func MaxInt(i []int) int { + if len(i) == 0 { + panic("arg is an empty array/slice") + } + var max int + for idx := 0; idx < len(i); idx++ { + item := i[idx] + if idx == 0 { + max = item + continue + } + if item > max { + max = item + } + } + return max +} + +// MaxInt8 validates the input, compares the elements and returns the maximum element in an array/slice. +// It accepts []int8 +// It returns int8 +func MaxInt8(i []int8) int8 { + if len(i) == 0 { + panic("arg is an empty array/slice") + } + var max int8 + for idx := 0; idx < len(i); idx++ { + item := i[idx] + if idx == 0 { + max = item + continue + } + if item > max { + max = item + } + } + return max +} + +// MaxInt16 validates the input, compares the elements and returns the maximum element in an array/slice. +// It accepts []int16 +// It returns int16 +func MaxInt16(i []int16) int16 { + if len(i) == 0 { + panic("arg is an empty array/slice") + } + var max int16 + for idx := 0; idx < len(i); idx++ { + item := i[idx] + if idx == 0 { + max = item + continue + } + if item > max { + max = item + } + } + return max +} + +// MaxInt32 validates the input, compares the elements and returns the maximum element in an array/slice. +// It accepts []int32 +// It returns int32 +func MaxInt32(i []int32) int32 { + if len(i) == 0 { + panic("arg is an empty array/slice") + } + var max int32 + for idx := 0; idx < len(i); idx++ { + item := i[idx] + if idx == 0 { + max = item + continue + } + if item > max { + max = item + } + } + return max +} + +// MaxInt64 validates the input, compares the elements and returns the maximum element in an array/slice. +// It accepts []int64 +// It returns int64 +func MaxInt64(i []int64) int64 { + if len(i) == 0 { + panic("arg is an empty array/slice") + } + var max int64 + for idx := 0; idx < len(i); idx++ { + item := i[idx] + if idx == 0 { + max = item + continue + } + if item > max { + max = item + } + } + return max +} + +// MaxFloat32 validates the input, compares the elements and returns the maximum element in an array/slice. +// It accepts []float32 +// It returns float32 +func MaxFloat32(i []float32) float32 { + if len(i) == 0 { + panic("arg is an empty array/slice") + } + var max float32 + for idx := 0; idx < len(i); idx++ { + item := i[idx] + if idx == 0 { + max = item + continue + } + if item > max { + max = item + } + } + return max +} + +// MaxFloat64 validates the input, compares the elements and returns the maximum element in an array/slice. +// It accepts []float64 +// It returns float64 +func MaxFloat64(i []float64) float64 { + if len(i) == 0 { + panic("arg is an empty array/slice") + } + var max float64 + for idx := 0; idx < len(i); idx++ { + item := i[idx] + if idx == 0 { + max = item + continue + } + if item > max { + max = item + } + } + return max +} + +// MaxString validates the input, compares the elements and returns the maximum element in an array/slice. +// It accepts []string +// It returns string +func MaxString(i []string) string { + if len(i) == 0 { + panic("arg is an empty array/slice") + } + var max string + for idx := 0; idx < len(i); idx++ { + item := i[idx] + if idx == 0 { + max = item + continue + } + max = compareStringsMax(max, item) + } + return max +} + +// compareStrings uses the strings.Compare method to compare two strings, and returns the greater one. +func compareStringsMax(max, current string) string { + r := strings.Compare(strings.ToLower(max), strings.ToLower(current)) + if r > 0 { + return max + } + return current +} diff --git a/vendor/github.com/thoas/go-funk/min.go b/vendor/github.com/thoas/go-funk/min.go new file mode 100644 index 0000000..81ffa77 --- /dev/null +++ b/vendor/github.com/thoas/go-funk/min.go @@ -0,0 +1,177 @@ +package funk + +import "strings" + +// MinInt validates the input, compares the elements and returns the minimum element in an array/slice. +// It accepts []int +// It returns int +func MinInt(i []int) int { + if len(i) == 0 { + panic("arg is an empty array/slice") + } + var min int + for idx := 0; idx < len(i); idx++ { + item := i[idx] + if idx == 0 { + min = item + continue + } + if item < min { + min = item + } + } + return min +} + +// MinInt8 validates the input, compares the elements and returns the minimum element in an array/slice. +// It accepts []int8 +// It returns int8 +func MinInt8(i []int8) int8 { + if len(i) == 0 { + panic("arg is an empty array/slice") + } + var min int8 + for idx := 0; idx < len(i); idx++ { + item := i[idx] + if idx == 0 { + min = item + continue + } + if item < min { + min = item + } + } + return min +} + +// MinInt16 validates the input, compares the elements and returns the minimum element in an array/slice. +// It accepts []int16 +// It returns int16 +func MinInt16(i []int16) int16 { + if len(i) == 0 { + panic("arg is an empty array/slice") + } + var min int16 + for idx := 0; idx < len(i); idx++ { + item := i[idx] + if idx == 0 { + min = item + continue + } + if item < min { + min = item + } + } + return min +} + +// MinInt32 validates the input, compares the elements and returns the minimum element in an array/slice. +// It accepts []int32 +// It returns int32 +func MinInt32(i []int32) int32 { + if len(i) == 0 { + panic("arg is an empty array/slice") + } + var min int32 + for idx := 0; idx < len(i); idx++ { + item := i[idx] + if idx == 0 { + min = item + continue + } + if item < min { + min = item + } + } + return min +} + +// MinInt64 validates the input, compares the elements and returns the minimum element in an array/slice. +// It accepts []int64 +// It returns int64 +func MinInt64(i []int64) int64 { + if len(i) == 0 { + panic("arg is an empty array/slice") + } + var min int64 + for idx := 0; idx < len(i); idx++ { + item := i[idx] + if idx == 0 { + min = item + continue + } + if item < min { + min = item + } + } + return min +} + +// MinFloat32 validates the input, compares the elements and returns the minimum element in an array/slice. +// It accepts []float32 +// It returns float32 +func MinFloat32(i []float32) float32 { + if len(i) == 0 { + panic("arg is an empty array/slice") + } + var min float32 + for idx := 0; idx < len(i); idx++ { + item := i[idx] + if idx == 0 { + min = item + continue + } + if item < min { + min = item + } + } + return min +} + +// MinFloat64 validates the input, compares the elements and returns the minimum element in an array/slice. +// It accepts []float64 +// It returns float64 +func MinFloat64(i []float64) float64 { + if len(i) == 0 { + panic("arg is an empty array/slice") + } + var min float64 + for idx := 0; idx < len(i); idx++ { + item := i[idx] + if idx == 0 { + min = item + continue + } + if item < min { + min = item + } + } + return min +} + +// MinString validates the input, compares the elements and returns the minimum element in an array/slice. +// It accepts []string +// It returns string +func MinString(i []string) string { + if len(i) == 0 { + panic("arg is an empty array/slice") + } + var min string + for idx := 0; idx < len(i); idx++ { + item := i[idx] + if idx == 0 { + min = item + continue + } + min = compareStringsMin(min, item) + } + return min +} + +func compareStringsMin(min, current string) string { + r := strings.Compare(strings.ToLower(min), strings.ToLower(current)) + if r < 0 { + return min + } + return current +} diff --git a/vendor/github.com/thoas/go-funk/options.go b/vendor/github.com/thoas/go-funk/options.go new file mode 100644 index 0000000..5d08c24 --- /dev/null +++ b/vendor/github.com/thoas/go-funk/options.go @@ -0,0 +1,24 @@ +package funk + +type options struct { + allowZero bool +} + +type option func(*options) + +func newOptions(values ...option) *options { + opts := &options{ + allowZero: false, + } + for _, o := range values { + o(opts) + } + return opts +} + +// WithAllowZero allows zero values. +func WithAllowZero() func(*options) { + return func(opts *options) { + opts.allowZero = true + } +} diff --git a/vendor/github.com/thoas/go-funk/permutation.go b/vendor/github.com/thoas/go-funk/permutation.go new file mode 100644 index 0000000..0b10993 --- /dev/null +++ b/vendor/github.com/thoas/go-funk/permutation.go @@ -0,0 +1,29 @@ +package funk + +import "errors" + +// NextPermutation Implement next permutation, +// which rearranges numbers into the lexicographically next greater permutation of numbers. +func NextPermutation(nums []int) error { + n := len(nums) + if n == 0 { + return errors.New("nums is empty") + } + + i := n - 2 + + for i >= 0 && nums[i] >= nums[i+1] { + i-- + } + + if i >= 0 { + j := n - 1 + for j >= 0 && nums[i] >= nums[j] { + j-- + } + nums[i], nums[j] = nums[j], nums[i] + } + + ReverseInt(nums[i+1:]) + return nil +} diff --git a/vendor/github.com/thoas/go-funk/predicate.go b/vendor/github.com/thoas/go-funk/predicate.go new file mode 100644 index 0000000..e6b4131 --- /dev/null +++ b/vendor/github.com/thoas/go-funk/predicate.go @@ -0,0 +1,47 @@ +package funk + +import ( + "reflect" +) + +// predicatesImpl contains the common implementation of AnyPredicates and AllPredicates. +func predicatesImpl(value interface{}, wantedAnswer bool, predicates interface{}) bool { + if !IsCollection(predicates) { + panic("Predicates parameter must be an iteratee") + } + + predicatesValue := reflect.ValueOf(predicates) + inputValue := reflect.ValueOf(value) + + for i := 0; i < predicatesValue.Len(); i++ { + funcValue := predicatesValue.Index(i) + if !IsFunction(funcValue.Interface()) { + panic("Got non function as predicate") + } + + funcType := funcValue.Type() + if !IsPredicate(funcValue.Interface()) { + panic("Predicate function must have 1 parameter and must return boolean") + } + + if !inputValue.Type().ConvertibleTo(funcType.In(0)) { + panic("Given value is not compatible with type of parameter for the predicate.") + } + if result := funcValue.Call([]reflect.Value{inputValue}); wantedAnswer == result[0].Bool() { + return wantedAnswer + } + } + + return !wantedAnswer +} + +// AnyPredicates gets a value and a series of predicates, and return true if at least one of the predicates +// is true. +func AnyPredicates(value interface{}, predicates interface{}) bool { + return predicatesImpl(value, true, predicates) +} + +// AllPredicates gets a value and a series of predicates, and return true if all of the predicates are true. +func AllPredicates(value interface{}, predicates interface{}) bool { + return predicatesImpl(value, false, predicates) +} diff --git a/vendor/github.com/thoas/go-funk/presence.go b/vendor/github.com/thoas/go-funk/presence.go index fc07051..9ab0f0f 100644 --- a/vendor/github.com/thoas/go-funk/presence.go +++ b/vendor/github.com/thoas/go-funk/presence.go @@ -1,6 +1,7 @@ package funk import ( + "fmt" "reflect" "strings" ) @@ -50,6 +51,13 @@ func Filter(arr interface{}, predicate interface{}) interface{} { // Find iterates over elements of collection, returning the first // element predicate returns truthy for. func Find(arr interface{}, predicate interface{}) interface{} { + _, val := FindKey(arr, predicate) + return val +} + +// Find iterates over elements of collection, returning the first +// element of an array and random of a map which predicate returns truthy for. +func FindKey(arr interface{}, predicate interface{}) (matchKey, matchEle interface{}) { if !IsIteratee(arr) { panic("First parameter must be an iteratee") } @@ -67,18 +75,33 @@ func Find(arr interface{}, predicate interface{}) interface{} { } arrValue := reflect.ValueOf(arr) + var keyArrs []reflect.Value + isMap := arrValue.Kind() == reflect.Map + if isMap { + keyArrs = arrValue.MapKeys() + } for i := 0; i < arrValue.Len(); i++ { - elem := arrValue.Index(i) + var ( + elem reflect.Value + key reflect.Value + ) + if isMap { + key = keyArrs[i] + elem = arrValue.MapIndex(key) + } else { + key = reflect.ValueOf(i) + elem = arrValue.Index(i) + } result := funcValue.Call([]reflect.Value{elem})[0].Interface().(bool) if result { - return elem.Interface() + return key.Interface(), elem.Interface() } } - return nil + return nil, nil } // IndexOf gets the index at which the first occurrence of value is found in array or return -1 @@ -95,8 +118,9 @@ func IndexOf(in interface{}, elem interface{}) int { } if inType.Kind() == reflect.Slice { + equalTo := equal(elem) for i := 0; i < inValue.Len(); i++ { - if equal(inValue.Index(i).Interface(), elem) { + if equalTo(reflect.Value{}, inValue.Index(i)) { return i } } @@ -121,8 +145,9 @@ func LastIndexOf(in interface{}, elem interface{}) int { if inType.Kind() == reflect.Slice { length := inValue.Len() + equalTo := equal(elem) for i := length - 1; i >= 0; i-- { - if equal(inValue.Index(i).Interface(), elem) { + if equalTo(reflect.Value{}, inValue.Index(i)) { return i } } @@ -141,17 +166,21 @@ func Contains(in interface{}, elem interface{}) bool { case reflect.String: return strings.Contains(inValue.String(), elemValue.String()) case reflect.Map: + equalTo := equal(elem, true) for _, key := range inValue.MapKeys() { - if equal(key.Interface(), elem) { + if equalTo(key, inValue.MapIndex(key)) { return true } } case reflect.Slice, reflect.Array: + equalTo := equal(elem) for i := 0; i < inValue.Len(); i++ { - if equal(inValue.Index(i).Interface(), elem) { + if equalTo(reflect.Value{}, inValue.Index(i)) { return true } } + default: + panic(fmt.Sprintf("Type %s is not supported by Contains, supported types are String, Map, Slice, Array", inType.String())) } return false @@ -166,3 +195,13 @@ func Every(in interface{}, elements ...interface{}) bool { } return true } + +// Some returns true if atleast one element is present in an iteratee. +func Some(in interface{}, elements ...interface{}) bool { + for _, elem := range elements { + if Contains(in, elem) { + return true + } + } + return false +} diff --git a/vendor/github.com/thoas/go-funk/reduce.go b/vendor/github.com/thoas/go-funk/reduce.go index de16a72..579b822 100644 --- a/vendor/github.com/thoas/go-funk/reduce.go +++ b/vendor/github.com/thoas/go-funk/reduce.go @@ -6,7 +6,7 @@ import ( // Reduce takes a collection and reduces it to a single value using a reduction // function (or a valid symbol) and an accumulator value. -func Reduce(arr, reduceFunc, acc interface{}) float64 { +func Reduce(arr, reduceFunc, acc interface{}) interface{} { arrValue := redirectValue(reflect.ValueOf(arr)) if !IsIteratee(arrValue.Interface()) { @@ -83,6 +83,5 @@ func Reduce(arr, reduceFunc, acc interface{}) float64 { accValue = result[0] } - resultInterface := accValue.Convert(returnType).Interface() - return resultInterface.(float64) + return accValue.Convert(returnType).Interface() } diff --git a/vendor/github.com/thoas/go-funk/retrieve.go b/vendor/github.com/thoas/go-funk/retrieve.go index e1daeb8..7b3019f 100644 --- a/vendor/github.com/thoas/go-funk/retrieve.go +++ b/vendor/github.com/thoas/go-funk/retrieve.go @@ -5,29 +5,59 @@ import ( "strings" ) -// Get retrieves the value at path of struct(s). -func Get(out interface{}, path string) interface{} { - result := get(reflect.ValueOf(out), path) +// Get retrieves the value from given path, retriever can be modified with available RetrieverOptions +func Get(out interface{}, path string, opts ...option) interface{} { + options := newOptions(opts...) - if result.Kind() != reflect.Invalid { + result := get(reflect.ValueOf(out), path) + // valid kind and we can return a result.Interface() without panic + if result.Kind() != reflect.Invalid && result.CanInterface() { + // if we don't allow zero and the result is a zero value return nil + if !options.allowZero && result.IsZero() { + return nil + } + // if the result kind is a pointer and its nil return nil + if result.Kind() == reflect.Ptr && result.IsNil() { + return nil + } + // return the result interface (i.e the zero value of it) return result.Interface() } return nil } +// GetOrElse retrieves the value of the pointer or default. +func GetOrElse(v interface{}, def interface{}) interface{} { + val := reflect.ValueOf(v) + if v == nil || (val.Kind() == reflect.Ptr && val.IsNil()) { + return def + } else if val.Kind() != reflect.Ptr { + return v + } + return val.Elem().Interface() +} + func get(value reflect.Value, path string) reflect.Value { if value.Kind() == reflect.Slice || value.Kind() == reflect.Array { var resultSlice reflect.Value length := value.Len() + if length == 0 { + zeroElement := reflect.Zero(value.Type().Elem()) + pathValue := get(zeroElement, path) + value = reflect.MakeSlice(reflect.SliceOf(pathValue.Type()), 0, 0) + + return value + } + for i := 0; i < length; i++ { item := value.Index(i) resultValue := get(item, path) - if resultValue.Kind() == reflect.Invalid { + if resultValue.Kind() == reflect.Invalid || resultValue.IsZero() { continue } @@ -43,7 +73,7 @@ func get(value reflect.Value, path string) reflect.Value { } // if the result is a slice of a slice, we need to flatten it - if resultSlice.Type().Elem().Kind() == reflect.Slice { + if resultSlice.Kind() != reflect.Invalid && resultSlice.Type().Elem().Kind() == reflect.Slice { return flattenDeep(resultSlice) } @@ -56,18 +86,17 @@ func get(value reflect.Value, path string) reflect.Value { value = redirectValue(value) kind := value.Kind() - if kind == reflect.Invalid { + switch kind { + case reflect.Invalid: continue - } - - if kind == reflect.Struct { + case reflect.Struct: value = value.FieldByName(part) - continue - } - - if kind == reflect.Slice || kind == reflect.Array { + case reflect.Map: + value = value.MapIndex(reflect.ValueOf(part)) + case reflect.Slice, reflect.Array: value = get(value, part) - continue + default: + return reflect.ValueOf(nil) } } diff --git a/vendor/github.com/thoas/go-funk/short_if.go b/vendor/github.com/thoas/go-funk/short_if.go new file mode 100644 index 0000000..537ff56 --- /dev/null +++ b/vendor/github.com/thoas/go-funk/short_if.go @@ -0,0 +1,8 @@ +package funk + +func ShortIf(condition bool, a interface{}, b interface{}) interface{} { + if condition { + return a + } + return b +} diff --git a/vendor/github.com/thoas/go-funk/subset.go b/vendor/github.com/thoas/go-funk/subset.go new file mode 100644 index 0000000..de47be1 --- /dev/null +++ b/vendor/github.com/thoas/go-funk/subset.go @@ -0,0 +1,41 @@ +package funk + +import ( + "reflect" +) + +// Subset returns true if collection x is a subset of y. +func Subset(x interface{}, y interface{}) bool { + if !IsCollection(x) { + panic("First parameter must be a collection") + } + if !IsCollection(y) { + panic("Second parameter must be a collection") + } + + xValue := reflect.ValueOf(x) + xType := xValue.Type() + + yValue := reflect.ValueOf(y) + yType := yValue.Type() + + if NotEqual(xType, yType) { + panic("Parameters must have the same type") + } + + if xValue.Len() == 0 { + return true + } + + if yValue.Len() == 0 || yValue.Len() < xValue.Len() { + return false + } + + for i := 0; i < xValue.Len(); i++ { + if !Contains(yValue.Interface(), xValue.Index(i).Interface()) { + return false + } + } + + return true +} diff --git a/vendor/github.com/thoas/go-funk/subtraction.go b/vendor/github.com/thoas/go-funk/subtraction.go new file mode 100644 index 0000000..10a0a98 --- /dev/null +++ b/vendor/github.com/thoas/go-funk/subtraction.go @@ -0,0 +1,87 @@ +package funk + +import ( + "reflect" +) + +// Subtract returns the subtraction between two collections. +func Subtract(x interface{}, y interface{}) interface{} { + if !IsCollection(x) { + panic("First parameter must be a collection") + } + if !IsCollection(y) { + panic("Second parameter must be a collection") + } + + hash := map[interface{}]struct{}{} + + xValue := reflect.ValueOf(x) + xType := xValue.Type() + + yValue := reflect.ValueOf(y) + yType := yValue.Type() + + if NotEqual(xType, yType) { + panic("Parameters must have the same type") + } + + zType := reflect.SliceOf(xType.Elem()) + zSlice := reflect.MakeSlice(zType, 0, 0) + + for i := 0; i < xValue.Len(); i++ { + v := xValue.Index(i).Interface() + hash[v] = struct{}{} + } + + for i := 0; i < yValue.Len(); i++ { + v := yValue.Index(i).Interface() + _, ok := hash[v] + if ok { + delete(hash, v) + } + } + + for i := 0; i < xValue.Len(); i++ { + v := xValue.Index(i).Interface() + _, ok := hash[v] + if ok { + zSlice = reflect.Append(zSlice, xValue.Index(i)) + } + } + + return zSlice.Interface() +} + +// SubtractString returns the subtraction between two collections of string +func SubtractString(x []string, y []string) []string { + if len(x) == 0 { + return []string{} + } + + if len(y) == 0 { + return x + } + + slice := []string{} + hash := map[string]struct{}{} + + for _, v := range x { + hash[v] = struct{}{} + } + + for _, v := range y { + _, ok := hash[v] + if ok { + delete(hash, v) + } + } + + for _, v := range x { + _, ok := hash[v] + if ok { + slice = append(slice, v) + } + } + + return slice +} diff --git a/vendor/github.com/thoas/go-funk/transform.go b/vendor/github.com/thoas/go-funk/transform.go index bc2439e..e4fd65b 100644 --- a/vendor/github.com/thoas/go-funk/transform.go +++ b/vendor/github.com/thoas/go-funk/transform.go @@ -4,6 +4,7 @@ import ( "fmt" "math/rand" "reflect" + "strings" ) // Chunk creates an array of elements split into groups with the length of size. @@ -14,6 +15,10 @@ func Chunk(arr interface{}, size int) interface{} { panic("First parameter must be neither array nor slice") } + if size == 0 { + return arr + } + arrValue := reflect.ValueOf(arr) arrType := arrValue.Type() @@ -93,7 +98,7 @@ func ToMap(in interface{}, pivot string) interface{} { return collection.Interface() } -func mapSlice(arrValue reflect.Value, funcValue reflect.Value) interface{} { +func mapSlice(arrValue reflect.Value, funcValue reflect.Value) reflect.Value { funcType := funcValue.Type() if funcType.NumIn() != 1 || funcType.NumOut() == 0 || funcType.NumOut() > 2 { @@ -120,7 +125,7 @@ func mapSlice(arrValue reflect.Value, funcValue reflect.Value) interface{} { resultSlice = reflect.Append(resultSlice, result) } - return resultSlice.Interface() + return resultSlice } if funcType.NumOut() == 2 { @@ -136,17 +141,17 @@ func mapSlice(arrValue reflect.Value, funcValue reflect.Value) interface{} { collection.SetMapIndex(results[0], results[1]) } - return collection.Interface() + return collection } - return nil + return reflect.Value{} } -func mapMap(arrValue reflect.Value, funcValue reflect.Value) interface{} { +func mapMap(arrValue reflect.Value, funcValue reflect.Value) reflect.Value { funcType := funcValue.Type() if funcType.NumIn() != 2 || funcType.NumOut() == 0 || funcType.NumOut() > 2 { - panic("Map function with an map must have one parameter and must return one or two parameters") + panic("Map function with a map must have two parameters and must return one or two parameters") } // Only one returned parameter, should be a slice @@ -165,7 +170,7 @@ func mapMap(arrValue reflect.Value, funcValue reflect.Value) interface{} { resultSlice = reflect.Append(resultSlice, result) } - return resultSlice.Interface() + return resultSlice } // two parameters, should be a map @@ -183,14 +188,24 @@ func mapMap(arrValue reflect.Value, funcValue reflect.Value) interface{} { } - return collection.Interface() + return collection } - return nil + return reflect.Value{} } // Map manipulates an iteratee and transforms it to another type. func Map(arr interface{}, mapFunc interface{}) interface{} { + result := mapFn(arr, mapFunc, "Map") + + if result.IsValid() { + return result.Interface() + } + + return nil +} + +func mapFn(arr interface{}, mapFunc interface{}, funcName string) reflect.Value { if !IsIteratee(arr) { panic("First parameter must be an iteratee") } @@ -209,13 +224,50 @@ func Map(arr interface{}, mapFunc interface{}) interface{} { if kind == reflect.Slice || kind == reflect.Array { return mapSlice(arrValue, funcValue) + } else if kind == reflect.Map { + return mapMap(arrValue, funcValue) } - if kind == reflect.Map { - return mapMap(arrValue, funcValue) + panic(fmt.Sprintf("Type %s is not supported by "+funcName, arrType.String())) +} + +// FlatMap manipulates an iteratee and transforms it to a flattened collection of another type. +func FlatMap(arr interface{}, mapFunc interface{}) interface{} { + result := mapFn(arr, mapFunc, "FlatMap") + + if result.IsValid() { + return flatten(result).Interface() + } + + return nil +} + +// Flatten flattens a two-dimensional array. +func Flatten(out interface{}) interface{} { + return flatten(reflect.ValueOf(out)).Interface() +} + +func flatten(value reflect.Value) reflect.Value { + sliceType := value.Type() + + if (value.Kind() != reflect.Slice && value.Kind() != reflect.Array) || + (sliceType.Elem().Kind() != reflect.Slice && sliceType.Elem().Kind() != reflect.Array) { + panic("Argument must be an array or slice of at least two dimensions") + } + + resultSliceType := sliceType.Elem().Elem() + + resultSlice := reflect.MakeSlice(reflect.SliceOf(resultSliceType), 0, 0) + + length := value.Len() + + for i := 0; i < length; i++ { + item := value.Index(i) + + resultSlice = reflect.AppendSlice(resultSlice, item) } - panic(fmt.Sprintf("Type %s is not supported by Map", arrType.String())) + return resultSlice } // FlattenDeep recursively flattens array. @@ -228,10 +280,10 @@ func flattenDeep(value reflect.Value) reflect.Value { resultSlice := reflect.MakeSlice(reflect.SliceOf(sliceType), 0, 0) - return flatten(value, resultSlice) + return flattenRecursive(value, resultSlice) } -func flatten(value reflect.Value, result reflect.Value) reflect.Value { +func flattenRecursive(value reflect.Value, result reflect.Value) reflect.Value { length := value.Len() for i := 0; i < length; i++ { @@ -239,7 +291,7 @@ func flatten(value reflect.Value, result reflect.Value) reflect.Value { kind := item.Kind() if kind == reflect.Slice || kind == reflect.Array { - result = flatten(item, result) + result = flattenRecursive(item, result) } else { result = reflect.Append(result, item) } @@ -309,6 +361,8 @@ func Uniq(in interface{}) interface{} { if kind == reflect.Array || kind == reflect.Slice { length := value.Len() + result := makeSlice(value, 0) + seen := make(map[interface{}]bool, length) j := 0 @@ -321,11 +375,11 @@ func Uniq(in interface{}) interface{} { } seen[v] = true - value.Index(j).Set(val) + result = reflect.Append(result, val) j++ } - return value.Slice(0, j).Interface() + return result.Interface() } panic(fmt.Sprintf("Type %s is not supported by Uniq", valueType.String())) @@ -388,3 +442,114 @@ func Drop(in interface{}, n int) interface{} { panic(fmt.Sprintf("Type %s is not supported by Drop", valueType.String())) } + +// Prune returns a copy of "in" that only contains fields in "paths" +// which are looked up using struct field name. +// For lookup paths by field tag instead, use funk.PruneByTag() +func Prune(in interface{}, paths []string) (interface{}, error) { + return pruneByTag(in, paths, nil /*tag*/) +} + +// pruneByTag returns a copy of "in" that only contains fields in "paths" +// which are looked up using struct field Tag "tag". +func PruneByTag(in interface{}, paths []string, tag string) (interface{}, error) { + return pruneByTag(in, paths, &tag) +} + +// pruneByTag returns a copy of "in" that only contains fields in "paths" +// which are looked up using struct field Tag "tag". If tag is nil, +// traverse paths using struct field name +func pruneByTag(in interface{}, paths []string, tag *string) (interface{}, error) { + + inValue := reflect.ValueOf(in) + + ret := reflect.New(inValue.Type()).Elem() + + for _, path := range paths { + parts := strings.Split(path, ".") + if err := prune(inValue, ret, parts, tag); err != nil { + return nil, err + } + } + return ret.Interface(), nil +} + +func prune(inValue reflect.Value, ret reflect.Value, parts []string, tag *string) error { + + if len(parts) == 0 { + // we reached the location that ret needs to hold inValue + // Note: The value at the end of the path is not copied, maybe we need to change. + // ret and the original data holds the same reference to this value + ret.Set(inValue) + return nil + } + + inKind := inValue.Kind() + + switch inKind { + case reflect.Ptr: + if inValue.IsNil() { + // TODO validate + return nil + } + if ret.IsNil() { + // init ret and go to next level + ret.Set(reflect.New(inValue.Type().Elem())) + } + return prune(inValue.Elem(), ret.Elem(), parts, tag) + case reflect.Struct: + part := parts[0] + var fValue reflect.Value + var fRet reflect.Value + if tag == nil { + // use field name + fValue = inValue.FieldByName(part) + if !fValue.IsValid() { + return fmt.Errorf("field name %v is not found in struct %v", part, inValue.Type().String()) + } + fRet = ret.FieldByName(part) + } else { + // search tag that has key equal to part + found := false + for i := 0; i < inValue.NumField(); i++ { + f := inValue.Type().Field(i) + if key, ok := f.Tag.Lookup(*tag); ok { + if key == part { + fValue = inValue.Field(i) + fRet = ret.Field(i) + found = true + break + } + } + } + if !found { + return fmt.Errorf("Struct tag %v is not found with key %v", *tag, part) + } + } + // init Ret is zero and go down one more level + if fRet.IsZero() { + fRet.Set(reflect.New(fValue.Type()).Elem()) + } + return prune(fValue, fRet, parts[1:], tag) + case reflect.Array, reflect.Slice: + // set all its elements + length := inValue.Len() + // init ret + if ret.IsZero() { + if inKind == reflect.Slice { + ret.Set(reflect.MakeSlice(inValue.Type(), length /*len*/, length /*cap*/)) + } else { // array + ret.Set(reflect.New(inValue.Type()).Elem()) + } + } + for j := 0; j < length; j++ { + if err := prune(inValue.Index(j), ret.Index(j), parts, tag); err != nil { + return err + } + } + default: + return fmt.Errorf("path %v cannot be looked up on kind of %v", strings.Join(parts, "."), inValue.Kind()) + } + + return nil +} diff --git a/vendor/github.com/thoas/go-funk/typesafe.go b/vendor/github.com/thoas/go-funk/typesafe.go index 9266822..059338b 100644 --- a/vendor/github.com/thoas/go-funk/typesafe.go +++ b/vendor/github.com/thoas/go-funk/typesafe.go @@ -4,6 +4,11 @@ import ( "math/rand" ) +// InBools is an alias of ContainsBool, returns true if a bool is present in a iteratee. +func InBools(s []bool, v bool) bool { + return ContainsBool(s, v) +} + // InInts is an alias of ContainsInt, returns true if an int is present in a iteratee. func InInts(s []int, v int) bool { return ContainsInt(s, v) @@ -19,6 +24,21 @@ func InInt64s(s []int64, v int64) bool { return ContainsInt64(s, v) } +// InUInts is an alias of ContainsUInt, returns true if an uint is present in a iteratee. +func InUInts(s []uint, v uint) bool { + return ContainsUInt(s, v) +} + +// InUInt32s is an alias of ContainsUInt32, returns true if an uint32 is present in a iteratee. +func InUInt32s(s []uint32, v uint32) bool { + return ContainsUInt32(s, v) +} + +// InUInt64s is an alias of ContainsUInt64, returns true if an uint64 is present in a iteratee. +func InUInt64s(s []uint64, v uint64) bool { + return ContainsUInt64(s, v) +} + // InStrings is an alias of ContainsString, returns true if a string is present in a iteratee. func InStrings(s []string, v string) bool { return ContainsString(s, v) @@ -118,6 +138,22 @@ func FindString(s []string, cb func(s string) bool) (string, bool) { return "", false } +// FilterBool iterates over a collection of bool, returning an array of +// all bool elements predicate returns truthy for. +func FilterBool(s []bool, cb func(s bool) bool) []bool { + results := []bool{} + + for _, i := range s { + result := cb(i) + + if result { + results = append(results, i) + } + } + + return results +} + // FilterFloat64 iterates over a collection of float64, returning an array of // all float64 elements predicate returns truthy for. func FilterFloat64(s []float64, cb func(s float64) bool) []float64 { @@ -198,6 +234,54 @@ func FilterInt64(s []int64, cb func(s int64) bool) []int64 { return results } +// FilterUInt iterates over a collection of uint, returning an array of +// all uint elements predicate returns truthy for. +func FilterUInt(s []uint, cb func(s uint) bool) []uint { + results := []uint{} + + for _, i := range s { + result := cb(i) + + if result { + results = append(results, i) + } + } + + return results +} + +// FilterUInt32 iterates over a collection of uint32, returning an array of +// all uint32 elements predicate returns truthy for. +func FilterUInt32(s []uint32, cb func(s uint32) bool) []uint32 { + results := []uint32{} + + for _, i := range s { + result := cb(i) + + if result { + results = append(results, i) + } + } + + return results +} + +// FilterUInt64 iterates over a collection of uint64, returning an array of +// all uint64 elements predicate returns truthy for. +func FilterUInt64(s []uint64, cb func(s uint64) bool) []uint64 { + results := []uint64{} + + for _, i := range s { + result := cb(i) + + if result { + results = append(results, i) + } + } + + return results +} + // FilterString iterates over a collection of string, returning an array of // all string elements predicate returns truthy for. func FilterString(s []string, cb func(s string) bool) []string { @@ -214,6 +298,16 @@ func FilterString(s []string, cb func(s string) bool) []string { return results } +// ContainsBool returns true if a boolean is present in a iteratee. +func ContainsBool(s []bool, v bool) bool { + for _, vv := range s { + if vv == v { + return true + } + } + return false +} + // ContainsInt returns true if an int is present in a iteratee. func ContainsInt(s []int, v int) bool { for _, vv := range s { @@ -244,6 +338,36 @@ func ContainsInt64(s []int64, v int64) bool { return false } +// ContainsUInt returns true if an uint is present in a iteratee. +func ContainsUInt(s []uint, v uint) bool { + for _, vv := range s { + if vv == v { + return true + } + } + return false +} + +// ContainsUInt32 returns true if an uint32 is present in a iteratee. +func ContainsUInt32(s []uint32, v uint32) bool { + for _, vv := range s { + if vv == v { + return true + } + } + return false +} + +// ContainsUInt64 returns true if an uint64 is present in a iteratee. +func ContainsUInt64(s []uint64, v uint64) bool { + for _, vv := range s { + if vv == v { + return true + } + } + return false +} + // ContainsString returns true if a string is present in a iteratee. func ContainsString(s []string, v string) bool { for _, vv := range s { @@ -298,6 +422,30 @@ func SumInt(s []int) (sum int) { return } +// SumUInt32 sums a uint32 iteratee and returns the sum of all elements +func SumUInt32(s []uint32) (sum uint32) { + for _, v := range s { + sum += v + } + return +} + +// SumUInt64 sums a uint64 iteratee and returns the sum of all elements +func SumUInt64(s []uint64) (sum uint64) { + for _, v := range s { + sum += v + } + return +} + +// SumUInt sums a uint iteratee and returns the sum of all elements +func SumUInt(s []uint) (sum uint) { + for _, v := range s { + sum += v + } + return +} + // SumFloat64 sums a float64 iteratee and returns the sum of all elements func SumFloat64(s []float64) (sum float64) { for _, v := range s { @@ -314,6 +462,14 @@ func SumFloat32(s []float32) (sum float32) { return } +// ReverseBools reverses an array of bool +func ReverseBools(s []bool) []bool { + for i, j := 0, len(s)-1; i < len(s)/2; i, j = i+1, j-1 { + s[i], s[j] = s[j], s[i] + } + return s +} + // ReverseStrings reverses an array of string func ReverseStrings(s []string) []string { for i, j := 0, len(s)-1; i < len(s)/2; i, j = i+1, j-1 { @@ -346,6 +502,30 @@ func ReverseInt64(s []int64) []int64 { return s } +// ReverseUInt reverses an array of int +func ReverseUInt(s []uint) []uint { + for i, j := 0, len(s)-1; i < len(s)/2; i, j = i+1, j-1 { + s[i], s[j] = s[j], s[i] + } + return s +} + +// ReverseUInt32 reverses an array of uint32 +func ReverseUInt32(s []uint32) []uint32 { + for i, j := 0, len(s)-1; i < len(s)/2; i, j = i+1, j-1 { + s[i], s[j] = s[j], s[i] + } + return s +} + +// ReverseUInt64 reverses an array of uint64 +func ReverseUInt64(s []uint64) []uint64 { + for i, j := 0, len(s)-1; i < len(s)/2; i, j = i+1, j-1 { + s[i], s[j] = s[j], s[i] + } + return s +} + // ReverseFloat64 reverses an array of float64 func ReverseFloat64(s []float64) []float64 { for i, j := 0, len(s)-1; i < len(s)/2; i, j = i+1, j-1 { @@ -380,6 +560,12 @@ func indexOf(n int, f func(int) bool) int { return -1 } +// IndexOfBool gets the index at which the first occurrence of a bool value is found in array or return -1 +// if the value cannot be found +func IndexOfBool(a []bool, x bool) int { + return indexOf(len(a), func(i int) bool { return a[i] == x }) +} + // IndexOfInt gets the index at which the first occurrence of an int value is found in array or return -1 // if the value cannot be found func IndexOfInt(a []int, x int) int { @@ -398,6 +584,24 @@ func IndexOfInt64(a []int64, x int64) int { return indexOf(len(a), func(i int) bool { return a[i] == x }) } +// IndexOfUInt gets the index at which the first occurrence of an uint value is found in array or return -1 +// if the value cannot be found +func IndexOfUInt(a []uint, x uint) int { + return indexOf(len(a), func(i int) bool { return a[i] == x }) +} + +// IndexOfUInt32 gets the index at which the first occurrence of an uint32 value is found in array or return -1 +// if the value cannot be found +func IndexOfUInt32(a []uint32, x uint32) int { + return indexOf(len(a), func(i int) bool { return a[i] == x }) +} + +// IndexOfUInt64 gets the index at which the first occurrence of an uint64 value is found in array or return -1 +// if the value cannot be found +func IndexOfUInt64(a []uint64, x uint64) int { + return indexOf(len(a), func(i int) bool { return a[i] == x }) +} + // IndexOfFloat64 gets the index at which the first occurrence of an float64 value is found in array or return -1 // if the value cannot be found func IndexOfFloat64(a []float64, x float64) int { @@ -411,7 +615,7 @@ func IndexOfString(a []string, x string) int { } func lastIndexOf(n int, f func(int) bool) int { - for i := n - 1; i > 0; i-- { + for i := n - 1; i >= 0; i-- { if f(i) { return i } @@ -419,6 +623,12 @@ func lastIndexOf(n int, f func(int) bool) int { return -1 } +// LastIndexOfBool gets the index at which the first occurrence of a bool value is found in array or return -1 +// if the value cannot be found +func LastIndexOfBool(a []bool, x bool) int { + return lastIndexOf(len(a), func(i int) bool { return a[i] == x }) +} + // LastIndexOfInt gets the index at which the first occurrence of an int value is found in array or return -1 // if the value cannot be found func LastIndexOfInt(a []int, x int) int { @@ -437,6 +647,24 @@ func LastIndexOfInt64(a []int64, x int64) int { return lastIndexOf(len(a), func(i int) bool { return a[i] == x }) } +// LastIndexOfUInt gets the index at which the first occurrence of an uint value is found in array or return -1 +// if the value cannot be found +func LastIndexOfUInt(a []uint, x uint) int { + return lastIndexOf(len(a), func(i int) bool { return a[i] == x }) +} + +// LastIndexOfUInt32 gets the index at which the first occurrence of an uint32 value is found in array or return -1 +// if the value cannot be found +func LastIndexOfUInt32(a []uint32, x uint32) int { + return lastIndexOf(len(a), func(i int) bool { return a[i] == x }) +} + +// LastIndexOfUInt64 gets the index at which the first occurrence of an uint64 value is found in array or return -1 +// if the value cannot be found +func LastIndexOfUInt64(a []uint64, x uint64) int { + return lastIndexOf(len(a), func(i int) bool { return a[i] == x }) +} + // LastIndexOfFloat64 gets the index at which the first occurrence of an float64 value is found in array or return -1 // if the value cannot be found func LastIndexOfFloat64(a []float64, x float64) int { @@ -455,12 +683,30 @@ func LastIndexOfString(a []string, x string) int { return lastIndexOf(len(a), func(i int) bool { return a[i] == x }) } +// UniqBool creates an array of bool with unique values. +func UniqBool(a []bool) []bool { + results := []bool{} + for _, value := range a { + // If results is not empty, there is at most 1 value in it + if len(results) == 0 || results[0] != value { + results = append(results, value) + } + // At most 2 unique values + if len(results) == 2 { + break + } + } + return results +} + // UniqInt32 creates an array of int32 with unique values. func UniqInt32(a []int32) []int32 { - length := len(a) - - seen := make(map[int32]struct{}, length) - j := 0 + var ( + length = len(a) + seen = make(map[int32]struct{}, length) + j = 0 + results = make([]int32, 0) + ) for i := 0; i < length; i++ { v := a[i] @@ -470,19 +716,21 @@ func UniqInt32(a []int32) []int32 { } seen[v] = struct{}{} - a[j] = v + results = append(results, v) j++ } - return a[0:j] + return results } // UniqInt64 creates an array of int64 with unique values. func UniqInt64(a []int64) []int64 { - length := len(a) - - seen := make(map[int64]struct{}, length) - j := 0 + var ( + length = len(a) + seen = make(map[int64]struct{}, length) + results = make([]int64, 0) + j = 0 + ) for i := 0; i < length; i++ { v := a[i] @@ -492,19 +740,69 @@ func UniqInt64(a []int64) []int64 { } seen[v] = struct{}{} - a[j] = v + results = append(results, v) j++ } - return a[0:j] + return results } // UniqInt creates an array of int with unique values. func UniqInt(a []int) []int { - length := len(a) + var ( + length = len(a) + seen = make(map[int]struct{}, length) + results = make([]int, 0) + j = 0 + ) + + for i := 0; i < length; i++ { + v := a[i] + + if _, ok := seen[v]; ok { + continue + } + + seen[v] = struct{}{} + results = append(results, v) + j++ + } + + return results +} + +// UniqUInt32 creates an array of uint32 with unique values. +func UniqUInt32(a []uint32) []uint32 { + var ( + length = len(a) + seen = make(map[uint32]struct{}, length) + j = 0 + results = make([]uint32, 0) + ) + + for i := 0; i < length; i++ { + v := a[i] + + if _, ok := seen[v]; ok { + continue + } + + seen[v] = struct{}{} + results = append(results, v) + j++ + } + + return results +} - seen := make(map[int]struct{}, length) - j := 0 +// UniqUInt64 creates an array of uint64 with unique values. +func UniqUInt64(a []uint64) []uint64 { + var ( + length = len(a) + seen = make(map[uint64]struct{}, length) + j = 0 + results = make([]uint64, 0) + ) for i := 0; i < length; i++ { v := a[i] @@ -514,19 +812,45 @@ func UniqInt(a []int) []int { } seen[v] = struct{}{} - a[j] = v + results = append(results, v) j++ } - return a[0:j] + return results +} + +// UniqUInt creates an array of uint with unique values. +func UniqUInt(a []uint) []uint { + var ( + length = len(a) + seen = make(map[uint]struct{}, length) + j = 0 + results = make([]uint, 0) + ) + + for i := 0; i < length; i++ { + v := a[i] + + if _, ok := seen[v]; ok { + continue + } + + seen[v] = struct{}{} + results = append(results, v) + j++ + } + + return results } // UniqString creates an array of string with unique values. func UniqString(a []string) []string { - length := len(a) - - seen := make(map[string]struct{}, length) - j := 0 + var ( + length = len(a) + seen = make(map[string]struct{}, length) + j = 0 + results = make([]string, 0) + ) for i := 0; i < length; i++ { v := a[i] @@ -536,19 +860,21 @@ func UniqString(a []string) []string { } seen[v] = struct{}{} - a[j] = v + results = append(results, v) j++ } - return a[0:j] + return results } // UniqFloat64 creates an array of float64 with unique values. func UniqFloat64(a []float64) []float64 { - length := len(a) - - seen := make(map[float64]struct{}, length) - j := 0 + var ( + length = len(a) + seen = make(map[float64]struct{}, length) + j = 0 + results = make([]float64, 0) + ) for i := 0; i < length; i++ { v := a[i] @@ -558,19 +884,21 @@ func UniqFloat64(a []float64) []float64 { } seen[v] = struct{}{} - a[j] = v + results = append(results, v) j++ } - return a[0:j] + return results } // UniqFloat32 creates an array of float32 with unique values. func UniqFloat32(a []float32) []float32 { - length := len(a) - - seen := make(map[float32]struct{}, length) - j := 0 + var ( + length = len(a) + seen = make(map[float32]struct{}, length) + j = 0 + results = make([]float32, 0) + ) for i := 0; i < length; i++ { v := a[i] @@ -580,11 +908,21 @@ func UniqFloat32(a []float32) []float32 { } seen[v] = struct{}{} - a[j] = v + results = append(results, v) j++ } - return a[0:j] + return results +} + +// ShuffleBool creates an array of bool shuffled values using Fisher–Yates algorithm +func ShuffleBool(a []bool) []bool { + for i := range a { + j := rand.Intn(i + 1) + a[i], a[j] = a[j], a[i] + } + + return a } // ShuffleInt creates an array of int shuffled values using Fisher–Yates algorithm @@ -617,6 +955,36 @@ func ShuffleInt64(a []int64) []int64 { return a } +// ShuffleUInt creates an array of int shuffled values using Fisher–Yates algorithm +func ShuffleUInt(a []uint) []uint { + for i := range a { + j := rand.Intn(i + 1) + a[i], a[j] = a[j], a[i] + } + + return a +} + +// ShuffleUInt32 creates an array of uint32 shuffled values using Fisher–Yates algorithm +func ShuffleUInt32(a []uint32) []uint32 { + for i := range a { + j := rand.Intn(i + 1) + a[i], a[j] = a[j], a[i] + } + + return a +} + +// ShuffleUInt64 creates an array of uint64 shuffled values using Fisher–Yates algorithm +func ShuffleUInt64(a []uint64) []uint64 { + for i := range a { + j := rand.Intn(i + 1) + a[i], a[j] = a[j], a[i] + } + + return a +} + // ShuffleString creates an array of string shuffled values using Fisher–Yates algorithm func ShuffleString(a []string) []string { for i := range a { @@ -647,6 +1015,11 @@ func ShuffleFloat64(a []float64) []float64 { return a } +// DropBool creates a slice with `n` bools dropped from the beginning. +func DropBool(s []bool, n int) []bool { + return s[n:] +} + // DropString creates a slice with `n` strings dropped from the beginning. func DropString(s []string, n int) []string { return s[n:] @@ -667,6 +1040,21 @@ func DropInt64(s []int64, n int) []int64 { return s[n:] } +// DropUInt creates a slice with `n` ints dropped from the beginning. +func DropUInt(s []uint, n uint) []uint { + return s[n:] +} + +// DropUInt32 creates a slice with `n` int32s dropped from the beginning. +func DropUInt32(s []uint32, n int) []uint32 { + return s[n:] +} + +// DropUInt64 creates a slice with `n` int64s dropped from the beginning. +func DropUInt64(s []uint64, n int) []uint64 { + return s[n:] +} + // DropFloat32 creates a slice with `n` float32s dropped from the beginning. func DropFloat32(s []float32, n int) []float32 { return s[n:] @@ -676,3 +1064,98 @@ func DropFloat32(s []float32, n int) []float32 { func DropFloat64(s []float64, n int) []float64 { return s[n:] } + +// ChunkStrings creates an array of strings split into groups with the length of size. +// If array can't be split evenly, the final chunk will be +// the remaining element. +func ChunkStrings(arr []string, size int) [][]string { + var results [][]string + + for i := 0; i < len(arr); i += size { + end := i + size + + if end > len(arr) { + end = len(arr) + } + + results = append(results, arr[i:end]) + } + + return results +} + +// ChunkInts creates an array of ints split into groups with the length of size. +// If array can't be split evenly, the final chunk will be +// the remaining element. +func ChunkInts(arr []int, size int) [][]int { + var results [][]int + + for i := 0; i < len(arr); i += size { + end := i + size + + if end > len(arr) { + end = len(arr) + } + + results = append(results, arr[i:end]) + } + + return results +} + +// ChunkInt32s creates an array of int32s split into groups with the length of size. +// If array can't be split evenly, the final chunk will be +// the remaining element. +func ChunkInt32s(arr []int32, size int) [][]int32 { + var results [][]int32 + + for i := 0; i < len(arr); i += size { + end := i + size + + if end > len(arr) { + end = len(arr) + } + + results = append(results, arr[i:end]) + } + + return results +} + +// ChunkInt64s creates an array of int64s split into groups with the length of size. +// If array can't be split evenly, the final chunk will be +// the remaining element. +func ChunkInt64s(arr []int64, size int) [][]int64 { + var results [][]int64 + + for i := 0; i < len(arr); i += size { + end := i + size + + if end > len(arr) { + end = len(arr) + } + + results = append(results, arr[i:end]) + } + + return results +} + +// ChunkFloat64s creates an array of float64s split into groups with the length of size. +// If array can't be split evenly, the final chunk will be +// the remaining element. +func ChunkFloat64s(arr []float64, size int) [][]float64 { + var results [][]float64 + + for i := 0; i < len(arr); i += size { + end := i + size + + if end > len(arr) { + end = len(arr) + } + + results = append(results, arr[i:end]) + } + + return results +} diff --git a/vendor/github.com/thoas/go-funk/utils.go b/vendor/github.com/thoas/go-funk/utils.go index 657484b..43d9a2b 100644 --- a/vendor/github.com/thoas/go-funk/utils.go +++ b/vendor/github.com/thoas/go-funk/utils.go @@ -1,14 +1,57 @@ package funk -import "reflect" +import ( + "fmt" + "reflect" +) -func equal(expected, actual interface{}) bool { - if expected == nil || actual == nil { - return expected == actual +func equal(expectedOrPredicate interface{}, optionalIsMap ...bool) func(keyValueIfMap, actualValue reflect.Value) bool { + isMap := append(optionalIsMap, false)[0] + + if IsFunction(expectedOrPredicate) { + inTypes := []reflect.Type{nil}; if isMap { + inTypes = append(inTypes, nil) + } + + if !IsPredicate(expectedOrPredicate, inTypes...) { + panic(fmt.Sprintf("Predicate function must have %d parameter and must return boolean", len(inTypes))) + } + + predicateValue := reflect.ValueOf(expectedOrPredicate) + + return func(keyValueIfMap, actualValue reflect.Value) bool { + + if isMap && !keyValueIfMap.Type().ConvertibleTo(predicateValue.Type().In(0)) { + panic("Given key is not compatible with type of parameter for the predicate.") + } + + if (isMap && !actualValue.Type().ConvertibleTo(predicateValue.Type().In(1))) || + (!isMap && !actualValue.Type().ConvertibleTo(predicateValue.Type().In(0))) { + panic("Given value is not compatible with type of parameter for the predicate.") + } + + args := []reflect.Value{actualValue} + if isMap { + args = append([]reflect.Value{keyValueIfMap}, args...) + } + + return predicateValue.Call(args)[0].Bool() + } } - return reflect.DeepEqual(expected, actual) + expected := expectedOrPredicate + return func(keyValueIfMap, actualValue reflect.Value) bool { + if isMap { + actualValue = keyValueIfMap + } + + if expected == nil || actualValue.IsZero() { + return actualValue.Interface() == expected + } + + return reflect.DeepEqual(actualValue.Interface(), expected) + } } func sliceElem(rtype reflect.Type) reflect.Type { @@ -23,17 +66,21 @@ func sliceElem(rtype reflect.Type) reflect.Type { func redirectValue(value reflect.Value) reflect.Value { for { - if !value.IsValid() || value.Kind() != reflect.Ptr { + if !value.IsValid() || (value.Kind() != reflect.Ptr && value.Kind() != reflect.Interface) { return value } - res := reflect.Indirect(value) + res := value.Elem() // Test for a circular type. - if res.Kind() == reflect.Ptr && value.Pointer() == res.Pointer() { + if res.Kind() == reflect.Ptr && value.Kind() == reflect.Ptr && value.Pointer() == res.Pointer() { return value } + if !res.IsValid() && value.Kind() == reflect.Ptr { + return reflect.Zero(value.Type().Elem()) + } + value = res } } diff --git a/vendor/github.com/thoas/go-funk/without.go b/vendor/github.com/thoas/go-funk/without.go new file mode 100644 index 0000000..6e35e98 --- /dev/null +++ b/vendor/github.com/thoas/go-funk/without.go @@ -0,0 +1,19 @@ +package funk + +import "reflect" + +// Without creates an array excluding all given values. +func Without(in interface{}, values ...interface{}) interface{} { + if !IsCollection(in) { + panic("First parameter must be a collection") + } + + inValue := reflect.ValueOf(in) + for _, value := range values { + if NotEqual(inValue.Type().Elem(), reflect.TypeOf(value)) { + panic("Values must have the same type") + } + } + + return LeftJoin(inValue, reflect.ValueOf(values)).Interface() +} diff --git a/vendor/github.com/thoas/go-funk/zip.go b/vendor/github.com/thoas/go-funk/zip.go index f9056bc..dcd3828 100644 --- a/vendor/github.com/thoas/go-funk/zip.go +++ b/vendor/github.com/thoas/go-funk/zip.go @@ -14,21 +14,19 @@ type Tuple struct { // from each of the input iterables. The returned list is truncated in length // to the length of the shortest input iterable. func Zip(slice1 interface{}, slice2 interface{}) []Tuple { - inValue1 := reflect.ValueOf(slice1) - inValue2 := reflect.ValueOf(slice2) - kind1 := inValue1.Type().Kind() - kind2 := inValue2.Type().Kind() - - result := []Tuple{} - for _, kind := range []reflect.Kind{kind1, kind2} { - if kind != reflect.Slice && kind != reflect.Array { - return result - } + if !IsCollection(slice1) || !IsCollection(slice2) { + panic("First parameter must be a collection") } - var minLength int - length1 := inValue1.Len() - length2 := inValue2.Len() + var ( + minLength int + inValue1 = reflect.ValueOf(slice1) + inValue2 = reflect.ValueOf(slice2) + result = []Tuple{} + length1 = inValue1.Len() + length2 = inValue2.Len() + ) + if length1 <= length2 { minLength = length1 } else { diff --git a/vendor/golang.org/x/crypto/acme/acme.go b/vendor/golang.org/x/crypto/acme/acme.go index 6e6c9d1..73b19ef 100644 --- a/vendor/golang.org/x/crypto/acme/acme.go +++ b/vendor/golang.org/x/crypto/acme/acme.go @@ -4,7 +4,7 @@ // Package acme provides an implementation of the // Automatic Certificate Management Environment (ACME) spec. -// The intial implementation was based on ACME draft-02 and +// The initial implementation was based on ACME draft-02 and // is now being extended to comply with RFC 8555. // See https://tools.ietf.org/html/draft-ietf-acme-acme-02 // and https://tools.ietf.org/html/rfc8555 for details. @@ -363,6 +363,10 @@ func AcceptTOS(tosURL string) bool { return true } // Also see Error's Instance field for when a CA requires already registered accounts to agree // to an updated Terms of Service. func (c *Client) Register(ctx context.Context, acct *Account, prompt func(tosURL string) bool) (*Account, error) { + if c.Key == nil { + return nil, errors.New("acme: client.Key must be set to Register") + } + dir, err := c.Discover(ctx) if err != nil { return nil, err diff --git a/vendor/golang.org/x/crypto/acme/autocert/autocert.go b/vendor/golang.org/x/crypto/acme/autocert/autocert.go index 2ea9e23..c7fbc54 100644 --- a/vendor/golang.org/x/crypto/acme/autocert/autocert.go +++ b/vendor/golang.org/x/crypto/acme/autocert/autocert.go @@ -1133,11 +1133,11 @@ func (s *certState) tlscert() (*tls.Certificate, error) { }, nil } -// certRequest generates a CSR for the given common name cn and optional SANs. -func certRequest(key crypto.Signer, cn string, ext []pkix.Extension, san ...string) ([]byte, error) { +// certRequest generates a CSR for the given common name. +func certRequest(key crypto.Signer, name string, ext []pkix.Extension) ([]byte, error) { req := &x509.CertificateRequest{ - Subject: pkix.Name{CommonName: cn}, - DNSNames: san, + Subject: pkix.Name{CommonName: name}, + DNSNames: []string{name}, ExtraExtensions: ext, } return x509.CreateCertificateRequest(rand.Reader, req, key) diff --git a/vendor/golang.org/x/crypto/acme/http.go b/vendor/golang.org/x/crypto/acme/http.go index c51943e..2b4c1a1 100644 --- a/vendor/golang.org/x/crypto/acme/http.go +++ b/vendor/golang.org/x/crypto/acme/http.go @@ -10,6 +10,7 @@ import ( "crypto" "crypto/rand" "encoding/json" + "errors" "fmt" "io/ioutil" "math/big" @@ -215,6 +216,9 @@ func (c *Client) post(ctx context.Context, key crypto.Signer, url string, body i func (c *Client) postNoRetry(ctx context.Context, key crypto.Signer, url string, body interface{}) (*http.Response, *http.Request, error) { kid := noKeyID if key == nil { + if c.Key == nil { + return nil, nil, errors.New("acme: Client.Key must be populated to make POST requests") + } key = c.Key kid = c.accountKID(ctx) } diff --git a/vendor/golang.org/x/crypto/acme/jws.go b/vendor/golang.org/x/crypto/acme/jws.go index 76e3fda..8c3ecce 100644 --- a/vendor/golang.org/x/crypto/acme/jws.go +++ b/vendor/golang.org/x/crypto/acme/jws.go @@ -7,6 +7,7 @@ package acme import ( "crypto" "crypto/ecdsa" + "crypto/hmac" "crypto/rand" "crypto/rsa" "crypto/sha256" @@ -14,6 +15,7 @@ import ( "encoding/asn1" "encoding/base64" "encoding/json" + "errors" "fmt" "math/big" ) @@ -31,6 +33,14 @@ const noKeyID = keyID("") // See https://tools.ietf.org/html/rfc8555#section-6.3 for more details. const noPayload = "" +// jsonWebSignature can be easily serialized into a JWS following +// https://tools.ietf.org/html/rfc7515#section-3.2. +type jsonWebSignature struct { + Protected string `json:"protected"` + Payload string `json:"payload"` + Sig string `json:"signature"` +} + // jwsEncodeJSON signs claimset using provided key and a nonce. // The result is serialized in JSON format containing either kid or jwk // fields based on the provided keyID value. @@ -71,12 +81,7 @@ func jwsEncodeJSON(claimset interface{}, key crypto.Signer, kid keyID, nonce, ur if err != nil { return nil, err } - - enc := struct { - Protected string `json:"protected"` - Payload string `json:"payload"` - Sig string `json:"signature"` - }{ + enc := jsonWebSignature{ Protected: phead, Payload: payload, Sig: base64.RawURLEncoding.EncodeToString(sig), @@ -84,6 +89,43 @@ func jwsEncodeJSON(claimset interface{}, key crypto.Signer, kid keyID, nonce, ur return json.Marshal(&enc) } +// jwsWithMAC creates and signs a JWS using the given key and the HS256 +// algorithm. kid and url are included in the protected header. rawPayload +// should not be base64-URL-encoded. +func jwsWithMAC(key []byte, kid, url string, rawPayload []byte) (*jsonWebSignature, error) { + if len(key) == 0 { + return nil, errors.New("acme: cannot sign JWS with an empty MAC key") + } + header := struct { + Algorithm string `json:"alg"` + KID string `json:"kid"` + URL string `json:"url,omitempty"` + }{ + // Only HMAC-SHA256 is supported. + Algorithm: "HS256", + KID: kid, + URL: url, + } + rawProtected, err := json.Marshal(header) + if err != nil { + return nil, err + } + protected := base64.RawURLEncoding.EncodeToString(rawProtected) + payload := base64.RawURLEncoding.EncodeToString(rawPayload) + + h := hmac.New(sha256.New, key) + if _, err := h.Write([]byte(protected + "." + payload)); err != nil { + return nil, err + } + mac := h.Sum(nil) + + return &jsonWebSignature{ + Protected: protected, + Payload: payload, + Sig: base64.RawURLEncoding.EncodeToString(mac), + }, nil +} + // jwkEncode encodes public part of an RSA or ECDSA key into a JWK. // The result is also suitable for creating a JWK thumbprint. // https://tools.ietf.org/html/rfc7517 diff --git a/vendor/golang.org/x/crypto/acme/rfc8555.go b/vendor/golang.org/x/crypto/acme/rfc8555.go index dfb57a6..f9d3011 100644 --- a/vendor/golang.org/x/crypto/acme/rfc8555.go +++ b/vendor/golang.org/x/crypto/acme/rfc8555.go @@ -37,22 +37,32 @@ func (c *Client) DeactivateReg(ctx context.Context) error { return nil } -// registerRFC is quivalent to c.Register but for CAs implementing RFC 8555. +// registerRFC is equivalent to c.Register but for CAs implementing RFC 8555. // It expects c.Discover to have already been called. -// TODO: Implement externalAccountBinding. func (c *Client) registerRFC(ctx context.Context, acct *Account, prompt func(tosURL string) bool) (*Account, error) { c.cacheMu.Lock() // guard c.kid access defer c.cacheMu.Unlock() req := struct { - TermsAgreed bool `json:"termsOfServiceAgreed,omitempty"` - Contact []string `json:"contact,omitempty"` + TermsAgreed bool `json:"termsOfServiceAgreed,omitempty"` + Contact []string `json:"contact,omitempty"` + ExternalAccountBinding *jsonWebSignature `json:"externalAccountBinding,omitempty"` }{ Contact: acct.Contact, } if c.dir.Terms != "" { req.TermsAgreed = prompt(c.dir.Terms) } + + // set 'externalAccountBinding' field if requested + if acct.ExternalAccountBinding != nil { + eabJWS, err := c.encodeExternalAccountBinding(acct.ExternalAccountBinding) + if err != nil { + return nil, fmt.Errorf("acme: failed to encode external account binding: %v", err) + } + req.ExternalAccountBinding = eabJWS + } + res, err := c.post(ctx, c.Key, c.dir.RegURL, req, wantStatus( http.StatusOK, // account with this key already registered http.StatusCreated, // new account created @@ -75,7 +85,17 @@ func (c *Client) registerRFC(ctx context.Context, acct *Account, prompt func(tos return a, nil } -// updateGegRFC is equivalent to c.UpdateReg but for CAs implementing RFC 8555. +// encodeExternalAccountBinding will encode an external account binding stanza +// as described in https://tools.ietf.org/html/rfc8555#section-7.3.4. +func (c *Client) encodeExternalAccountBinding(eab *ExternalAccountBinding) (*jsonWebSignature, error) { + jwk, err := jwkEncode(c.Key.Public()) + if err != nil { + return nil, err + } + return jwsWithMAC(eab.Key, eab.KID, c.dir.RegURL, []byte(jwk)) +} + +// updateRegRFC is equivalent to c.UpdateReg but for CAs implementing RFC 8555. // It expects c.Discover to have already been called. func (c *Client) updateRegRFC(ctx context.Context, a *Account) (*Account, error) { url := string(c.accountKID(ctx)) @@ -390,3 +410,29 @@ func isAlreadyRevoked(err error) bool { e, ok := err.(*Error) return ok && e.ProblemType == "urn:ietf:params:acme:error:alreadyRevoked" } + +// ListCertAlternates retrieves any alternate certificate chain URLs for the +// given certificate chain URL. These alternate URLs can be passed to FetchCert +// in order to retrieve the alternate certificate chains. +// +// If there are no alternate issuer certificate chains, a nil slice will be +// returned. +func (c *Client) ListCertAlternates(ctx context.Context, url string) ([]string, error) { + if _, err := c.Discover(ctx); err != nil { // required by c.accountKID + return nil, err + } + + res, err := c.postAsGet(ctx, url, wantStatus(http.StatusOK)) + if err != nil { + return nil, err + } + defer res.Body.Close() + + // We don't need the body but we need to discard it so we don't end up + // preventing keep-alive + if _, err := io.Copy(ioutil.Discard, res.Body); err != nil { + return nil, fmt.Errorf("acme: cert alternates response stream: %v", err) + } + alts := linkHeader(res.Header, "alternate") + return alts, nil +} diff --git a/vendor/golang.org/x/crypto/acme/types.go b/vendor/golang.org/x/crypto/acme/types.go index e959caf..eaae452 100644 --- a/vendor/golang.org/x/crypto/acme/types.go +++ b/vendor/golang.org/x/crypto/acme/types.go @@ -57,6 +57,32 @@ var ( ErrNoAccount = errors.New("acme: account does not exist") ) +// A Subproblem describes an ACME subproblem as reported in an Error. +type Subproblem struct { + // Type is a URI reference that identifies the problem type, + // typically in a "urn:acme:error:xxx" form. + Type string + // Detail is a human-readable explanation specific to this occurrence of the problem. + Detail string + // Instance indicates a URL that the client should direct a human user to visit + // in order for instructions on how to agree to the updated Terms of Service. + // In such an event CA sets StatusCode to 403, Type to + // "urn:ietf:params:acme:error:userActionRequired", and adds a Link header with relation + // "terms-of-service" containing the latest TOS URL. + Instance string + // Identifier may contain the ACME identifier that the error is for. + Identifier *AuthzID +} + +func (sp Subproblem) String() string { + str := fmt.Sprintf("%s: ", sp.Type) + if sp.Identifier != nil { + str += fmt.Sprintf("[%s: %s] ", sp.Identifier.Type, sp.Identifier.Value) + } + str += sp.Detail + return str +} + // Error is an ACME error, defined in Problem Details for HTTP APIs doc // http://tools.ietf.org/html/draft-ietf-appsawg-http-problem. type Error struct { @@ -76,10 +102,21 @@ type Error struct { // Header is the original server error response headers. // It may be nil. Header http.Header + // Subproblems may contain more detailed information about the individual problems + // that caused the error. This field is only sent by RFC 8555 compatible ACME + // servers. Defined in RFC 8555 Section 6.7.1. + Subproblems []Subproblem } func (e *Error) Error() string { - return fmt.Sprintf("%d %s: %s", e.StatusCode, e.ProblemType, e.Detail) + str := fmt.Sprintf("%d %s: %s", e.StatusCode, e.ProblemType, e.Detail) + if len(e.Subproblems) > 0 { + str += fmt.Sprintf("; subproblems:") + for _, sp := range e.Subproblems { + str += fmt.Sprintf("\n\t%s", sp) + } + } + return str } // AuthorizationError indicates that an authorization for an identifier @@ -199,6 +236,28 @@ type Account struct { // // It is non-RFC 8555 compliant and is obsoleted by OrdersURL. Certificates string + + // ExternalAccountBinding represents an arbitrary binding to an account of + // the CA which the ACME server is tied to. + // See https://tools.ietf.org/html/rfc8555#section-7.3.4 for more details. + ExternalAccountBinding *ExternalAccountBinding +} + +// ExternalAccountBinding contains the data needed to form a request with +// an external account binding. +// See https://tools.ietf.org/html/rfc8555#section-7.3.4 for more details. +type ExternalAccountBinding struct { + // KID is the Key ID of the symmetric MAC key that the CA provides to + // identify an external account from ACME. + KID string + + // Key is the bytes of the symmetric key that the CA provides to identify + // the account. Key must correspond to the KID. + Key []byte +} + +func (e *ExternalAccountBinding) String() string { + return fmt.Sprintf("&{KID: %q, Key: redacted}", e.KID) } // Directory is ACME server discovery data. @@ -511,20 +570,23 @@ func (c *wireChallenge) challenge() *Challenge { // wireError is a subset of fields of the Problem Details object // as described in https://tools.ietf.org/html/rfc7807#section-3.1. type wireError struct { - Status int - Type string - Detail string - Instance string + Status int + Type string + Detail string + Instance string + Subproblems []Subproblem } func (e *wireError) error(h http.Header) *Error { - return &Error{ + err := &Error{ StatusCode: e.Status, ProblemType: e.Type, Detail: e.Detail, Instance: e.Instance, Header: h, + Subproblems: e.Subproblems, } + return err } // CertOption is an optional argument type for the TLS ChallengeCert methods for diff --git a/vendor/golang.org/x/crypto/acme/version_go112.go b/vendor/golang.org/x/crypto/acme/version_go112.go index b58f245..b9efdb5 100644 --- a/vendor/golang.org/x/crypto/acme/version_go112.go +++ b/vendor/golang.org/x/crypto/acme/version_go112.go @@ -2,6 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +//go:build go1.12 // +build go1.12 package acme diff --git a/vendor/golang.org/x/crypto/chacha20/chacha_arm64.go b/vendor/golang.org/x/crypto/chacha20/chacha_arm64.go index b799e44..94c71ac 100644 --- a/vendor/golang.org/x/crypto/chacha20/chacha_arm64.go +++ b/vendor/golang.org/x/crypto/chacha20/chacha_arm64.go @@ -2,7 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build go1.11,!gccgo,!purego +//go:build go1.11 && gc && !purego +// +build go1.11,gc,!purego package chacha20 diff --git a/vendor/golang.org/x/crypto/chacha20/chacha_arm64.s b/vendor/golang.org/x/crypto/chacha20/chacha_arm64.s index 8914815..63cae9e 100644 --- a/vendor/golang.org/x/crypto/chacha20/chacha_arm64.s +++ b/vendor/golang.org/x/crypto/chacha20/chacha_arm64.s @@ -2,7 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build go1.11,!gccgo,!purego +//go:build go1.11 && gc && !purego +// +build go1.11,gc,!purego #include "textflag.h" diff --git a/vendor/golang.org/x/crypto/chacha20/chacha_noasm.go b/vendor/golang.org/x/crypto/chacha20/chacha_noasm.go index 4635307..025b498 100644 --- a/vendor/golang.org/x/crypto/chacha20/chacha_noasm.go +++ b/vendor/golang.org/x/crypto/chacha20/chacha_noasm.go @@ -2,7 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build !arm64,!s390x,!ppc64le arm64,!go1.11 gccgo purego +//go:build (!arm64 && !s390x && !ppc64le) || (arm64 && !go1.11) || !gc || purego +// +build !arm64,!s390x,!ppc64le arm64,!go1.11 !gc purego package chacha20 diff --git a/vendor/golang.org/x/crypto/chacha20/chacha_ppc64le.go b/vendor/golang.org/x/crypto/chacha20/chacha_ppc64le.go index b799330..da420b2 100644 --- a/vendor/golang.org/x/crypto/chacha20/chacha_ppc64le.go +++ b/vendor/golang.org/x/crypto/chacha20/chacha_ppc64le.go @@ -2,7 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build !gccgo,!purego +//go:build gc && !purego +// +build gc,!purego package chacha20 diff --git a/vendor/golang.org/x/crypto/chacha20/chacha_ppc64le.s b/vendor/golang.org/x/crypto/chacha20/chacha_ppc64le.s index 23c6021..5c0fed2 100644 --- a/vendor/golang.org/x/crypto/chacha20/chacha_ppc64le.s +++ b/vendor/golang.org/x/crypto/chacha20/chacha_ppc64le.s @@ -19,7 +19,8 @@ // The differences in this and the original implementation are // due to the calling conventions and initialization of constants. -// +build !gccgo,!purego +//go:build gc && !purego +// +build gc,!purego #include "textflag.h" diff --git a/vendor/golang.org/x/crypto/chacha20/chacha_s390x.go b/vendor/golang.org/x/crypto/chacha20/chacha_s390x.go index a9244bd..c5898db 100644 --- a/vendor/golang.org/x/crypto/chacha20/chacha_s390x.go +++ b/vendor/golang.org/x/crypto/chacha20/chacha_s390x.go @@ -2,7 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build !gccgo,!purego +//go:build gc && !purego +// +build gc,!purego package chacha20 diff --git a/vendor/golang.org/x/crypto/chacha20/chacha_s390x.s b/vendor/golang.org/x/crypto/chacha20/chacha_s390x.s index 89c658c..f3ef5a0 100644 --- a/vendor/golang.org/x/crypto/chacha20/chacha_s390x.s +++ b/vendor/golang.org/x/crypto/chacha20/chacha_s390x.s @@ -2,7 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build !gccgo,!purego +//go:build gc && !purego +// +build gc,!purego #include "go_asm.h" #include "textflag.h" diff --git a/vendor/golang.org/x/crypto/curve25519/curve25519.go b/vendor/golang.org/x/crypto/curve25519/curve25519.go index 4b9a655..cda3fdd 100644 --- a/vendor/golang.org/x/crypto/curve25519/curve25519.go +++ b/vendor/golang.org/x/crypto/curve25519/curve25519.go @@ -10,6 +10,8 @@ package curve25519 // import "golang.org/x/crypto/curve25519" import ( "crypto/subtle" "fmt" + + "golang.org/x/crypto/curve25519/internal/field" ) // ScalarMult sets dst to the product scalar * point. @@ -18,7 +20,55 @@ import ( // zeroes, irrespective of the scalar. Instead, use the X25519 function, which // will return an error. func ScalarMult(dst, scalar, point *[32]byte) { - scalarMult(dst, scalar, point) + var e [32]byte + + copy(e[:], scalar[:]) + e[0] &= 248 + e[31] &= 127 + e[31] |= 64 + + var x1, x2, z2, x3, z3, tmp0, tmp1 field.Element + x1.SetBytes(point[:]) + x2.One() + x3.Set(&x1) + z3.One() + + swap := 0 + for pos := 254; pos >= 0; pos-- { + b := e[pos/8] >> uint(pos&7) + b &= 1 + swap ^= int(b) + x2.Swap(&x3, swap) + z2.Swap(&z3, swap) + swap = int(b) + + tmp0.Subtract(&x3, &z3) + tmp1.Subtract(&x2, &z2) + x2.Add(&x2, &z2) + z2.Add(&x3, &z3) + z3.Multiply(&tmp0, &x2) + z2.Multiply(&z2, &tmp1) + tmp0.Square(&tmp1) + tmp1.Square(&x2) + x3.Add(&z3, &z2) + z2.Subtract(&z3, &z2) + x2.Multiply(&tmp1, &tmp0) + tmp1.Subtract(&tmp1, &tmp0) + z2.Square(&z2) + + z3.Mult32(&tmp1, 121666) + x3.Square(&x3) + tmp0.Add(&tmp0, &z3) + z3.Multiply(&x1, &z2) + z2.Multiply(&tmp1, &tmp0) + } + + x2.Swap(&x3, swap) + z2.Swap(&z3, swap) + + z2.Invert(&z2) + x2.Multiply(&x2, &z2) + copy(dst[:], x2.Bytes()) } // ScalarBaseMult sets dst to the product scalar * base where base is the diff --git a/vendor/golang.org/x/crypto/curve25519/curve25519_amd64.go b/vendor/golang.org/x/crypto/curve25519/curve25519_amd64.go deleted file mode 100644 index 5120b77..0000000 --- a/vendor/golang.org/x/crypto/curve25519/curve25519_amd64.go +++ /dev/null @@ -1,240 +0,0 @@ -// Copyright 2012 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build amd64,!gccgo,!appengine,!purego - -package curve25519 - -// These functions are implemented in the .s files. The names of the functions -// in the rest of the file are also taken from the SUPERCOP sources to help -// people following along. - -//go:noescape - -func cswap(inout *[5]uint64, v uint64) - -//go:noescape - -func ladderstep(inout *[5][5]uint64) - -//go:noescape - -func freeze(inout *[5]uint64) - -//go:noescape - -func mul(dest, a, b *[5]uint64) - -//go:noescape - -func square(out, in *[5]uint64) - -// mladder uses a Montgomery ladder to calculate (xr/zr) *= s. -func mladder(xr, zr *[5]uint64, s *[32]byte) { - var work [5][5]uint64 - - work[0] = *xr - setint(&work[1], 1) - setint(&work[2], 0) - work[3] = *xr - setint(&work[4], 1) - - j := uint(6) - var prevbit byte - - for i := 31; i >= 0; i-- { - for j < 8 { - bit := ((*s)[i] >> j) & 1 - swap := bit ^ prevbit - prevbit = bit - cswap(&work[1], uint64(swap)) - ladderstep(&work) - j-- - } - j = 7 - } - - *xr = work[1] - *zr = work[2] -} - -func scalarMult(out, in, base *[32]byte) { - var e [32]byte - copy(e[:], (*in)[:]) - e[0] &= 248 - e[31] &= 127 - e[31] |= 64 - - var t, z [5]uint64 - unpack(&t, base) - mladder(&t, &z, &e) - invert(&z, &z) - mul(&t, &t, &z) - pack(out, &t) -} - -func setint(r *[5]uint64, v uint64) { - r[0] = v - r[1] = 0 - r[2] = 0 - r[3] = 0 - r[4] = 0 -} - -// unpack sets r = x where r consists of 5, 51-bit limbs in little-endian -// order. -func unpack(r *[5]uint64, x *[32]byte) { - r[0] = uint64(x[0]) | - uint64(x[1])<<8 | - uint64(x[2])<<16 | - uint64(x[3])<<24 | - uint64(x[4])<<32 | - uint64(x[5])<<40 | - uint64(x[6]&7)<<48 - - r[1] = uint64(x[6])>>3 | - uint64(x[7])<<5 | - uint64(x[8])<<13 | - uint64(x[9])<<21 | - uint64(x[10])<<29 | - uint64(x[11])<<37 | - uint64(x[12]&63)<<45 - - r[2] = uint64(x[12])>>6 | - uint64(x[13])<<2 | - uint64(x[14])<<10 | - uint64(x[15])<<18 | - uint64(x[16])<<26 | - uint64(x[17])<<34 | - uint64(x[18])<<42 | - uint64(x[19]&1)<<50 - - r[3] = uint64(x[19])>>1 | - uint64(x[20])<<7 | - uint64(x[21])<<15 | - uint64(x[22])<<23 | - uint64(x[23])<<31 | - uint64(x[24])<<39 | - uint64(x[25]&15)<<47 - - r[4] = uint64(x[25])>>4 | - uint64(x[26])<<4 | - uint64(x[27])<<12 | - uint64(x[28])<<20 | - uint64(x[29])<<28 | - uint64(x[30])<<36 | - uint64(x[31]&127)<<44 -} - -// pack sets out = x where out is the usual, little-endian form of the 5, -// 51-bit limbs in x. -func pack(out *[32]byte, x *[5]uint64) { - t := *x - freeze(&t) - - out[0] = byte(t[0]) - out[1] = byte(t[0] >> 8) - out[2] = byte(t[0] >> 16) - out[3] = byte(t[0] >> 24) - out[4] = byte(t[0] >> 32) - out[5] = byte(t[0] >> 40) - out[6] = byte(t[0] >> 48) - - out[6] ^= byte(t[1]<<3) & 0xf8 - out[7] = byte(t[1] >> 5) - out[8] = byte(t[1] >> 13) - out[9] = byte(t[1] >> 21) - out[10] = byte(t[1] >> 29) - out[11] = byte(t[1] >> 37) - out[12] = byte(t[1] >> 45) - - out[12] ^= byte(t[2]<<6) & 0xc0 - out[13] = byte(t[2] >> 2) - out[14] = byte(t[2] >> 10) - out[15] = byte(t[2] >> 18) - out[16] = byte(t[2] >> 26) - out[17] = byte(t[2] >> 34) - out[18] = byte(t[2] >> 42) - out[19] = byte(t[2] >> 50) - - out[19] ^= byte(t[3]<<1) & 0xfe - out[20] = byte(t[3] >> 7) - out[21] = byte(t[3] >> 15) - out[22] = byte(t[3] >> 23) - out[23] = byte(t[3] >> 31) - out[24] = byte(t[3] >> 39) - out[25] = byte(t[3] >> 47) - - out[25] ^= byte(t[4]<<4) & 0xf0 - out[26] = byte(t[4] >> 4) - out[27] = byte(t[4] >> 12) - out[28] = byte(t[4] >> 20) - out[29] = byte(t[4] >> 28) - out[30] = byte(t[4] >> 36) - out[31] = byte(t[4] >> 44) -} - -// invert calculates r = x^-1 mod p using Fermat's little theorem. -func invert(r *[5]uint64, x *[5]uint64) { - var z2, z9, z11, z2_5_0, z2_10_0, z2_20_0, z2_50_0, z2_100_0, t [5]uint64 - - square(&z2, x) /* 2 */ - square(&t, &z2) /* 4 */ - square(&t, &t) /* 8 */ - mul(&z9, &t, x) /* 9 */ - mul(&z11, &z9, &z2) /* 11 */ - square(&t, &z11) /* 22 */ - mul(&z2_5_0, &t, &z9) /* 2^5 - 2^0 = 31 */ - - square(&t, &z2_5_0) /* 2^6 - 2^1 */ - for i := 1; i < 5; i++ { /* 2^20 - 2^10 */ - square(&t, &t) - } - mul(&z2_10_0, &t, &z2_5_0) /* 2^10 - 2^0 */ - - square(&t, &z2_10_0) /* 2^11 - 2^1 */ - for i := 1; i < 10; i++ { /* 2^20 - 2^10 */ - square(&t, &t) - } - mul(&z2_20_0, &t, &z2_10_0) /* 2^20 - 2^0 */ - - square(&t, &z2_20_0) /* 2^21 - 2^1 */ - for i := 1; i < 20; i++ { /* 2^40 - 2^20 */ - square(&t, &t) - } - mul(&t, &t, &z2_20_0) /* 2^40 - 2^0 */ - - square(&t, &t) /* 2^41 - 2^1 */ - for i := 1; i < 10; i++ { /* 2^50 - 2^10 */ - square(&t, &t) - } - mul(&z2_50_0, &t, &z2_10_0) /* 2^50 - 2^0 */ - - square(&t, &z2_50_0) /* 2^51 - 2^1 */ - for i := 1; i < 50; i++ { /* 2^100 - 2^50 */ - square(&t, &t) - } - mul(&z2_100_0, &t, &z2_50_0) /* 2^100 - 2^0 */ - - square(&t, &z2_100_0) /* 2^101 - 2^1 */ - for i := 1; i < 100; i++ { /* 2^200 - 2^100 */ - square(&t, &t) - } - mul(&t, &t, &z2_100_0) /* 2^200 - 2^0 */ - - square(&t, &t) /* 2^201 - 2^1 */ - for i := 1; i < 50; i++ { /* 2^250 - 2^50 */ - square(&t, &t) - } - mul(&t, &t, &z2_50_0) /* 2^250 - 2^0 */ - - square(&t, &t) /* 2^251 - 2^1 */ - square(&t, &t) /* 2^252 - 2^2 */ - square(&t, &t) /* 2^253 - 2^3 */ - - square(&t, &t) /* 2^254 - 2^4 */ - - square(&t, &t) /* 2^255 - 2^5 */ - mul(r, &t, &z11) /* 2^255 - 21 */ -} diff --git a/vendor/golang.org/x/crypto/curve25519/curve25519_amd64.s b/vendor/golang.org/x/crypto/curve25519/curve25519_amd64.s deleted file mode 100644 index 0250c88..0000000 --- a/vendor/golang.org/x/crypto/curve25519/curve25519_amd64.s +++ /dev/null @@ -1,1793 +0,0 @@ -// Copyright 2012 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// This code was translated into a form compatible with 6a from the public -// domain sources in SUPERCOP: https://bench.cr.yp.to/supercop.html - -// +build amd64,!gccgo,!appengine,!purego - -#define REDMASK51 0x0007FFFFFFFFFFFF - -// These constants cannot be encoded in non-MOVQ immediates. -// We access them directly from memory instead. - -DATA ·_121666_213(SB)/8, $996687872 -GLOBL ·_121666_213(SB), 8, $8 - -DATA ·_2P0(SB)/8, $0xFFFFFFFFFFFDA -GLOBL ·_2P0(SB), 8, $8 - -DATA ·_2P1234(SB)/8, $0xFFFFFFFFFFFFE -GLOBL ·_2P1234(SB), 8, $8 - -// func freeze(inout *[5]uint64) -TEXT ·freeze(SB),7,$0-8 - MOVQ inout+0(FP), DI - - MOVQ 0(DI),SI - MOVQ 8(DI),DX - MOVQ 16(DI),CX - MOVQ 24(DI),R8 - MOVQ 32(DI),R9 - MOVQ $REDMASK51,AX - MOVQ AX,R10 - SUBQ $18,R10 - MOVQ $3,R11 -REDUCELOOP: - MOVQ SI,R12 - SHRQ $51,R12 - ANDQ AX,SI - ADDQ R12,DX - MOVQ DX,R12 - SHRQ $51,R12 - ANDQ AX,DX - ADDQ R12,CX - MOVQ CX,R12 - SHRQ $51,R12 - ANDQ AX,CX - ADDQ R12,R8 - MOVQ R8,R12 - SHRQ $51,R12 - ANDQ AX,R8 - ADDQ R12,R9 - MOVQ R9,R12 - SHRQ $51,R12 - ANDQ AX,R9 - IMUL3Q $19,R12,R12 - ADDQ R12,SI - SUBQ $1,R11 - JA REDUCELOOP - MOVQ $1,R12 - CMPQ R10,SI - CMOVQLT R11,R12 - CMPQ AX,DX - CMOVQNE R11,R12 - CMPQ AX,CX - CMOVQNE R11,R12 - CMPQ AX,R8 - CMOVQNE R11,R12 - CMPQ AX,R9 - CMOVQNE R11,R12 - NEGQ R12 - ANDQ R12,AX - ANDQ R12,R10 - SUBQ R10,SI - SUBQ AX,DX - SUBQ AX,CX - SUBQ AX,R8 - SUBQ AX,R9 - MOVQ SI,0(DI) - MOVQ DX,8(DI) - MOVQ CX,16(DI) - MOVQ R8,24(DI) - MOVQ R9,32(DI) - RET - -// func ladderstep(inout *[5][5]uint64) -TEXT ·ladderstep(SB),0,$296-8 - MOVQ inout+0(FP),DI - - MOVQ 40(DI),SI - MOVQ 48(DI),DX - MOVQ 56(DI),CX - MOVQ 64(DI),R8 - MOVQ 72(DI),R9 - MOVQ SI,AX - MOVQ DX,R10 - MOVQ CX,R11 - MOVQ R8,R12 - MOVQ R9,R13 - ADDQ ·_2P0(SB),AX - ADDQ ·_2P1234(SB),R10 - ADDQ ·_2P1234(SB),R11 - ADDQ ·_2P1234(SB),R12 - ADDQ ·_2P1234(SB),R13 - ADDQ 80(DI),SI - ADDQ 88(DI),DX - ADDQ 96(DI),CX - ADDQ 104(DI),R8 - ADDQ 112(DI),R9 - SUBQ 80(DI),AX - SUBQ 88(DI),R10 - SUBQ 96(DI),R11 - SUBQ 104(DI),R12 - SUBQ 112(DI),R13 - MOVQ SI,0(SP) - MOVQ DX,8(SP) - MOVQ CX,16(SP) - MOVQ R8,24(SP) - MOVQ R9,32(SP) - MOVQ AX,40(SP) - MOVQ R10,48(SP) - MOVQ R11,56(SP) - MOVQ R12,64(SP) - MOVQ R13,72(SP) - MOVQ 40(SP),AX - MULQ 40(SP) - MOVQ AX,SI - MOVQ DX,CX - MOVQ 40(SP),AX - SHLQ $1,AX - MULQ 48(SP) - MOVQ AX,R8 - MOVQ DX,R9 - MOVQ 40(SP),AX - SHLQ $1,AX - MULQ 56(SP) - MOVQ AX,R10 - MOVQ DX,R11 - MOVQ 40(SP),AX - SHLQ $1,AX - MULQ 64(SP) - MOVQ AX,R12 - MOVQ DX,R13 - MOVQ 40(SP),AX - SHLQ $1,AX - MULQ 72(SP) - MOVQ AX,R14 - MOVQ DX,R15 - MOVQ 48(SP),AX - MULQ 48(SP) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 48(SP),AX - SHLQ $1,AX - MULQ 56(SP) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ 48(SP),AX - SHLQ $1,AX - MULQ 64(SP) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 48(SP),DX - IMUL3Q $38,DX,AX - MULQ 72(SP) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 56(SP),AX - MULQ 56(SP) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 56(SP),DX - IMUL3Q $38,DX,AX - MULQ 64(SP) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 56(SP),DX - IMUL3Q $38,DX,AX - MULQ 72(SP) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 64(SP),DX - IMUL3Q $19,DX,AX - MULQ 64(SP) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 64(SP),DX - IMUL3Q $38,DX,AX - MULQ 72(SP) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 72(SP),DX - IMUL3Q $19,DX,AX - MULQ 72(SP) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ $REDMASK51,DX - SHLQ $13,SI,CX - ANDQ DX,SI - SHLQ $13,R8,R9 - ANDQ DX,R8 - ADDQ CX,R8 - SHLQ $13,R10,R11 - ANDQ DX,R10 - ADDQ R9,R10 - SHLQ $13,R12,R13 - ANDQ DX,R12 - ADDQ R11,R12 - SHLQ $13,R14,R15 - ANDQ DX,R14 - ADDQ R13,R14 - IMUL3Q $19,R15,CX - ADDQ CX,SI - MOVQ SI,CX - SHRQ $51,CX - ADDQ R8,CX - ANDQ DX,SI - MOVQ CX,R8 - SHRQ $51,CX - ADDQ R10,CX - ANDQ DX,R8 - MOVQ CX,R9 - SHRQ $51,CX - ADDQ R12,CX - ANDQ DX,R9 - MOVQ CX,AX - SHRQ $51,CX - ADDQ R14,CX - ANDQ DX,AX - MOVQ CX,R10 - SHRQ $51,CX - IMUL3Q $19,CX,CX - ADDQ CX,SI - ANDQ DX,R10 - MOVQ SI,80(SP) - MOVQ R8,88(SP) - MOVQ R9,96(SP) - MOVQ AX,104(SP) - MOVQ R10,112(SP) - MOVQ 0(SP),AX - MULQ 0(SP) - MOVQ AX,SI - MOVQ DX,CX - MOVQ 0(SP),AX - SHLQ $1,AX - MULQ 8(SP) - MOVQ AX,R8 - MOVQ DX,R9 - MOVQ 0(SP),AX - SHLQ $1,AX - MULQ 16(SP) - MOVQ AX,R10 - MOVQ DX,R11 - MOVQ 0(SP),AX - SHLQ $1,AX - MULQ 24(SP) - MOVQ AX,R12 - MOVQ DX,R13 - MOVQ 0(SP),AX - SHLQ $1,AX - MULQ 32(SP) - MOVQ AX,R14 - MOVQ DX,R15 - MOVQ 8(SP),AX - MULQ 8(SP) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 8(SP),AX - SHLQ $1,AX - MULQ 16(SP) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ 8(SP),AX - SHLQ $1,AX - MULQ 24(SP) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 8(SP),DX - IMUL3Q $38,DX,AX - MULQ 32(SP) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 16(SP),AX - MULQ 16(SP) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 16(SP),DX - IMUL3Q $38,DX,AX - MULQ 24(SP) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 16(SP),DX - IMUL3Q $38,DX,AX - MULQ 32(SP) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 24(SP),DX - IMUL3Q $19,DX,AX - MULQ 24(SP) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 24(SP),DX - IMUL3Q $38,DX,AX - MULQ 32(SP) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 32(SP),DX - IMUL3Q $19,DX,AX - MULQ 32(SP) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ $REDMASK51,DX - SHLQ $13,SI,CX - ANDQ DX,SI - SHLQ $13,R8,R9 - ANDQ DX,R8 - ADDQ CX,R8 - SHLQ $13,R10,R11 - ANDQ DX,R10 - ADDQ R9,R10 - SHLQ $13,R12,R13 - ANDQ DX,R12 - ADDQ R11,R12 - SHLQ $13,R14,R15 - ANDQ DX,R14 - ADDQ R13,R14 - IMUL3Q $19,R15,CX - ADDQ CX,SI - MOVQ SI,CX - SHRQ $51,CX - ADDQ R8,CX - ANDQ DX,SI - MOVQ CX,R8 - SHRQ $51,CX - ADDQ R10,CX - ANDQ DX,R8 - MOVQ CX,R9 - SHRQ $51,CX - ADDQ R12,CX - ANDQ DX,R9 - MOVQ CX,AX - SHRQ $51,CX - ADDQ R14,CX - ANDQ DX,AX - MOVQ CX,R10 - SHRQ $51,CX - IMUL3Q $19,CX,CX - ADDQ CX,SI - ANDQ DX,R10 - MOVQ SI,120(SP) - MOVQ R8,128(SP) - MOVQ R9,136(SP) - MOVQ AX,144(SP) - MOVQ R10,152(SP) - MOVQ SI,SI - MOVQ R8,DX - MOVQ R9,CX - MOVQ AX,R8 - MOVQ R10,R9 - ADDQ ·_2P0(SB),SI - ADDQ ·_2P1234(SB),DX - ADDQ ·_2P1234(SB),CX - ADDQ ·_2P1234(SB),R8 - ADDQ ·_2P1234(SB),R9 - SUBQ 80(SP),SI - SUBQ 88(SP),DX - SUBQ 96(SP),CX - SUBQ 104(SP),R8 - SUBQ 112(SP),R9 - MOVQ SI,160(SP) - MOVQ DX,168(SP) - MOVQ CX,176(SP) - MOVQ R8,184(SP) - MOVQ R9,192(SP) - MOVQ 120(DI),SI - MOVQ 128(DI),DX - MOVQ 136(DI),CX - MOVQ 144(DI),R8 - MOVQ 152(DI),R9 - MOVQ SI,AX - MOVQ DX,R10 - MOVQ CX,R11 - MOVQ R8,R12 - MOVQ R9,R13 - ADDQ ·_2P0(SB),AX - ADDQ ·_2P1234(SB),R10 - ADDQ ·_2P1234(SB),R11 - ADDQ ·_2P1234(SB),R12 - ADDQ ·_2P1234(SB),R13 - ADDQ 160(DI),SI - ADDQ 168(DI),DX - ADDQ 176(DI),CX - ADDQ 184(DI),R8 - ADDQ 192(DI),R9 - SUBQ 160(DI),AX - SUBQ 168(DI),R10 - SUBQ 176(DI),R11 - SUBQ 184(DI),R12 - SUBQ 192(DI),R13 - MOVQ SI,200(SP) - MOVQ DX,208(SP) - MOVQ CX,216(SP) - MOVQ R8,224(SP) - MOVQ R9,232(SP) - MOVQ AX,240(SP) - MOVQ R10,248(SP) - MOVQ R11,256(SP) - MOVQ R12,264(SP) - MOVQ R13,272(SP) - MOVQ 224(SP),SI - IMUL3Q $19,SI,AX - MOVQ AX,280(SP) - MULQ 56(SP) - MOVQ AX,SI - MOVQ DX,CX - MOVQ 232(SP),DX - IMUL3Q $19,DX,AX - MOVQ AX,288(SP) - MULQ 48(SP) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 200(SP),AX - MULQ 40(SP) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 200(SP),AX - MULQ 48(SP) - MOVQ AX,R8 - MOVQ DX,R9 - MOVQ 200(SP),AX - MULQ 56(SP) - MOVQ AX,R10 - MOVQ DX,R11 - MOVQ 200(SP),AX - MULQ 64(SP) - MOVQ AX,R12 - MOVQ DX,R13 - MOVQ 200(SP),AX - MULQ 72(SP) - MOVQ AX,R14 - MOVQ DX,R15 - MOVQ 208(SP),AX - MULQ 40(SP) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 208(SP),AX - MULQ 48(SP) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 208(SP),AX - MULQ 56(SP) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ 208(SP),AX - MULQ 64(SP) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 208(SP),DX - IMUL3Q $19,DX,AX - MULQ 72(SP) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 216(SP),AX - MULQ 40(SP) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 216(SP),AX - MULQ 48(SP) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ 216(SP),AX - MULQ 56(SP) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 216(SP),DX - IMUL3Q $19,DX,AX - MULQ 64(SP) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 216(SP),DX - IMUL3Q $19,DX,AX - MULQ 72(SP) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 224(SP),AX - MULQ 40(SP) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ 224(SP),AX - MULQ 48(SP) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 280(SP),AX - MULQ 64(SP) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 280(SP),AX - MULQ 72(SP) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 232(SP),AX - MULQ 40(SP) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 288(SP),AX - MULQ 56(SP) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 288(SP),AX - MULQ 64(SP) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 288(SP),AX - MULQ 72(SP) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ $REDMASK51,DX - SHLQ $13,SI,CX - ANDQ DX,SI - SHLQ $13,R8,R9 - ANDQ DX,R8 - ADDQ CX,R8 - SHLQ $13,R10,R11 - ANDQ DX,R10 - ADDQ R9,R10 - SHLQ $13,R12,R13 - ANDQ DX,R12 - ADDQ R11,R12 - SHLQ $13,R14,R15 - ANDQ DX,R14 - ADDQ R13,R14 - IMUL3Q $19,R15,CX - ADDQ CX,SI - MOVQ SI,CX - SHRQ $51,CX - ADDQ R8,CX - MOVQ CX,R8 - SHRQ $51,CX - ANDQ DX,SI - ADDQ R10,CX - MOVQ CX,R9 - SHRQ $51,CX - ANDQ DX,R8 - ADDQ R12,CX - MOVQ CX,AX - SHRQ $51,CX - ANDQ DX,R9 - ADDQ R14,CX - MOVQ CX,R10 - SHRQ $51,CX - ANDQ DX,AX - IMUL3Q $19,CX,CX - ADDQ CX,SI - ANDQ DX,R10 - MOVQ SI,40(SP) - MOVQ R8,48(SP) - MOVQ R9,56(SP) - MOVQ AX,64(SP) - MOVQ R10,72(SP) - MOVQ 264(SP),SI - IMUL3Q $19,SI,AX - MOVQ AX,200(SP) - MULQ 16(SP) - MOVQ AX,SI - MOVQ DX,CX - MOVQ 272(SP),DX - IMUL3Q $19,DX,AX - MOVQ AX,208(SP) - MULQ 8(SP) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 240(SP),AX - MULQ 0(SP) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 240(SP),AX - MULQ 8(SP) - MOVQ AX,R8 - MOVQ DX,R9 - MOVQ 240(SP),AX - MULQ 16(SP) - MOVQ AX,R10 - MOVQ DX,R11 - MOVQ 240(SP),AX - MULQ 24(SP) - MOVQ AX,R12 - MOVQ DX,R13 - MOVQ 240(SP),AX - MULQ 32(SP) - MOVQ AX,R14 - MOVQ DX,R15 - MOVQ 248(SP),AX - MULQ 0(SP) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 248(SP),AX - MULQ 8(SP) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 248(SP),AX - MULQ 16(SP) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ 248(SP),AX - MULQ 24(SP) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 248(SP),DX - IMUL3Q $19,DX,AX - MULQ 32(SP) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 256(SP),AX - MULQ 0(SP) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 256(SP),AX - MULQ 8(SP) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ 256(SP),AX - MULQ 16(SP) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 256(SP),DX - IMUL3Q $19,DX,AX - MULQ 24(SP) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 256(SP),DX - IMUL3Q $19,DX,AX - MULQ 32(SP) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 264(SP),AX - MULQ 0(SP) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ 264(SP),AX - MULQ 8(SP) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 200(SP),AX - MULQ 24(SP) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 200(SP),AX - MULQ 32(SP) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 272(SP),AX - MULQ 0(SP) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 208(SP),AX - MULQ 16(SP) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 208(SP),AX - MULQ 24(SP) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 208(SP),AX - MULQ 32(SP) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ $REDMASK51,DX - SHLQ $13,SI,CX - ANDQ DX,SI - SHLQ $13,R8,R9 - ANDQ DX,R8 - ADDQ CX,R8 - SHLQ $13,R10,R11 - ANDQ DX,R10 - ADDQ R9,R10 - SHLQ $13,R12,R13 - ANDQ DX,R12 - ADDQ R11,R12 - SHLQ $13,R14,R15 - ANDQ DX,R14 - ADDQ R13,R14 - IMUL3Q $19,R15,CX - ADDQ CX,SI - MOVQ SI,CX - SHRQ $51,CX - ADDQ R8,CX - MOVQ CX,R8 - SHRQ $51,CX - ANDQ DX,SI - ADDQ R10,CX - MOVQ CX,R9 - SHRQ $51,CX - ANDQ DX,R8 - ADDQ R12,CX - MOVQ CX,AX - SHRQ $51,CX - ANDQ DX,R9 - ADDQ R14,CX - MOVQ CX,R10 - SHRQ $51,CX - ANDQ DX,AX - IMUL3Q $19,CX,CX - ADDQ CX,SI - ANDQ DX,R10 - MOVQ SI,DX - MOVQ R8,CX - MOVQ R9,R11 - MOVQ AX,R12 - MOVQ R10,R13 - ADDQ ·_2P0(SB),DX - ADDQ ·_2P1234(SB),CX - ADDQ ·_2P1234(SB),R11 - ADDQ ·_2P1234(SB),R12 - ADDQ ·_2P1234(SB),R13 - ADDQ 40(SP),SI - ADDQ 48(SP),R8 - ADDQ 56(SP),R9 - ADDQ 64(SP),AX - ADDQ 72(SP),R10 - SUBQ 40(SP),DX - SUBQ 48(SP),CX - SUBQ 56(SP),R11 - SUBQ 64(SP),R12 - SUBQ 72(SP),R13 - MOVQ SI,120(DI) - MOVQ R8,128(DI) - MOVQ R9,136(DI) - MOVQ AX,144(DI) - MOVQ R10,152(DI) - MOVQ DX,160(DI) - MOVQ CX,168(DI) - MOVQ R11,176(DI) - MOVQ R12,184(DI) - MOVQ R13,192(DI) - MOVQ 120(DI),AX - MULQ 120(DI) - MOVQ AX,SI - MOVQ DX,CX - MOVQ 120(DI),AX - SHLQ $1,AX - MULQ 128(DI) - MOVQ AX,R8 - MOVQ DX,R9 - MOVQ 120(DI),AX - SHLQ $1,AX - MULQ 136(DI) - MOVQ AX,R10 - MOVQ DX,R11 - MOVQ 120(DI),AX - SHLQ $1,AX - MULQ 144(DI) - MOVQ AX,R12 - MOVQ DX,R13 - MOVQ 120(DI),AX - SHLQ $1,AX - MULQ 152(DI) - MOVQ AX,R14 - MOVQ DX,R15 - MOVQ 128(DI),AX - MULQ 128(DI) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 128(DI),AX - SHLQ $1,AX - MULQ 136(DI) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ 128(DI),AX - SHLQ $1,AX - MULQ 144(DI) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 128(DI),DX - IMUL3Q $38,DX,AX - MULQ 152(DI) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 136(DI),AX - MULQ 136(DI) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 136(DI),DX - IMUL3Q $38,DX,AX - MULQ 144(DI) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 136(DI),DX - IMUL3Q $38,DX,AX - MULQ 152(DI) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 144(DI),DX - IMUL3Q $19,DX,AX - MULQ 144(DI) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 144(DI),DX - IMUL3Q $38,DX,AX - MULQ 152(DI) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 152(DI),DX - IMUL3Q $19,DX,AX - MULQ 152(DI) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ $REDMASK51,DX - SHLQ $13,SI,CX - ANDQ DX,SI - SHLQ $13,R8,R9 - ANDQ DX,R8 - ADDQ CX,R8 - SHLQ $13,R10,R11 - ANDQ DX,R10 - ADDQ R9,R10 - SHLQ $13,R12,R13 - ANDQ DX,R12 - ADDQ R11,R12 - SHLQ $13,R14,R15 - ANDQ DX,R14 - ADDQ R13,R14 - IMUL3Q $19,R15,CX - ADDQ CX,SI - MOVQ SI,CX - SHRQ $51,CX - ADDQ R8,CX - ANDQ DX,SI - MOVQ CX,R8 - SHRQ $51,CX - ADDQ R10,CX - ANDQ DX,R8 - MOVQ CX,R9 - SHRQ $51,CX - ADDQ R12,CX - ANDQ DX,R9 - MOVQ CX,AX - SHRQ $51,CX - ADDQ R14,CX - ANDQ DX,AX - MOVQ CX,R10 - SHRQ $51,CX - IMUL3Q $19,CX,CX - ADDQ CX,SI - ANDQ DX,R10 - MOVQ SI,120(DI) - MOVQ R8,128(DI) - MOVQ R9,136(DI) - MOVQ AX,144(DI) - MOVQ R10,152(DI) - MOVQ 160(DI),AX - MULQ 160(DI) - MOVQ AX,SI - MOVQ DX,CX - MOVQ 160(DI),AX - SHLQ $1,AX - MULQ 168(DI) - MOVQ AX,R8 - MOVQ DX,R9 - MOVQ 160(DI),AX - SHLQ $1,AX - MULQ 176(DI) - MOVQ AX,R10 - MOVQ DX,R11 - MOVQ 160(DI),AX - SHLQ $1,AX - MULQ 184(DI) - MOVQ AX,R12 - MOVQ DX,R13 - MOVQ 160(DI),AX - SHLQ $1,AX - MULQ 192(DI) - MOVQ AX,R14 - MOVQ DX,R15 - MOVQ 168(DI),AX - MULQ 168(DI) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 168(DI),AX - SHLQ $1,AX - MULQ 176(DI) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ 168(DI),AX - SHLQ $1,AX - MULQ 184(DI) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 168(DI),DX - IMUL3Q $38,DX,AX - MULQ 192(DI) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 176(DI),AX - MULQ 176(DI) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 176(DI),DX - IMUL3Q $38,DX,AX - MULQ 184(DI) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 176(DI),DX - IMUL3Q $38,DX,AX - MULQ 192(DI) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 184(DI),DX - IMUL3Q $19,DX,AX - MULQ 184(DI) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 184(DI),DX - IMUL3Q $38,DX,AX - MULQ 192(DI) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 192(DI),DX - IMUL3Q $19,DX,AX - MULQ 192(DI) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ $REDMASK51,DX - SHLQ $13,SI,CX - ANDQ DX,SI - SHLQ $13,R8,R9 - ANDQ DX,R8 - ADDQ CX,R8 - SHLQ $13,R10,R11 - ANDQ DX,R10 - ADDQ R9,R10 - SHLQ $13,R12,R13 - ANDQ DX,R12 - ADDQ R11,R12 - SHLQ $13,R14,R15 - ANDQ DX,R14 - ADDQ R13,R14 - IMUL3Q $19,R15,CX - ADDQ CX,SI - MOVQ SI,CX - SHRQ $51,CX - ADDQ R8,CX - ANDQ DX,SI - MOVQ CX,R8 - SHRQ $51,CX - ADDQ R10,CX - ANDQ DX,R8 - MOVQ CX,R9 - SHRQ $51,CX - ADDQ R12,CX - ANDQ DX,R9 - MOVQ CX,AX - SHRQ $51,CX - ADDQ R14,CX - ANDQ DX,AX - MOVQ CX,R10 - SHRQ $51,CX - IMUL3Q $19,CX,CX - ADDQ CX,SI - ANDQ DX,R10 - MOVQ SI,160(DI) - MOVQ R8,168(DI) - MOVQ R9,176(DI) - MOVQ AX,184(DI) - MOVQ R10,192(DI) - MOVQ 184(DI),SI - IMUL3Q $19,SI,AX - MOVQ AX,0(SP) - MULQ 16(DI) - MOVQ AX,SI - MOVQ DX,CX - MOVQ 192(DI),DX - IMUL3Q $19,DX,AX - MOVQ AX,8(SP) - MULQ 8(DI) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 160(DI),AX - MULQ 0(DI) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 160(DI),AX - MULQ 8(DI) - MOVQ AX,R8 - MOVQ DX,R9 - MOVQ 160(DI),AX - MULQ 16(DI) - MOVQ AX,R10 - MOVQ DX,R11 - MOVQ 160(DI),AX - MULQ 24(DI) - MOVQ AX,R12 - MOVQ DX,R13 - MOVQ 160(DI),AX - MULQ 32(DI) - MOVQ AX,R14 - MOVQ DX,R15 - MOVQ 168(DI),AX - MULQ 0(DI) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 168(DI),AX - MULQ 8(DI) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 168(DI),AX - MULQ 16(DI) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ 168(DI),AX - MULQ 24(DI) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 168(DI),DX - IMUL3Q $19,DX,AX - MULQ 32(DI) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 176(DI),AX - MULQ 0(DI) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 176(DI),AX - MULQ 8(DI) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ 176(DI),AX - MULQ 16(DI) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 176(DI),DX - IMUL3Q $19,DX,AX - MULQ 24(DI) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 176(DI),DX - IMUL3Q $19,DX,AX - MULQ 32(DI) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 184(DI),AX - MULQ 0(DI) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ 184(DI),AX - MULQ 8(DI) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 0(SP),AX - MULQ 24(DI) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 0(SP),AX - MULQ 32(DI) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 192(DI),AX - MULQ 0(DI) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 8(SP),AX - MULQ 16(DI) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 8(SP),AX - MULQ 24(DI) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 8(SP),AX - MULQ 32(DI) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ $REDMASK51,DX - SHLQ $13,SI,CX - ANDQ DX,SI - SHLQ $13,R8,R9 - ANDQ DX,R8 - ADDQ CX,R8 - SHLQ $13,R10,R11 - ANDQ DX,R10 - ADDQ R9,R10 - SHLQ $13,R12,R13 - ANDQ DX,R12 - ADDQ R11,R12 - SHLQ $13,R14,R15 - ANDQ DX,R14 - ADDQ R13,R14 - IMUL3Q $19,R15,CX - ADDQ CX,SI - MOVQ SI,CX - SHRQ $51,CX - ADDQ R8,CX - MOVQ CX,R8 - SHRQ $51,CX - ANDQ DX,SI - ADDQ R10,CX - MOVQ CX,R9 - SHRQ $51,CX - ANDQ DX,R8 - ADDQ R12,CX - MOVQ CX,AX - SHRQ $51,CX - ANDQ DX,R9 - ADDQ R14,CX - MOVQ CX,R10 - SHRQ $51,CX - ANDQ DX,AX - IMUL3Q $19,CX,CX - ADDQ CX,SI - ANDQ DX,R10 - MOVQ SI,160(DI) - MOVQ R8,168(DI) - MOVQ R9,176(DI) - MOVQ AX,184(DI) - MOVQ R10,192(DI) - MOVQ 144(SP),SI - IMUL3Q $19,SI,AX - MOVQ AX,0(SP) - MULQ 96(SP) - MOVQ AX,SI - MOVQ DX,CX - MOVQ 152(SP),DX - IMUL3Q $19,DX,AX - MOVQ AX,8(SP) - MULQ 88(SP) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 120(SP),AX - MULQ 80(SP) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 120(SP),AX - MULQ 88(SP) - MOVQ AX,R8 - MOVQ DX,R9 - MOVQ 120(SP),AX - MULQ 96(SP) - MOVQ AX,R10 - MOVQ DX,R11 - MOVQ 120(SP),AX - MULQ 104(SP) - MOVQ AX,R12 - MOVQ DX,R13 - MOVQ 120(SP),AX - MULQ 112(SP) - MOVQ AX,R14 - MOVQ DX,R15 - MOVQ 128(SP),AX - MULQ 80(SP) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 128(SP),AX - MULQ 88(SP) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 128(SP),AX - MULQ 96(SP) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ 128(SP),AX - MULQ 104(SP) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 128(SP),DX - IMUL3Q $19,DX,AX - MULQ 112(SP) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 136(SP),AX - MULQ 80(SP) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 136(SP),AX - MULQ 88(SP) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ 136(SP),AX - MULQ 96(SP) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 136(SP),DX - IMUL3Q $19,DX,AX - MULQ 104(SP) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 136(SP),DX - IMUL3Q $19,DX,AX - MULQ 112(SP) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 144(SP),AX - MULQ 80(SP) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ 144(SP),AX - MULQ 88(SP) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 0(SP),AX - MULQ 104(SP) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 0(SP),AX - MULQ 112(SP) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 152(SP),AX - MULQ 80(SP) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 8(SP),AX - MULQ 96(SP) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 8(SP),AX - MULQ 104(SP) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 8(SP),AX - MULQ 112(SP) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ $REDMASK51,DX - SHLQ $13,SI,CX - ANDQ DX,SI - SHLQ $13,R8,R9 - ANDQ DX,R8 - ADDQ CX,R8 - SHLQ $13,R10,R11 - ANDQ DX,R10 - ADDQ R9,R10 - SHLQ $13,R12,R13 - ANDQ DX,R12 - ADDQ R11,R12 - SHLQ $13,R14,R15 - ANDQ DX,R14 - ADDQ R13,R14 - IMUL3Q $19,R15,CX - ADDQ CX,SI - MOVQ SI,CX - SHRQ $51,CX - ADDQ R8,CX - MOVQ CX,R8 - SHRQ $51,CX - ANDQ DX,SI - ADDQ R10,CX - MOVQ CX,R9 - SHRQ $51,CX - ANDQ DX,R8 - ADDQ R12,CX - MOVQ CX,AX - SHRQ $51,CX - ANDQ DX,R9 - ADDQ R14,CX - MOVQ CX,R10 - SHRQ $51,CX - ANDQ DX,AX - IMUL3Q $19,CX,CX - ADDQ CX,SI - ANDQ DX,R10 - MOVQ SI,40(DI) - MOVQ R8,48(DI) - MOVQ R9,56(DI) - MOVQ AX,64(DI) - MOVQ R10,72(DI) - MOVQ 160(SP),AX - MULQ ·_121666_213(SB) - SHRQ $13,AX - MOVQ AX,SI - MOVQ DX,CX - MOVQ 168(SP),AX - MULQ ·_121666_213(SB) - SHRQ $13,AX - ADDQ AX,CX - MOVQ DX,R8 - MOVQ 176(SP),AX - MULQ ·_121666_213(SB) - SHRQ $13,AX - ADDQ AX,R8 - MOVQ DX,R9 - MOVQ 184(SP),AX - MULQ ·_121666_213(SB) - SHRQ $13,AX - ADDQ AX,R9 - MOVQ DX,R10 - MOVQ 192(SP),AX - MULQ ·_121666_213(SB) - SHRQ $13,AX - ADDQ AX,R10 - IMUL3Q $19,DX,DX - ADDQ DX,SI - ADDQ 80(SP),SI - ADDQ 88(SP),CX - ADDQ 96(SP),R8 - ADDQ 104(SP),R9 - ADDQ 112(SP),R10 - MOVQ SI,80(DI) - MOVQ CX,88(DI) - MOVQ R8,96(DI) - MOVQ R9,104(DI) - MOVQ R10,112(DI) - MOVQ 104(DI),SI - IMUL3Q $19,SI,AX - MOVQ AX,0(SP) - MULQ 176(SP) - MOVQ AX,SI - MOVQ DX,CX - MOVQ 112(DI),DX - IMUL3Q $19,DX,AX - MOVQ AX,8(SP) - MULQ 168(SP) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 80(DI),AX - MULQ 160(SP) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 80(DI),AX - MULQ 168(SP) - MOVQ AX,R8 - MOVQ DX,R9 - MOVQ 80(DI),AX - MULQ 176(SP) - MOVQ AX,R10 - MOVQ DX,R11 - MOVQ 80(DI),AX - MULQ 184(SP) - MOVQ AX,R12 - MOVQ DX,R13 - MOVQ 80(DI),AX - MULQ 192(SP) - MOVQ AX,R14 - MOVQ DX,R15 - MOVQ 88(DI),AX - MULQ 160(SP) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 88(DI),AX - MULQ 168(SP) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 88(DI),AX - MULQ 176(SP) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ 88(DI),AX - MULQ 184(SP) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 88(DI),DX - IMUL3Q $19,DX,AX - MULQ 192(SP) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 96(DI),AX - MULQ 160(SP) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 96(DI),AX - MULQ 168(SP) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ 96(DI),AX - MULQ 176(SP) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 96(DI),DX - IMUL3Q $19,DX,AX - MULQ 184(SP) - ADDQ AX,SI - ADCQ DX,CX - MOVQ 96(DI),DX - IMUL3Q $19,DX,AX - MULQ 192(SP) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 104(DI),AX - MULQ 160(SP) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ 104(DI),AX - MULQ 168(SP) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 0(SP),AX - MULQ 184(SP) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 0(SP),AX - MULQ 192(SP) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 112(DI),AX - MULQ 160(SP) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 8(SP),AX - MULQ 176(SP) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 8(SP),AX - MULQ 184(SP) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 8(SP),AX - MULQ 192(SP) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ $REDMASK51,DX - SHLQ $13,SI,CX - ANDQ DX,SI - SHLQ $13,R8,R9 - ANDQ DX,R8 - ADDQ CX,R8 - SHLQ $13,R10,R11 - ANDQ DX,R10 - ADDQ R9,R10 - SHLQ $13,R12,R13 - ANDQ DX,R12 - ADDQ R11,R12 - SHLQ $13,R14,R15 - ANDQ DX,R14 - ADDQ R13,R14 - IMUL3Q $19,R15,CX - ADDQ CX,SI - MOVQ SI,CX - SHRQ $51,CX - ADDQ R8,CX - MOVQ CX,R8 - SHRQ $51,CX - ANDQ DX,SI - ADDQ R10,CX - MOVQ CX,R9 - SHRQ $51,CX - ANDQ DX,R8 - ADDQ R12,CX - MOVQ CX,AX - SHRQ $51,CX - ANDQ DX,R9 - ADDQ R14,CX - MOVQ CX,R10 - SHRQ $51,CX - ANDQ DX,AX - IMUL3Q $19,CX,CX - ADDQ CX,SI - ANDQ DX,R10 - MOVQ SI,80(DI) - MOVQ R8,88(DI) - MOVQ R9,96(DI) - MOVQ AX,104(DI) - MOVQ R10,112(DI) - RET - -// func cswap(inout *[4][5]uint64, v uint64) -TEXT ·cswap(SB),7,$0 - MOVQ inout+0(FP),DI - MOVQ v+8(FP),SI - - SUBQ $1, SI - NOTQ SI - MOVQ SI, X15 - PSHUFD $0x44, X15, X15 - - MOVOU 0(DI), X0 - MOVOU 16(DI), X2 - MOVOU 32(DI), X4 - MOVOU 48(DI), X6 - MOVOU 64(DI), X8 - MOVOU 80(DI), X1 - MOVOU 96(DI), X3 - MOVOU 112(DI), X5 - MOVOU 128(DI), X7 - MOVOU 144(DI), X9 - - MOVO X1, X10 - MOVO X3, X11 - MOVO X5, X12 - MOVO X7, X13 - MOVO X9, X14 - - PXOR X0, X10 - PXOR X2, X11 - PXOR X4, X12 - PXOR X6, X13 - PXOR X8, X14 - PAND X15, X10 - PAND X15, X11 - PAND X15, X12 - PAND X15, X13 - PAND X15, X14 - PXOR X10, X0 - PXOR X10, X1 - PXOR X11, X2 - PXOR X11, X3 - PXOR X12, X4 - PXOR X12, X5 - PXOR X13, X6 - PXOR X13, X7 - PXOR X14, X8 - PXOR X14, X9 - - MOVOU X0, 0(DI) - MOVOU X2, 16(DI) - MOVOU X4, 32(DI) - MOVOU X6, 48(DI) - MOVOU X8, 64(DI) - MOVOU X1, 80(DI) - MOVOU X3, 96(DI) - MOVOU X5, 112(DI) - MOVOU X7, 128(DI) - MOVOU X9, 144(DI) - RET - -// func mul(dest, a, b *[5]uint64) -TEXT ·mul(SB),0,$16-24 - MOVQ dest+0(FP), DI - MOVQ a+8(FP), SI - MOVQ b+16(FP), DX - - MOVQ DX,CX - MOVQ 24(SI),DX - IMUL3Q $19,DX,AX - MOVQ AX,0(SP) - MULQ 16(CX) - MOVQ AX,R8 - MOVQ DX,R9 - MOVQ 32(SI),DX - IMUL3Q $19,DX,AX - MOVQ AX,8(SP) - MULQ 8(CX) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 0(SI),AX - MULQ 0(CX) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 0(SI),AX - MULQ 8(CX) - MOVQ AX,R10 - MOVQ DX,R11 - MOVQ 0(SI),AX - MULQ 16(CX) - MOVQ AX,R12 - MOVQ DX,R13 - MOVQ 0(SI),AX - MULQ 24(CX) - MOVQ AX,R14 - MOVQ DX,R15 - MOVQ 0(SI),AX - MULQ 32(CX) - MOVQ AX,BX - MOVQ DX,BP - MOVQ 8(SI),AX - MULQ 0(CX) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 8(SI),AX - MULQ 8(CX) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ 8(SI),AX - MULQ 16(CX) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 8(SI),AX - MULQ 24(CX) - ADDQ AX,BX - ADCQ DX,BP - MOVQ 8(SI),DX - IMUL3Q $19,DX,AX - MULQ 32(CX) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 16(SI),AX - MULQ 0(CX) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ 16(SI),AX - MULQ 8(CX) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 16(SI),AX - MULQ 16(CX) - ADDQ AX,BX - ADCQ DX,BP - MOVQ 16(SI),DX - IMUL3Q $19,DX,AX - MULQ 24(CX) - ADDQ AX,R8 - ADCQ DX,R9 - MOVQ 16(SI),DX - IMUL3Q $19,DX,AX - MULQ 32(CX) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 24(SI),AX - MULQ 0(CX) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ 24(SI),AX - MULQ 8(CX) - ADDQ AX,BX - ADCQ DX,BP - MOVQ 0(SP),AX - MULQ 24(CX) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 0(SP),AX - MULQ 32(CX) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ 32(SI),AX - MULQ 0(CX) - ADDQ AX,BX - ADCQ DX,BP - MOVQ 8(SP),AX - MULQ 16(CX) - ADDQ AX,R10 - ADCQ DX,R11 - MOVQ 8(SP),AX - MULQ 24(CX) - ADDQ AX,R12 - ADCQ DX,R13 - MOVQ 8(SP),AX - MULQ 32(CX) - ADDQ AX,R14 - ADCQ DX,R15 - MOVQ $REDMASK51,SI - SHLQ $13,R8,R9 - ANDQ SI,R8 - SHLQ $13,R10,R11 - ANDQ SI,R10 - ADDQ R9,R10 - SHLQ $13,R12,R13 - ANDQ SI,R12 - ADDQ R11,R12 - SHLQ $13,R14,R15 - ANDQ SI,R14 - ADDQ R13,R14 - SHLQ $13,BX,BP - ANDQ SI,BX - ADDQ R15,BX - IMUL3Q $19,BP,DX - ADDQ DX,R8 - MOVQ R8,DX - SHRQ $51,DX - ADDQ R10,DX - MOVQ DX,CX - SHRQ $51,DX - ANDQ SI,R8 - ADDQ R12,DX - MOVQ DX,R9 - SHRQ $51,DX - ANDQ SI,CX - ADDQ R14,DX - MOVQ DX,AX - SHRQ $51,DX - ANDQ SI,R9 - ADDQ BX,DX - MOVQ DX,R10 - SHRQ $51,DX - ANDQ SI,AX - IMUL3Q $19,DX,DX - ADDQ DX,R8 - ANDQ SI,R10 - MOVQ R8,0(DI) - MOVQ CX,8(DI) - MOVQ R9,16(DI) - MOVQ AX,24(DI) - MOVQ R10,32(DI) - RET - -// func square(out, in *[5]uint64) -TEXT ·square(SB),7,$0-16 - MOVQ out+0(FP), DI - MOVQ in+8(FP), SI - - MOVQ 0(SI),AX - MULQ 0(SI) - MOVQ AX,CX - MOVQ DX,R8 - MOVQ 0(SI),AX - SHLQ $1,AX - MULQ 8(SI) - MOVQ AX,R9 - MOVQ DX,R10 - MOVQ 0(SI),AX - SHLQ $1,AX - MULQ 16(SI) - MOVQ AX,R11 - MOVQ DX,R12 - MOVQ 0(SI),AX - SHLQ $1,AX - MULQ 24(SI) - MOVQ AX,R13 - MOVQ DX,R14 - MOVQ 0(SI),AX - SHLQ $1,AX - MULQ 32(SI) - MOVQ AX,R15 - MOVQ DX,BX - MOVQ 8(SI),AX - MULQ 8(SI) - ADDQ AX,R11 - ADCQ DX,R12 - MOVQ 8(SI),AX - SHLQ $1,AX - MULQ 16(SI) - ADDQ AX,R13 - ADCQ DX,R14 - MOVQ 8(SI),AX - SHLQ $1,AX - MULQ 24(SI) - ADDQ AX,R15 - ADCQ DX,BX - MOVQ 8(SI),DX - IMUL3Q $38,DX,AX - MULQ 32(SI) - ADDQ AX,CX - ADCQ DX,R8 - MOVQ 16(SI),AX - MULQ 16(SI) - ADDQ AX,R15 - ADCQ DX,BX - MOVQ 16(SI),DX - IMUL3Q $38,DX,AX - MULQ 24(SI) - ADDQ AX,CX - ADCQ DX,R8 - MOVQ 16(SI),DX - IMUL3Q $38,DX,AX - MULQ 32(SI) - ADDQ AX,R9 - ADCQ DX,R10 - MOVQ 24(SI),DX - IMUL3Q $19,DX,AX - MULQ 24(SI) - ADDQ AX,R9 - ADCQ DX,R10 - MOVQ 24(SI),DX - IMUL3Q $38,DX,AX - MULQ 32(SI) - ADDQ AX,R11 - ADCQ DX,R12 - MOVQ 32(SI),DX - IMUL3Q $19,DX,AX - MULQ 32(SI) - ADDQ AX,R13 - ADCQ DX,R14 - MOVQ $REDMASK51,SI - SHLQ $13,CX,R8 - ANDQ SI,CX - SHLQ $13,R9,R10 - ANDQ SI,R9 - ADDQ R8,R9 - SHLQ $13,R11,R12 - ANDQ SI,R11 - ADDQ R10,R11 - SHLQ $13,R13,R14 - ANDQ SI,R13 - ADDQ R12,R13 - SHLQ $13,R15,BX - ANDQ SI,R15 - ADDQ R14,R15 - IMUL3Q $19,BX,DX - ADDQ DX,CX - MOVQ CX,DX - SHRQ $51,DX - ADDQ R9,DX - ANDQ SI,CX - MOVQ DX,R8 - SHRQ $51,DX - ADDQ R11,DX - ANDQ SI,R8 - MOVQ DX,R9 - SHRQ $51,DX - ADDQ R13,DX - ANDQ SI,R9 - MOVQ DX,AX - SHRQ $51,DX - ADDQ R15,DX - ANDQ SI,AX - MOVQ DX,R10 - SHRQ $51,DX - IMUL3Q $19,DX,DX - ADDQ DX,CX - ANDQ SI,R10 - MOVQ CX,0(DI) - MOVQ R8,8(DI) - MOVQ R9,16(DI) - MOVQ AX,24(DI) - MOVQ R10,32(DI) - RET diff --git a/vendor/golang.org/x/crypto/curve25519/curve25519_generic.go b/vendor/golang.org/x/crypto/curve25519/curve25519_generic.go deleted file mode 100644 index c43b13f..0000000 --- a/vendor/golang.org/x/crypto/curve25519/curve25519_generic.go +++ /dev/null @@ -1,828 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package curve25519 - -import "encoding/binary" - -// This code is a port of the public domain, "ref10" implementation of -// curve25519 from SUPERCOP 20130419 by D. J. Bernstein. - -// fieldElement represents an element of the field GF(2^255 - 19). An element -// t, entries t[0]...t[9], represents the integer t[0]+2^26 t[1]+2^51 t[2]+2^77 -// t[3]+2^102 t[4]+...+2^230 t[9]. Bounds on each t[i] vary depending on -// context. -type fieldElement [10]int32 - -func feZero(fe *fieldElement) { - for i := range fe { - fe[i] = 0 - } -} - -func feOne(fe *fieldElement) { - feZero(fe) - fe[0] = 1 -} - -func feAdd(dst, a, b *fieldElement) { - for i := range dst { - dst[i] = a[i] + b[i] - } -} - -func feSub(dst, a, b *fieldElement) { - for i := range dst { - dst[i] = a[i] - b[i] - } -} - -func feCopy(dst, src *fieldElement) { - for i := range dst { - dst[i] = src[i] - } -} - -// feCSwap replaces (f,g) with (g,f) if b == 1; replaces (f,g) with (f,g) if b == 0. -// -// Preconditions: b in {0,1}. -func feCSwap(f, g *fieldElement, b int32) { - b = -b - for i := range f { - t := b & (f[i] ^ g[i]) - f[i] ^= t - g[i] ^= t - } -} - -// load3 reads a 24-bit, little-endian value from in. -func load3(in []byte) int64 { - var r int64 - r = int64(in[0]) - r |= int64(in[1]) << 8 - r |= int64(in[2]) << 16 - return r -} - -// load4 reads a 32-bit, little-endian value from in. -func load4(in []byte) int64 { - return int64(binary.LittleEndian.Uint32(in)) -} - -func feFromBytes(dst *fieldElement, src *[32]byte) { - h0 := load4(src[:]) - h1 := load3(src[4:]) << 6 - h2 := load3(src[7:]) << 5 - h3 := load3(src[10:]) << 3 - h4 := load3(src[13:]) << 2 - h5 := load4(src[16:]) - h6 := load3(src[20:]) << 7 - h7 := load3(src[23:]) << 5 - h8 := load3(src[26:]) << 4 - h9 := (load3(src[29:]) & 0x7fffff) << 2 - - var carry [10]int64 - carry[9] = (h9 + 1<<24) >> 25 - h0 += carry[9] * 19 - h9 -= carry[9] << 25 - carry[1] = (h1 + 1<<24) >> 25 - h2 += carry[1] - h1 -= carry[1] << 25 - carry[3] = (h3 + 1<<24) >> 25 - h4 += carry[3] - h3 -= carry[3] << 25 - carry[5] = (h5 + 1<<24) >> 25 - h6 += carry[5] - h5 -= carry[5] << 25 - carry[7] = (h7 + 1<<24) >> 25 - h8 += carry[7] - h7 -= carry[7] << 25 - - carry[0] = (h0 + 1<<25) >> 26 - h1 += carry[0] - h0 -= carry[0] << 26 - carry[2] = (h2 + 1<<25) >> 26 - h3 += carry[2] - h2 -= carry[2] << 26 - carry[4] = (h4 + 1<<25) >> 26 - h5 += carry[4] - h4 -= carry[4] << 26 - carry[6] = (h6 + 1<<25) >> 26 - h7 += carry[6] - h6 -= carry[6] << 26 - carry[8] = (h8 + 1<<25) >> 26 - h9 += carry[8] - h8 -= carry[8] << 26 - - dst[0] = int32(h0) - dst[1] = int32(h1) - dst[2] = int32(h2) - dst[3] = int32(h3) - dst[4] = int32(h4) - dst[5] = int32(h5) - dst[6] = int32(h6) - dst[7] = int32(h7) - dst[8] = int32(h8) - dst[9] = int32(h9) -} - -// feToBytes marshals h to s. -// Preconditions: -// |h| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. -// -// Write p=2^255-19; q=floor(h/p). -// Basic claim: q = floor(2^(-255)(h + 19 2^(-25)h9 + 2^(-1))). -// -// Proof: -// Have |h|<=p so |q|<=1 so |19^2 2^(-255) q|<1/4. -// Also have |h-2^230 h9|<2^230 so |19 2^(-255)(h-2^230 h9)|<1/4. -// -// Write y=2^(-1)-19^2 2^(-255)q-19 2^(-255)(h-2^230 h9). -// Then 0> 25 - q = (h[0] + q) >> 26 - q = (h[1] + q) >> 25 - q = (h[2] + q) >> 26 - q = (h[3] + q) >> 25 - q = (h[4] + q) >> 26 - q = (h[5] + q) >> 25 - q = (h[6] + q) >> 26 - q = (h[7] + q) >> 25 - q = (h[8] + q) >> 26 - q = (h[9] + q) >> 25 - - // Goal: Output h-(2^255-19)q, which is between 0 and 2^255-20. - h[0] += 19 * q - // Goal: Output h-2^255 q, which is between 0 and 2^255-20. - - carry[0] = h[0] >> 26 - h[1] += carry[0] - h[0] -= carry[0] << 26 - carry[1] = h[1] >> 25 - h[2] += carry[1] - h[1] -= carry[1] << 25 - carry[2] = h[2] >> 26 - h[3] += carry[2] - h[2] -= carry[2] << 26 - carry[3] = h[3] >> 25 - h[4] += carry[3] - h[3] -= carry[3] << 25 - carry[4] = h[4] >> 26 - h[5] += carry[4] - h[4] -= carry[4] << 26 - carry[5] = h[5] >> 25 - h[6] += carry[5] - h[5] -= carry[5] << 25 - carry[6] = h[6] >> 26 - h[7] += carry[6] - h[6] -= carry[6] << 26 - carry[7] = h[7] >> 25 - h[8] += carry[7] - h[7] -= carry[7] << 25 - carry[8] = h[8] >> 26 - h[9] += carry[8] - h[8] -= carry[8] << 26 - carry[9] = h[9] >> 25 - h[9] -= carry[9] << 25 - // h10 = carry9 - - // Goal: Output h[0]+...+2^255 h10-2^255 q, which is between 0 and 2^255-20. - // Have h[0]+...+2^230 h[9] between 0 and 2^255-1; - // evidently 2^255 h10-2^255 q = 0. - // Goal: Output h[0]+...+2^230 h[9]. - - s[0] = byte(h[0] >> 0) - s[1] = byte(h[0] >> 8) - s[2] = byte(h[0] >> 16) - s[3] = byte((h[0] >> 24) | (h[1] << 2)) - s[4] = byte(h[1] >> 6) - s[5] = byte(h[1] >> 14) - s[6] = byte((h[1] >> 22) | (h[2] << 3)) - s[7] = byte(h[2] >> 5) - s[8] = byte(h[2] >> 13) - s[9] = byte((h[2] >> 21) | (h[3] << 5)) - s[10] = byte(h[3] >> 3) - s[11] = byte(h[3] >> 11) - s[12] = byte((h[3] >> 19) | (h[4] << 6)) - s[13] = byte(h[4] >> 2) - s[14] = byte(h[4] >> 10) - s[15] = byte(h[4] >> 18) - s[16] = byte(h[5] >> 0) - s[17] = byte(h[5] >> 8) - s[18] = byte(h[5] >> 16) - s[19] = byte((h[5] >> 24) | (h[6] << 1)) - s[20] = byte(h[6] >> 7) - s[21] = byte(h[6] >> 15) - s[22] = byte((h[6] >> 23) | (h[7] << 3)) - s[23] = byte(h[7] >> 5) - s[24] = byte(h[7] >> 13) - s[25] = byte((h[7] >> 21) | (h[8] << 4)) - s[26] = byte(h[8] >> 4) - s[27] = byte(h[8] >> 12) - s[28] = byte((h[8] >> 20) | (h[9] << 6)) - s[29] = byte(h[9] >> 2) - s[30] = byte(h[9] >> 10) - s[31] = byte(h[9] >> 18) -} - -// feMul calculates h = f * g -// Can overlap h with f or g. -// -// Preconditions: -// |f| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc. -// |g| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc. -// -// Postconditions: -// |h| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. -// -// Notes on implementation strategy: -// -// Using schoolbook multiplication. -// Karatsuba would save a little in some cost models. -// -// Most multiplications by 2 and 19 are 32-bit precomputations; -// cheaper than 64-bit postcomputations. -// -// There is one remaining multiplication by 19 in the carry chain; -// one *19 precomputation can be merged into this, -// but the resulting data flow is considerably less clean. -// -// There are 12 carries below. -// 10 of them are 2-way parallelizable and vectorizable. -// Can get away with 11 carries, but then data flow is much deeper. -// -// With tighter constraints on inputs can squeeze carries into int32. -func feMul(h, f, g *fieldElement) { - f0 := f[0] - f1 := f[1] - f2 := f[2] - f3 := f[3] - f4 := f[4] - f5 := f[5] - f6 := f[6] - f7 := f[7] - f8 := f[8] - f9 := f[9] - g0 := g[0] - g1 := g[1] - g2 := g[2] - g3 := g[3] - g4 := g[4] - g5 := g[5] - g6 := g[6] - g7 := g[7] - g8 := g[8] - g9 := g[9] - g1_19 := 19 * g1 // 1.4*2^29 - g2_19 := 19 * g2 // 1.4*2^30; still ok - g3_19 := 19 * g3 - g4_19 := 19 * g4 - g5_19 := 19 * g5 - g6_19 := 19 * g6 - g7_19 := 19 * g7 - g8_19 := 19 * g8 - g9_19 := 19 * g9 - f1_2 := 2 * f1 - f3_2 := 2 * f3 - f5_2 := 2 * f5 - f7_2 := 2 * f7 - f9_2 := 2 * f9 - f0g0 := int64(f0) * int64(g0) - f0g1 := int64(f0) * int64(g1) - f0g2 := int64(f0) * int64(g2) - f0g3 := int64(f0) * int64(g3) - f0g4 := int64(f0) * int64(g4) - f0g5 := int64(f0) * int64(g5) - f0g6 := int64(f0) * int64(g6) - f0g7 := int64(f0) * int64(g7) - f0g8 := int64(f0) * int64(g8) - f0g9 := int64(f0) * int64(g9) - f1g0 := int64(f1) * int64(g0) - f1g1_2 := int64(f1_2) * int64(g1) - f1g2 := int64(f1) * int64(g2) - f1g3_2 := int64(f1_2) * int64(g3) - f1g4 := int64(f1) * int64(g4) - f1g5_2 := int64(f1_2) * int64(g5) - f1g6 := int64(f1) * int64(g6) - f1g7_2 := int64(f1_2) * int64(g7) - f1g8 := int64(f1) * int64(g8) - f1g9_38 := int64(f1_2) * int64(g9_19) - f2g0 := int64(f2) * int64(g0) - f2g1 := int64(f2) * int64(g1) - f2g2 := int64(f2) * int64(g2) - f2g3 := int64(f2) * int64(g3) - f2g4 := int64(f2) * int64(g4) - f2g5 := int64(f2) * int64(g5) - f2g6 := int64(f2) * int64(g6) - f2g7 := int64(f2) * int64(g7) - f2g8_19 := int64(f2) * int64(g8_19) - f2g9_19 := int64(f2) * int64(g9_19) - f3g0 := int64(f3) * int64(g0) - f3g1_2 := int64(f3_2) * int64(g1) - f3g2 := int64(f3) * int64(g2) - f3g3_2 := int64(f3_2) * int64(g3) - f3g4 := int64(f3) * int64(g4) - f3g5_2 := int64(f3_2) * int64(g5) - f3g6 := int64(f3) * int64(g6) - f3g7_38 := int64(f3_2) * int64(g7_19) - f3g8_19 := int64(f3) * int64(g8_19) - f3g9_38 := int64(f3_2) * int64(g9_19) - f4g0 := int64(f4) * int64(g0) - f4g1 := int64(f4) * int64(g1) - f4g2 := int64(f4) * int64(g2) - f4g3 := int64(f4) * int64(g3) - f4g4 := int64(f4) * int64(g4) - f4g5 := int64(f4) * int64(g5) - f4g6_19 := int64(f4) * int64(g6_19) - f4g7_19 := int64(f4) * int64(g7_19) - f4g8_19 := int64(f4) * int64(g8_19) - f4g9_19 := int64(f4) * int64(g9_19) - f5g0 := int64(f5) * int64(g0) - f5g1_2 := int64(f5_2) * int64(g1) - f5g2 := int64(f5) * int64(g2) - f5g3_2 := int64(f5_2) * int64(g3) - f5g4 := int64(f5) * int64(g4) - f5g5_38 := int64(f5_2) * int64(g5_19) - f5g6_19 := int64(f5) * int64(g6_19) - f5g7_38 := int64(f5_2) * int64(g7_19) - f5g8_19 := int64(f5) * int64(g8_19) - f5g9_38 := int64(f5_2) * int64(g9_19) - f6g0 := int64(f6) * int64(g0) - f6g1 := int64(f6) * int64(g1) - f6g2 := int64(f6) * int64(g2) - f6g3 := int64(f6) * int64(g3) - f6g4_19 := int64(f6) * int64(g4_19) - f6g5_19 := int64(f6) * int64(g5_19) - f6g6_19 := int64(f6) * int64(g6_19) - f6g7_19 := int64(f6) * int64(g7_19) - f6g8_19 := int64(f6) * int64(g8_19) - f6g9_19 := int64(f6) * int64(g9_19) - f7g0 := int64(f7) * int64(g0) - f7g1_2 := int64(f7_2) * int64(g1) - f7g2 := int64(f7) * int64(g2) - f7g3_38 := int64(f7_2) * int64(g3_19) - f7g4_19 := int64(f7) * int64(g4_19) - f7g5_38 := int64(f7_2) * int64(g5_19) - f7g6_19 := int64(f7) * int64(g6_19) - f7g7_38 := int64(f7_2) * int64(g7_19) - f7g8_19 := int64(f7) * int64(g8_19) - f7g9_38 := int64(f7_2) * int64(g9_19) - f8g0 := int64(f8) * int64(g0) - f8g1 := int64(f8) * int64(g1) - f8g2_19 := int64(f8) * int64(g2_19) - f8g3_19 := int64(f8) * int64(g3_19) - f8g4_19 := int64(f8) * int64(g4_19) - f8g5_19 := int64(f8) * int64(g5_19) - f8g6_19 := int64(f8) * int64(g6_19) - f8g7_19 := int64(f8) * int64(g7_19) - f8g8_19 := int64(f8) * int64(g8_19) - f8g9_19 := int64(f8) * int64(g9_19) - f9g0 := int64(f9) * int64(g0) - f9g1_38 := int64(f9_2) * int64(g1_19) - f9g2_19 := int64(f9) * int64(g2_19) - f9g3_38 := int64(f9_2) * int64(g3_19) - f9g4_19 := int64(f9) * int64(g4_19) - f9g5_38 := int64(f9_2) * int64(g5_19) - f9g6_19 := int64(f9) * int64(g6_19) - f9g7_38 := int64(f9_2) * int64(g7_19) - f9g8_19 := int64(f9) * int64(g8_19) - f9g9_38 := int64(f9_2) * int64(g9_19) - h0 := f0g0 + f1g9_38 + f2g8_19 + f3g7_38 + f4g6_19 + f5g5_38 + f6g4_19 + f7g3_38 + f8g2_19 + f9g1_38 - h1 := f0g1 + f1g0 + f2g9_19 + f3g8_19 + f4g7_19 + f5g6_19 + f6g5_19 + f7g4_19 + f8g3_19 + f9g2_19 - h2 := f0g2 + f1g1_2 + f2g0 + f3g9_38 + f4g8_19 + f5g7_38 + f6g6_19 + f7g5_38 + f8g4_19 + f9g3_38 - h3 := f0g3 + f1g2 + f2g1 + f3g0 + f4g9_19 + f5g8_19 + f6g7_19 + f7g6_19 + f8g5_19 + f9g4_19 - h4 := f0g4 + f1g3_2 + f2g2 + f3g1_2 + f4g0 + f5g9_38 + f6g8_19 + f7g7_38 + f8g6_19 + f9g5_38 - h5 := f0g5 + f1g4 + f2g3 + f3g2 + f4g1 + f5g0 + f6g9_19 + f7g8_19 + f8g7_19 + f9g6_19 - h6 := f0g6 + f1g5_2 + f2g4 + f3g3_2 + f4g2 + f5g1_2 + f6g0 + f7g9_38 + f8g8_19 + f9g7_38 - h7 := f0g7 + f1g6 + f2g5 + f3g4 + f4g3 + f5g2 + f6g1 + f7g0 + f8g9_19 + f9g8_19 - h8 := f0g8 + f1g7_2 + f2g6 + f3g5_2 + f4g4 + f5g3_2 + f6g2 + f7g1_2 + f8g0 + f9g9_38 - h9 := f0g9 + f1g8 + f2g7 + f3g6 + f4g5 + f5g4 + f6g3 + f7g2 + f8g1 + f9g0 - var carry [10]int64 - - // |h0| <= (1.1*1.1*2^52*(1+19+19+19+19)+1.1*1.1*2^50*(38+38+38+38+38)) - // i.e. |h0| <= 1.2*2^59; narrower ranges for h2, h4, h6, h8 - // |h1| <= (1.1*1.1*2^51*(1+1+19+19+19+19+19+19+19+19)) - // i.e. |h1| <= 1.5*2^58; narrower ranges for h3, h5, h7, h9 - - carry[0] = (h0 + (1 << 25)) >> 26 - h1 += carry[0] - h0 -= carry[0] << 26 - carry[4] = (h4 + (1 << 25)) >> 26 - h5 += carry[4] - h4 -= carry[4] << 26 - // |h0| <= 2^25 - // |h4| <= 2^25 - // |h1| <= 1.51*2^58 - // |h5| <= 1.51*2^58 - - carry[1] = (h1 + (1 << 24)) >> 25 - h2 += carry[1] - h1 -= carry[1] << 25 - carry[5] = (h5 + (1 << 24)) >> 25 - h6 += carry[5] - h5 -= carry[5] << 25 - // |h1| <= 2^24; from now on fits into int32 - // |h5| <= 2^24; from now on fits into int32 - // |h2| <= 1.21*2^59 - // |h6| <= 1.21*2^59 - - carry[2] = (h2 + (1 << 25)) >> 26 - h3 += carry[2] - h2 -= carry[2] << 26 - carry[6] = (h6 + (1 << 25)) >> 26 - h7 += carry[6] - h6 -= carry[6] << 26 - // |h2| <= 2^25; from now on fits into int32 unchanged - // |h6| <= 2^25; from now on fits into int32 unchanged - // |h3| <= 1.51*2^58 - // |h7| <= 1.51*2^58 - - carry[3] = (h3 + (1 << 24)) >> 25 - h4 += carry[3] - h3 -= carry[3] << 25 - carry[7] = (h7 + (1 << 24)) >> 25 - h8 += carry[7] - h7 -= carry[7] << 25 - // |h3| <= 2^24; from now on fits into int32 unchanged - // |h7| <= 2^24; from now on fits into int32 unchanged - // |h4| <= 1.52*2^33 - // |h8| <= 1.52*2^33 - - carry[4] = (h4 + (1 << 25)) >> 26 - h5 += carry[4] - h4 -= carry[4] << 26 - carry[8] = (h8 + (1 << 25)) >> 26 - h9 += carry[8] - h8 -= carry[8] << 26 - // |h4| <= 2^25; from now on fits into int32 unchanged - // |h8| <= 2^25; from now on fits into int32 unchanged - // |h5| <= 1.01*2^24 - // |h9| <= 1.51*2^58 - - carry[9] = (h9 + (1 << 24)) >> 25 - h0 += carry[9] * 19 - h9 -= carry[9] << 25 - // |h9| <= 2^24; from now on fits into int32 unchanged - // |h0| <= 1.8*2^37 - - carry[0] = (h0 + (1 << 25)) >> 26 - h1 += carry[0] - h0 -= carry[0] << 26 - // |h0| <= 2^25; from now on fits into int32 unchanged - // |h1| <= 1.01*2^24 - - h[0] = int32(h0) - h[1] = int32(h1) - h[2] = int32(h2) - h[3] = int32(h3) - h[4] = int32(h4) - h[5] = int32(h5) - h[6] = int32(h6) - h[7] = int32(h7) - h[8] = int32(h8) - h[9] = int32(h9) -} - -// feSquare calculates h = f*f. Can overlap h with f. -// -// Preconditions: -// |f| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc. -// -// Postconditions: -// |h| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. -func feSquare(h, f *fieldElement) { - f0 := f[0] - f1 := f[1] - f2 := f[2] - f3 := f[3] - f4 := f[4] - f5 := f[5] - f6 := f[6] - f7 := f[7] - f8 := f[8] - f9 := f[9] - f0_2 := 2 * f0 - f1_2 := 2 * f1 - f2_2 := 2 * f2 - f3_2 := 2 * f3 - f4_2 := 2 * f4 - f5_2 := 2 * f5 - f6_2 := 2 * f6 - f7_2 := 2 * f7 - f5_38 := 38 * f5 // 1.31*2^30 - f6_19 := 19 * f6 // 1.31*2^30 - f7_38 := 38 * f7 // 1.31*2^30 - f8_19 := 19 * f8 // 1.31*2^30 - f9_38 := 38 * f9 // 1.31*2^30 - f0f0 := int64(f0) * int64(f0) - f0f1_2 := int64(f0_2) * int64(f1) - f0f2_2 := int64(f0_2) * int64(f2) - f0f3_2 := int64(f0_2) * int64(f3) - f0f4_2 := int64(f0_2) * int64(f4) - f0f5_2 := int64(f0_2) * int64(f5) - f0f6_2 := int64(f0_2) * int64(f6) - f0f7_2 := int64(f0_2) * int64(f7) - f0f8_2 := int64(f0_2) * int64(f8) - f0f9_2 := int64(f0_2) * int64(f9) - f1f1_2 := int64(f1_2) * int64(f1) - f1f2_2 := int64(f1_2) * int64(f2) - f1f3_4 := int64(f1_2) * int64(f3_2) - f1f4_2 := int64(f1_2) * int64(f4) - f1f5_4 := int64(f1_2) * int64(f5_2) - f1f6_2 := int64(f1_2) * int64(f6) - f1f7_4 := int64(f1_2) * int64(f7_2) - f1f8_2 := int64(f1_2) * int64(f8) - f1f9_76 := int64(f1_2) * int64(f9_38) - f2f2 := int64(f2) * int64(f2) - f2f3_2 := int64(f2_2) * int64(f3) - f2f4_2 := int64(f2_2) * int64(f4) - f2f5_2 := int64(f2_2) * int64(f5) - f2f6_2 := int64(f2_2) * int64(f6) - f2f7_2 := int64(f2_2) * int64(f7) - f2f8_38 := int64(f2_2) * int64(f8_19) - f2f9_38 := int64(f2) * int64(f9_38) - f3f3_2 := int64(f3_2) * int64(f3) - f3f4_2 := int64(f3_2) * int64(f4) - f3f5_4 := int64(f3_2) * int64(f5_2) - f3f6_2 := int64(f3_2) * int64(f6) - f3f7_76 := int64(f3_2) * int64(f7_38) - f3f8_38 := int64(f3_2) * int64(f8_19) - f3f9_76 := int64(f3_2) * int64(f9_38) - f4f4 := int64(f4) * int64(f4) - f4f5_2 := int64(f4_2) * int64(f5) - f4f6_38 := int64(f4_2) * int64(f6_19) - f4f7_38 := int64(f4) * int64(f7_38) - f4f8_38 := int64(f4_2) * int64(f8_19) - f4f9_38 := int64(f4) * int64(f9_38) - f5f5_38 := int64(f5) * int64(f5_38) - f5f6_38 := int64(f5_2) * int64(f6_19) - f5f7_76 := int64(f5_2) * int64(f7_38) - f5f8_38 := int64(f5_2) * int64(f8_19) - f5f9_76 := int64(f5_2) * int64(f9_38) - f6f6_19 := int64(f6) * int64(f6_19) - f6f7_38 := int64(f6) * int64(f7_38) - f6f8_38 := int64(f6_2) * int64(f8_19) - f6f9_38 := int64(f6) * int64(f9_38) - f7f7_38 := int64(f7) * int64(f7_38) - f7f8_38 := int64(f7_2) * int64(f8_19) - f7f9_76 := int64(f7_2) * int64(f9_38) - f8f8_19 := int64(f8) * int64(f8_19) - f8f9_38 := int64(f8) * int64(f9_38) - f9f9_38 := int64(f9) * int64(f9_38) - h0 := f0f0 + f1f9_76 + f2f8_38 + f3f7_76 + f4f6_38 + f5f5_38 - h1 := f0f1_2 + f2f9_38 + f3f8_38 + f4f7_38 + f5f6_38 - h2 := f0f2_2 + f1f1_2 + f3f9_76 + f4f8_38 + f5f7_76 + f6f6_19 - h3 := f0f3_2 + f1f2_2 + f4f9_38 + f5f8_38 + f6f7_38 - h4 := f0f4_2 + f1f3_4 + f2f2 + f5f9_76 + f6f8_38 + f7f7_38 - h5 := f0f5_2 + f1f4_2 + f2f3_2 + f6f9_38 + f7f8_38 - h6 := f0f6_2 + f1f5_4 + f2f4_2 + f3f3_2 + f7f9_76 + f8f8_19 - h7 := f0f7_2 + f1f6_2 + f2f5_2 + f3f4_2 + f8f9_38 - h8 := f0f8_2 + f1f7_4 + f2f6_2 + f3f5_4 + f4f4 + f9f9_38 - h9 := f0f9_2 + f1f8_2 + f2f7_2 + f3f6_2 + f4f5_2 - var carry [10]int64 - - carry[0] = (h0 + (1 << 25)) >> 26 - h1 += carry[0] - h0 -= carry[0] << 26 - carry[4] = (h4 + (1 << 25)) >> 26 - h5 += carry[4] - h4 -= carry[4] << 26 - - carry[1] = (h1 + (1 << 24)) >> 25 - h2 += carry[1] - h1 -= carry[1] << 25 - carry[5] = (h5 + (1 << 24)) >> 25 - h6 += carry[5] - h5 -= carry[5] << 25 - - carry[2] = (h2 + (1 << 25)) >> 26 - h3 += carry[2] - h2 -= carry[2] << 26 - carry[6] = (h6 + (1 << 25)) >> 26 - h7 += carry[6] - h6 -= carry[6] << 26 - - carry[3] = (h3 + (1 << 24)) >> 25 - h4 += carry[3] - h3 -= carry[3] << 25 - carry[7] = (h7 + (1 << 24)) >> 25 - h8 += carry[7] - h7 -= carry[7] << 25 - - carry[4] = (h4 + (1 << 25)) >> 26 - h5 += carry[4] - h4 -= carry[4] << 26 - carry[8] = (h8 + (1 << 25)) >> 26 - h9 += carry[8] - h8 -= carry[8] << 26 - - carry[9] = (h9 + (1 << 24)) >> 25 - h0 += carry[9] * 19 - h9 -= carry[9] << 25 - - carry[0] = (h0 + (1 << 25)) >> 26 - h1 += carry[0] - h0 -= carry[0] << 26 - - h[0] = int32(h0) - h[1] = int32(h1) - h[2] = int32(h2) - h[3] = int32(h3) - h[4] = int32(h4) - h[5] = int32(h5) - h[6] = int32(h6) - h[7] = int32(h7) - h[8] = int32(h8) - h[9] = int32(h9) -} - -// feMul121666 calculates h = f * 121666. Can overlap h with f. -// -// Preconditions: -// |f| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc. -// -// Postconditions: -// |h| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. -func feMul121666(h, f *fieldElement) { - h0 := int64(f[0]) * 121666 - h1 := int64(f[1]) * 121666 - h2 := int64(f[2]) * 121666 - h3 := int64(f[3]) * 121666 - h4 := int64(f[4]) * 121666 - h5 := int64(f[5]) * 121666 - h6 := int64(f[6]) * 121666 - h7 := int64(f[7]) * 121666 - h8 := int64(f[8]) * 121666 - h9 := int64(f[9]) * 121666 - var carry [10]int64 - - carry[9] = (h9 + (1 << 24)) >> 25 - h0 += carry[9] * 19 - h9 -= carry[9] << 25 - carry[1] = (h1 + (1 << 24)) >> 25 - h2 += carry[1] - h1 -= carry[1] << 25 - carry[3] = (h3 + (1 << 24)) >> 25 - h4 += carry[3] - h3 -= carry[3] << 25 - carry[5] = (h5 + (1 << 24)) >> 25 - h6 += carry[5] - h5 -= carry[5] << 25 - carry[7] = (h7 + (1 << 24)) >> 25 - h8 += carry[7] - h7 -= carry[7] << 25 - - carry[0] = (h0 + (1 << 25)) >> 26 - h1 += carry[0] - h0 -= carry[0] << 26 - carry[2] = (h2 + (1 << 25)) >> 26 - h3 += carry[2] - h2 -= carry[2] << 26 - carry[4] = (h4 + (1 << 25)) >> 26 - h5 += carry[4] - h4 -= carry[4] << 26 - carry[6] = (h6 + (1 << 25)) >> 26 - h7 += carry[6] - h6 -= carry[6] << 26 - carry[8] = (h8 + (1 << 25)) >> 26 - h9 += carry[8] - h8 -= carry[8] << 26 - - h[0] = int32(h0) - h[1] = int32(h1) - h[2] = int32(h2) - h[3] = int32(h3) - h[4] = int32(h4) - h[5] = int32(h5) - h[6] = int32(h6) - h[7] = int32(h7) - h[8] = int32(h8) - h[9] = int32(h9) -} - -// feInvert sets out = z^-1. -func feInvert(out, z *fieldElement) { - var t0, t1, t2, t3 fieldElement - var i int - - feSquare(&t0, z) - for i = 1; i < 1; i++ { - feSquare(&t0, &t0) - } - feSquare(&t1, &t0) - for i = 1; i < 2; i++ { - feSquare(&t1, &t1) - } - feMul(&t1, z, &t1) - feMul(&t0, &t0, &t1) - feSquare(&t2, &t0) - for i = 1; i < 1; i++ { - feSquare(&t2, &t2) - } - feMul(&t1, &t1, &t2) - feSquare(&t2, &t1) - for i = 1; i < 5; i++ { - feSquare(&t2, &t2) - } - feMul(&t1, &t2, &t1) - feSquare(&t2, &t1) - for i = 1; i < 10; i++ { - feSquare(&t2, &t2) - } - feMul(&t2, &t2, &t1) - feSquare(&t3, &t2) - for i = 1; i < 20; i++ { - feSquare(&t3, &t3) - } - feMul(&t2, &t3, &t2) - feSquare(&t2, &t2) - for i = 1; i < 10; i++ { - feSquare(&t2, &t2) - } - feMul(&t1, &t2, &t1) - feSquare(&t2, &t1) - for i = 1; i < 50; i++ { - feSquare(&t2, &t2) - } - feMul(&t2, &t2, &t1) - feSquare(&t3, &t2) - for i = 1; i < 100; i++ { - feSquare(&t3, &t3) - } - feMul(&t2, &t3, &t2) - feSquare(&t2, &t2) - for i = 1; i < 50; i++ { - feSquare(&t2, &t2) - } - feMul(&t1, &t2, &t1) - feSquare(&t1, &t1) - for i = 1; i < 5; i++ { - feSquare(&t1, &t1) - } - feMul(out, &t1, &t0) -} - -func scalarMultGeneric(out, in, base *[32]byte) { - var e [32]byte - - copy(e[:], in[:]) - e[0] &= 248 - e[31] &= 127 - e[31] |= 64 - - var x1, x2, z2, x3, z3, tmp0, tmp1 fieldElement - feFromBytes(&x1, base) - feOne(&x2) - feCopy(&x3, &x1) - feOne(&z3) - - swap := int32(0) - for pos := 254; pos >= 0; pos-- { - b := e[pos/8] >> uint(pos&7) - b &= 1 - swap ^= int32(b) - feCSwap(&x2, &x3, swap) - feCSwap(&z2, &z3, swap) - swap = int32(b) - - feSub(&tmp0, &x3, &z3) - feSub(&tmp1, &x2, &z2) - feAdd(&x2, &x2, &z2) - feAdd(&z2, &x3, &z3) - feMul(&z3, &tmp0, &x2) - feMul(&z2, &z2, &tmp1) - feSquare(&tmp0, &tmp1) - feSquare(&tmp1, &x2) - feAdd(&x3, &z3, &z2) - feSub(&z2, &z3, &z2) - feMul(&x2, &tmp1, &tmp0) - feSub(&tmp1, &tmp1, &tmp0) - feSquare(&z2, &z2) - feMul121666(&z3, &tmp1) - feSquare(&x3, &x3) - feAdd(&tmp0, &tmp0, &z3) - feMul(&z3, &x1, &z2) - feMul(&z2, &tmp1, &tmp0) - } - - feCSwap(&x2, &x3, swap) - feCSwap(&z2, &z3, swap) - - feInvert(&z2, &z2) - feMul(&x2, &x2, &z2) - feToBytes(out, &x2) -} diff --git a/vendor/golang.org/x/crypto/curve25519/curve25519_noasm.go b/vendor/golang.org/x/crypto/curve25519/curve25519_noasm.go deleted file mode 100644 index 047d49a..0000000 --- a/vendor/golang.org/x/crypto/curve25519/curve25519_noasm.go +++ /dev/null @@ -1,11 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build !amd64 gccgo appengine purego - -package curve25519 - -func scalarMult(out, in, base *[32]byte) { - scalarMultGeneric(out, in, base) -} diff --git a/vendor/golang.org/x/crypto/curve25519/internal/field/README b/vendor/golang.org/x/crypto/curve25519/internal/field/README new file mode 100644 index 0000000..e25bca7 --- /dev/null +++ b/vendor/golang.org/x/crypto/curve25519/internal/field/README @@ -0,0 +1,7 @@ +This package is kept in sync with crypto/ed25519/internal/edwards25519/field in +the standard library. + +If there are any changes in the standard library that need to be synced to this +package, run sync.sh. It will not overwrite any local changes made since the +previous sync, so it's ok to land changes in this package first, and then sync +to the standard library later. diff --git a/vendor/golang.org/x/crypto/curve25519/internal/field/fe.go b/vendor/golang.org/x/crypto/curve25519/internal/field/fe.go new file mode 100644 index 0000000..ca841ad --- /dev/null +++ b/vendor/golang.org/x/crypto/curve25519/internal/field/fe.go @@ -0,0 +1,416 @@ +// Copyright (c) 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package field implements fast arithmetic modulo 2^255-19. +package field + +import ( + "crypto/subtle" + "encoding/binary" + "math/bits" +) + +// Element represents an element of the field GF(2^255-19). Note that this +// is not a cryptographically secure group, and should only be used to interact +// with edwards25519.Point coordinates. +// +// This type works similarly to math/big.Int, and all arguments and receivers +// are allowed to alias. +// +// The zero value is a valid zero element. +type Element struct { + // An element t represents the integer + // t.l0 + t.l1*2^51 + t.l2*2^102 + t.l3*2^153 + t.l4*2^204 + // + // Between operations, all limbs are expected to be lower than 2^52. + l0 uint64 + l1 uint64 + l2 uint64 + l3 uint64 + l4 uint64 +} + +const maskLow51Bits uint64 = (1 << 51) - 1 + +var feZero = &Element{0, 0, 0, 0, 0} + +// Zero sets v = 0, and returns v. +func (v *Element) Zero() *Element { + *v = *feZero + return v +} + +var feOne = &Element{1, 0, 0, 0, 0} + +// One sets v = 1, and returns v. +func (v *Element) One() *Element { + *v = *feOne + return v +} + +// reduce reduces v modulo 2^255 - 19 and returns it. +func (v *Element) reduce() *Element { + v.carryPropagate() + + // After the light reduction we now have a field element representation + // v < 2^255 + 2^13 * 19, but need v < 2^255 - 19. + + // If v >= 2^255 - 19, then v + 19 >= 2^255, which would overflow 2^255 - 1, + // generating a carry. That is, c will be 0 if v < 2^255 - 19, and 1 otherwise. + c := (v.l0 + 19) >> 51 + c = (v.l1 + c) >> 51 + c = (v.l2 + c) >> 51 + c = (v.l3 + c) >> 51 + c = (v.l4 + c) >> 51 + + // If v < 2^255 - 19 and c = 0, this will be a no-op. Otherwise, it's + // effectively applying the reduction identity to the carry. + v.l0 += 19 * c + + v.l1 += v.l0 >> 51 + v.l0 = v.l0 & maskLow51Bits + v.l2 += v.l1 >> 51 + v.l1 = v.l1 & maskLow51Bits + v.l3 += v.l2 >> 51 + v.l2 = v.l2 & maskLow51Bits + v.l4 += v.l3 >> 51 + v.l3 = v.l3 & maskLow51Bits + // no additional carry + v.l4 = v.l4 & maskLow51Bits + + return v +} + +// Add sets v = a + b, and returns v. +func (v *Element) Add(a, b *Element) *Element { + v.l0 = a.l0 + b.l0 + v.l1 = a.l1 + b.l1 + v.l2 = a.l2 + b.l2 + v.l3 = a.l3 + b.l3 + v.l4 = a.l4 + b.l4 + // Using the generic implementation here is actually faster than the + // assembly. Probably because the body of this function is so simple that + // the compiler can figure out better optimizations by inlining the carry + // propagation. TODO + return v.carryPropagateGeneric() +} + +// Subtract sets v = a - b, and returns v. +func (v *Element) Subtract(a, b *Element) *Element { + // We first add 2 * p, to guarantee the subtraction won't underflow, and + // then subtract b (which can be up to 2^255 + 2^13 * 19). + v.l0 = (a.l0 + 0xFFFFFFFFFFFDA) - b.l0 + v.l1 = (a.l1 + 0xFFFFFFFFFFFFE) - b.l1 + v.l2 = (a.l2 + 0xFFFFFFFFFFFFE) - b.l2 + v.l3 = (a.l3 + 0xFFFFFFFFFFFFE) - b.l3 + v.l4 = (a.l4 + 0xFFFFFFFFFFFFE) - b.l4 + return v.carryPropagate() +} + +// Negate sets v = -a, and returns v. +func (v *Element) Negate(a *Element) *Element { + return v.Subtract(feZero, a) +} + +// Invert sets v = 1/z mod p, and returns v. +// +// If z == 0, Invert returns v = 0. +func (v *Element) Invert(z *Element) *Element { + // Inversion is implemented as exponentiation with exponent p − 2. It uses the + // same sequence of 255 squarings and 11 multiplications as [Curve25519]. + var z2, z9, z11, z2_5_0, z2_10_0, z2_20_0, z2_50_0, z2_100_0, t Element + + z2.Square(z) // 2 + t.Square(&z2) // 4 + t.Square(&t) // 8 + z9.Multiply(&t, z) // 9 + z11.Multiply(&z9, &z2) // 11 + t.Square(&z11) // 22 + z2_5_0.Multiply(&t, &z9) // 31 = 2^5 - 2^0 + + t.Square(&z2_5_0) // 2^6 - 2^1 + for i := 0; i < 4; i++ { + t.Square(&t) // 2^10 - 2^5 + } + z2_10_0.Multiply(&t, &z2_5_0) // 2^10 - 2^0 + + t.Square(&z2_10_0) // 2^11 - 2^1 + for i := 0; i < 9; i++ { + t.Square(&t) // 2^20 - 2^10 + } + z2_20_0.Multiply(&t, &z2_10_0) // 2^20 - 2^0 + + t.Square(&z2_20_0) // 2^21 - 2^1 + for i := 0; i < 19; i++ { + t.Square(&t) // 2^40 - 2^20 + } + t.Multiply(&t, &z2_20_0) // 2^40 - 2^0 + + t.Square(&t) // 2^41 - 2^1 + for i := 0; i < 9; i++ { + t.Square(&t) // 2^50 - 2^10 + } + z2_50_0.Multiply(&t, &z2_10_0) // 2^50 - 2^0 + + t.Square(&z2_50_0) // 2^51 - 2^1 + for i := 0; i < 49; i++ { + t.Square(&t) // 2^100 - 2^50 + } + z2_100_0.Multiply(&t, &z2_50_0) // 2^100 - 2^0 + + t.Square(&z2_100_0) // 2^101 - 2^1 + for i := 0; i < 99; i++ { + t.Square(&t) // 2^200 - 2^100 + } + t.Multiply(&t, &z2_100_0) // 2^200 - 2^0 + + t.Square(&t) // 2^201 - 2^1 + for i := 0; i < 49; i++ { + t.Square(&t) // 2^250 - 2^50 + } + t.Multiply(&t, &z2_50_0) // 2^250 - 2^0 + + t.Square(&t) // 2^251 - 2^1 + t.Square(&t) // 2^252 - 2^2 + t.Square(&t) // 2^253 - 2^3 + t.Square(&t) // 2^254 - 2^4 + t.Square(&t) // 2^255 - 2^5 + + return v.Multiply(&t, &z11) // 2^255 - 21 +} + +// Set sets v = a, and returns v. +func (v *Element) Set(a *Element) *Element { + *v = *a + return v +} + +// SetBytes sets v to x, which must be a 32-byte little-endian encoding. +// +// Consistent with RFC 7748, the most significant bit (the high bit of the +// last byte) is ignored, and non-canonical values (2^255-19 through 2^255-1) +// are accepted. Note that this is laxer than specified by RFC 8032. +func (v *Element) SetBytes(x []byte) *Element { + if len(x) != 32 { + panic("edwards25519: invalid field element input size") + } + + // Bits 0:51 (bytes 0:8, bits 0:64, shift 0, mask 51). + v.l0 = binary.LittleEndian.Uint64(x[0:8]) + v.l0 &= maskLow51Bits + // Bits 51:102 (bytes 6:14, bits 48:112, shift 3, mask 51). + v.l1 = binary.LittleEndian.Uint64(x[6:14]) >> 3 + v.l1 &= maskLow51Bits + // Bits 102:153 (bytes 12:20, bits 96:160, shift 6, mask 51). + v.l2 = binary.LittleEndian.Uint64(x[12:20]) >> 6 + v.l2 &= maskLow51Bits + // Bits 153:204 (bytes 19:27, bits 152:216, shift 1, mask 51). + v.l3 = binary.LittleEndian.Uint64(x[19:27]) >> 1 + v.l3 &= maskLow51Bits + // Bits 204:251 (bytes 24:32, bits 192:256, shift 12, mask 51). + // Note: not bytes 25:33, shift 4, to avoid overread. + v.l4 = binary.LittleEndian.Uint64(x[24:32]) >> 12 + v.l4 &= maskLow51Bits + + return v +} + +// Bytes returns the canonical 32-byte little-endian encoding of v. +func (v *Element) Bytes() []byte { + // This function is outlined to make the allocations inline in the caller + // rather than happen on the heap. + var out [32]byte + return v.bytes(&out) +} + +func (v *Element) bytes(out *[32]byte) []byte { + t := *v + t.reduce() + + var buf [8]byte + for i, l := range [5]uint64{t.l0, t.l1, t.l2, t.l3, t.l4} { + bitsOffset := i * 51 + binary.LittleEndian.PutUint64(buf[:], l<= len(out) { + break + } + out[off] |= bb + } + } + + return out[:] +} + +// Equal returns 1 if v and u are equal, and 0 otherwise. +func (v *Element) Equal(u *Element) int { + sa, sv := u.Bytes(), v.Bytes() + return subtle.ConstantTimeCompare(sa, sv) +} + +// mask64Bits returns 0xffffffff if cond is 1, and 0 otherwise. +func mask64Bits(cond int) uint64 { return ^(uint64(cond) - 1) } + +// Select sets v to a if cond == 1, and to b if cond == 0. +func (v *Element) Select(a, b *Element, cond int) *Element { + m := mask64Bits(cond) + v.l0 = (m & a.l0) | (^m & b.l0) + v.l1 = (m & a.l1) | (^m & b.l1) + v.l2 = (m & a.l2) | (^m & b.l2) + v.l3 = (m & a.l3) | (^m & b.l3) + v.l4 = (m & a.l4) | (^m & b.l4) + return v +} + +// Swap swaps v and u if cond == 1 or leaves them unchanged if cond == 0, and returns v. +func (v *Element) Swap(u *Element, cond int) { + m := mask64Bits(cond) + t := m & (v.l0 ^ u.l0) + v.l0 ^= t + u.l0 ^= t + t = m & (v.l1 ^ u.l1) + v.l1 ^= t + u.l1 ^= t + t = m & (v.l2 ^ u.l2) + v.l2 ^= t + u.l2 ^= t + t = m & (v.l3 ^ u.l3) + v.l3 ^= t + u.l3 ^= t + t = m & (v.l4 ^ u.l4) + v.l4 ^= t + u.l4 ^= t +} + +// IsNegative returns 1 if v is negative, and 0 otherwise. +func (v *Element) IsNegative() int { + return int(v.Bytes()[0] & 1) +} + +// Absolute sets v to |u|, and returns v. +func (v *Element) Absolute(u *Element) *Element { + return v.Select(new(Element).Negate(u), u, u.IsNegative()) +} + +// Multiply sets v = x * y, and returns v. +func (v *Element) Multiply(x, y *Element) *Element { + feMul(v, x, y) + return v +} + +// Square sets v = x * x, and returns v. +func (v *Element) Square(x *Element) *Element { + feSquare(v, x) + return v +} + +// Mult32 sets v = x * y, and returns v. +func (v *Element) Mult32(x *Element, y uint32) *Element { + x0lo, x0hi := mul51(x.l0, y) + x1lo, x1hi := mul51(x.l1, y) + x2lo, x2hi := mul51(x.l2, y) + x3lo, x3hi := mul51(x.l3, y) + x4lo, x4hi := mul51(x.l4, y) + v.l0 = x0lo + 19*x4hi // carried over per the reduction identity + v.l1 = x1lo + x0hi + v.l2 = x2lo + x1hi + v.l3 = x3lo + x2hi + v.l4 = x4lo + x3hi + // The hi portions are going to be only 32 bits, plus any previous excess, + // so we can skip the carry propagation. + return v +} + +// mul51 returns lo + hi * 2⁵¹ = a * b. +func mul51(a uint64, b uint32) (lo uint64, hi uint64) { + mh, ml := bits.Mul64(a, uint64(b)) + lo = ml & maskLow51Bits + hi = (mh << 13) | (ml >> 51) + return +} + +// Pow22523 set v = x^((p-5)/8), and returns v. (p-5)/8 is 2^252-3. +func (v *Element) Pow22523(x *Element) *Element { + var t0, t1, t2 Element + + t0.Square(x) // x^2 + t1.Square(&t0) // x^4 + t1.Square(&t1) // x^8 + t1.Multiply(x, &t1) // x^9 + t0.Multiply(&t0, &t1) // x^11 + t0.Square(&t0) // x^22 + t0.Multiply(&t1, &t0) // x^31 + t1.Square(&t0) // x^62 + for i := 1; i < 5; i++ { // x^992 + t1.Square(&t1) + } + t0.Multiply(&t1, &t0) // x^1023 -> 1023 = 2^10 - 1 + t1.Square(&t0) // 2^11 - 2 + for i := 1; i < 10; i++ { // 2^20 - 2^10 + t1.Square(&t1) + } + t1.Multiply(&t1, &t0) // 2^20 - 1 + t2.Square(&t1) // 2^21 - 2 + for i := 1; i < 20; i++ { // 2^40 - 2^20 + t2.Square(&t2) + } + t1.Multiply(&t2, &t1) // 2^40 - 1 + t1.Square(&t1) // 2^41 - 2 + for i := 1; i < 10; i++ { // 2^50 - 2^10 + t1.Square(&t1) + } + t0.Multiply(&t1, &t0) // 2^50 - 1 + t1.Square(&t0) // 2^51 - 2 + for i := 1; i < 50; i++ { // 2^100 - 2^50 + t1.Square(&t1) + } + t1.Multiply(&t1, &t0) // 2^100 - 1 + t2.Square(&t1) // 2^101 - 2 + for i := 1; i < 100; i++ { // 2^200 - 2^100 + t2.Square(&t2) + } + t1.Multiply(&t2, &t1) // 2^200 - 1 + t1.Square(&t1) // 2^201 - 2 + for i := 1; i < 50; i++ { // 2^250 - 2^50 + t1.Square(&t1) + } + t0.Multiply(&t1, &t0) // 2^250 - 1 + t0.Square(&t0) // 2^251 - 2 + t0.Square(&t0) // 2^252 - 4 + return v.Multiply(&t0, x) // 2^252 - 3 -> x^(2^252-3) +} + +// sqrtM1 is 2^((p-1)/4), which squared is equal to -1 by Euler's Criterion. +var sqrtM1 = &Element{1718705420411056, 234908883556509, + 2233514472574048, 2117202627021982, 765476049583133} + +// SqrtRatio sets r to the non-negative square root of the ratio of u and v. +// +// If u/v is square, SqrtRatio returns r and 1. If u/v is not square, SqrtRatio +// sets r according to Section 4.3 of draft-irtf-cfrg-ristretto255-decaf448-00, +// and returns r and 0. +func (r *Element) SqrtRatio(u, v *Element) (rr *Element, wasSquare int) { + var a, b Element + + // r = (u * v3) * (u * v7)^((p-5)/8) + v2 := a.Square(v) + uv3 := b.Multiply(u, b.Multiply(v2, v)) + uv7 := a.Multiply(uv3, a.Square(v2)) + r.Multiply(uv3, r.Pow22523(uv7)) + + check := a.Multiply(v, a.Square(r)) // check = v * r^2 + + uNeg := b.Negate(u) + correctSignSqrt := check.Equal(u) + flippedSignSqrt := check.Equal(uNeg) + flippedSignSqrtI := check.Equal(uNeg.Multiply(uNeg, sqrtM1)) + + rPrime := b.Multiply(r, sqrtM1) // r_prime = SQRT_M1 * r + // r = CT_SELECT(r_prime IF flipped_sign_sqrt | flipped_sign_sqrt_i ELSE r) + r.Select(rPrime, r, flippedSignSqrt|flippedSignSqrtI) + + r.Absolute(r) // Choose the nonnegative square root. + return r, correctSignSqrt | flippedSignSqrt +} diff --git a/vendor/golang.org/x/crypto/curve25519/internal/field/fe_amd64.go b/vendor/golang.org/x/crypto/curve25519/internal/field/fe_amd64.go new file mode 100644 index 0000000..44dc8e8 --- /dev/null +++ b/vendor/golang.org/x/crypto/curve25519/internal/field/fe_amd64.go @@ -0,0 +1,13 @@ +// Code generated by command: go run fe_amd64_asm.go -out ../fe_amd64.s -stubs ../fe_amd64.go -pkg field. DO NOT EDIT. + +// +build amd64,gc,!purego + +package field + +// feMul sets out = a * b. It works like feMulGeneric. +//go:noescape +func feMul(out *Element, a *Element, b *Element) + +// feSquare sets out = a * a. It works like feSquareGeneric. +//go:noescape +func feSquare(out *Element, a *Element) diff --git a/vendor/golang.org/x/crypto/curve25519/internal/field/fe_amd64.s b/vendor/golang.org/x/crypto/curve25519/internal/field/fe_amd64.s new file mode 100644 index 0000000..293f013 --- /dev/null +++ b/vendor/golang.org/x/crypto/curve25519/internal/field/fe_amd64.s @@ -0,0 +1,379 @@ +// Code generated by command: go run fe_amd64_asm.go -out ../fe_amd64.s -stubs ../fe_amd64.go -pkg field. DO NOT EDIT. + +//go:build amd64 && gc && !purego +// +build amd64,gc,!purego + +#include "textflag.h" + +// func feMul(out *Element, a *Element, b *Element) +TEXT ·feMul(SB), NOSPLIT, $0-24 + MOVQ a+8(FP), CX + MOVQ b+16(FP), BX + + // r0 = a0×b0 + MOVQ (CX), AX + MULQ (BX) + MOVQ AX, DI + MOVQ DX, SI + + // r0 += 19×a1×b4 + MOVQ 8(CX), AX + IMUL3Q $0x13, AX, AX + MULQ 32(BX) + ADDQ AX, DI + ADCQ DX, SI + + // r0 += 19×a2×b3 + MOVQ 16(CX), AX + IMUL3Q $0x13, AX, AX + MULQ 24(BX) + ADDQ AX, DI + ADCQ DX, SI + + // r0 += 19×a3×b2 + MOVQ 24(CX), AX + IMUL3Q $0x13, AX, AX + MULQ 16(BX) + ADDQ AX, DI + ADCQ DX, SI + + // r0 += 19×a4×b1 + MOVQ 32(CX), AX + IMUL3Q $0x13, AX, AX + MULQ 8(BX) + ADDQ AX, DI + ADCQ DX, SI + + // r1 = a0×b1 + MOVQ (CX), AX + MULQ 8(BX) + MOVQ AX, R9 + MOVQ DX, R8 + + // r1 += a1×b0 + MOVQ 8(CX), AX + MULQ (BX) + ADDQ AX, R9 + ADCQ DX, R8 + + // r1 += 19×a2×b4 + MOVQ 16(CX), AX + IMUL3Q $0x13, AX, AX + MULQ 32(BX) + ADDQ AX, R9 + ADCQ DX, R8 + + // r1 += 19×a3×b3 + MOVQ 24(CX), AX + IMUL3Q $0x13, AX, AX + MULQ 24(BX) + ADDQ AX, R9 + ADCQ DX, R8 + + // r1 += 19×a4×b2 + MOVQ 32(CX), AX + IMUL3Q $0x13, AX, AX + MULQ 16(BX) + ADDQ AX, R9 + ADCQ DX, R8 + + // r2 = a0×b2 + MOVQ (CX), AX + MULQ 16(BX) + MOVQ AX, R11 + MOVQ DX, R10 + + // r2 += a1×b1 + MOVQ 8(CX), AX + MULQ 8(BX) + ADDQ AX, R11 + ADCQ DX, R10 + + // r2 += a2×b0 + MOVQ 16(CX), AX + MULQ (BX) + ADDQ AX, R11 + ADCQ DX, R10 + + // r2 += 19×a3×b4 + MOVQ 24(CX), AX + IMUL3Q $0x13, AX, AX + MULQ 32(BX) + ADDQ AX, R11 + ADCQ DX, R10 + + // r2 += 19×a4×b3 + MOVQ 32(CX), AX + IMUL3Q $0x13, AX, AX + MULQ 24(BX) + ADDQ AX, R11 + ADCQ DX, R10 + + // r3 = a0×b3 + MOVQ (CX), AX + MULQ 24(BX) + MOVQ AX, R13 + MOVQ DX, R12 + + // r3 += a1×b2 + MOVQ 8(CX), AX + MULQ 16(BX) + ADDQ AX, R13 + ADCQ DX, R12 + + // r3 += a2×b1 + MOVQ 16(CX), AX + MULQ 8(BX) + ADDQ AX, R13 + ADCQ DX, R12 + + // r3 += a3×b0 + MOVQ 24(CX), AX + MULQ (BX) + ADDQ AX, R13 + ADCQ DX, R12 + + // r3 += 19×a4×b4 + MOVQ 32(CX), AX + IMUL3Q $0x13, AX, AX + MULQ 32(BX) + ADDQ AX, R13 + ADCQ DX, R12 + + // r4 = a0×b4 + MOVQ (CX), AX + MULQ 32(BX) + MOVQ AX, R15 + MOVQ DX, R14 + + // r4 += a1×b3 + MOVQ 8(CX), AX + MULQ 24(BX) + ADDQ AX, R15 + ADCQ DX, R14 + + // r4 += a2×b2 + MOVQ 16(CX), AX + MULQ 16(BX) + ADDQ AX, R15 + ADCQ DX, R14 + + // r4 += a3×b1 + MOVQ 24(CX), AX + MULQ 8(BX) + ADDQ AX, R15 + ADCQ DX, R14 + + // r4 += a4×b0 + MOVQ 32(CX), AX + MULQ (BX) + ADDQ AX, R15 + ADCQ DX, R14 + + // First reduction chain + MOVQ $0x0007ffffffffffff, AX + SHLQ $0x0d, DI, SI + SHLQ $0x0d, R9, R8 + SHLQ $0x0d, R11, R10 + SHLQ $0x0d, R13, R12 + SHLQ $0x0d, R15, R14 + ANDQ AX, DI + IMUL3Q $0x13, R14, R14 + ADDQ R14, DI + ANDQ AX, R9 + ADDQ SI, R9 + ANDQ AX, R11 + ADDQ R8, R11 + ANDQ AX, R13 + ADDQ R10, R13 + ANDQ AX, R15 + ADDQ R12, R15 + + // Second reduction chain (carryPropagate) + MOVQ DI, SI + SHRQ $0x33, SI + MOVQ R9, R8 + SHRQ $0x33, R8 + MOVQ R11, R10 + SHRQ $0x33, R10 + MOVQ R13, R12 + SHRQ $0x33, R12 + MOVQ R15, R14 + SHRQ $0x33, R14 + ANDQ AX, DI + IMUL3Q $0x13, R14, R14 + ADDQ R14, DI + ANDQ AX, R9 + ADDQ SI, R9 + ANDQ AX, R11 + ADDQ R8, R11 + ANDQ AX, R13 + ADDQ R10, R13 + ANDQ AX, R15 + ADDQ R12, R15 + + // Store output + MOVQ out+0(FP), AX + MOVQ DI, (AX) + MOVQ R9, 8(AX) + MOVQ R11, 16(AX) + MOVQ R13, 24(AX) + MOVQ R15, 32(AX) + RET + +// func feSquare(out *Element, a *Element) +TEXT ·feSquare(SB), NOSPLIT, $0-16 + MOVQ a+8(FP), CX + + // r0 = l0×l0 + MOVQ (CX), AX + MULQ (CX) + MOVQ AX, SI + MOVQ DX, BX + + // r0 += 38×l1×l4 + MOVQ 8(CX), AX + IMUL3Q $0x26, AX, AX + MULQ 32(CX) + ADDQ AX, SI + ADCQ DX, BX + + // r0 += 38×l2×l3 + MOVQ 16(CX), AX + IMUL3Q $0x26, AX, AX + MULQ 24(CX) + ADDQ AX, SI + ADCQ DX, BX + + // r1 = 2×l0×l1 + MOVQ (CX), AX + SHLQ $0x01, AX + MULQ 8(CX) + MOVQ AX, R8 + MOVQ DX, DI + + // r1 += 38×l2×l4 + MOVQ 16(CX), AX + IMUL3Q $0x26, AX, AX + MULQ 32(CX) + ADDQ AX, R8 + ADCQ DX, DI + + // r1 += 19×l3×l3 + MOVQ 24(CX), AX + IMUL3Q $0x13, AX, AX + MULQ 24(CX) + ADDQ AX, R8 + ADCQ DX, DI + + // r2 = 2×l0×l2 + MOVQ (CX), AX + SHLQ $0x01, AX + MULQ 16(CX) + MOVQ AX, R10 + MOVQ DX, R9 + + // r2 += l1×l1 + MOVQ 8(CX), AX + MULQ 8(CX) + ADDQ AX, R10 + ADCQ DX, R9 + + // r2 += 38×l3×l4 + MOVQ 24(CX), AX + IMUL3Q $0x26, AX, AX + MULQ 32(CX) + ADDQ AX, R10 + ADCQ DX, R9 + + // r3 = 2×l0×l3 + MOVQ (CX), AX + SHLQ $0x01, AX + MULQ 24(CX) + MOVQ AX, R12 + MOVQ DX, R11 + + // r3 += 2×l1×l2 + MOVQ 8(CX), AX + IMUL3Q $0x02, AX, AX + MULQ 16(CX) + ADDQ AX, R12 + ADCQ DX, R11 + + // r3 += 19×l4×l4 + MOVQ 32(CX), AX + IMUL3Q $0x13, AX, AX + MULQ 32(CX) + ADDQ AX, R12 + ADCQ DX, R11 + + // r4 = 2×l0×l4 + MOVQ (CX), AX + SHLQ $0x01, AX + MULQ 32(CX) + MOVQ AX, R14 + MOVQ DX, R13 + + // r4 += 2×l1×l3 + MOVQ 8(CX), AX + IMUL3Q $0x02, AX, AX + MULQ 24(CX) + ADDQ AX, R14 + ADCQ DX, R13 + + // r4 += l2×l2 + MOVQ 16(CX), AX + MULQ 16(CX) + ADDQ AX, R14 + ADCQ DX, R13 + + // First reduction chain + MOVQ $0x0007ffffffffffff, AX + SHLQ $0x0d, SI, BX + SHLQ $0x0d, R8, DI + SHLQ $0x0d, R10, R9 + SHLQ $0x0d, R12, R11 + SHLQ $0x0d, R14, R13 + ANDQ AX, SI + IMUL3Q $0x13, R13, R13 + ADDQ R13, SI + ANDQ AX, R8 + ADDQ BX, R8 + ANDQ AX, R10 + ADDQ DI, R10 + ANDQ AX, R12 + ADDQ R9, R12 + ANDQ AX, R14 + ADDQ R11, R14 + + // Second reduction chain (carryPropagate) + MOVQ SI, BX + SHRQ $0x33, BX + MOVQ R8, DI + SHRQ $0x33, DI + MOVQ R10, R9 + SHRQ $0x33, R9 + MOVQ R12, R11 + SHRQ $0x33, R11 + MOVQ R14, R13 + SHRQ $0x33, R13 + ANDQ AX, SI + IMUL3Q $0x13, R13, R13 + ADDQ R13, SI + ANDQ AX, R8 + ADDQ BX, R8 + ANDQ AX, R10 + ADDQ DI, R10 + ANDQ AX, R12 + ADDQ R9, R12 + ANDQ AX, R14 + ADDQ R11, R14 + + // Store output + MOVQ out+0(FP), AX + MOVQ SI, (AX) + MOVQ R8, 8(AX) + MOVQ R10, 16(AX) + MOVQ R12, 24(AX) + MOVQ R14, 32(AX) + RET diff --git a/vendor/golang.org/x/crypto/curve25519/internal/field/fe_amd64_noasm.go b/vendor/golang.org/x/crypto/curve25519/internal/field/fe_amd64_noasm.go new file mode 100644 index 0000000..ddb6c9b --- /dev/null +++ b/vendor/golang.org/x/crypto/curve25519/internal/field/fe_amd64_noasm.go @@ -0,0 +1,12 @@ +// Copyright (c) 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build !amd64 || !gc || purego +// +build !amd64 !gc purego + +package field + +func feMul(v, x, y *Element) { feMulGeneric(v, x, y) } + +func feSquare(v, x *Element) { feSquareGeneric(v, x) } diff --git a/vendor/golang.org/x/crypto/curve25519/internal/field/fe_arm64.go b/vendor/golang.org/x/crypto/curve25519/internal/field/fe_arm64.go new file mode 100644 index 0000000..af459ef --- /dev/null +++ b/vendor/golang.org/x/crypto/curve25519/internal/field/fe_arm64.go @@ -0,0 +1,16 @@ +// Copyright (c) 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build arm64 && gc && !purego +// +build arm64,gc,!purego + +package field + +//go:noescape +func carryPropagate(v *Element) + +func (v *Element) carryPropagate() *Element { + carryPropagate(v) + return v +} diff --git a/vendor/golang.org/x/crypto/curve25519/internal/field/fe_arm64.s b/vendor/golang.org/x/crypto/curve25519/internal/field/fe_arm64.s new file mode 100644 index 0000000..5c91e45 --- /dev/null +++ b/vendor/golang.org/x/crypto/curve25519/internal/field/fe_arm64.s @@ -0,0 +1,43 @@ +// Copyright (c) 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build arm64 && gc && !purego +// +build arm64,gc,!purego + +#include "textflag.h" + +// carryPropagate works exactly like carryPropagateGeneric and uses the +// same AND, ADD, and LSR+MADD instructions emitted by the compiler, but +// avoids loading R0-R4 twice and uses LDP and STP. +// +// See https://golang.org/issues/43145 for the main compiler issue. +// +// func carryPropagate(v *Element) +TEXT ·carryPropagate(SB),NOFRAME|NOSPLIT,$0-8 + MOVD v+0(FP), R20 + + LDP 0(R20), (R0, R1) + LDP 16(R20), (R2, R3) + MOVD 32(R20), R4 + + AND $0x7ffffffffffff, R0, R10 + AND $0x7ffffffffffff, R1, R11 + AND $0x7ffffffffffff, R2, R12 + AND $0x7ffffffffffff, R3, R13 + AND $0x7ffffffffffff, R4, R14 + + ADD R0>>51, R11, R11 + ADD R1>>51, R12, R12 + ADD R2>>51, R13, R13 + ADD R3>>51, R14, R14 + // R4>>51 * 19 + R10 -> R10 + LSR $51, R4, R21 + MOVD $19, R22 + MADD R22, R10, R21, R10 + + STP (R10, R11), 0(R20) + STP (R12, R13), 16(R20) + MOVD R14, 32(R20) + + RET diff --git a/vendor/golang.org/x/crypto/curve25519/internal/field/fe_arm64_noasm.go b/vendor/golang.org/x/crypto/curve25519/internal/field/fe_arm64_noasm.go new file mode 100644 index 0000000..234a5b2 --- /dev/null +++ b/vendor/golang.org/x/crypto/curve25519/internal/field/fe_arm64_noasm.go @@ -0,0 +1,12 @@ +// Copyright (c) 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build !arm64 || !gc || purego +// +build !arm64 !gc purego + +package field + +func (v *Element) carryPropagate() *Element { + return v.carryPropagateGeneric() +} diff --git a/vendor/golang.org/x/crypto/curve25519/internal/field/fe_generic.go b/vendor/golang.org/x/crypto/curve25519/internal/field/fe_generic.go new file mode 100644 index 0000000..7b5b78c --- /dev/null +++ b/vendor/golang.org/x/crypto/curve25519/internal/field/fe_generic.go @@ -0,0 +1,264 @@ +// Copyright (c) 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package field + +import "math/bits" + +// uint128 holds a 128-bit number as two 64-bit limbs, for use with the +// bits.Mul64 and bits.Add64 intrinsics. +type uint128 struct { + lo, hi uint64 +} + +// mul64 returns a * b. +func mul64(a, b uint64) uint128 { + hi, lo := bits.Mul64(a, b) + return uint128{lo, hi} +} + +// addMul64 returns v + a * b. +func addMul64(v uint128, a, b uint64) uint128 { + hi, lo := bits.Mul64(a, b) + lo, c := bits.Add64(lo, v.lo, 0) + hi, _ = bits.Add64(hi, v.hi, c) + return uint128{lo, hi} +} + +// shiftRightBy51 returns a >> 51. a is assumed to be at most 115 bits. +func shiftRightBy51(a uint128) uint64 { + return (a.hi << (64 - 51)) | (a.lo >> 51) +} + +func feMulGeneric(v, a, b *Element) { + a0 := a.l0 + a1 := a.l1 + a2 := a.l2 + a3 := a.l3 + a4 := a.l4 + + b0 := b.l0 + b1 := b.l1 + b2 := b.l2 + b3 := b.l3 + b4 := b.l4 + + // Limb multiplication works like pen-and-paper columnar multiplication, but + // with 51-bit limbs instead of digits. + // + // a4 a3 a2 a1 a0 x + // b4 b3 b2 b1 b0 = + // ------------------------ + // a4b0 a3b0 a2b0 a1b0 a0b0 + + // a4b1 a3b1 a2b1 a1b1 a0b1 + + // a4b2 a3b2 a2b2 a1b2 a0b2 + + // a4b3 a3b3 a2b3 a1b3 a0b3 + + // a4b4 a3b4 a2b4 a1b4 a0b4 = + // ---------------------------------------------- + // r8 r7 r6 r5 r4 r3 r2 r1 r0 + // + // We can then use the reduction identity (a * 2²⁵⁵ + b = a * 19 + b) to + // reduce the limbs that would overflow 255 bits. r5 * 2²⁵⁵ becomes 19 * r5, + // r6 * 2³⁰⁶ becomes 19 * r6 * 2⁵¹, etc. + // + // Reduction can be carried out simultaneously to multiplication. For + // example, we do not compute r5: whenever the result of a multiplication + // belongs to r5, like a1b4, we multiply it by 19 and add the result to r0. + // + // a4b0 a3b0 a2b0 a1b0 a0b0 + + // a3b1 a2b1 a1b1 a0b1 19×a4b1 + + // a2b2 a1b2 a0b2 19×a4b2 19×a3b2 + + // a1b3 a0b3 19×a4b3 19×a3b3 19×a2b3 + + // a0b4 19×a4b4 19×a3b4 19×a2b4 19×a1b4 = + // -------------------------------------- + // r4 r3 r2 r1 r0 + // + // Finally we add up the columns into wide, overlapping limbs. + + a1_19 := a1 * 19 + a2_19 := a2 * 19 + a3_19 := a3 * 19 + a4_19 := a4 * 19 + + // r0 = a0×b0 + 19×(a1×b4 + a2×b3 + a3×b2 + a4×b1) + r0 := mul64(a0, b0) + r0 = addMul64(r0, a1_19, b4) + r0 = addMul64(r0, a2_19, b3) + r0 = addMul64(r0, a3_19, b2) + r0 = addMul64(r0, a4_19, b1) + + // r1 = a0×b1 + a1×b0 + 19×(a2×b4 + a3×b3 + a4×b2) + r1 := mul64(a0, b1) + r1 = addMul64(r1, a1, b0) + r1 = addMul64(r1, a2_19, b4) + r1 = addMul64(r1, a3_19, b3) + r1 = addMul64(r1, a4_19, b2) + + // r2 = a0×b2 + a1×b1 + a2×b0 + 19×(a3×b4 + a4×b3) + r2 := mul64(a0, b2) + r2 = addMul64(r2, a1, b1) + r2 = addMul64(r2, a2, b0) + r2 = addMul64(r2, a3_19, b4) + r2 = addMul64(r2, a4_19, b3) + + // r3 = a0×b3 + a1×b2 + a2×b1 + a3×b0 + 19×a4×b4 + r3 := mul64(a0, b3) + r3 = addMul64(r3, a1, b2) + r3 = addMul64(r3, a2, b1) + r3 = addMul64(r3, a3, b0) + r3 = addMul64(r3, a4_19, b4) + + // r4 = a0×b4 + a1×b3 + a2×b2 + a3×b1 + a4×b0 + r4 := mul64(a0, b4) + r4 = addMul64(r4, a1, b3) + r4 = addMul64(r4, a2, b2) + r4 = addMul64(r4, a3, b1) + r4 = addMul64(r4, a4, b0) + + // After the multiplication, we need to reduce (carry) the five coefficients + // to obtain a result with limbs that are at most slightly larger than 2⁵¹, + // to respect the Element invariant. + // + // Overall, the reduction works the same as carryPropagate, except with + // wider inputs: we take the carry for each coefficient by shifting it right + // by 51, and add it to the limb above it. The top carry is multiplied by 19 + // according to the reduction identity and added to the lowest limb. + // + // The largest coefficient (r0) will be at most 111 bits, which guarantees + // that all carries are at most 111 - 51 = 60 bits, which fits in a uint64. + // + // r0 = a0×b0 + 19×(a1×b4 + a2×b3 + a3×b2 + a4×b1) + // r0 < 2⁵²×2⁵² + 19×(2⁵²×2⁵² + 2⁵²×2⁵² + 2⁵²×2⁵² + 2⁵²×2⁵²) + // r0 < (1 + 19 × 4) × 2⁵² × 2⁵² + // r0 < 2⁷ × 2⁵² × 2⁵² + // r0 < 2¹¹¹ + // + // Moreover, the top coefficient (r4) is at most 107 bits, so c4 is at most + // 56 bits, and c4 * 19 is at most 61 bits, which again fits in a uint64 and + // allows us to easily apply the reduction identity. + // + // r4 = a0×b4 + a1×b3 + a2×b2 + a3×b1 + a4×b0 + // r4 < 5 × 2⁵² × 2⁵² + // r4 < 2¹⁰⁷ + // + + c0 := shiftRightBy51(r0) + c1 := shiftRightBy51(r1) + c2 := shiftRightBy51(r2) + c3 := shiftRightBy51(r3) + c4 := shiftRightBy51(r4) + + rr0 := r0.lo&maskLow51Bits + c4*19 + rr1 := r1.lo&maskLow51Bits + c0 + rr2 := r2.lo&maskLow51Bits + c1 + rr3 := r3.lo&maskLow51Bits + c2 + rr4 := r4.lo&maskLow51Bits + c3 + + // Now all coefficients fit into 64-bit registers but are still too large to + // be passed around as a Element. We therefore do one last carry chain, + // where the carries will be small enough to fit in the wiggle room above 2⁵¹. + *v = Element{rr0, rr1, rr2, rr3, rr4} + v.carryPropagate() +} + +func feSquareGeneric(v, a *Element) { + l0 := a.l0 + l1 := a.l1 + l2 := a.l2 + l3 := a.l3 + l4 := a.l4 + + // Squaring works precisely like multiplication above, but thanks to its + // symmetry we get to group a few terms together. + // + // l4 l3 l2 l1 l0 x + // l4 l3 l2 l1 l0 = + // ------------------------ + // l4l0 l3l0 l2l0 l1l0 l0l0 + + // l4l1 l3l1 l2l1 l1l1 l0l1 + + // l4l2 l3l2 l2l2 l1l2 l0l2 + + // l4l3 l3l3 l2l3 l1l3 l0l3 + + // l4l4 l3l4 l2l4 l1l4 l0l4 = + // ---------------------------------------------- + // r8 r7 r6 r5 r4 r3 r2 r1 r0 + // + // l4l0 l3l0 l2l0 l1l0 l0l0 + + // l3l1 l2l1 l1l1 l0l1 19×l4l1 + + // l2l2 l1l2 l0l2 19×l4l2 19×l3l2 + + // l1l3 l0l3 19×l4l3 19×l3l3 19×l2l3 + + // l0l4 19×l4l4 19×l3l4 19×l2l4 19×l1l4 = + // -------------------------------------- + // r4 r3 r2 r1 r0 + // + // With precomputed 2×, 19×, and 2×19× terms, we can compute each limb with + // only three Mul64 and four Add64, instead of five and eight. + + l0_2 := l0 * 2 + l1_2 := l1 * 2 + + l1_38 := l1 * 38 + l2_38 := l2 * 38 + l3_38 := l3 * 38 + + l3_19 := l3 * 19 + l4_19 := l4 * 19 + + // r0 = l0×l0 + 19×(l1×l4 + l2×l3 + l3×l2 + l4×l1) = l0×l0 + 19×2×(l1×l4 + l2×l3) + r0 := mul64(l0, l0) + r0 = addMul64(r0, l1_38, l4) + r0 = addMul64(r0, l2_38, l3) + + // r1 = l0×l1 + l1×l0 + 19×(l2×l4 + l3×l3 + l4×l2) = 2×l0×l1 + 19×2×l2×l4 + 19×l3×l3 + r1 := mul64(l0_2, l1) + r1 = addMul64(r1, l2_38, l4) + r1 = addMul64(r1, l3_19, l3) + + // r2 = l0×l2 + l1×l1 + l2×l0 + 19×(l3×l4 + l4×l3) = 2×l0×l2 + l1×l1 + 19×2×l3×l4 + r2 := mul64(l0_2, l2) + r2 = addMul64(r2, l1, l1) + r2 = addMul64(r2, l3_38, l4) + + // r3 = l0×l3 + l1×l2 + l2×l1 + l3×l0 + 19×l4×l4 = 2×l0×l3 + 2×l1×l2 + 19×l4×l4 + r3 := mul64(l0_2, l3) + r3 = addMul64(r3, l1_2, l2) + r3 = addMul64(r3, l4_19, l4) + + // r4 = l0×l4 + l1×l3 + l2×l2 + l3×l1 + l4×l0 = 2×l0×l4 + 2×l1×l3 + l2×l2 + r4 := mul64(l0_2, l4) + r4 = addMul64(r4, l1_2, l3) + r4 = addMul64(r4, l2, l2) + + c0 := shiftRightBy51(r0) + c1 := shiftRightBy51(r1) + c2 := shiftRightBy51(r2) + c3 := shiftRightBy51(r3) + c4 := shiftRightBy51(r4) + + rr0 := r0.lo&maskLow51Bits + c4*19 + rr1 := r1.lo&maskLow51Bits + c0 + rr2 := r2.lo&maskLow51Bits + c1 + rr3 := r3.lo&maskLow51Bits + c2 + rr4 := r4.lo&maskLow51Bits + c3 + + *v = Element{rr0, rr1, rr2, rr3, rr4} + v.carryPropagate() +} + +// carryPropagate brings the limbs below 52 bits by applying the reduction +// identity (a * 2²⁵⁵ + b = a * 19 + b) to the l4 carry. TODO inline +func (v *Element) carryPropagateGeneric() *Element { + c0 := v.l0 >> 51 + c1 := v.l1 >> 51 + c2 := v.l2 >> 51 + c3 := v.l3 >> 51 + c4 := v.l4 >> 51 + + v.l0 = v.l0&maskLow51Bits + c4*19 + v.l1 = v.l1&maskLow51Bits + c0 + v.l2 = v.l2&maskLow51Bits + c1 + v.l3 = v.l3&maskLow51Bits + c2 + v.l4 = v.l4&maskLow51Bits + c3 + + return v +} diff --git a/vendor/golang.org/x/crypto/curve25519/internal/field/sync.checkpoint b/vendor/golang.org/x/crypto/curve25519/internal/field/sync.checkpoint new file mode 100644 index 0000000..e3685f9 --- /dev/null +++ b/vendor/golang.org/x/crypto/curve25519/internal/field/sync.checkpoint @@ -0,0 +1 @@ +b0c49ae9f59d233526f8934262c5bbbe14d4358d diff --git a/vendor/golang.org/x/crypto/curve25519/internal/field/sync.sh b/vendor/golang.org/x/crypto/curve25519/internal/field/sync.sh new file mode 100644 index 0000000..1ba22a8 --- /dev/null +++ b/vendor/golang.org/x/crypto/curve25519/internal/field/sync.sh @@ -0,0 +1,19 @@ +#! /bin/bash +set -euo pipefail + +cd "$(git rev-parse --show-toplevel)" + +STD_PATH=src/crypto/ed25519/internal/edwards25519/field +LOCAL_PATH=curve25519/internal/field +LAST_SYNC_REF=$(cat $LOCAL_PATH/sync.checkpoint) + +git fetch https://go.googlesource.com/go master + +if git diff --quiet $LAST_SYNC_REF:$STD_PATH FETCH_HEAD:$STD_PATH; then + echo "No changes." +else + NEW_REF=$(git rev-parse FETCH_HEAD | tee $LOCAL_PATH/sync.checkpoint) + echo "Applying changes from $LAST_SYNC_REF to $NEW_REF..." + git diff $LAST_SYNC_REF:$STD_PATH FETCH_HEAD:$STD_PATH | \ + git apply -3 --directory=$LOCAL_PATH +fi diff --git a/vendor/golang.org/x/crypto/ed25519/ed25519.go b/vendor/golang.org/x/crypto/ed25519/ed25519.go index c7f8c7e..71ad917 100644 --- a/vendor/golang.org/x/crypto/ed25519/ed25519.go +++ b/vendor/golang.org/x/crypto/ed25519/ed25519.go @@ -5,6 +5,7 @@ // In Go 1.13, the ed25519 package was promoted to the standard library as // crypto/ed25519, and this package became a wrapper for the standard library one. // +//go:build !go1.13 // +build !go1.13 // Package ed25519 implements the Ed25519 signature algorithm. See diff --git a/vendor/golang.org/x/crypto/ed25519/ed25519_go113.go b/vendor/golang.org/x/crypto/ed25519/ed25519_go113.go index d1448d8..b5974dc 100644 --- a/vendor/golang.org/x/crypto/ed25519/ed25519_go113.go +++ b/vendor/golang.org/x/crypto/ed25519/ed25519_go113.go @@ -2,6 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +//go:build go1.13 // +build go1.13 // Package ed25519 implements the Ed25519 signature algorithm. See diff --git a/vendor/golang.org/x/crypto/poly1305/bits_compat.go b/vendor/golang.org/x/crypto/internal/poly1305/bits_compat.go similarity index 98% rename from vendor/golang.org/x/crypto/poly1305/bits_compat.go rename to vendor/golang.org/x/crypto/internal/poly1305/bits_compat.go index 157a69f..45b5c96 100644 --- a/vendor/golang.org/x/crypto/poly1305/bits_compat.go +++ b/vendor/golang.org/x/crypto/internal/poly1305/bits_compat.go @@ -2,6 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +//go:build !go1.13 // +build !go1.13 package poly1305 diff --git a/vendor/golang.org/x/crypto/poly1305/bits_go1.13.go b/vendor/golang.org/x/crypto/internal/poly1305/bits_go1.13.go similarity index 96% rename from vendor/golang.org/x/crypto/poly1305/bits_go1.13.go rename to vendor/golang.org/x/crypto/internal/poly1305/bits_go1.13.go index a0a185f..ed52b34 100644 --- a/vendor/golang.org/x/crypto/poly1305/bits_go1.13.go +++ b/vendor/golang.org/x/crypto/internal/poly1305/bits_go1.13.go @@ -2,6 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +//go:build go1.13 // +build go1.13 package poly1305 diff --git a/vendor/golang.org/x/crypto/poly1305/mac_noasm.go b/vendor/golang.org/x/crypto/internal/poly1305/mac_noasm.go similarity index 66% rename from vendor/golang.org/x/crypto/poly1305/mac_noasm.go rename to vendor/golang.org/x/crypto/internal/poly1305/mac_noasm.go index d118f30..f184b67 100644 --- a/vendor/golang.org/x/crypto/poly1305/mac_noasm.go +++ b/vendor/golang.org/x/crypto/internal/poly1305/mac_noasm.go @@ -2,7 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build !amd64,!ppc64le,!s390x gccgo purego +//go:build (!amd64 && !ppc64le && !s390x) || !gc || purego +// +build !amd64,!ppc64le,!s390x !gc purego package poly1305 diff --git a/vendor/golang.org/x/crypto/poly1305/poly1305.go b/vendor/golang.org/x/crypto/internal/poly1305/poly1305.go similarity index 98% rename from vendor/golang.org/x/crypto/poly1305/poly1305.go rename to vendor/golang.org/x/crypto/internal/poly1305/poly1305.go index 9d7a6af..4aaea81 100644 --- a/vendor/golang.org/x/crypto/poly1305/poly1305.go +++ b/vendor/golang.org/x/crypto/internal/poly1305/poly1305.go @@ -15,7 +15,7 @@ // used with a fixed key in order to generate one-time keys from an nonce. // However, in this package AES isn't used and the one-time key is specified // directly. -package poly1305 // import "golang.org/x/crypto/poly1305" +package poly1305 import "crypto/subtle" diff --git a/vendor/golang.org/x/crypto/poly1305/sum_amd64.go b/vendor/golang.org/x/crypto/internal/poly1305/sum_amd64.go similarity index 95% rename from vendor/golang.org/x/crypto/poly1305/sum_amd64.go rename to vendor/golang.org/x/crypto/internal/poly1305/sum_amd64.go index 99e5a1d..6d52233 100644 --- a/vendor/golang.org/x/crypto/poly1305/sum_amd64.go +++ b/vendor/golang.org/x/crypto/internal/poly1305/sum_amd64.go @@ -2,7 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build !gccgo,!purego +//go:build gc && !purego +// +build gc,!purego package poly1305 diff --git a/vendor/golang.org/x/crypto/poly1305/sum_amd64.s b/vendor/golang.org/x/crypto/internal/poly1305/sum_amd64.s similarity index 98% rename from vendor/golang.org/x/crypto/poly1305/sum_amd64.s rename to vendor/golang.org/x/crypto/internal/poly1305/sum_amd64.s index 8d394a2..1d74f0f 100644 --- a/vendor/golang.org/x/crypto/poly1305/sum_amd64.s +++ b/vendor/golang.org/x/crypto/internal/poly1305/sum_amd64.s @@ -2,7 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build !gccgo,!purego +//go:build gc && !purego +// +build gc,!purego #include "textflag.h" diff --git a/vendor/golang.org/x/crypto/poly1305/sum_generic.go b/vendor/golang.org/x/crypto/internal/poly1305/sum_generic.go similarity index 100% rename from vendor/golang.org/x/crypto/poly1305/sum_generic.go rename to vendor/golang.org/x/crypto/internal/poly1305/sum_generic.go diff --git a/vendor/golang.org/x/crypto/poly1305/sum_ppc64le.go b/vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64le.go similarity index 95% rename from vendor/golang.org/x/crypto/poly1305/sum_ppc64le.go rename to vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64le.go index 2e7a120..4a06994 100644 --- a/vendor/golang.org/x/crypto/poly1305/sum_ppc64le.go +++ b/vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64le.go @@ -2,7 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build !gccgo,!purego +//go:build gc && !purego +// +build gc,!purego package poly1305 diff --git a/vendor/golang.org/x/crypto/poly1305/sum_ppc64le.s b/vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64le.s similarity index 94% rename from vendor/golang.org/x/crypto/poly1305/sum_ppc64le.s rename to vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64le.s index 4e02813..58422aa 100644 --- a/vendor/golang.org/x/crypto/poly1305/sum_ppc64le.s +++ b/vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64le.s @@ -2,7 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build !gccgo,!purego +//go:build gc && !purego +// +build gc,!purego #include "textflag.h" @@ -82,7 +83,7 @@ multiply: BGE loop bytes_between_0_and_15: - CMP $0, R5 + CMP R5, $0 BEQ done MOVD $0, R16 // h0 MOVD $0, R17 // h1 @@ -122,7 +123,7 @@ just1: // Exactly 8 MOVD (R4), R16 - CMP $0, R17 + CMP R17, $0 // Check if we've already set R17; if not // set 1 to indicate end of msg. @@ -151,7 +152,7 @@ less4: ADD $2, R4 less2: - CMP $0, R5 + CMP R5, $0 BEQ insert1 MOVBZ (R4), R21 SLD R22, R21, R21 @@ -166,12 +167,12 @@ insert1: carry: // Add new values to h0, h1, h2 - ADDC R16, R8 - ADDE R17, R9 - ADDE $0, R10 - MOVD $16, R5 - ADD R5, R4 - BR multiply + ADDC R16, R8 + ADDE R17, R9 + ADDZE R10, R10 + MOVD $16, R5 + ADD R5, R4 + BR multiply done: // Save h0, h1, h2 in state diff --git a/vendor/golang.org/x/crypto/poly1305/sum_s390x.go b/vendor/golang.org/x/crypto/internal/poly1305/sum_s390x.go similarity index 97% rename from vendor/golang.org/x/crypto/poly1305/sum_s390x.go rename to vendor/golang.org/x/crypto/internal/poly1305/sum_s390x.go index 958fedc..62cc9f8 100644 --- a/vendor/golang.org/x/crypto/poly1305/sum_s390x.go +++ b/vendor/golang.org/x/crypto/internal/poly1305/sum_s390x.go @@ -2,7 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build !gccgo,!purego +//go:build gc && !purego +// +build gc,!purego package poly1305 diff --git a/vendor/golang.org/x/crypto/poly1305/sum_s390x.s b/vendor/golang.org/x/crypto/internal/poly1305/sum_s390x.s similarity index 99% rename from vendor/golang.org/x/crypto/poly1305/sum_s390x.s rename to vendor/golang.org/x/crypto/internal/poly1305/sum_s390x.s index 0fa9ee6..aa9e049 100644 --- a/vendor/golang.org/x/crypto/poly1305/sum_s390x.s +++ b/vendor/golang.org/x/crypto/internal/poly1305/sum_s390x.s @@ -2,7 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build !gccgo,!purego +//go:build gc && !purego +// +build gc,!purego #include "textflag.h" @@ -17,7 +18,7 @@ // value. These limbs are, for the most part, zero extended and // placed into 64-bit vector register elements. Each vector // register is 128-bits wide and so holds 2 of these elements. -// Using 26-bit limbs allows us plenty of headroom to accomodate +// Using 26-bit limbs allows us plenty of headroom to accommodate // accumulations before and after multiplication without // overflowing either 32-bits (before multiplication) or 64-bits // (after multiplication). diff --git a/vendor/golang.org/x/crypto/internal/subtle/aliasing.go b/vendor/golang.org/x/crypto/internal/subtle/aliasing.go index f38797b..4fad24f 100644 --- a/vendor/golang.org/x/crypto/internal/subtle/aliasing.go +++ b/vendor/golang.org/x/crypto/internal/subtle/aliasing.go @@ -2,7 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build !appengine +//go:build !purego +// +build !purego // Package subtle implements functions that are often useful in cryptographic // code but require careful thought to use correctly. diff --git a/vendor/golang.org/x/crypto/internal/subtle/aliasing_appengine.go b/vendor/golang.org/x/crypto/internal/subtle/aliasing_purego.go similarity index 97% rename from vendor/golang.org/x/crypto/internal/subtle/aliasing_appengine.go rename to vendor/golang.org/x/crypto/internal/subtle/aliasing_purego.go index 0cc4a8a..80ccbed 100644 --- a/vendor/golang.org/x/crypto/internal/subtle/aliasing_appengine.go +++ b/vendor/golang.org/x/crypto/internal/subtle/aliasing_purego.go @@ -2,7 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build appengine +//go:build purego +// +build purego // Package subtle implements functions that are often useful in cryptographic // code but require careful thought to use correctly. diff --git a/vendor/golang.org/x/crypto/openpgp/armor/armor.go b/vendor/golang.org/x/crypto/openpgp/armor/armor.go index 36a6804..ebc8787 100644 --- a/vendor/golang.org/x/crypto/openpgp/armor/armor.go +++ b/vendor/golang.org/x/crypto/openpgp/armor/armor.go @@ -4,6 +4,12 @@ // Package armor implements OpenPGP ASCII Armor, see RFC 4880. OpenPGP Armor is // very similar to PEM except that it has an additional CRC checksum. +// +// Deprecated: this package is unmaintained except for security fixes. New +// applications should consider a more focused, modern alternative to OpenPGP +// for their specific task. If you are required to interoperate with OpenPGP +// systems and need a maintained package, consider a community fork. +// See https://golang.org/issue/44226. package armor // import "golang.org/x/crypto/openpgp/armor" import ( diff --git a/vendor/golang.org/x/crypto/openpgp/elgamal/elgamal.go b/vendor/golang.org/x/crypto/openpgp/elgamal/elgamal.go index 72a6a73..84396a0 100644 --- a/vendor/golang.org/x/crypto/openpgp/elgamal/elgamal.go +++ b/vendor/golang.org/x/crypto/openpgp/elgamal/elgamal.go @@ -10,6 +10,12 @@ // This form of ElGamal embeds PKCS#1 v1.5 padding, which may make it // unsuitable for other protocols. RSA should be used in preference in any // case. +// +// Deprecated: this package was only provided to support ElGamal encryption in +// OpenPGP. The golang.org/x/crypto/openpgp package is now deprecated (see +// https://golang.org/issue/44226), and ElGamal in the OpenPGP ecosystem has +// compatibility and security issues (see https://eprint.iacr.org/2021/923). +// Moreover, this package doesn't protect against side-channel attacks. package elgamal // import "golang.org/x/crypto/openpgp/elgamal" import ( diff --git a/vendor/golang.org/x/crypto/openpgp/errors/errors.go b/vendor/golang.org/x/crypto/openpgp/errors/errors.go index eb0550b..1d7a0ea 100644 --- a/vendor/golang.org/x/crypto/openpgp/errors/errors.go +++ b/vendor/golang.org/x/crypto/openpgp/errors/errors.go @@ -3,6 +3,12 @@ // license that can be found in the LICENSE file. // Package errors contains common error types for the OpenPGP packages. +// +// Deprecated: this package is unmaintained except for security fixes. New +// applications should consider a more focused, modern alternative to OpenPGP +// for their specific task. If you are required to interoperate with OpenPGP +// systems and need a maintained package, consider a community fork. +// See https://golang.org/issue/44226. package errors // import "golang.org/x/crypto/openpgp/errors" import ( diff --git a/vendor/golang.org/x/crypto/openpgp/packet/packet.go b/vendor/golang.org/x/crypto/openpgp/packet/packet.go index 9728d61..0a19794 100644 --- a/vendor/golang.org/x/crypto/openpgp/packet/packet.go +++ b/vendor/golang.org/x/crypto/openpgp/packet/packet.go @@ -4,6 +4,12 @@ // Package packet implements parsing and serialization of OpenPGP packets, as // specified in RFC 4880. +// +// Deprecated: this package is unmaintained except for security fixes. New +// applications should consider a more focused, modern alternative to OpenPGP +// for their specific task. If you are required to interoperate with OpenPGP +// systems and need a maintained package, consider a community fork. +// See https://golang.org/issue/44226. package packet // import "golang.org/x/crypto/openpgp/packet" import ( diff --git a/vendor/golang.org/x/crypto/openpgp/read.go b/vendor/golang.org/x/crypto/openpgp/read.go index 6ec664f..48a8931 100644 --- a/vendor/golang.org/x/crypto/openpgp/read.go +++ b/vendor/golang.org/x/crypto/openpgp/read.go @@ -3,6 +3,12 @@ // license that can be found in the LICENSE file. // Package openpgp implements high level operations on OpenPGP messages. +// +// Deprecated: this package is unmaintained except for security fixes. New +// applications should consider a more focused, modern alternative to OpenPGP +// for their specific task. If you are required to interoperate with OpenPGP +// systems and need a maintained package, consider a community fork. +// See https://golang.org/issue/44226. package openpgp // import "golang.org/x/crypto/openpgp" import ( diff --git a/vendor/golang.org/x/crypto/openpgp/s2k/s2k.go b/vendor/golang.org/x/crypto/openpgp/s2k/s2k.go index 4b9a44c..9de0495 100644 --- a/vendor/golang.org/x/crypto/openpgp/s2k/s2k.go +++ b/vendor/golang.org/x/crypto/openpgp/s2k/s2k.go @@ -4,6 +4,12 @@ // Package s2k implements the various OpenPGP string-to-key transforms as // specified in RFC 4800 section 3.7.1. +// +// Deprecated: this package is unmaintained except for security fixes. New +// applications should consider a more focused, modern alternative to OpenPGP +// for their specific task. If you are required to interoperate with OpenPGP +// systems and need a maintained package, consider a community fork. +// See https://golang.org/issue/44226. package s2k // import "golang.org/x/crypto/openpgp/s2k" import ( diff --git a/vendor/golang.org/x/crypto/scrypt/scrypt.go b/vendor/golang.org/x/crypto/scrypt/scrypt.go index 2f81fe4..bbe4494 100644 --- a/vendor/golang.org/x/crypto/scrypt/scrypt.go +++ b/vendor/golang.org/x/crypto/scrypt/scrypt.go @@ -9,6 +9,7 @@ package scrypt // import "golang.org/x/crypto/scrypt" import ( "crypto/sha256" + "encoding/binary" "errors" "math/bits" @@ -143,36 +144,34 @@ func integer(b []uint32, r int) uint64 { func smix(b []byte, r, N int, v, xy []uint32) { var tmp [16]uint32 + R := 32 * r x := xy - y := xy[32*r:] + y := xy[R:] j := 0 - for i := 0; i < 32*r; i++ { - x[i] = uint32(b[j]) | uint32(b[j+1])<<8 | uint32(b[j+2])<<16 | uint32(b[j+3])<<24 + for i := 0; i < R; i++ { + x[i] = binary.LittleEndian.Uint32(b[j:]) j += 4 } for i := 0; i < N; i += 2 { - blockCopy(v[i*(32*r):], x, 32*r) + blockCopy(v[i*R:], x, R) blockMix(&tmp, x, y, r) - blockCopy(v[(i+1)*(32*r):], y, 32*r) + blockCopy(v[(i+1)*R:], y, R) blockMix(&tmp, y, x, r) } for i := 0; i < N; i += 2 { j := int(integer(x, r) & uint64(N-1)) - blockXOR(x, v[j*(32*r):], 32*r) + blockXOR(x, v[j*R:], R) blockMix(&tmp, x, y, r) j = int(integer(y, r) & uint64(N-1)) - blockXOR(y, v[j*(32*r):], 32*r) + blockXOR(y, v[j*R:], R) blockMix(&tmp, y, x, r) } j = 0 - for _, v := range x[:32*r] { - b[j+0] = byte(v >> 0) - b[j+1] = byte(v >> 8) - b[j+2] = byte(v >> 16) - b[j+3] = byte(v >> 24) + for _, v := range x[:R] { + binary.LittleEndian.PutUint32(b[j:], v) j += 4 } } diff --git a/vendor/golang.org/x/crypto/ssh/cipher.go b/vendor/golang.org/x/crypto/ssh/cipher.go index 8bd6b3d..bddbde5 100644 --- a/vendor/golang.org/x/crypto/ssh/cipher.go +++ b/vendor/golang.org/x/crypto/ssh/cipher.go @@ -18,7 +18,7 @@ import ( "io/ioutil" "golang.org/x/crypto/chacha20" - "golang.org/x/crypto/poly1305" + "golang.org/x/crypto/internal/poly1305" ) const ( diff --git a/vendor/golang.org/x/crypto/ssh/client.go b/vendor/golang.org/x/crypto/ssh/client.go index 7b00bff..99f68bd 100644 --- a/vendor/golang.org/x/crypto/ssh/client.go +++ b/vendor/golang.org/x/crypto/ssh/client.go @@ -77,7 +77,7 @@ func NewClientConn(c net.Conn, addr string, config *ClientConfig) (Conn, <-chan } conn := &connection{ - sshConn: sshConn{conn: c}, + sshConn: sshConn{conn: c, user: fullConf.User}, } if err := conn.clientHandshake(addr, &fullConf); err != nil { diff --git a/vendor/golang.org/x/crypto/ssh/client_auth.go b/vendor/golang.org/x/crypto/ssh/client_auth.go index f326565..c611aeb 100644 --- a/vendor/golang.org/x/crypto/ssh/client_auth.go +++ b/vendor/golang.org/x/crypto/ssh/client_auth.go @@ -471,7 +471,7 @@ func (cb KeyboardInteractiveChallenge) auth(session []byte, user string, c packe } if len(answers) != len(prompts) { - return authFailure, nil, errors.New("ssh: not enough answers from keyboard-interactive callback") + return authFailure, nil, fmt.Errorf("ssh: incorrect number of answers from keyboard-interactive callback %d (expected %d)", len(answers), len(prompts)) } responseLength := 1 + 4 for _, a := range answers { diff --git a/vendor/golang.org/x/crypto/ssh/kex.go b/vendor/golang.org/x/crypto/ssh/kex.go index 7eedb20..766e929 100644 --- a/vendor/golang.org/x/crypto/ssh/kex.go +++ b/vendor/golang.org/x/crypto/ssh/kex.go @@ -557,8 +557,6 @@ type dhGEXSHA struct { hashFunc crypto.Hash } -const numMRTests = 64 - const ( dhGroupExchangeMinimumBits = 2048 dhGroupExchangePreferredBits = 2048 @@ -602,15 +600,8 @@ func (gex dhGEXSHA) Client(c packetConn, randSource io.Reader, magics *handshake gex.p = kexDHGexGroup.P gex.g = kexDHGexGroup.G - // Check if p is safe by verifing that p and (p-1)/2 are primes - one := big.NewInt(1) - var pHalf = &big.Int{} - pHalf.Rsh(gex.p, 1) - if !gex.p.ProbablyPrime(numMRTests) || !pHalf.ProbablyPrime(numMRTests) { - return nil, fmt.Errorf("ssh: server provided gex p is not safe") - } - // Check if g is safe by verifing that g > 1 and g < p - 1 + one := big.NewInt(1) var pMinusOne = &big.Int{} pMinusOne.Sub(gex.p, one) if gex.g.Cmp(one) != 1 && gex.g.Cmp(pMinusOne) != -1 { @@ -618,6 +609,8 @@ func (gex dhGEXSHA) Client(c packetConn, randSource io.Reader, magics *handshake } // Send GexInit + var pHalf = &big.Int{} + pHalf.Rsh(gex.p, 1) x, err := rand.Int(randSource, pHalf) if err != nil { return nil, err diff --git a/vendor/golang.org/x/crypto/ssh/server.go b/vendor/golang.org/x/crypto/ssh/server.go index 7d42a8c..b6911e8 100644 --- a/vendor/golang.org/x/crypto/ssh/server.go +++ b/vendor/golang.org/x/crypto/ssh/server.go @@ -572,6 +572,10 @@ userAuthLoop: perms = candidate.perms } case "gssapi-with-mic": + if config.GSSAPIWithMICConfig == nil { + authErr = errors.New("ssh: gssapi-with-mic auth not configured") + break + } gssapiConfig := config.GSSAPIWithMICConfig userAuthRequestGSSAPI, err := parseGSSAPIPayload(userAuthReq.Payload) if err != nil { diff --git a/vendor/golang.org/x/crypto/ssh/terminal/terminal.go b/vendor/golang.org/x/crypto/ssh/terminal/terminal.go index 2ffb97b..a4d1919 100644 --- a/vendor/golang.org/x/crypto/ssh/terminal/terminal.go +++ b/vendor/golang.org/x/crypto/ssh/terminal/terminal.go @@ -2,986 +2,75 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +// Package terminal provides support functions for dealing with terminals, as +// commonly found on UNIX systems. +// +// Deprecated: this package moved to golang.org/x/term. package terminal import ( - "bytes" "io" - "runtime" - "strconv" - "sync" - "unicode/utf8" + + "golang.org/x/term" ) // EscapeCodes contains escape sequences that can be written to the terminal in // order to achieve different styles of text. -type EscapeCodes struct { - // Foreground colors - Black, Red, Green, Yellow, Blue, Magenta, Cyan, White []byte - - // Reset all attributes - Reset []byte -} - -var vt100EscapeCodes = EscapeCodes{ - Black: []byte{keyEscape, '[', '3', '0', 'm'}, - Red: []byte{keyEscape, '[', '3', '1', 'm'}, - Green: []byte{keyEscape, '[', '3', '2', 'm'}, - Yellow: []byte{keyEscape, '[', '3', '3', 'm'}, - Blue: []byte{keyEscape, '[', '3', '4', 'm'}, - Magenta: []byte{keyEscape, '[', '3', '5', 'm'}, - Cyan: []byte{keyEscape, '[', '3', '6', 'm'}, - White: []byte{keyEscape, '[', '3', '7', 'm'}, - - Reset: []byte{keyEscape, '[', '0', 'm'}, -} +type EscapeCodes = term.EscapeCodes // Terminal contains the state for running a VT100 terminal that is capable of // reading lines of input. -type Terminal struct { - // AutoCompleteCallback, if non-null, is called for each keypress with - // the full input line and the current position of the cursor (in - // bytes, as an index into |line|). If it returns ok=false, the key - // press is processed normally. Otherwise it returns a replacement line - // and the new cursor position. - AutoCompleteCallback func(line string, pos int, key rune) (newLine string, newPos int, ok bool) - - // Escape contains a pointer to the escape codes for this terminal. - // It's always a valid pointer, although the escape codes themselves - // may be empty if the terminal doesn't support them. - Escape *EscapeCodes - - // lock protects the terminal and the state in this object from - // concurrent processing of a key press and a Write() call. - lock sync.Mutex - - c io.ReadWriter - prompt []rune - - // line is the current line being entered. - line []rune - // pos is the logical position of the cursor in line - pos int - // echo is true if local echo is enabled - echo bool - // pasteActive is true iff there is a bracketed paste operation in - // progress. - pasteActive bool - - // cursorX contains the current X value of the cursor where the left - // edge is 0. cursorY contains the row number where the first row of - // the current line is 0. - cursorX, cursorY int - // maxLine is the greatest value of cursorY so far. - maxLine int - - termWidth, termHeight int - - // outBuf contains the terminal data to be sent. - outBuf []byte - // remainder contains the remainder of any partial key sequences after - // a read. It aliases into inBuf. - remainder []byte - inBuf [256]byte - - // history contains previously entered commands so that they can be - // accessed with the up and down keys. - history stRingBuffer - // historyIndex stores the currently accessed history entry, where zero - // means the immediately previous entry. - historyIndex int - // When navigating up and down the history it's possible to return to - // the incomplete, initial line. That value is stored in - // historyPending. - historyPending string -} +type Terminal = term.Terminal // NewTerminal runs a VT100 terminal on the given ReadWriter. If the ReadWriter is // a local terminal, that terminal must first have been put into raw mode. // prompt is a string that is written at the start of each input line (i.e. // "> "). func NewTerminal(c io.ReadWriter, prompt string) *Terminal { - return &Terminal{ - Escape: &vt100EscapeCodes, - c: c, - prompt: []rune(prompt), - termWidth: 80, - termHeight: 24, - echo: true, - historyIndex: -1, - } -} - -const ( - keyCtrlC = 3 - keyCtrlD = 4 - keyCtrlU = 21 - keyEnter = '\r' - keyEscape = 27 - keyBackspace = 127 - keyUnknown = 0xd800 /* UTF-16 surrogate area */ + iota - keyUp - keyDown - keyLeft - keyRight - keyAltLeft - keyAltRight - keyHome - keyEnd - keyDeleteWord - keyDeleteLine - keyClearScreen - keyPasteStart - keyPasteEnd -) - -var ( - crlf = []byte{'\r', '\n'} - pasteStart = []byte{keyEscape, '[', '2', '0', '0', '~'} - pasteEnd = []byte{keyEscape, '[', '2', '0', '1', '~'} -) - -// bytesToKey tries to parse a key sequence from b. If successful, it returns -// the key and the remainder of the input. Otherwise it returns utf8.RuneError. -func bytesToKey(b []byte, pasteActive bool) (rune, []byte) { - if len(b) == 0 { - return utf8.RuneError, nil - } - - if !pasteActive { - switch b[0] { - case 1: // ^A - return keyHome, b[1:] - case 2: // ^B - return keyLeft, b[1:] - case 5: // ^E - return keyEnd, b[1:] - case 6: // ^F - return keyRight, b[1:] - case 8: // ^H - return keyBackspace, b[1:] - case 11: // ^K - return keyDeleteLine, b[1:] - case 12: // ^L - return keyClearScreen, b[1:] - case 23: // ^W - return keyDeleteWord, b[1:] - case 14: // ^N - return keyDown, b[1:] - case 16: // ^P - return keyUp, b[1:] - } - } - - if b[0] != keyEscape { - if !utf8.FullRune(b) { - return utf8.RuneError, b - } - r, l := utf8.DecodeRune(b) - return r, b[l:] - } - - if !pasteActive && len(b) >= 3 && b[0] == keyEscape && b[1] == '[' { - switch b[2] { - case 'A': - return keyUp, b[3:] - case 'B': - return keyDown, b[3:] - case 'C': - return keyRight, b[3:] - case 'D': - return keyLeft, b[3:] - case 'H': - return keyHome, b[3:] - case 'F': - return keyEnd, b[3:] - } - } - - if !pasteActive && len(b) >= 6 && b[0] == keyEscape && b[1] == '[' && b[2] == '1' && b[3] == ';' && b[4] == '3' { - switch b[5] { - case 'C': - return keyAltRight, b[6:] - case 'D': - return keyAltLeft, b[6:] - } - } - - if !pasteActive && len(b) >= 6 && bytes.Equal(b[:6], pasteStart) { - return keyPasteStart, b[6:] - } - - if pasteActive && len(b) >= 6 && bytes.Equal(b[:6], pasteEnd) { - return keyPasteEnd, b[6:] - } - - // If we get here then we have a key that we don't recognise, or a - // partial sequence. It's not clear how one should find the end of a - // sequence without knowing them all, but it seems that [a-zA-Z~] only - // appears at the end of a sequence. - for i, c := range b[0:] { - if c >= 'a' && c <= 'z' || c >= 'A' && c <= 'Z' || c == '~' { - return keyUnknown, b[i+1:] - } - } - - return utf8.RuneError, b -} - -// queue appends data to the end of t.outBuf -func (t *Terminal) queue(data []rune) { - t.outBuf = append(t.outBuf, []byte(string(data))...) -} - -var eraseUnderCursor = []rune{' ', keyEscape, '[', 'D'} -var space = []rune{' '} - -func isPrintable(key rune) bool { - isInSurrogateArea := key >= 0xd800 && key <= 0xdbff - return key >= 32 && !isInSurrogateArea -} - -// moveCursorToPos appends data to t.outBuf which will move the cursor to the -// given, logical position in the text. -func (t *Terminal) moveCursorToPos(pos int) { - if !t.echo { - return - } - - x := visualLength(t.prompt) + pos - y := x / t.termWidth - x = x % t.termWidth - - up := 0 - if y < t.cursorY { - up = t.cursorY - y - } - - down := 0 - if y > t.cursorY { - down = y - t.cursorY - } - - left := 0 - if x < t.cursorX { - left = t.cursorX - x - } - - right := 0 - if x > t.cursorX { - right = x - t.cursorX - } - - t.cursorX = x - t.cursorY = y - t.move(up, down, left, right) -} - -func (t *Terminal) move(up, down, left, right int) { - m := []rune{} - - // 1 unit up can be expressed as ^[[A or ^[A - // 5 units up can be expressed as ^[[5A - - if up == 1 { - m = append(m, keyEscape, '[', 'A') - } else if up > 1 { - m = append(m, keyEscape, '[') - m = append(m, []rune(strconv.Itoa(up))...) - m = append(m, 'A') - } - - if down == 1 { - m = append(m, keyEscape, '[', 'B') - } else if down > 1 { - m = append(m, keyEscape, '[') - m = append(m, []rune(strconv.Itoa(down))...) - m = append(m, 'B') - } - - if right == 1 { - m = append(m, keyEscape, '[', 'C') - } else if right > 1 { - m = append(m, keyEscape, '[') - m = append(m, []rune(strconv.Itoa(right))...) - m = append(m, 'C') - } - - if left == 1 { - m = append(m, keyEscape, '[', 'D') - } else if left > 1 { - m = append(m, keyEscape, '[') - m = append(m, []rune(strconv.Itoa(left))...) - m = append(m, 'D') - } - - t.queue(m) -} - -func (t *Terminal) clearLineToRight() { - op := []rune{keyEscape, '[', 'K'} - t.queue(op) -} - -const maxLineLength = 4096 - -func (t *Terminal) setLine(newLine []rune, newPos int) { - if t.echo { - t.moveCursorToPos(0) - t.writeLine(newLine) - for i := len(newLine); i < len(t.line); i++ { - t.writeLine(space) - } - t.moveCursorToPos(newPos) - } - t.line = newLine - t.pos = newPos -} - -func (t *Terminal) advanceCursor(places int) { - t.cursorX += places - t.cursorY += t.cursorX / t.termWidth - if t.cursorY > t.maxLine { - t.maxLine = t.cursorY - } - t.cursorX = t.cursorX % t.termWidth - - if places > 0 && t.cursorX == 0 { - // Normally terminals will advance the current position - // when writing a character. But that doesn't happen - // for the last character in a line. However, when - // writing a character (except a new line) that causes - // a line wrap, the position will be advanced two - // places. - // - // So, if we are stopping at the end of a line, we - // need to write a newline so that our cursor can be - // advanced to the next line. - t.outBuf = append(t.outBuf, '\r', '\n') - } -} - -func (t *Terminal) eraseNPreviousChars(n int) { - if n == 0 { - return - } - - if t.pos < n { - n = t.pos - } - t.pos -= n - t.moveCursorToPos(t.pos) - - copy(t.line[t.pos:], t.line[n+t.pos:]) - t.line = t.line[:len(t.line)-n] - if t.echo { - t.writeLine(t.line[t.pos:]) - for i := 0; i < n; i++ { - t.queue(space) - } - t.advanceCursor(n) - t.moveCursorToPos(t.pos) - } -} - -// countToLeftWord returns then number of characters from the cursor to the -// start of the previous word. -func (t *Terminal) countToLeftWord() int { - if t.pos == 0 { - return 0 - } - - pos := t.pos - 1 - for pos > 0 { - if t.line[pos] != ' ' { - break - } - pos-- - } - for pos > 0 { - if t.line[pos] == ' ' { - pos++ - break - } - pos-- - } - - return t.pos - pos -} - -// countToRightWord returns then number of characters from the cursor to the -// start of the next word. -func (t *Terminal) countToRightWord() int { - pos := t.pos - for pos < len(t.line) { - if t.line[pos] == ' ' { - break - } - pos++ - } - for pos < len(t.line) { - if t.line[pos] != ' ' { - break - } - pos++ - } - return pos - t.pos -} - -// visualLength returns the number of visible glyphs in s. -func visualLength(runes []rune) int { - inEscapeSeq := false - length := 0 - - for _, r := range runes { - switch { - case inEscapeSeq: - if (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z') { - inEscapeSeq = false - } - case r == '\x1b': - inEscapeSeq = true - default: - length++ - } - } - - return length -} - -// handleKey processes the given key and, optionally, returns a line of text -// that the user has entered. -func (t *Terminal) handleKey(key rune) (line string, ok bool) { - if t.pasteActive && key != keyEnter { - t.addKeyToLine(key) - return - } - - switch key { - case keyBackspace: - if t.pos == 0 { - return - } - t.eraseNPreviousChars(1) - case keyAltLeft: - // move left by a word. - t.pos -= t.countToLeftWord() - t.moveCursorToPos(t.pos) - case keyAltRight: - // move right by a word. - t.pos += t.countToRightWord() - t.moveCursorToPos(t.pos) - case keyLeft: - if t.pos == 0 { - return - } - t.pos-- - t.moveCursorToPos(t.pos) - case keyRight: - if t.pos == len(t.line) { - return - } - t.pos++ - t.moveCursorToPos(t.pos) - case keyHome: - if t.pos == 0 { - return - } - t.pos = 0 - t.moveCursorToPos(t.pos) - case keyEnd: - if t.pos == len(t.line) { - return - } - t.pos = len(t.line) - t.moveCursorToPos(t.pos) - case keyUp: - entry, ok := t.history.NthPreviousEntry(t.historyIndex + 1) - if !ok { - return "", false - } - if t.historyIndex == -1 { - t.historyPending = string(t.line) - } - t.historyIndex++ - runes := []rune(entry) - t.setLine(runes, len(runes)) - case keyDown: - switch t.historyIndex { - case -1: - return - case 0: - runes := []rune(t.historyPending) - t.setLine(runes, len(runes)) - t.historyIndex-- - default: - entry, ok := t.history.NthPreviousEntry(t.historyIndex - 1) - if ok { - t.historyIndex-- - runes := []rune(entry) - t.setLine(runes, len(runes)) - } - } - case keyEnter: - t.moveCursorToPos(len(t.line)) - t.queue([]rune("\r\n")) - line = string(t.line) - ok = true - t.line = t.line[:0] - t.pos = 0 - t.cursorX = 0 - t.cursorY = 0 - t.maxLine = 0 - case keyDeleteWord: - // Delete zero or more spaces and then one or more characters. - t.eraseNPreviousChars(t.countToLeftWord()) - case keyDeleteLine: - // Delete everything from the current cursor position to the - // end of line. - for i := t.pos; i < len(t.line); i++ { - t.queue(space) - t.advanceCursor(1) - } - t.line = t.line[:t.pos] - t.moveCursorToPos(t.pos) - case keyCtrlD: - // Erase the character under the current position. - // The EOF case when the line is empty is handled in - // readLine(). - if t.pos < len(t.line) { - t.pos++ - t.eraseNPreviousChars(1) - } - case keyCtrlU: - t.eraseNPreviousChars(t.pos) - case keyClearScreen: - // Erases the screen and moves the cursor to the home position. - t.queue([]rune("\x1b[2J\x1b[H")) - t.queue(t.prompt) - t.cursorX, t.cursorY = 0, 0 - t.advanceCursor(visualLength(t.prompt)) - t.setLine(t.line, t.pos) - default: - if t.AutoCompleteCallback != nil { - prefix := string(t.line[:t.pos]) - suffix := string(t.line[t.pos:]) - - t.lock.Unlock() - newLine, newPos, completeOk := t.AutoCompleteCallback(prefix+suffix, len(prefix), key) - t.lock.Lock() - - if completeOk { - t.setLine([]rune(newLine), utf8.RuneCount([]byte(newLine)[:newPos])) - return - } - } - if !isPrintable(key) { - return - } - if len(t.line) == maxLineLength { - return - } - t.addKeyToLine(key) - } - return -} - -// addKeyToLine inserts the given key at the current position in the current -// line. -func (t *Terminal) addKeyToLine(key rune) { - if len(t.line) == cap(t.line) { - newLine := make([]rune, len(t.line), 2*(1+len(t.line))) - copy(newLine, t.line) - t.line = newLine - } - t.line = t.line[:len(t.line)+1] - copy(t.line[t.pos+1:], t.line[t.pos:]) - t.line[t.pos] = key - if t.echo { - t.writeLine(t.line[t.pos:]) - } - t.pos++ - t.moveCursorToPos(t.pos) -} - -func (t *Terminal) writeLine(line []rune) { - for len(line) != 0 { - remainingOnLine := t.termWidth - t.cursorX - todo := len(line) - if todo > remainingOnLine { - todo = remainingOnLine - } - t.queue(line[:todo]) - t.advanceCursor(visualLength(line[:todo])) - line = line[todo:] - } -} - -// writeWithCRLF writes buf to w but replaces all occurrences of \n with \r\n. -func writeWithCRLF(w io.Writer, buf []byte) (n int, err error) { - for len(buf) > 0 { - i := bytes.IndexByte(buf, '\n') - todo := len(buf) - if i >= 0 { - todo = i - } - - var nn int - nn, err = w.Write(buf[:todo]) - n += nn - if err != nil { - return n, err - } - buf = buf[todo:] - - if i >= 0 { - if _, err = w.Write(crlf); err != nil { - return n, err - } - n++ - buf = buf[1:] - } - } - - return n, nil -} - -func (t *Terminal) Write(buf []byte) (n int, err error) { - t.lock.Lock() - defer t.lock.Unlock() - - if t.cursorX == 0 && t.cursorY == 0 { - // This is the easy case: there's nothing on the screen that we - // have to move out of the way. - return writeWithCRLF(t.c, buf) - } - - // We have a prompt and possibly user input on the screen. We - // have to clear it first. - t.move(0 /* up */, 0 /* down */, t.cursorX /* left */, 0 /* right */) - t.cursorX = 0 - t.clearLineToRight() - - for t.cursorY > 0 { - t.move(1 /* up */, 0, 0, 0) - t.cursorY-- - t.clearLineToRight() - } - - if _, err = t.c.Write(t.outBuf); err != nil { - return - } - t.outBuf = t.outBuf[:0] - - if n, err = writeWithCRLF(t.c, buf); err != nil { - return - } - - t.writeLine(t.prompt) - if t.echo { - t.writeLine(t.line) - } - - t.moveCursorToPos(t.pos) - - if _, err = t.c.Write(t.outBuf); err != nil { - return - } - t.outBuf = t.outBuf[:0] - return -} - -// ReadPassword temporarily changes the prompt and reads a password, without -// echo, from the terminal. -func (t *Terminal) ReadPassword(prompt string) (line string, err error) { - t.lock.Lock() - defer t.lock.Unlock() - - oldPrompt := t.prompt - t.prompt = []rune(prompt) - t.echo = false - - line, err = t.readLine() - - t.prompt = oldPrompt - t.echo = true - - return -} - -// ReadLine returns a line of input from the terminal. -func (t *Terminal) ReadLine() (line string, err error) { - t.lock.Lock() - defer t.lock.Unlock() - - return t.readLine() -} - -func (t *Terminal) readLine() (line string, err error) { - // t.lock must be held at this point - - if t.cursorX == 0 && t.cursorY == 0 { - t.writeLine(t.prompt) - t.c.Write(t.outBuf) - t.outBuf = t.outBuf[:0] - } - - lineIsPasted := t.pasteActive - - for { - rest := t.remainder - lineOk := false - for !lineOk { - var key rune - key, rest = bytesToKey(rest, t.pasteActive) - if key == utf8.RuneError { - break - } - if !t.pasteActive { - if key == keyCtrlD { - if len(t.line) == 0 { - return "", io.EOF - } - } - if key == keyCtrlC { - return "", io.EOF - } - if key == keyPasteStart { - t.pasteActive = true - if len(t.line) == 0 { - lineIsPasted = true - } - continue - } - } else if key == keyPasteEnd { - t.pasteActive = false - continue - } - if !t.pasteActive { - lineIsPasted = false - } - line, lineOk = t.handleKey(key) - } - if len(rest) > 0 { - n := copy(t.inBuf[:], rest) - t.remainder = t.inBuf[:n] - } else { - t.remainder = nil - } - t.c.Write(t.outBuf) - t.outBuf = t.outBuf[:0] - if lineOk { - if t.echo { - t.historyIndex = -1 - t.history.Add(line) - } - if lineIsPasted { - err = ErrPasteIndicator - } - return - } - - // t.remainder is a slice at the beginning of t.inBuf - // containing a partial key sequence - readBuf := t.inBuf[len(t.remainder):] - var n int - - t.lock.Unlock() - n, err = t.c.Read(readBuf) - t.lock.Lock() - - if err != nil { - return - } - - t.remainder = t.inBuf[:n+len(t.remainder)] - } -} - -// SetPrompt sets the prompt to be used when reading subsequent lines. -func (t *Terminal) SetPrompt(prompt string) { - t.lock.Lock() - defer t.lock.Unlock() - - t.prompt = []rune(prompt) -} - -func (t *Terminal) clearAndRepaintLinePlusNPrevious(numPrevLines int) { - // Move cursor to column zero at the start of the line. - t.move(t.cursorY, 0, t.cursorX, 0) - t.cursorX, t.cursorY = 0, 0 - t.clearLineToRight() - for t.cursorY < numPrevLines { - // Move down a line - t.move(0, 1, 0, 0) - t.cursorY++ - t.clearLineToRight() - } - // Move back to beginning. - t.move(t.cursorY, 0, 0, 0) - t.cursorX, t.cursorY = 0, 0 - - t.queue(t.prompt) - t.advanceCursor(visualLength(t.prompt)) - t.writeLine(t.line) - t.moveCursorToPos(t.pos) -} - -func (t *Terminal) SetSize(width, height int) error { - t.lock.Lock() - defer t.lock.Unlock() - - if width == 0 { - width = 1 - } - - oldWidth := t.termWidth - t.termWidth, t.termHeight = width, height - - switch { - case width == oldWidth: - // If the width didn't change then nothing else needs to be - // done. - return nil - case len(t.line) == 0 && t.cursorX == 0 && t.cursorY == 0: - // If there is nothing on current line and no prompt printed, - // just do nothing - return nil - case width < oldWidth: - // Some terminals (e.g. xterm) will truncate lines that were - // too long when shinking. Others, (e.g. gnome-terminal) will - // attempt to wrap them. For the former, repainting t.maxLine - // works great, but that behaviour goes badly wrong in the case - // of the latter because they have doubled every full line. - - // We assume that we are working on a terminal that wraps lines - // and adjust the cursor position based on every previous line - // wrapping and turning into two. This causes the prompt on - // xterms to move upwards, which isn't great, but it avoids a - // huge mess with gnome-terminal. - if t.cursorX >= t.termWidth { - t.cursorX = t.termWidth - 1 - } - t.cursorY *= 2 - t.clearAndRepaintLinePlusNPrevious(t.maxLine * 2) - case width > oldWidth: - // If the terminal expands then our position calculations will - // be wrong in the future because we think the cursor is - // |t.pos| chars into the string, but there will be a gap at - // the end of any wrapped line. - // - // But the position will actually be correct until we move, so - // we can move back to the beginning and repaint everything. - t.clearAndRepaintLinePlusNPrevious(t.maxLine) - } - - _, err := t.c.Write(t.outBuf) - t.outBuf = t.outBuf[:0] - return err -} - -type pasteIndicatorError struct{} - -func (pasteIndicatorError) Error() string { - return "terminal: ErrPasteIndicator not correctly handled" + return term.NewTerminal(c, prompt) } // ErrPasteIndicator may be returned from ReadLine as the error, in addition // to valid line data. It indicates that bracketed paste mode is enabled and // that the returned line consists only of pasted data. Programs may wish to // interpret pasted data more literally than typed data. -var ErrPasteIndicator = pasteIndicatorError{} +var ErrPasteIndicator = term.ErrPasteIndicator -// SetBracketedPasteMode requests that the terminal bracket paste operations -// with markers. Not all terminals support this but, if it is supported, then -// enabling this mode will stop any autocomplete callback from running due to -// pastes. Additionally, any lines that are completely pasted will be returned -// from ReadLine with the error set to ErrPasteIndicator. -func (t *Terminal) SetBracketedPasteMode(on bool) { - if on { - io.WriteString(t.c, "\x1b[?2004h") - } else { - io.WriteString(t.c, "\x1b[?2004l") - } -} +// State contains the state of a terminal. +type State = term.State -// stRingBuffer is a ring buffer of strings. -type stRingBuffer struct { - // entries contains max elements. - entries []string - max int - // head contains the index of the element most recently added to the ring. - head int - // size contains the number of elements in the ring. - size int +// IsTerminal returns whether the given file descriptor is a terminal. +func IsTerminal(fd int) bool { + return term.IsTerminal(fd) } -func (s *stRingBuffer) Add(a string) { - if s.entries == nil { - const defaultNumEntries = 100 - s.entries = make([]string, defaultNumEntries) - s.max = defaultNumEntries - } +// ReadPassword reads a line of input from a terminal without local echo. This +// is commonly used for inputting passwords and other sensitive data. The slice +// returned does not include the \n. +func ReadPassword(fd int) ([]byte, error) { + return term.ReadPassword(fd) +} - s.head = (s.head + 1) % s.max - s.entries[s.head] = a - if s.size < s.max { - s.size++ - } +// MakeRaw puts the terminal connected to the given file descriptor into raw +// mode and returns the previous state of the terminal so that it can be +// restored. +func MakeRaw(fd int) (*State, error) { + return term.MakeRaw(fd) } -// NthPreviousEntry returns the value passed to the nth previous call to Add. -// If n is zero then the immediately prior value is returned, if one, then the -// next most recent, and so on. If such an element doesn't exist then ok is -// false. -func (s *stRingBuffer) NthPreviousEntry(n int) (value string, ok bool) { - if n >= s.size { - return "", false - } - index := s.head - n - if index < 0 { - index += s.max - } - return s.entries[index], true +// Restore restores the terminal connected to the given file descriptor to a +// previous state. +func Restore(fd int, oldState *State) error { + return term.Restore(fd, oldState) } -// readPasswordLine reads from reader until it finds \n or io.EOF. -// The slice returned does not include the \n. -// readPasswordLine also ignores any \r it finds. -// Windows uses \r as end of line. So, on Windows, readPasswordLine -// reads until it finds \r and ignores any \n it finds during processing. -func readPasswordLine(reader io.Reader) ([]byte, error) { - var buf [1]byte - var ret []byte +// GetState returns the current state of a terminal which may be useful to +// restore the terminal after a signal. +func GetState(fd int) (*State, error) { + return term.GetState(fd) +} - for { - n, err := reader.Read(buf[:]) - if n > 0 { - switch buf[0] { - case '\b': - if len(ret) > 0 { - ret = ret[:len(ret)-1] - } - case '\n': - if runtime.GOOS != "windows" { - return ret, nil - } - // otherwise ignore \n - case '\r': - if runtime.GOOS == "windows" { - return ret, nil - } - // otherwise ignore \r - default: - ret = append(ret, buf[0]) - } - continue - } - if err != nil { - if err == io.EOF && len(ret) > 0 { - return ret, nil - } - return ret, err - } - } +// GetSize returns the dimensions of the given terminal. +func GetSize(fd int) (width, height int, err error) { + return term.GetSize(fd) } diff --git a/vendor/golang.org/x/net/context/go17.go b/vendor/golang.org/x/net/context/go17.go index d20f52b..344bd14 100644 --- a/vendor/golang.org/x/net/context/go17.go +++ b/vendor/golang.org/x/net/context/go17.go @@ -2,6 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +//go:build go1.7 // +build go1.7 package context diff --git a/vendor/golang.org/x/net/context/go19.go b/vendor/golang.org/x/net/context/go19.go index d88bd1d..64d31ec 100644 --- a/vendor/golang.org/x/net/context/go19.go +++ b/vendor/golang.org/x/net/context/go19.go @@ -2,6 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +//go:build go1.9 // +build go1.9 package context diff --git a/vendor/golang.org/x/net/context/pre_go17.go b/vendor/golang.org/x/net/context/pre_go17.go index 0f35592..5270db5 100644 --- a/vendor/golang.org/x/net/context/pre_go17.go +++ b/vendor/golang.org/x/net/context/pre_go17.go @@ -2,6 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +//go:build !go1.7 // +build !go1.7 package context diff --git a/vendor/golang.org/x/net/context/pre_go19.go b/vendor/golang.org/x/net/context/pre_go19.go index b105f80..1f97153 100644 --- a/vendor/golang.org/x/net/context/pre_go19.go +++ b/vendor/golang.org/x/net/context/pre_go19.go @@ -2,6 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +//go:build !go1.9 // +build !go1.9 package context diff --git a/vendor/golang.org/x/net/html/foreign.go b/vendor/golang.org/x/net/html/foreign.go index 74774c4..9da9e9d 100644 --- a/vendor/golang.org/x/net/html/foreign.go +++ b/vendor/golang.org/x/net/html/foreign.go @@ -161,65 +161,62 @@ var mathMLAttributeAdjustments = map[string]string{ } var svgAttributeAdjustments = map[string]string{ - "attributename": "attributeName", - "attributetype": "attributeType", - "basefrequency": "baseFrequency", - "baseprofile": "baseProfile", - "calcmode": "calcMode", - "clippathunits": "clipPathUnits", - "contentscripttype": "contentScriptType", - "contentstyletype": "contentStyleType", - "diffuseconstant": "diffuseConstant", - "edgemode": "edgeMode", - "externalresourcesrequired": "externalResourcesRequired", - "filterunits": "filterUnits", - "glyphref": "glyphRef", - "gradienttransform": "gradientTransform", - "gradientunits": "gradientUnits", - "kernelmatrix": "kernelMatrix", - "kernelunitlength": "kernelUnitLength", - "keypoints": "keyPoints", - "keysplines": "keySplines", - "keytimes": "keyTimes", - "lengthadjust": "lengthAdjust", - "limitingconeangle": "limitingConeAngle", - "markerheight": "markerHeight", - "markerunits": "markerUnits", - "markerwidth": "markerWidth", - "maskcontentunits": "maskContentUnits", - "maskunits": "maskUnits", - "numoctaves": "numOctaves", - "pathlength": "pathLength", - "patterncontentunits": "patternContentUnits", - "patterntransform": "patternTransform", - "patternunits": "patternUnits", - "pointsatx": "pointsAtX", - "pointsaty": "pointsAtY", - "pointsatz": "pointsAtZ", - "preservealpha": "preserveAlpha", - "preserveaspectratio": "preserveAspectRatio", - "primitiveunits": "primitiveUnits", - "refx": "refX", - "refy": "refY", - "repeatcount": "repeatCount", - "repeatdur": "repeatDur", - "requiredextensions": "requiredExtensions", - "requiredfeatures": "requiredFeatures", - "specularconstant": "specularConstant", - "specularexponent": "specularExponent", - "spreadmethod": "spreadMethod", - "startoffset": "startOffset", - "stddeviation": "stdDeviation", - "stitchtiles": "stitchTiles", - "surfacescale": "surfaceScale", - "systemlanguage": "systemLanguage", - "tablevalues": "tableValues", - "targetx": "targetX", - "targety": "targetY", - "textlength": "textLength", - "viewbox": "viewBox", - "viewtarget": "viewTarget", - "xchannelselector": "xChannelSelector", - "ychannelselector": "yChannelSelector", - "zoomandpan": "zoomAndPan", + "attributename": "attributeName", + "attributetype": "attributeType", + "basefrequency": "baseFrequency", + "baseprofile": "baseProfile", + "calcmode": "calcMode", + "clippathunits": "clipPathUnits", + "diffuseconstant": "diffuseConstant", + "edgemode": "edgeMode", + "filterunits": "filterUnits", + "glyphref": "glyphRef", + "gradienttransform": "gradientTransform", + "gradientunits": "gradientUnits", + "kernelmatrix": "kernelMatrix", + "kernelunitlength": "kernelUnitLength", + "keypoints": "keyPoints", + "keysplines": "keySplines", + "keytimes": "keyTimes", + "lengthadjust": "lengthAdjust", + "limitingconeangle": "limitingConeAngle", + "markerheight": "markerHeight", + "markerunits": "markerUnits", + "markerwidth": "markerWidth", + "maskcontentunits": "maskContentUnits", + "maskunits": "maskUnits", + "numoctaves": "numOctaves", + "pathlength": "pathLength", + "patterncontentunits": "patternContentUnits", + "patterntransform": "patternTransform", + "patternunits": "patternUnits", + "pointsatx": "pointsAtX", + "pointsaty": "pointsAtY", + "pointsatz": "pointsAtZ", + "preservealpha": "preserveAlpha", + "preserveaspectratio": "preserveAspectRatio", + "primitiveunits": "primitiveUnits", + "refx": "refX", + "refy": "refY", + "repeatcount": "repeatCount", + "repeatdur": "repeatDur", + "requiredextensions": "requiredExtensions", + "requiredfeatures": "requiredFeatures", + "specularconstant": "specularConstant", + "specularexponent": "specularExponent", + "spreadmethod": "spreadMethod", + "startoffset": "startOffset", + "stddeviation": "stdDeviation", + "stitchtiles": "stitchTiles", + "surfacescale": "surfaceScale", + "systemlanguage": "systemLanguage", + "tablevalues": "tableValues", + "targetx": "targetX", + "targety": "targetY", + "textlength": "textLength", + "viewbox": "viewBox", + "viewtarget": "viewTarget", + "xchannelselector": "xChannelSelector", + "ychannelselector": "yChannelSelector", + "zoomandpan": "zoomAndPan", } diff --git a/vendor/golang.org/x/net/html/parse.go b/vendor/golang.org/x/net/html/parse.go index 2cd12fc..038941d 100644 --- a/vendor/golang.org/x/net/html/parse.go +++ b/vendor/golang.org/x/net/html/parse.go @@ -663,6 +663,24 @@ func inHeadIM(p *parser) bool { // Ignore the token. return true case a.Template: + // TODO: remove this divergence from the HTML5 spec. + // + // We don't handle all of the corner cases when mixing foreign + // content (i.e. or ) with