From 56a854fe14ae2328d421103ccb362566ae368a7b Mon Sep 17 00:00:00 2001 From: 6543 <6543@obermui.de> Date: Thu, 24 Feb 2022 17:33:24 +0100 Subject: [PATCH] Update deps (#789) * update github.com/docker/cli * update github.com/docker/distribution * update github.com/docker/docker * update github.com/gin-gonic/gin * update github.com/golang-jwt/jwt/v4 * update github.com/golangci/golangci-lint * update github.com/gorilla/securecookie * update github.com/mattn/go-sqlite3 * update github.com/moby/moby * update github.com/prometheus/client_golang * update github.com/xanzy/go-gitlab --- go.mod | 32 +- go.sum | 348 +- vendor/github.com/BurntSushi/toml/README.md | 41 +- vendor/github.com/BurntSushi/toml/decode.go | 133 +- .../BurntSushi/toml/decode_go116.go | 1 + .../github.com/BurntSushi/toml/deprecated.go | 24 +- vendor/github.com/BurntSushi/toml/encode.go | 150 +- vendor/github.com/BurntSushi/toml/error.go | 229 + vendor/github.com/BurntSushi/toml/go.sum | 0 vendor/github.com/BurntSushi/toml/lex.go | 354 +- .../toml/{decode_meta.go => meta.go} | 105 +- vendor/github.com/BurntSushi/toml/parse.go | 174 +- .../github.com/BurntSushi/toml/type_fields.go | 4 +- .../toml/{type_check.go => type_toml.go} | 2 +- .../github.com/OpenPeeDeeP/depguard/README.md | 46 +- .../OpenPeeDeeP/depguard/depguard.go | 82 +- vendor/github.com/OpenPeeDeeP/depguard/go.mod | 1 + vendor/github.com/OpenPeeDeeP/depguard/go.sum | 10 + .../forbidigo/forbidigo/forbidigo.go | 26 +- .../forbidigo/forbidigo/patterns.go | 43 + .../ashanbrown/makezero/makezero/makezero.go | 25 +- .../blizzy78/varnamelen/.editorconfig | 4 + .../blizzy78/varnamelen/.golangci.yml | 45 +- .../blizzy78/varnamelen/.travis.yml | 7 - .../github.com/blizzy78/varnamelen/README.md | 56 +- .../github.com/blizzy78/varnamelen/flags.go | 98 + vendor/github.com/blizzy78/varnamelen/go.mod | 7 +- vendor/github.com/blizzy78/varnamelen/go.sum | 20 +- .../blizzy78/varnamelen/varnamelen.go | 583 +- .../breml/bidichk/pkg/bidichk/bidichk.go | 153 +- .../breml/bidichk/pkg/bidichk/version.go | 19 + vendor/github.com/breml/errchkjson/.gitignore | 29 + .../breml/errchkjson/.goreleaser.yml | 33 + vendor/github.com/breml/errchkjson/LICENSE | 21 + vendor/github.com/breml/errchkjson/README.md | 125 + .../github.com/breml/errchkjson/errchkjson.go | 310 + vendor/github.com/breml/errchkjson/go.mod | 5 + vendor/github.com/breml/errchkjson/go.sum | 27 + .../breml/errchkjson/noexported_error.go | 23 + .../breml/errchkjson/unsupported_error.go | 23 + vendor/github.com/breml/errchkjson/version.go | 19 + .../daixiang0/gci/pkg/analyzer/analyzer.go | 131 + .../daixiang0/gci/pkg/analyzer/errors.go | 16 + .../gci/pkg/configuration/formatter.go | 7 + .../daixiang0/gci/pkg/constants/sequences.go | 16 + .../daixiang0/gci/pkg/gci/configuration.go | 58 + .../daixiang0/gci/pkg/gci/errors.go | 78 + .../daixiang0/gci/pkg/gci/format.go | 98 + .../github.com/daixiang0/gci/pkg/gci/gci.go | 454 +- .../daixiang0/gci/pkg/gci/imports/errors.go | 37 + .../daixiang0/gci/pkg/gci/imports/import.go | 86 + .../github.com/daixiang0/gci/pkg/gci/parse.go | 56 + .../gci/pkg/gci/sections/commentline.go | 51 + .../daixiang0/gci/pkg/gci/sections/default.go | 43 + .../daixiang0/gci/pkg/gci/sections/errors.go | 68 + .../daixiang0/gci/pkg/gci/sections/newline.go | 41 + .../daixiang0/gci/pkg/gci/sections/prefix.go | 51 + .../daixiang0/gci/pkg/gci/sections/section.go | 58 + .../gci/pkg/gci/sections/sectionparser.go | 139 + .../gci/pkg/gci/sections/sectiontype.go | 40 + .../gci/pkg/gci/sections/standardpackage.go | 51 + .../standardpackage_list.go} | 10 +- .../gci/pkg/gci/specificity/default.go | 19 + .../gci/pkg/gci/specificity/match.go | 24 + .../gci/pkg/gci/specificity/mismatch.go | 21 + .../gci/pkg/gci/specificity/specificity.go | 28 + .../gci/pkg/gci/specificity/standard.go | 20 + .../github.com/daixiang0/gci/pkg/io/file.go | 59 + .../github.com/daixiang0/gci/pkg/io/search.go | 47 + .../github.com/daixiang0/gci/pkg/io/stdin.go | 28 + .../distribution/reference/normalize.go | 29 + .../distribution/reference/reference.go | 2 +- .../registry/api/errcode/errors.go | 6 +- .../esimonov/ifshort/pkg/analyzer/analyzer.go | 9 + .../ifshort/pkg/analyzer/occurrences.go | 13 +- vendor/github.com/fzipp/gocyclo/CHANGELOG.md | 9 +- vendor/github.com/fzipp/gocyclo/README.md | 3 +- vendor/github.com/fzipp/gocyclo/analyze.go | 21 +- vendor/github.com/fzipp/gocyclo/go.mod | 2 +- vendor/github.com/fzipp/gocyclo/recv.go | 26 + .../github.com/fzipp/gocyclo/recv_pre118.go | 24 + vendor/github.com/gin-gonic/gin/.travis.yml | 2 - vendor/github.com/gin-gonic/gin/CHANGELOG.md | 30 + vendor/github.com/gin-gonic/gin/README.md | 43 +- vendor/github.com/gin-gonic/gin/context.go | 66 +- .../gin-gonic/gin/context_appengine.go | 2 +- vendor/github.com/gin-gonic/gin/debug.go | 4 +- vendor/github.com/gin-gonic/gin/gin.go | 126 +- vendor/github.com/gin-gonic/gin/go.mod | 2 + vendor/github.com/gin-gonic/gin/tree.go | 136 +- vendor/github.com/gin-gonic/gin/version.go | 2 +- .../checkers/boolExprSimplify_checker.go | 7 +- .../go-critic/go-critic/checkers/checkers.go | 96 - .../checkers/commentFormatting_checker.go | 71 +- .../go-critic/checkers/deferInLoop_checker.go | 70 + .../checkers/deprecatedComment_checker.go | 25 +- .../go-critic/checkers/embedded_rules.go | 103 + .../checkers/internal/lintutil/astfind.go | 32 +- .../go-critic/checkers/ruleguard_checker.go | 56 +- .../go-critic/checkers/rulesdata/rulesdata.go | 1218 +- .../checkers/typeSwitchVar_checker.go | 2 +- .../go-critic/checkers/unlabelStmt_checker.go | 21 +- .../go-critic/checkers/whyNoLint_checker.go | 3 +- .../go-critic/framework/linter/go_version.go | 18 +- .../linter/{lintpack.go => linter.go} | 6 +- .../go-playground/validator/v10/README.md | 4 +- .../go-playground/validator/v10/baked_in.go | 29 + .../go-playground/validator/v10/doc.go | 19 + .../go-playground/validator/v10/regexes.go | 8 +- vendor/github.com/golang-jwt/jwt/v4/README.md | 13 +- vendor/github.com/golang-jwt/jwt/v4/claims.go | 22 +- vendor/github.com/golang-jwt/jwt/v4/errors.go | 53 + .../golang-jwt/jwt/v4/map_claims.go | 3 + vendor/github.com/golang-jwt/jwt/v4/parser.go | 32 +- .../golang-jwt/jwt/v4/parser_option.go | 29 + .../golang-jwt/jwt/v4/signing_method.go | 11 + vendor/github.com/golang-jwt/jwt/v4/token.go | 64 +- vendor/github.com/golang-jwt/jwt/v4/types.go | 10 +- .../golangci-lint/internal/cache/cache.go | 4 +- .../internal/pkgcache/pkgcache.go | 2 +- .../internal/robustio/robustio_flaky.go | 2 +- .../golangci-lint/pkg/commands/cache.go | 5 +- .../golangci-lint/pkg/commands/config.go | 2 +- .../golangci-lint/pkg/commands/help.go | 12 +- .../golangci-lint/pkg/commands/linters.go | 3 +- .../golangci-lint/pkg/commands/root.go | 3 +- .../golangci-lint/pkg/commands/run.go | 90 +- .../golangci-lint/pkg/config/config.go | 2 +- .../pkg/config/linters_settings.go | 226 +- .../pkg/config/linters_settings_gocritic.go | 3 +- .../golangci-lint/pkg/config/reader.go | 3 +- .../golangci-lint/pkg/exitcodes/exitcodes.go | 18 +- .../golangci-lint/pkg/golinters/bidichk.go | 48 +- .../pkg/golinters/containedctx.go | 19 + .../golangci-lint/pkg/golinters/decorder.go | 38 + .../golangci-lint/pkg/golinters/depguard.go | 218 +- .../golangci-lint/pkg/golinters/errchkjson.go | 33 + .../golangci-lint/pkg/golinters/exhaustive.go | 3 +- .../golangci-lint/pkg/golinters/gci.go | 102 +- .../pkg/golinters/goanalysis/runner.go | 2 +- .../pkg/golinters/goanalysis/runner_action.go | 4 +- .../goanalysis/runner_loadingpackage.go | 10 +- .../pkg/golinters/gochecknoglobals.go | 4 +- .../golangci-lint/pkg/golinters/gocritic.go | 20 +- .../golangci-lint/pkg/golinters/godot.go | 7 +- .../pkg/golinters/gofmt_common.go | 11 - .../golangci-lint/pkg/golinters/gomnd.go | 34 +- .../golangci-lint/pkg/golinters/gosec.go | 6 +- .../golangci-lint/pkg/golinters/grouper.go | 32 + .../golangci-lint/pkg/golinters/importas.go | 7 +- .../golangci-lint/pkg/golinters/ireturn.go | 6 +- .../golangci-lint/pkg/golinters/lll.go | 4 +- .../golangci-lint/pkg/golinters/maintidx.go | 32 + .../pkg/golinters/nolintlint/nolintlint.go | 4 +- .../golangci-lint/pkg/golinters/revive.go | 88 +- .../{rowerrcheck.go => rowserrcheck.go} | 0 .../golangci-lint/pkg/golinters/varnamelen.go | 12 +- .../golangci-lint/pkg/golinters/wrapcheck.go | 3 + .../pkg/lint/lintersdb/manager.go | 807 +- .../pkg/lint/lintersdb/validator.go | 2 +- .../golangci/golangci-lint/pkg/lint/load.go | 2 +- .../golangci/golangci-lint/pkg/lint/runner.go | 13 +- .../golangci-lint/pkg/logutils/log.go | 4 +- .../golangci-lint/pkg/packages/errors.go | 1 - .../golangci-lint/pkg/packages/util.go | 4 +- .../golangci-lint/pkg/printers/checkstyle.go | 25 +- .../golangci-lint/pkg/printers/codeclimate.go | 12 +- .../golangci-lint/pkg/printers/github.go | 11 +- .../golangci-lint/pkg/printers/html.go | 16 +- .../golangci-lint/pkg/printers/json.go | 16 +- .../golangci-lint/pkg/printers/junitxml.go | 24 +- .../golangci-lint/pkg/printers/tab.go | 6 +- .../golangci-lint/pkg/printers/text.go | 11 +- .../pkg/result/processors/nolint.go | 5 +- .../pkg/result/processors/sort_results.go | 3 +- .../golangci-lint/pkg/timeutils/stopwatch.go | 2 +- .../github.com/google/go-cmp/cmp/compare.go | 17 - .../google/go-cmp/cmp/export_panic.go | 1 + .../google/go-cmp/cmp/export_unsafe.go | 1 + .../go-cmp/cmp/internal/diff/debug_disable.go | 1 + .../go-cmp/cmp/internal/diff/debug_enable.go | 1 + .../cmp/internal/flags/toolchain_legacy.go | 10 - .../cmp/internal/flags/toolchain_recent.go | 10 - .../google/go-cmp/cmp/internal/value/name.go | 7 + .../cmp/internal/value/pointer_purego.go | 1 + .../cmp/internal/value/pointer_unsafe.go | 1 + vendor/github.com/google/go-cmp/cmp/path.go | 2 +- .../google/go-cmp/cmp/report_reflect.go | 3 +- .../google/go-cmp/cmp/report_slices.go | 6 +- .../pkg/ineffassign/ineffassign.go | 13 +- .../forcetypeassert/README.md | 11 + .../forcetypeassert/forcetypeassert.go | 134 +- vendor/github.com/hexops/gotextdiff/LICENSE | 27 + vendor/github.com/hexops/gotextdiff/README.md | 54 + vendor/github.com/hexops/gotextdiff/diff.go | 159 + vendor/github.com/hexops/gotextdiff/go.mod | 3 + .../hexops/gotextdiff/myers/diff.go | 205 + .../hexops/gotextdiff/span/parse.go | 100 + .../github.com/hexops/gotextdiff/span/span.go | 285 + .../hexops/gotextdiff/span/token.go | 194 + .../hexops/gotextdiff/span/token111.go | 39 + .../hexops/gotextdiff/span/token112.go | 16 + .../github.com/hexops/gotextdiff/span/uri.go | 169 + .../hexops/gotextdiff/span/utf16.go | 91 + .../github.com/hexops/gotextdiff/unified.go | 210 + vendor/github.com/julz/importas/README.md | 12 + vendor/github.com/julz/importas/analyzer.go | 29 +- vendor/github.com/julz/importas/config.go | 7 +- vendor/github.com/julz/importas/flags.go | 1 + .../kulti/thelper/pkg/analyzer/analyzer.go | 228 +- .../github.com/ldez/tagliatelle/.golangci.yml | 6 +- vendor/github.com/ldez/tagliatelle/Makefile | 2 +- vendor/github.com/ldez/tagliatelle/readme.md | 63 +- .../ldez/tagliatelle/tagliatelle.go | 34 +- .../github.com/leonklingele/grouper/LICENSE | 662 + .../grouper/pkg/analyzer/analyzer.go | 89 + .../grouper/pkg/analyzer/config.go | 15 + .../grouper/pkg/analyzer/consts/analyzer.go | 19 + .../grouper/pkg/analyzer/consts/config.go | 6 + .../grouper/pkg/analyzer/flags.go | 37 + .../grouper/pkg/analyzer/globals/analyzer.go | 105 + .../grouper/pkg/analyzer/imports/analyzer.go | 103 + .../grouper/pkg/analyzer/imports/config.go | 6 + .../grouper/pkg/analyzer/types/analyzer.go | 19 + .../grouper/pkg/analyzer/types/config.go | 6 + .../grouper/pkg/analyzer/vars/analyzer.go | 19 + .../grouper/pkg/analyzer/vars/config.go | 6 + .../mattn/go-colorable/noncolorable.go | 2 - vendor/github.com/mattn/go-sqlite3/README.md | 111 +- vendor/github.com/mattn/go-sqlite3/backup.go | 2 +- .../github.com/mattn/go-sqlite3/callback.go | 2 +- vendor/github.com/mattn/go-sqlite3/error.go | 2 +- .../mattn/go-sqlite3/sqlite3-binding.c | 14670 +++++++++------- .../mattn/go-sqlite3/sqlite3-binding.h | 167 +- vendor/github.com/mattn/go-sqlite3/sqlite3.go | 99 +- .../mattn/go-sqlite3/sqlite3_context.go | 2 +- .../go-sqlite3/sqlite3_load_extension.go | 2 +- .../go-sqlite3/sqlite3_opt_preupdate_hook.go | 2 +- .../go-sqlite3/sqlite3_opt_unlock_notify.c | 2 +- .../go-sqlite3/sqlite3_opt_unlock_notify.go | 2 +- .../mattn/go-sqlite3/sqlite3_opt_userauth.go | 2 +- .../mattn/go-sqlite3/sqlite3_opt_vtable.go | 21 +- .../mattn/go-sqlite3/sqlite3_trace.go | 2 +- .../mattn/go-sqlite3/sqlite3_type.go | 94 +- .../github.com/mattn/go-sqlite3/sqlite3ext.h | 12 + .../mgechev/revive/config/config.go | 36 +- .../mgechev/revive/formatter/friendly.go | 4 +- vendor/github.com/mgechev/revive/lint/file.go | 6 +- .../github.com/mgechev/revive/lint/linter.go | 76 +- vendor/github.com/mgechev/revive/lint/rule.go | 2 +- .../mgechev/revive/rule/add-constant.go | 81 +- .../mgechev/revive/rule/argument-limit.go | 17 +- .../mgechev/revive/rule/banned-characters.go | 81 + .../mgechev/revive/rule/bare-return.go | 2 +- .../mgechev/revive/rule/blank-imports.go | 2 +- .../revive/rule/cognitive-complexity.go | 18 +- .../revive/rule/context-as-argument.go | 72 +- .../mgechev/revive/rule/cyclomatic.go | 23 +- .../github.com/mgechev/revive/rule/defer.go | 13 +- .../mgechev/revive/rule/error-strings.go | 92 +- .../mgechev/revive/rule/exported.go | 29 +- .../mgechev/revive/rule/file-header.go | 19 +- .../mgechev/revive/rule/function-length.go | 15 +- .../revive/rule/function-result-limit.go | 23 +- .../mgechev/revive/rule/imports-blacklist.go | 28 +- .../mgechev/revive/rule/line-length-limit.go | 20 +- .../mgechev/revive/rule/max-public-structs.go | 17 +- .../revive/rule/optimize-operands-order.go | 77 + .../revive/rule/range-val-in-closure.go | 15 +- .../mgechev/revive/rule/string-format.go | 2 +- .../mgechev/revive/rule/string-of-int.go | 2 +- .../mgechev/revive/rule/time-equal.go | 76 + .../revive/rule/unconditional-recursion.go | 4 +- .../mgechev/revive/rule/unhandled-error.go | 24 +- .../github.com/mgechev/revive/rule/utils.go | 11 +- .../mgechev/revive/rule/var-naming.go | 35 +- .../mitchellh/mapstructure/CHANGELOG.md | 4 + .../mitchellh/mapstructure/mapstructure.go | 8 +- .../nishanths/exhaustive/.gitignore | 3 + .../nishanths/exhaustive/.travis.yml | 12 - .../github.com/nishanths/exhaustive/Makefile | 28 + .../github.com/nishanths/exhaustive/README.md | 60 +- .../nishanths/exhaustive/comment.go | 65 + .../github.com/nishanths/exhaustive/enum.go | 261 +- .../nishanths/exhaustive/exhaustive.go | 387 +- .../github.com/nishanths/exhaustive/fact.go | 29 + .../nishanths/exhaustive/generated.go | 34 - vendor/github.com/nishanths/exhaustive/go.mod | 5 +- vendor/github.com/nishanths/exhaustive/go.sum | 17 +- .../nishanths/exhaustive/regexp_flag.go | 35 - .../github.com/nishanths/exhaustive/switch.go | 644 +- .../go-errorlint/errorlint/allowed.go | 143 +- .../go-errorlint/errorlint/analysis.go | 49 +- .../polyfloyd/go-errorlint/errorlint/lint.go | 23 +- .../client_golang/prometheus/collector.go | 8 + .../prometheus/go_collector_go117.go | 94 +- .../prometheus/internal/go_runtime_metrics.go | 65 + .../github.com/quasilyte/go-ruleguard/LICENSE | 2 +- .../go-ruleguard/ruleguard/ast_walker.go | 55 +- .../go-ruleguard/ruleguard/filters.go | 93 +- .../go-ruleguard/ruleguard/gorule.go | 20 +- .../ruleguard/ir/filter_op.gen.go | 196 +- .../ruleguard/ir/gen_filter_op.go | 3 + .../go-ruleguard/ruleguard/ir_loader.go | 72 +- .../go-ruleguard/ruleguard/irconv/irconv.go | 166 +- .../go-ruleguard/ruleguard/match_data.go | 2 +- .../ruleguard/profiling/no_labels.go | 16 + .../ruleguard/profiling/with_labels.go | 21 + .../go-ruleguard/ruleguard/quasigo/compile.go | 167 +- .../go-ruleguard/ruleguard/quasigo/disasm.go | 2 + .../go-ruleguard/ruleguard/quasigo/eval.go | 9 + .../ruleguard/quasigo/gen_opcodes.go | 7 +- .../ruleguard/quasigo/opcode_string.go | 67 +- .../ruleguard/quasigo/opcodes.gen.go | 137 +- .../go-ruleguard/ruleguard/quasigo/quasigo.go | 18 +- .../go-ruleguard/ruleguard/ruleguard.go | 26 +- .../go-ruleguard/ruleguard/runner.go | 87 +- vendor/github.com/quasilyte/gogrep/.gitignore | 4 + .../github.com/quasilyte/gogrep/.golangci.yml | 49 + vendor/github.com/quasilyte/gogrep/LICENSE | 33 + vendor/github.com/quasilyte/gogrep/Makefile | 19 + vendor/github.com/quasilyte/gogrep/README.md | 41 + .../internal => }/gogrep/compile.go | 69 +- .../quasilyte/gogrep/compile_import.go | 57 + .../internal => }/gogrep/gen_operations.go | 20 +- vendor/github.com/quasilyte/gogrep/go.mod | 8 + vendor/github.com/quasilyte/gogrep/go.sum | 8 + .../internal => }/gogrep/gogrep.go | 37 +- .../internal => }/gogrep/instructions.go | 0 .../gogrep/internal/stdinfo/stdinfo.go | 151 + .../internal => }/gogrep/match.go | 21 +- .../nodetag/nodetag.go | 4 +- .../internal => }/gogrep/operation_string.go | 10 +- .../internal => }/gogrep/operations.gen.go | 33 +- .../internal => }/gogrep/parse.go | 7 +- .../internal => }/gogrep/slices.go | 0 .../github.com/securego/gosec/v2/Dockerfile | 2 +- vendor/github.com/securego/gosec/v2/Makefile | 8 +- vendor/github.com/securego/gosec/v2/README.md | 40 +- .../github.com/securego/gosec/v2/analyzer.go | 153 +- vendor/github.com/securego/gosec/v2/config.go | 4 + vendor/github.com/securego/gosec/v2/errors.go | 2 +- vendor/github.com/securego/gosec/v2/go.mod | 12 +- vendor/github.com/securego/gosec/v2/go.sum | 43 +- vendor/github.com/securego/gosec/v2/issue.go | 29 +- vendor/github.com/securego/gosec/v2/rule.go | 27 +- .../securego/gosec/v2/rules/bad_defer.go | 45 +- .../securego/gosec/v2/rules/readfile.go | 1 + .../securego/gosec/v2/rules/rulelist.go | 28 +- .../github.com/securego/gosec/v2/rules/sql.go | 23 +- .../securego/gosec/v2/rules/subproc.go | 4 + .../securego/gosec/v2/rules/tempfiles.go | 45 +- .../github.com/securego/gosec/v2/rules/tls.go | 2 +- .../sivchari/containedctx/.golangci.yml | 38 + .../github.com/sivchari/containedctx/LICENCE | 21 + .../sivchari/containedctx/README.md | 62 + .../sivchari/containedctx/containedctx.go | 54 + .../github.com/sivchari/containedctx/go.mod | 19 + .../github.com/sivchari/containedctx/go.sum | 61 + vendor/github.com/spf13/cobra/Makefile | 2 +- vendor/github.com/spf13/cobra/README.md | 34 +- vendor/github.com/spf13/cobra/args.go | 12 + .../spf13/cobra/bash_completions.go | 48 +- .../spf13/cobra/bash_completionsV2.go | 31 +- .../github.com/spf13/cobra/command_notwin.go | 1 + vendor/github.com/spf13/cobra/command_win.go | 1 + vendor/github.com/spf13/cobra/completions.go | 208 +- vendor/github.com/spf13/cobra/go.mod | 6 +- vendor/github.com/spf13/cobra/go.sum | 291 +- .../spf13/cobra/powershell_completions.go | 6 +- .../spf13/cobra/projects_using_cobra.md | 12 + .../spf13/cobra/shell_completions.md | 20 +- vendor/github.com/spf13/cobra/user_guide.md | 5 +- .../github.com/spf13/cobra/zsh_completions.go | 4 +- vendor/github.com/spf13/viper/.golangci.yml | 2 +- vendor/github.com/spf13/viper/Makefile | 4 +- vendor/github.com/spf13/viper/fs.go | 65 + vendor/github.com/spf13/viper/go.mod | 57 +- vendor/github.com/spf13/viper/go.sum | 217 +- vendor/github.com/spf13/viper/logger.go | 77 + vendor/github.com/spf13/viper/util.go | 22 +- vendor/github.com/spf13/viper/viper.go | 97 +- vendor/github.com/spf13/viper/viper_go1_15.go | 57 + vendor/github.com/spf13/viper/viper_go1_16.go | 32 + vendor/github.com/spf13/viper/watch.go | 1 + .../github.com/tdakkota/asciicheck/.gitignore | 1 - .../github.com/tdakkota/asciicheck/ascii.go | 15 +- vendor/github.com/tdakkota/asciicheck/go.sum | 20 + .../bodyclose/passes/bodyclose/bodyclose.go | 5 - .../tommy-muehle/go-mnd/v2/.goreleaser.yml | 25 + .../tommy-muehle/go-mnd/v2/Dockerfile | 2 +- .../tommy-muehle/go-mnd/v2/Makefile | 2 +- .../tommy-muehle/go-mnd/v2/README.md | 2 +- .../tommy-muehle/go-mnd/v2/checks/argument.go | 4 + .../tommy-muehle/go-mnd/v2/config/config.go | 24 +- .../github.com/ultraware/whitespace/README.md | 2 +- .../github.com/ultraware/whitespace/main.go | 4 + vendor/github.com/xanzy/go-gitlab/README.md | 10 +- .../xanzy/go-gitlab/access_requests.go | 16 +- .../xanzy/go-gitlab/audit_events.go | 8 +- .../xanzy/go-gitlab/award_emojis.go | 16 +- vendor/github.com/xanzy/go-gitlab/boards.go | 22 +- vendor/github.com/xanzy/go-gitlab/branches.go | 14 +- .../xanzy/go-gitlab/ci_yml_templates.go | 2 +- .../xanzy/go-gitlab/client_options.go | 16 + vendor/github.com/xanzy/go-gitlab/commits.go | 26 +- .../xanzy/go-gitlab/container_registry.go | 76 +- .../github.com/xanzy/go-gitlab/deploy_keys.go | 12 +- .../xanzy/go-gitlab/deploy_tokens.go | 16 +- .../github.com/xanzy/go-gitlab/deployments.go | 8 +- .../github.com/xanzy/go-gitlab/discussions.go | 62 +- .../xanzy/go-gitlab/environments.go | 12 +- .../github.com/xanzy/go-gitlab/epic_issues.go | 8 +- vendor/github.com/xanzy/go-gitlab/epics.go | 19 +- vendor/github.com/xanzy/go-gitlab/events.go | 2 +- .../xanzy/go-gitlab/external_status_checks.go | 4 +- .../xanzy/go-gitlab/freeze_periods.go | 10 +- .../xanzy/go-gitlab/generic_packages.go | 24 +- .../github.com/xanzy/go-gitlab/geo_nodes.go | 56 +- vendor/github.com/xanzy/go-gitlab/gitlab.go | 102 +- vendor/github.com/xanzy/go-gitlab/go.mod | 17 +- vendor/github.com/xanzy/go-gitlab/go.sum | 12 +- .../xanzy/go-gitlab/group_access_tokens.go | 140 + .../xanzy/go-gitlab/group_badges.go | 12 +- .../xanzy/go-gitlab/group_boards.go | 22 +- .../xanzy/go-gitlab/group_clusters.go | 10 +- .../github.com/xanzy/go-gitlab/group_hooks.go | 14 +- .../xanzy/go-gitlab/group_import_export.go | 4 +- .../xanzy/go-gitlab/group_iterations.go | 90 + .../xanzy/go-gitlab/group_labels.go | 25 +- .../xanzy/go-gitlab/group_members.go | 20 +- .../xanzy/go-gitlab/group_milestones.go | 16 +- .../xanzy/go-gitlab/group_variables.go | 10 +- .../github.com/xanzy/go-gitlab/group_wikis.go | 10 +- vendor/github.com/xanzy/go-gitlab/groups.go | 274 +- vendor/github.com/xanzy/go-gitlab/invites.go | 8 +- .../github.com/xanzy/go-gitlab/issue_links.go | 6 +- vendor/github.com/xanzy/go-gitlab/issues.go | 76 +- .../xanzy/go-gitlab/issues_statistics.go | 22 +- vendor/github.com/xanzy/go-gitlab/jobs.go | 43 +- vendor/github.com/xanzy/go-gitlab/labels.go | 16 +- vendor/github.com/xanzy/go-gitlab/markdown.go | 47 + .../go-gitlab/merge_request_approvals.go | 29 +- .../xanzy/go-gitlab/merge_requests.go | 328 +- .../github.com/xanzy/go-gitlab/milestones.go | 23 +- .../github.com/xanzy/go-gitlab/namespaces.go | 2 +- vendor/github.com/xanzy/go-gitlab/notes.go | 40 +- .../xanzy/go-gitlab/notifications.go | 8 +- vendor/github.com/xanzy/go-gitlab/packages.go | 72 +- vendor/github.com/xanzy/go-gitlab/pages.go | 2 +- .../xanzy/go-gitlab/pages_domains.go | 10 +- .../xanzy/go-gitlab/pipeline_schedules.go | 20 +- .../xanzy/go-gitlab/pipeline_triggers.go | 14 +- .../github.com/xanzy/go-gitlab/pipelines.go | 27 +- .../xanzy/go-gitlab/project_access_tokens.go | 8 +- .../xanzy/go-gitlab/project_badges.go | 20 +- .../xanzy/go-gitlab/project_clusters.go | 10 +- .../xanzy/go-gitlab/project_import_export.go | 25 +- .../xanzy/go-gitlab/project_iterations.go | 90 + .../go-gitlab/project_managed_licenses.go | 10 +- .../xanzy/go-gitlab/project_members.go | 14 +- .../xanzy/go-gitlab/project_mirror.go | 6 +- .../xanzy/go-gitlab/project_snippets.go | 13 +- .../xanzy/go-gitlab/project_variables.go | 42 +- vendor/github.com/xanzy/go-gitlab/projects.go | 322 +- .../xanzy/go-gitlab/protected_branches.go | 28 +- .../xanzy/go-gitlab/protected_environments.go | 12 +- .../xanzy/go-gitlab/protected_tags.go | 8 +- .../xanzy/go-gitlab/releaselinks.go | 16 +- vendor/github.com/xanzy/go-gitlab/releases.go | 63 +- .../xanzy/go-gitlab/repositories.go | 18 +- .../xanzy/go-gitlab/repository_files.go | 28 +- .../xanzy/go-gitlab/repository_submodules.go | 93 + .../xanzy/go-gitlab/resource_label_events.go | 12 +- .../xanzy/go-gitlab/resource_state_events.go | 8 +- vendor/github.com/xanzy/go-gitlab/runners.go | 38 +- vendor/github.com/xanzy/go-gitlab/search.go | 4 +- vendor/github.com/xanzy/go-gitlab/services.go | 172 +- vendor/github.com/xanzy/go-gitlab/settings.go | 326 +- vendor/github.com/xanzy/go-gitlab/snippets.go | 16 +- vendor/github.com/xanzy/go-gitlab/tags.go | 12 +- .../github.com/xanzy/go-gitlab/time_stats.go | 10 +- vendor/github.com/xanzy/go-gitlab/topics.go | 152 + vendor/github.com/xanzy/go-gitlab/types.go | 136 +- vendor/github.com/xanzy/go-gitlab/users.go | 186 +- vendor/github.com/xanzy/go-gitlab/validate.go | 4 +- vendor/github.com/xanzy/go-gitlab/wikis.go | 10 +- vendor/github.com/yagipy/maintidx/.gitignore | 2 + vendor/github.com/yagipy/maintidx/LICENSE | 21 + vendor/github.com/yagipy/maintidx/Makefile | 2 + vendor/github.com/yagipy/maintidx/README.md | 45 + vendor/github.com/yagipy/maintidx/go.mod | 11 + vendor/github.com/yagipy/maintidx/go.sum | 28 + vendor/github.com/yagipy/maintidx/maintidx.go | 63 + .../github.com/yagipy/maintidx/pkg/cyc/cyc.go | 36 + .../yagipy/maintidx/pkg/halstvol/halstvol.go | 71 + .../yagipy/maintidx/pkg/halstvol/handle.go | 151 + vendor/github.com/yagipy/maintidx/visitor.go | 77 + vendor/github.com/yeya24/promlinter/README.md | 36 +- .../yeya24/promlinter/promlinter.go | 4 + vendor/gitlab.com/bosi/decorder/.gitignore | 7 + .../bosi/decorder/.gitlab-ci.params.yml | 15 + .../gitlab.com/bosi/decorder/.gitlab-ci.yml | 61 + vendor/gitlab.com/bosi/decorder/LICENSE.md | 16 + vendor/gitlab.com/bosi/decorder/Makefile | 7 + vendor/gitlab.com/bosi/decorder/README.md | 40 + vendor/gitlab.com/bosi/decorder/analyzer.go | 196 + vendor/gitlab.com/bosi/decorder/go.mod | 11 + vendor/gitlab.com/bosi/decorder/go.sum | 30 + vendor/gitlab.com/bosi/decorder/renovate.json | 26 + vendor/golang.org/x/crypto/acme/acme.go | 441 +- .../x/crypto/acme/autocert/autocert.go | 112 +- .../x/crypto/acme/autocert/renewal.go | 35 +- vendor/golang.org/x/crypto/acme/jws.go | 13 +- vendor/golang.org/x/crypto/acme/rfc8555.go | 2 +- vendor/golang.org/x/crypto/acme/types.go | 8 - vendor/golang.org/x/crypto/sha3/sha3.go | 2 +- vendor/golang.org/x/mod/modfile/rule.go | 313 +- vendor/golang.org/x/oauth2/go.mod | 2 +- vendor/golang.org/x/oauth2/go.sum | 7 +- vendor/golang.org/x/sys/unix/ioctl_linux.go | 23 + vendor/golang.org/x/sys/unix/mkerrors.sh | 4 + vendor/golang.org/x/sys/unix/syscall_linux.go | 56 +- .../x/sys/unix/syscall_linux_386.go | 8 - .../x/sys/unix/syscall_linux_alarm.go | 14 + .../x/sys/unix/syscall_linux_amd64.go | 1 - .../x/sys/unix/syscall_linux_arm.go | 1 - .../x/sys/unix/syscall_linux_arm64.go | 1 - .../x/sys/unix/syscall_linux_mips64x.go | 1 - .../x/sys/unix/syscall_linux_mipsx.go | 1 - .../x/sys/unix/syscall_linux_ppc.go | 1 - .../x/sys/unix/syscall_linux_ppc64x.go | 1 - .../x/sys/unix/syscall_linux_riscv64.go | 1 - .../x/sys/unix/syscall_linux_s390x.go | 9 - .../x/sys/unix/syscall_linux_sparc64.go | 1 - vendor/golang.org/x/sys/unix/zerrors_linux.go | 23 +- .../golang.org/x/sys/unix/zsyscall_linux.go | 20 + .../x/sys/unix/zsyscall_linux_386.go | 13 +- .../x/sys/unix/zsyscall_linux_amd64.go | 24 +- .../x/sys/unix/zsyscall_linux_arm.go | 11 - .../x/sys/unix/zsyscall_linux_arm64.go | 11 - .../x/sys/unix/zsyscall_linux_mips.go | 24 +- .../x/sys/unix/zsyscall_linux_mips64.go | 24 +- .../x/sys/unix/zsyscall_linux_mips64le.go | 11 - .../x/sys/unix/zsyscall_linux_mipsle.go | 24 +- .../x/sys/unix/zsyscall_linux_ppc.go | 24 +- .../x/sys/unix/zsyscall_linux_ppc64.go | 24 +- .../x/sys/unix/zsyscall_linux_ppc64le.go | 24 +- .../x/sys/unix/zsyscall_linux_riscv64.go | 11 - .../x/sys/unix/zsyscall_linux_s390x.go | 13 +- .../x/sys/unix/zsyscall_linux_sparc64.go | 24 +- vendor/golang.org/x/sys/unix/ztypes_linux.go | 93 + .../x/sys/unix/ztypes_linux_s390x.go | 4 +- .../go/analysis/passes/composite/composite.go | 75 +- .../go/analysis/passes/copylock/copylock.go | 75 +- .../go/analysis/passes/ctrlflow/ctrlflow.go | 6 +- .../passes/ifaceassert/ifaceassert.go | 6 + .../passes/ifaceassert/parameterized.go | 112 + .../go/analysis/passes/nilfunc/nilfunc.go | 7 + .../tools/go/analysis/passes/printf/printf.go | 49 +- .../tools/go/analysis/passes/printf/types.go | 217 +- .../x/tools/go/analysis/passes/shift/shift.go | 36 +- .../go/analysis/passes/sortslice/analyzer.go | 5 +- .../analysis/passes/stdmethods/stdmethods.go | 6 +- .../analysis/passes/stringintconv/string.go | 153 +- .../testinggoroutine/testinggoroutine.go | 20 +- .../x/tools/go/analysis/passes/tests/tests.go | 10 + .../go/analysis/passes/unmarshal/unmarshal.go | 3 +- .../passes/unusedresult/unusedresult.go | 6 + .../x/tools/go/ast/astutil/enclosing.go | 20 +- .../x/tools/go/ast/astutil/rewrite.go | 12 +- .../x/tools/go/ast/inspector/typeof.go | 9 +- .../x/tools/go/gcexportdata/gcexportdata.go | 23 +- .../x/tools/go/internal/gcimporter/bexport.go | 3 - .../x/tools/go/internal/gcimporter/bimport.go | 51 +- .../go/internal/gcimporter/exportdata.go | 16 +- .../go/internal/gcimporter/gcimporter.go | 12 +- .../x/tools/go/internal/gcimporter/iexport.go | 172 +- .../x/tools/go/internal/gcimporter/iimport.go | 166 +- .../go/internal/gcimporter/support_go117.go | 4 +- .../go/internal/gcimporter/support_go118.go | 7 +- .../x/tools/go/packages/packages.go | 5 + vendor/golang.org/x/tools/go/ssa/doc.go | 4 +- vendor/golang.org/x/tools/go/ssa/print.go | 11 +- .../golang.org/x/tools/go/ssa/ssautil/load.go | 2 +- vendor/golang.org/x/tools/go/ssa/testmain.go | 274 - .../x/tools/go/types/objectpath/objectpath.go | 151 +- .../x/tools/go/types/typeutil/callee.go | 29 +- .../x/tools/go/types/typeutil/map.go | 138 +- .../x/tools/internal/imports/sortimports.go | 15 +- .../x/tools/internal/lsp/fuzzy/symbol.go | 26 +- .../x/tools/internal/typeparams/common.go | 78 +- .../internal/typeparams/enabled_go117.go | 4 +- .../internal/typeparams/enabled_go118.go | 4 +- .../x/tools/internal/typeparams/normalize.go | 216 + .../x/tools/internal/typeparams/termlist.go | 172 + .../internal/typeparams/typeparams_go117.go | 105 +- .../internal/typeparams/typeparams_go118.go | 83 +- .../x/tools/internal/typeparams/typeterm.go | 170 + vendor/gopkg.in/ini.v1/README.md | 2 +- vendor/gopkg.in/ini.v1/file.go | 11 +- vendor/gopkg.in/ini.v1/parser.go | 26 +- .../tools/analysis/facts/nilness/nilness.go | 7 + vendor/honnef.co/go/tools/go/ir/UPSTREAM | 2 +- vendor/honnef.co/go/tools/go/ir/doc.go | 99 +- vendor/honnef.co/go/tools/go/ir/emit.go | 4 +- vendor/honnef.co/go/tools/go/ir/exits.go | 39 + vendor/honnef.co/go/tools/go/ir/print.go | 9 +- vendor/honnef.co/go/tools/go/ir/sanity.go | 8 +- vendor/honnef.co/go/tools/go/ir/ssa.go | 21 +- vendor/honnef.co/go/tools/simple/doc.go | 3 +- vendor/honnef.co/go/tools/simple/lint.go | 3 +- .../go/tools/staticcheck/fakejson/encode.go | 373 + .../staticcheck/fakereflect/fakereflect.go | 131 + .../go/tools/staticcheck/fakexml/marshal.go | 375 + .../go/tools/staticcheck/fakexml/typeinfo.go | 383 + .../go/tools/staticcheck/fakexml/xml.go | 33 + vendor/honnef.co/go/tools/staticcheck/lint.go | 139 +- vendor/honnef.co/go/tools/unused/unused.go | 15 +- vendor/modules.txt | 143 +- vendor/mvdan.cc/gofumpt/format/format.go | 474 +- vendor/mvdan.cc/gofumpt/format/rewrite.go | 113 + vendor/mvdan.cc/gofumpt/format/simplify.go | 165 + .../gofumpt/internal/version/version.go | 31 + 624 files changed, 32400 insertions(+), 14470 deletions(-) create mode 100644 vendor/github.com/BurntSushi/toml/error.go delete mode 100644 vendor/github.com/BurntSushi/toml/go.sum rename vendor/github.com/BurntSushi/toml/{decode_meta.go => meta.go} (76%) rename vendor/github.com/BurntSushi/toml/{type_check.go => type_toml.go} (98%) create mode 100644 vendor/github.com/ashanbrown/forbidigo/forbidigo/patterns.go delete mode 100644 vendor/github.com/blizzy78/varnamelen/.travis.yml create mode 100644 vendor/github.com/blizzy78/varnamelen/flags.go create mode 100644 vendor/github.com/breml/bidichk/pkg/bidichk/version.go create mode 100644 vendor/github.com/breml/errchkjson/.gitignore create mode 100644 vendor/github.com/breml/errchkjson/.goreleaser.yml create mode 100644 vendor/github.com/breml/errchkjson/LICENSE create mode 100644 vendor/github.com/breml/errchkjson/README.md create mode 100644 vendor/github.com/breml/errchkjson/errchkjson.go create mode 100644 vendor/github.com/breml/errchkjson/go.mod create mode 100644 vendor/github.com/breml/errchkjson/go.sum create mode 100644 vendor/github.com/breml/errchkjson/noexported_error.go create mode 100644 vendor/github.com/breml/errchkjson/unsupported_error.go create mode 100644 vendor/github.com/breml/errchkjson/version.go create mode 100644 vendor/github.com/daixiang0/gci/pkg/analyzer/analyzer.go create mode 100644 vendor/github.com/daixiang0/gci/pkg/analyzer/errors.go create mode 100644 vendor/github.com/daixiang0/gci/pkg/configuration/formatter.go create mode 100644 vendor/github.com/daixiang0/gci/pkg/constants/sequences.go create mode 100644 vendor/github.com/daixiang0/gci/pkg/gci/configuration.go create mode 100644 vendor/github.com/daixiang0/gci/pkg/gci/errors.go create mode 100644 vendor/github.com/daixiang0/gci/pkg/gci/format.go create mode 100644 vendor/github.com/daixiang0/gci/pkg/gci/imports/errors.go create mode 100644 vendor/github.com/daixiang0/gci/pkg/gci/imports/import.go create mode 100644 vendor/github.com/daixiang0/gci/pkg/gci/parse.go create mode 100644 vendor/github.com/daixiang0/gci/pkg/gci/sections/commentline.go create mode 100644 vendor/github.com/daixiang0/gci/pkg/gci/sections/default.go create mode 100644 vendor/github.com/daixiang0/gci/pkg/gci/sections/errors.go create mode 100644 vendor/github.com/daixiang0/gci/pkg/gci/sections/newline.go create mode 100644 vendor/github.com/daixiang0/gci/pkg/gci/sections/prefix.go create mode 100644 vendor/github.com/daixiang0/gci/pkg/gci/sections/section.go create mode 100644 vendor/github.com/daixiang0/gci/pkg/gci/sections/sectionparser.go create mode 100644 vendor/github.com/daixiang0/gci/pkg/gci/sections/sectiontype.go create mode 100644 vendor/github.com/daixiang0/gci/pkg/gci/sections/standardpackage.go rename vendor/github.com/daixiang0/gci/pkg/gci/{std.go => sections/standardpackage_list.go} (96%) create mode 100644 vendor/github.com/daixiang0/gci/pkg/gci/specificity/default.go create mode 100644 vendor/github.com/daixiang0/gci/pkg/gci/specificity/match.go create mode 100644 vendor/github.com/daixiang0/gci/pkg/gci/specificity/mismatch.go create mode 100644 vendor/github.com/daixiang0/gci/pkg/gci/specificity/specificity.go create mode 100644 vendor/github.com/daixiang0/gci/pkg/gci/specificity/standard.go create mode 100644 vendor/github.com/daixiang0/gci/pkg/io/file.go create mode 100644 vendor/github.com/daixiang0/gci/pkg/io/search.go create mode 100644 vendor/github.com/daixiang0/gci/pkg/io/stdin.go create mode 100644 vendor/github.com/fzipp/gocyclo/recv.go create mode 100644 vendor/github.com/fzipp/gocyclo/recv_pre118.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/deferInLoop_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/embedded_rules.go rename vendor/github.com/go-critic/go-critic/framework/linter/{lintpack.go => linter.go} (99%) create mode 100644 vendor/github.com/golang-jwt/jwt/v4/parser_option.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/containedctx.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/decorder.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/errchkjson.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/grouper.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/maintidx.go rename vendor/github.com/golangci/golangci-lint/pkg/golinters/{rowerrcheck.go => rowserrcheck.go} (100%) delete mode 100644 vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_legacy.go delete mode 100644 vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_recent.go create mode 100644 vendor/github.com/hexops/gotextdiff/LICENSE create mode 100644 vendor/github.com/hexops/gotextdiff/README.md create mode 100644 vendor/github.com/hexops/gotextdiff/diff.go create mode 100644 vendor/github.com/hexops/gotextdiff/go.mod create mode 100644 vendor/github.com/hexops/gotextdiff/myers/diff.go create mode 100644 vendor/github.com/hexops/gotextdiff/span/parse.go create mode 100644 vendor/github.com/hexops/gotextdiff/span/span.go create mode 100644 vendor/github.com/hexops/gotextdiff/span/token.go create mode 100644 vendor/github.com/hexops/gotextdiff/span/token111.go create mode 100644 vendor/github.com/hexops/gotextdiff/span/token112.go create mode 100644 vendor/github.com/hexops/gotextdiff/span/uri.go create mode 100644 vendor/github.com/hexops/gotextdiff/span/utf16.go create mode 100644 vendor/github.com/hexops/gotextdiff/unified.go create mode 100644 vendor/github.com/leonklingele/grouper/LICENSE create mode 100644 vendor/github.com/leonklingele/grouper/pkg/analyzer/analyzer.go create mode 100644 vendor/github.com/leonklingele/grouper/pkg/analyzer/config.go create mode 100644 vendor/github.com/leonklingele/grouper/pkg/analyzer/consts/analyzer.go create mode 100644 vendor/github.com/leonklingele/grouper/pkg/analyzer/consts/config.go create mode 100644 vendor/github.com/leonklingele/grouper/pkg/analyzer/flags.go create mode 100644 vendor/github.com/leonklingele/grouper/pkg/analyzer/globals/analyzer.go create mode 100644 vendor/github.com/leonklingele/grouper/pkg/analyzer/imports/analyzer.go create mode 100644 vendor/github.com/leonklingele/grouper/pkg/analyzer/imports/config.go create mode 100644 vendor/github.com/leonklingele/grouper/pkg/analyzer/types/analyzer.go create mode 100644 vendor/github.com/leonklingele/grouper/pkg/analyzer/types/config.go create mode 100644 vendor/github.com/leonklingele/grouper/pkg/analyzer/vars/analyzer.go create mode 100644 vendor/github.com/leonklingele/grouper/pkg/analyzer/vars/config.go create mode 100644 vendor/github.com/mgechev/revive/rule/banned-characters.go create mode 100644 vendor/github.com/mgechev/revive/rule/optimize-operands-order.go create mode 100644 vendor/github.com/mgechev/revive/rule/time-equal.go delete mode 100644 vendor/github.com/nishanths/exhaustive/.travis.yml create mode 100644 vendor/github.com/nishanths/exhaustive/Makefile create mode 100644 vendor/github.com/nishanths/exhaustive/comment.go create mode 100644 vendor/github.com/nishanths/exhaustive/fact.go delete mode 100644 vendor/github.com/nishanths/exhaustive/generated.go delete mode 100644 vendor/github.com/nishanths/exhaustive/regexp_flag.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/ruleguard/profiling/no_labels.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/ruleguard/profiling/with_labels.go create mode 100644 vendor/github.com/quasilyte/gogrep/.gitignore create mode 100644 vendor/github.com/quasilyte/gogrep/.golangci.yml create mode 100644 vendor/github.com/quasilyte/gogrep/LICENSE create mode 100644 vendor/github.com/quasilyte/gogrep/Makefile create mode 100644 vendor/github.com/quasilyte/gogrep/README.md rename vendor/github.com/quasilyte/{go-ruleguard/internal => }/gogrep/compile.go (95%) create mode 100644 vendor/github.com/quasilyte/gogrep/compile_import.go rename vendor/github.com/quasilyte/{go-ruleguard/internal => }/gogrep/gen_operations.go (96%) create mode 100644 vendor/github.com/quasilyte/gogrep/go.mod create mode 100644 vendor/github.com/quasilyte/gogrep/go.sum rename vendor/github.com/quasilyte/{go-ruleguard/internal => }/gogrep/gogrep.go (58%) rename vendor/github.com/quasilyte/{go-ruleguard/internal => }/gogrep/instructions.go (100%) create mode 100644 vendor/github.com/quasilyte/gogrep/internal/stdinfo/stdinfo.go rename vendor/github.com/quasilyte/{go-ruleguard/internal => }/gogrep/match.go (98%) rename vendor/github.com/quasilyte/{go-ruleguard => gogrep}/nodetag/nodetag.go (99%) rename vendor/github.com/quasilyte/{go-ruleguard/internal => }/gogrep/operation_string.go (66%) rename vendor/github.com/quasilyte/{go-ruleguard/internal => }/gogrep/operations.gen.go (98%) rename vendor/github.com/quasilyte/{go-ruleguard/internal => }/gogrep/parse.go (98%) rename vendor/github.com/quasilyte/{go-ruleguard/internal => }/gogrep/slices.go (100%) create mode 100644 vendor/github.com/sivchari/containedctx/.golangci.yml create mode 100644 vendor/github.com/sivchari/containedctx/LICENCE create mode 100644 vendor/github.com/sivchari/containedctx/README.md create mode 100644 vendor/github.com/sivchari/containedctx/containedctx.go create mode 100644 vendor/github.com/sivchari/containedctx/go.mod create mode 100644 vendor/github.com/sivchari/containedctx/go.sum create mode 100644 vendor/github.com/spf13/viper/fs.go create mode 100644 vendor/github.com/spf13/viper/logger.go create mode 100644 vendor/github.com/spf13/viper/viper_go1_15.go create mode 100644 vendor/github.com/spf13/viper/viper_go1_16.go create mode 100644 vendor/github.com/tdakkota/asciicheck/go.sum create mode 100644 vendor/github.com/xanzy/go-gitlab/group_access_tokens.go create mode 100644 vendor/github.com/xanzy/go-gitlab/group_iterations.go create mode 100644 vendor/github.com/xanzy/go-gitlab/markdown.go create mode 100644 vendor/github.com/xanzy/go-gitlab/project_iterations.go create mode 100644 vendor/github.com/xanzy/go-gitlab/repository_submodules.go create mode 100644 vendor/github.com/xanzy/go-gitlab/topics.go create mode 100644 vendor/github.com/yagipy/maintidx/.gitignore create mode 100644 vendor/github.com/yagipy/maintidx/LICENSE create mode 100644 vendor/github.com/yagipy/maintidx/Makefile create mode 100644 vendor/github.com/yagipy/maintidx/README.md create mode 100644 vendor/github.com/yagipy/maintidx/go.mod create mode 100644 vendor/github.com/yagipy/maintidx/go.sum create mode 100644 vendor/github.com/yagipy/maintidx/maintidx.go create mode 100644 vendor/github.com/yagipy/maintidx/pkg/cyc/cyc.go create mode 100644 vendor/github.com/yagipy/maintidx/pkg/halstvol/halstvol.go create mode 100644 vendor/github.com/yagipy/maintidx/pkg/halstvol/handle.go create mode 100644 vendor/github.com/yagipy/maintidx/visitor.go create mode 100644 vendor/gitlab.com/bosi/decorder/.gitignore create mode 100644 vendor/gitlab.com/bosi/decorder/.gitlab-ci.params.yml create mode 100644 vendor/gitlab.com/bosi/decorder/.gitlab-ci.yml create mode 100644 vendor/gitlab.com/bosi/decorder/LICENSE.md create mode 100644 vendor/gitlab.com/bosi/decorder/Makefile create mode 100644 vendor/gitlab.com/bosi/decorder/README.md create mode 100644 vendor/gitlab.com/bosi/decorder/analyzer.go create mode 100644 vendor/gitlab.com/bosi/decorder/go.mod create mode 100644 vendor/gitlab.com/bosi/decorder/go.sum create mode 100644 vendor/gitlab.com/bosi/decorder/renovate.json create mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_alarm.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/parameterized.go delete mode 100644 vendor/golang.org/x/tools/go/ssa/testmain.go create mode 100644 vendor/golang.org/x/tools/internal/typeparams/normalize.go create mode 100644 vendor/golang.org/x/tools/internal/typeparams/termlist.go create mode 100644 vendor/golang.org/x/tools/internal/typeparams/typeterm.go create mode 100644 vendor/honnef.co/go/tools/staticcheck/fakejson/encode.go create mode 100644 vendor/honnef.co/go/tools/staticcheck/fakereflect/fakereflect.go create mode 100644 vendor/honnef.co/go/tools/staticcheck/fakexml/marshal.go create mode 100644 vendor/honnef.co/go/tools/staticcheck/fakexml/typeinfo.go create mode 100644 vendor/honnef.co/go/tools/staticcheck/fakexml/xml.go create mode 100644 vendor/mvdan.cc/gofumpt/format/rewrite.go create mode 100644 vendor/mvdan.cc/gofumpt/format/simplify.go create mode 100644 vendor/mvdan.cc/gofumpt/internal/version/version.go diff --git a/go.mod b/go.mod index d3e3cd199..221c47357 100644 --- a/go.mod +++ b/go.mod @@ -7,35 +7,33 @@ require ( github.com/Microsoft/go-winio v0.5.1 // indirect github.com/bmatcuk/doublestar/v4 v4.0.2 github.com/containerd/containerd v1.5.9 // indirect - github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect - github.com/docker/cli v20.10.10+incompatible - github.com/docker/distribution v2.7.1+incompatible - github.com/docker/docker v20.10.10+incompatible + github.com/docker/cli v20.10.12+incompatible + github.com/docker/distribution v2.8.0+incompatible + github.com/docker/docker v20.10.12+incompatible github.com/docker/docker-credential-helpers v0.6.4 // indirect github.com/docker/go-connections v0.4.0 // indirect github.com/docker/go-units v0.4.0 github.com/drone/envsubst v1.0.3 github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568 github.com/franela/goblin v0.0.0-20211003143422-0a4f594942bf - github.com/gin-gonic/gin v1.7.4 - github.com/go-playground/validator/v10 v10.9.0 // indirect + github.com/gin-gonic/gin v1.7.7 + github.com/go-playground/validator/v10 v10.10.0 // indirect github.com/go-sql-driver/mysql v1.6.0 github.com/gogits/go-gogs-client v0.0.0-20210131175652-1d7215cd8d85 - github.com/golang-jwt/jwt/v4 v4.1.0 - github.com/golangci/golangci-lint v1.43.0 + github.com/golang-jwt/jwt/v4 v4.3.0 + github.com/golangci/golangci-lint v1.44.2 github.com/google/go-github/v39 v39.2.0 github.com/gorilla/securecookie v1.1.1 - github.com/hashicorp/go-cleanhttp v0.5.2 // indirect github.com/hashicorp/go-retryablehttp v0.7.0 // indirect github.com/joho/godotenv v1.4.0 github.com/lib/pq v1.10.4 - github.com/mattn/go-sqlite3 v1.14.9 - github.com/moby/moby v20.10.10+incompatible + github.com/mattn/go-sqlite3 v1.14.11 + github.com/moby/moby v20.10.12+incompatible github.com/moby/term v0.0.0-20210619224110-3f7ff695adc6 github.com/morikuni/aec v1.0.0 // indirect github.com/mrjones/oauth v0.0.0-20190623134757-126b35219450 github.com/pkg/errors v0.9.1 - github.com/prometheus/client_golang v1.12.0 + github.com/prometheus/client_golang v1.12.1 github.com/rs/zerolog v1.26.1 github.com/stretchr/objx v0.3.0 // indirect github.com/stretchr/testify v1.7.0 @@ -43,15 +41,15 @@ require ( github.com/ugorji/go v1.2.6 // indirect github.com/urfave/cli/v2 v2.3.0 github.com/woodpecker-ci/expr v0.0.0-20210628233344-164b8b3d0915 - github.com/xanzy/go-gitlab v0.52.2 + github.com/xanzy/go-gitlab v0.55.1 github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect github.com/xeipuuv/gojsonschema v1.2.0 - golang.org/x/crypto v0.0.0-20220128200615-198e4374d7ed + golang.org/x/crypto v0.0.0-20220214200702-86341886e292 golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd - golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8 + golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b golang.org/x/sync v0.0.0-20210220032951-036812b2e83c - golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27 // indirect - golang.org/x/time v0.0.0-20211116232009-f0f3c7e86c11 // indirect + golang.org/x/sys v0.0.0-20220224120231-95c6836cb0e7 // indirect + golang.org/x/time v0.0.0-20220210224613-90d013bbcef8 // indirect google.golang.org/genproto v0.0.0-20220126215142-9970aeb2e350 // indirect google.golang.org/grpc v1.44.0 google.golang.org/protobuf v1.27.1 diff --git a/go.sum b/go.sum index 915d083b1..5fadb2e61 100644 --- a/go.sum +++ b/go.sum @@ -28,6 +28,10 @@ cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSU cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY= cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ= cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI= +cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4= +cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc= +cloud.google.com/go v0.98.0/go.mod h1:ua6Ush4NALrHk5QXDWnjvZHN93OuF0HfuEPq9I1X0cM= +cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= @@ -36,8 +40,8 @@ cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4g cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= -cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= cloud.google.com/go/firestore v1.6.0/go.mod h1:afJwI0vaXwAG54kI7A//lP/lSPDkQORQuMkv56TxEPU= +cloud.google.com/go/firestore v1.6.1/go.mod h1:asNXNOzBdyVQmEU+ggO8UPodTkEVFW5Qx+rwHnAz+EY= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= @@ -75,9 +79,10 @@ github.com/Azure/go-autorest/autorest/mocks v0.4.1/go.mod h1:LTp+uSrOhSkaKrUy935 github.com/Azure/go-autorest/logger v0.2.0/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8= github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/toml v0.4.1 h1:GaI7EiDXDRfa8VshkTj7Fym7ha+y8/XxIgD2okUIjLw= -github.com/BurntSushi/toml v0.4.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= +github.com/BurntSushi/toml v1.0.0 h1:dtDWrepsVPfW9H/4y7dDgFc2MBUSeJhlaDtK13CxFlU= +github.com/BurntSushi/toml v1.0.0/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 h1:sHglBQTwgx+rWPdisA5ynNEsoARbiCBOyGcJM4/OzsM= github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24/go.mod h1:4UJr5HIiMZrwgkSPdsjy2uOQExX/WEILpIrO9UPGuXs= github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= @@ -110,14 +115,13 @@ github.com/Microsoft/hcsshim/test v0.0.0-20201218223536-d3e5debf77da/go.mod h1:5 github.com/Microsoft/hcsshim/test v0.0.0-20210227013316-43a75bb4edd3/go.mod h1:mw7qgWloBUl75W/gVH3cQszUg1+gUITj7D6NY7ywVnY= github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= -github.com/OpenPeeDeeP/depguard v1.0.1 h1:VlW4R6jmBIv3/u1JNlawEvJMM4J+dPORPaZasQee8Us= -github.com/OpenPeeDeeP/depguard v1.0.1/go.mod h1:xsIw86fROiiwelg+jB2uM9PiKihMMmUx/1V+TNhjQvM= +github.com/OpenPeeDeeP/depguard v1.1.0 h1:pjK9nLPS1FwQYGGpPxoMYpe7qACHOhAWQMQzV71i49o= +github.com/OpenPeeDeeP/depguard v1.1.0/go.mod h1:JtAMzWkmFEzDPyAd+W0NHl1lvpQKTvT9jnRVsohBKpc= github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= github.com/Shopify/logrus-bugsnag v0.0.0-20171204204709-577dee27f20d/go.mod h1:HI8ITrYtUY+O+ZhtlqUnD8+KwNPOyugEhfP9fdUIaEQ= github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo= github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= -github.com/StackExchange/wmi v1.2.1/go.mod h1:rcmrprowKIVzvc+NUiLncP2uuArMWLCbu9SBzvHz7e8= github.com/VividCortex/gohistogram v1.0.0/go.mod h1:Pf5mBqqDxYaXu3hDrrU+w6nw50o/4+TcAqDqk/vUH7g= github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= @@ -138,14 +142,15 @@ github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= +github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc= github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6lCRdSC2Tm3DSWRPvIPr6xNKyeHdqDQSQT+A= github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY= -github.com/ashanbrown/forbidigo v1.2.0 h1:RMlEFupPCxQ1IogYOQUnIQwGEUGK8g5vAPMRyJoSxbc= -github.com/ashanbrown/forbidigo v1.2.0/go.mod h1:vVW7PEdqEFqapJe95xHkTfB1+XvZXBFg8t0sG2FIxmI= -github.com/ashanbrown/makezero v0.0.0-20210520155254-b6261585ddde h1:YOsoVXsZQPA9aOTy1g0lAJv5VzZUvwQuZqug8XPeqfM= -github.com/ashanbrown/makezero v0.0.0-20210520155254-b6261585ddde/go.mod h1:oG9Dnez7/ESBqc4EdrdNlryeo7d0KcW1ftXHm7nU/UU= +github.com/ashanbrown/forbidigo v1.3.0 h1:VkYIwb/xxdireGAdJNZoo24O4lmnEWkactplBlWTShc= +github.com/ashanbrown/forbidigo v1.3.0/go.mod h1:vVW7PEdqEFqapJe95xHkTfB1+XvZXBFg8t0sG2FIxmI= +github.com/ashanbrown/makezero v1.1.0 h1:b2FVq4dTlBpy9f6qxhbyWH+6zy56IETE9cFbBGtDqs8= +github.com/ashanbrown/makezero v1.1.0/go.mod h1:oG9Dnez7/ESBqc4EdrdNlryeo7d0KcW1ftXHm7nU/UU= github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQwij/eHl5CU= github.com/aws/aws-sdk-go v1.15.11/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0= github.com/aws/aws-sdk-go v1.23.20/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= @@ -161,20 +166,21 @@ github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6r github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= github.com/bitly/go-simplejson v0.5.0/go.mod h1:cXHtHw4XUPsvGaxgjIAn8PhEWG9NfngEKAMDJEczWVA= github.com/bits-and-blooms/bitset v1.2.0/go.mod h1:gIdJ4wp64HaoK2YrL1Q5/N7Y16edYb8uY+O0FJTyyDA= -github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM= github.com/bkielbasa/cyclop v1.2.0 h1:7Jmnh0yL2DjKfw28p86YTd/B4lRGcNuu12sKE35sM7A= github.com/bkielbasa/cyclop v1.2.0/go.mod h1:qOI0yy6A7dYC4Zgsa72Ppm9kONl0RoIlPbzot9mhmeI= github.com/blang/semver v3.1.0+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= -github.com/blizzy78/varnamelen v0.3.0 h1:80mYO7Y5ppeEefg1Jzu+NBg16iwToOQVnDnNIoWSShs= -github.com/blizzy78/varnamelen v0.3.0/go.mod h1:hbwRdBvoBqxk34XyQ6HA0UH3G0/1TKuv5AC4eaBT0Ec= +github.com/blizzy78/varnamelen v0.6.0 h1:TOIDk9qRIMspALZKX8x+5hQfAjuvAFogppnxtvuNmBo= +github.com/blizzy78/varnamelen v0.6.0/go.mod h1:zy2Eic4qWqjrxa60jG34cfL0VXcSwzUrIx68eJPb4Q8= github.com/bmatcuk/doublestar/v4 v4.0.2 h1:X0krlUVAVmtr2cRoTqR8aDMrDqnB36ht8wpWTiQ3jsA= github.com/bmatcuk/doublestar/v4 v4.0.2/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc= github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4= github.com/bombsimon/wsl/v3 v3.3.0 h1:Mka/+kRLoQJq7g2rggtgQsjuI/K5Efd87WX96EWFxjM= github.com/bombsimon/wsl/v3 v3.3.0/go.mod h1:st10JtZYLE4D5sC7b8xV4zTKZwAQjCH/Hy2Pm1FNZIc= -github.com/breml/bidichk v0.1.1 h1:Qpy8Rmgos9qdJxhka0K7ADEE5bQZX9PQUthkgggHpFM= -github.com/breml/bidichk v0.1.1/go.mod h1:zbfeitpevDUGI7V91Uzzuwrn4Vls8MoBMrwtt78jmso= +github.com/breml/bidichk v0.2.2 h1:w7QXnpH0eCBJm55zGCTJveZEkQBt6Fs5zThIdA6qQ9Y= +github.com/breml/bidichk v0.2.2/go.mod h1:zbfeitpevDUGI7V91Uzzuwrn4Vls8MoBMrwtt78jmso= +github.com/breml/errchkjson v0.2.3 h1:97eGTmR/w0paL2SwfRPI1jaAZHaH/fXnxWTw2eEIqE0= +github.com/breml/errchkjson v0.2.3/go.mod h1:jZEATw/jF69cL1iy7//Yih8yp/mXp2CBoBr9GJwCAsY= github.com/bshuster-repo/logrus-logstash-hook v0.4.1/go.mod h1:zsTqEiSzDgAa/8GZR7E1qaXrhYNDKBYy5/dWPTIflbk= github.com/buger/jsonparser v0.0.0-20180808090653-f4dd9f5a6b44/go.mod h1:bbYlZJ7hK1yFx9hf58LP0zeX7UjIGs20ufpu3evjr+s= github.com/bugsnag/bugsnag-go v0.0.0-20141110184014-b1d153021fcd/go.mod h1:2oa8nejYd4cQ/b0hMIopN0lCRxU0bueqREvZLWFrtK8= @@ -186,6 +192,7 @@ github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= github.com/cenkalti/backoff/v4 v4.1.1/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= @@ -205,6 +212,8 @@ github.com/cilium/ebpf v0.0.0-20200702112145-1c8d4c9ef775/go.mod h1:7cR51M8ViRLI github.com/cilium/ebpf v0.2.0/go.mod h1:To2CFviqOWL/M0gIMsvSMlqe7em/l1ALkX1PyjrX2Qs= github.com/cilium/ebpf v0.4.0/go.mod h1:4tRaxcgiL706VnOzHOdBlY8IEAIdxINsQBcU4xJJXRs= github.com/cilium/ebpf v0.6.2/go.mod h1:4tRaxcgiL706VnOzHOdBlY8IEAIdxINsQBcU4xJJXRs= +github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag= +github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I= github.com/clbanning/x2j v0.0.0-20191024224557-825249438eec/go.mod h1:jMjuTZXRI4dUb/I5gc9Hdhagfvm9+RyrPryS/auMzxE= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= @@ -214,7 +223,9 @@ github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XP github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211130200136-a8f946100490/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI= @@ -335,8 +346,8 @@ github.com/d2g/dhcp4 v0.0.0-20170904100407-a1d1b6c41b1c/go.mod h1:Ct2BUK8SB0YC1S github.com/d2g/dhcp4client v1.0.0/go.mod h1:j0hNfjhrt2SxUOw55nL0ATM/z4Yt3t2Kd1mW34z5W5s= github.com/d2g/dhcp4server v0.0.0-20181031114812-7d4a0a7f59a5/go.mod h1:Eo87+Kg/IX2hfWJfwxMzLyuSZyxSoAug2nGa1G2QAi8= github.com/d2g/hardwareaddr v0.0.0-20190221164911-e7d9fbe030e4/go.mod h1:bMl4RjIciD2oAxI7DmWRx6gbeqrkoLqv3MV0vzNad+I= -github.com/daixiang0/gci v0.2.9 h1:iwJvwQpBZmMg31w+QQ6jsyZ54KEATn6/nfARbBNW294= -github.com/daixiang0/gci v0.2.9/go.mod h1:+4dZ7TISfSmqfAGv59ePaHfNzgGtIkHAhhdKggP1JAc= +github.com/daixiang0/gci v0.3.1-0.20220208004058-76d765e3ab48 h1:9rJGqaC5do9zkvKrtRdx0HJoxj7Jd6vDa0O2eBU0AbU= +github.com/daixiang0/gci v0.3.1-0.20220208004058-76d765e3ab48/go.mod h1:jaASoJmv/ykO9dAAPy31iJnreV19248qKDdVWf3QgC4= github.com/danieljoos/wincred v1.1.0/go.mod h1:XYlo+eRTsVA9aHGp7NGjFkPla4m+DCL7hqDjlFjiygg= github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -350,14 +361,15 @@ github.com/dgrijalva/jwt-go v0.0.0-20170104182250-a601269ab70c/go.mod h1:E3ru+11 github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= github.com/dnaeon/go-vcr v1.0.1/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E= -github.com/docker/cli v20.10.10+incompatible h1:kcbwdgWbrBOH8QwQzaJmyriHwF7XIl4HT1qh0HTRys4= -github.com/docker/cli v20.10.10+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/docker/cli v20.10.12+incompatible h1:lZlz0uzG+GH+c0plStMUdF/qk3ppmgnswpR5EbqzVGA= +github.com/docker/cli v20.10.12+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= github.com/docker/distribution v0.0.0-20190905152932-14b96e55d84c/go.mod h1:0+TTO4EOBfRPhZXAeF1Vu+W3hHZ8eLp8PgKVZlcvtFY= github.com/docker/distribution v2.7.1-0.20190205005809-0d3efadf0154+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= -github.com/docker/distribution v2.7.1+incompatible h1:a5mlkVzth6W5A4fOsS3D2EO5BUmsJpcB+cRlLU7cSug= github.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= -github.com/docker/docker v20.10.10+incompatible h1:GKkP0T7U4ks6X3lmmHKC2QDprnpRJor2Z5a8m62R9ZM= -github.com/docker/docker v20.10.10+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/distribution v2.8.0+incompatible h1:l9EaZDICImO1ngI+uTifW+ZYvvz7fKISBAKpg+MbWbY= +github.com/docker/distribution v2.8.0+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= +github.com/docker/docker v20.10.12+incompatible h1:CEeNmFM0QZIsJCZKMkZx0ZcahTiewkrgiwfYD+dfl1U= +github.com/docker/docker v20.10.12+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/docker-credential-helpers v0.6.4 h1:axCks+yV+2MR3/kZhAmy07yC56WZ2Pwu/fKWtKuZB0o= github.com/docker/docker-credential-helpers v0.6.4/go.mod h1:ofX3UI0Gz1TteYBjtgs07O36Pyasyp66D2uKT7H8W1c= github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ= @@ -391,10 +403,12 @@ github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.m github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= +github.com/envoyproxy/go-control-plane v0.10.1/go.mod h1:AY7fTTXNdv/aJ2O5jwpxAPOWUZ7hQAEvzN5Pf27BkQQ= github.com/envoyproxy/protoc-gen-validate v0.0.14/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/esimonov/ifshort v1.0.3 h1:JD6x035opqGec5fZ0TLjXeROD2p5H7oLGn8MKfy9HTM= -github.com/esimonov/ifshort v1.0.3/go.mod h1:yZqNJUrNn20K8Q9n2CrjTKYyVEmX209Hgu+M1LBpeZE= +github.com/envoyproxy/protoc-gen-validate v0.6.2/go.mod h1:2t7qjJNvHPx8IjnBOzl9E9/baC+qXE/TeeyBRzgJDws= +github.com/esimonov/ifshort v1.0.4 h1:6SID4yGWfRae/M7hkVDVVyppy8q/v9OuxNdmjLQStBA= +github.com/esimonov/ifshort v1.0.4/go.mod h1:Pe8zjlRrJ80+q2CxHLfEOfTwxCZ4O+MuhcHcfgNWTk0= github.com/ettle/strcase v0.1.1 h1:htFueZyVeE1XNnMEfbqp5r67qAN/4r6ya1ysq8Q+Zcw= github.com/ettle/strcase v0.1.1/go.mod h1:hzDLsPC7/lwKyBOywSHEP89nt2pDgdy+No1NBA9o9VY= github.com/evanphx/json-patch v4.9.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= @@ -413,23 +427,25 @@ github.com/franela/goblin v0.0.0-20211003143422-0a4f594942bf h1:NrF81UtW8gG2LBGk github.com/franela/goblin v0.0.0-20211003143422-0a4f594942bf/go.mod h1:VzmDKDJVZI3aJmnRI9VjAn9nJ8qPPsN1fqzr9dqInIo= github.com/franela/goreq v0.0.0-20171204163338-bcd34c9993f8/go.mod h1:ZhphrRTfi2rbfLwlschooIH4+wKKDR4Pdxhh+TRoA20= github.com/frankban/quicktest v1.11.3/go.mod h1:wRf/ReqHper53s+kmmSZizM8NamnL3IM0I9ntUbOk+k= +github.com/frankban/quicktest v1.14.0 h1:+cqqvzZV87b4adx/5ayVOaYZ2CrvM4ejQvUdBzPPUss= +github.com/frankban/quicktest v1.14.0/go.mod h1:NeW+ay9A/U67EYXNFA1nPE8e/tnQv/09mUdL/ijj8og= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/fsnotify/fsnotify v1.5.1 h1:mZcQUHVQUQWoPXXtuf9yuEXKudkV2sx1E06UadKWpgI= github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU= github.com/fullsailor/pkcs7 v0.0.0-20190404230743-d7302db945fa/go.mod h1:KnogPXtdwXqoenmZCw6S+25EAm2MkxbG0deNDu4cbSA= github.com/fullstorydev/grpcurl v1.6.0/go.mod h1:ZQ+ayqbKMJNhzLmbpCiurTVlaK2M/3nqZCxaQ2Ze/sM= -github.com/fzipp/gocyclo v0.3.1 h1:A9UeX3HJSXTBzvHzhqoYVuE0eAhe+aM8XBCCwsPMZOc= -github.com/fzipp/gocyclo v0.3.1/go.mod h1:DJHO6AUmbdqj2ET4Z9iArSuwWgYDRryYt2wASxc7x3E= +github.com/fzipp/gocyclo v0.4.0 h1:IykTnjwh2YLyYkGa0y92iTTEQcnyAz0r9zOo15EbJ7k= +github.com/fzipp/gocyclo v0.4.0/go.mod h1:rXPyn8fnlpa0R2csP/31uerbiVBugk5whMdlyaLkLoA= github.com/garyburd/redigo v0.0.0-20150301180006-535138d7bcd7/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY= github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= -github.com/gin-gonic/gin v1.7.4 h1:QmUZXrvJ9qZ3GfWvQ+2wnW/1ePrTEJqPKMYEU3lD/DM= -github.com/gin-gonic/gin v1.7.4/go.mod h1:jD2toBW3GZUr5UMcdrwQA10I7RuaFOl/SGeDjXkfUtY= -github.com/go-critic/go-critic v0.6.1 h1:lS8B9LH/VVsvQQP7Ao5TJyQqteVKVs3E4dXiHMyubtI= -github.com/go-critic/go-critic v0.6.1/go.mod h1:SdNCfU0yF3UBjtaZGw6586/WocupMOJuiqgom5DsQxM= +github.com/gin-gonic/gin v1.7.7 h1:3DoBmSbJbZAWqXJC3SLjAPfutPJJRN1U5pALB7EeTTs= +github.com/gin-gonic/gin v1.7.7/go.mod h1:axIBovoeJpVj8S3BwE0uPMTeReE4+AfFtqpqaZ1qq1U= +github.com/go-critic/go-critic v0.6.2 h1:L5SDut1N4ZfsWZY0sH4DCrsHLHnhuuWak2wa165t9gs= +github.com/go-critic/go-critic v0.6.2/go.mod h1:td1s27kfmLpe5G/DPjlnFI7o1UCzePptwU7Az0V5iCM= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= @@ -443,7 +459,6 @@ github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas= github.com/go-logr/logr v0.2.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU= -github.com/go-ole/go-ole v1.2.5/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= github.com/go-openapi/jsonpointer v0.19.2/go.mod h1:3akKfEdA7DF1sugOqz1dVQHBcuDBPKZGEoHC/NkiQRg= github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= @@ -461,8 +476,8 @@ github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+ github.com/go-playground/universal-translator v0.18.0 h1:82dyy6p4OuJq4/CByFNOn/jYrnRPArHwAcmLoJZxyho= github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA= github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4= -github.com/go-playground/validator/v10 v10.9.0 h1:NgTtmN58D0m8+UuxtYmGztBJB7VnPgjj221I1QHci2A= -github.com/go-playground/validator/v10 v10.9.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXSGrTK4nAUsbPlLADvpJkos= +github.com/go-playground/validator/v10 v10.10.0 h1:I7mrTYv78z8k8VXa/qJlOlEXn/nBh+BF8dHX5nt/dr0= +github.com/go-playground/validator/v10 v10.10.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXSGrTK4nAUsbPlLADvpJkos= github.com/go-redis/redis v6.15.8+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA= github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= @@ -479,14 +494,12 @@ github.com/go-toolsmith/astequal v1.0.1 h1:JbSszi42Jiqu36Gnf363HWS9MTEAz67vTQLpo github.com/go-toolsmith/astequal v1.0.1/go.mod h1:4oGA3EZXTVItV/ipGiOx7NWkY5veFfcsOJVS2YxltLw= github.com/go-toolsmith/astfmt v1.0.0 h1:A0vDDXt+vsvLEdbMFJAUBI/uTbRw1ffOPnxsILnFL6k= github.com/go-toolsmith/astfmt v1.0.0/go.mod h1:cnWmsOAuq4jJY6Ct5YWlVLmcmLMn1JUPuQIHCY7CJDw= -github.com/go-toolsmith/astinfo v0.0.0-20180906194353-9809ff7efb21/go.mod h1:dDStQCHtmZpYOmjRP/8gHHnCCch3Zz3oEgCdZVdtweU= github.com/go-toolsmith/astp v1.0.0 h1:alXE75TXgcmupDsMK1fRAy0YUzLzqPVvBKoyWV+KPXg= github.com/go-toolsmith/astp v1.0.0/go.mod h1:RSyrtpVlfTFGDYRbrjyWP1pYu//tSFcvdYrA8meBmLI= -github.com/go-toolsmith/pkgload v1.0.0 h1:4DFWWMXVfbcN5So1sBNW9+yeiMqLFGl1wFLTL5R0Tgg= -github.com/go-toolsmith/pkgload v1.0.0/go.mod h1:5eFArkbO80v7Z0kdngIxsRXRMTaX4Ilcwuh3clNrQJc= +github.com/go-toolsmith/pkgload v1.0.2-0.20220101231613-e814995d17c5 h1:eD9POs68PHkwrx7hAB78z1cb6PfGq/jyWn3wJywsH1o= +github.com/go-toolsmith/pkgload v1.0.2-0.20220101231613-e814995d17c5/go.mod h1:3NAwwmD4uY/yggRxoEjk/S00MIV3A+H7rrE3i87eYxM= github.com/go-toolsmith/strparse v1.0.0 h1:Vcw78DnpCAKlM20kSbAyO4mPfJn/lyYA4BJUDxe2Jb4= github.com/go-toolsmith/strparse v1.0.0/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslWct4wyljWhSRy8= -github.com/go-toolsmith/typep v1.0.0/go.mod h1:JSQCQMUPdRlMZFswiq3TGpNp1GMktqkR2Ns5AIQkATU= github.com/go-toolsmith/typep v1.0.2 h1:8xdsa1+FSIH/RhEkgnD1j2CJOy5mNllW1Q9tRiYwvlk= github.com/go-toolsmith/typep v1.0.2/go.mod h1:JSQCQMUPdRlMZFswiq3TGpNp1GMktqkR2Ns5AIQkATU= github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b h1:khEcpUM4yFcxg4/FHQWkvVRmgijNXRfzkIDHh23ggEo= @@ -517,8 +530,8 @@ github.com/gogo/protobuf v1.3.0/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXP github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= -github.com/golang-jwt/jwt/v4 v4.1.0 h1:XUgk2Ex5veyVFVeLm0xhusUTQybEbexJXrvPNOKkSY0= -github.com/golang-jwt/jwt/v4 v4.1.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= +github.com/golang-jwt/jwt/v4 v4.3.0 h1:kHL1vqdqWNfATmA0FNMdmZNMyZI1U6O31X4rlIPoBog= +github.com/golang-jwt/jwt/v4 v4.3.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= @@ -526,6 +539,7 @@ github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4er github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= @@ -565,8 +579,8 @@ github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613 h1:9kfjN3AdxcbsZB github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613/go.mod h1:SyvUF2NxV+sN8upjjeVYr5W7tyxaT1JVtvhKhOn2ii8= github.com/golangci/gofmt v0.0.0-20190930125516-244bba706f1a h1:iR3fYXUjHCR97qWS8ch1y9zPNsgXThGwjKPrYfqMPks= github.com/golangci/gofmt v0.0.0-20190930125516-244bba706f1a/go.mod h1:9qCChq59u/eW8im404Q2WWTrnBUQKjpNYKMbU4M7EFU= -github.com/golangci/golangci-lint v1.43.0 h1:SLwZFEmDgopqZpfP495zCtV9REUf551JJlJ51Ql7NZA= -github.com/golangci/golangci-lint v1.43.0/go.mod h1:VIFlUqidx5ggxDfQagdvd9E67UjMXtTHBkBQ7sHoC5Q= +github.com/golangci/golangci-lint v1.44.2 h1:MzvkDt1j1OHkv42/feNJVNNXRFACPp7aAWBWDo5aYQw= +github.com/golangci/golangci-lint v1.44.2/go.mod h1:KjBgkLvsTWDkhfu12iCrv0gwL1kON5KNhbyjQ6qN7Jo= github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0 h1:MfyDlzVjl1hoaPzPD4Gpb/QgoRfSBR0jdhwGyAWwMSA= github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0/go.mod h1:66R6K6P6VWk9I95jvqGxkqJxVWGFy9XlDwLwVz1RCFg= github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca h1:kNY3/svz5T29MYHubXix4aDDuE3RWHkPvopM/EDv/MA= @@ -592,11 +606,11 @@ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o= +github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= github.com/google/go-github/v39 v39.2.0 h1:rNNM311XtPOz5rDdsJXAp2o8F67X9FnROXTvto3aSnQ= github.com/google/go-github/v39 v39.2.0/go.mod h1:C1s8C5aCC9L+JXIYpJM5GYytdX52vC1bLvHEF1IhBrE= -github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= @@ -617,6 +631,7 @@ github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= @@ -631,12 +646,13 @@ github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+ github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= +github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= github.com/googleapis/gnostic v0.4.1/go.mod h1:LRhVm6pbyptWbWbuZ38d1eyptfvIytN3ir6b65WBswg= -github.com/gookit/color v1.4.2/go.mod h1:fqRyamkC1W8uxl+lxCQxOT09l/vYfZ+QeiX3rKQHCoQ= +github.com/gookit/color v1.5.0/go.mod h1:43aQb+Zerm/BWh2GnrgOQm7ffz7tvQXEKV6BFMl7wAo= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gordonklaus/ineffassign v0.0.0-20200309095847-7953dde2c7bf/go.mod h1:cuNKsD1zp2v6XfE/orVX2QE1LC+i254ceGcVeDT3pTU= -github.com/gordonklaus/ineffassign v0.0.0-20210225214923-2e10b2664254 h1:Nb2aRlC404yz7gQIfRZxX9/MLvQiqXyiBTJtgAy6yrI= -github.com/gordonklaus/ineffassign v0.0.0-20210225214923-2e10b2664254/go.mod h1:M9mZEtGIsR1oDaZagNPNG9iq9n2HrhZ17dsXk73V3Lw= +github.com/gordonklaus/ineffassign v0.0.0-20210914165742-4cc7213b9bc8 h1:PVRE9d4AQKmbelZ7emNig1+NT27DUmKZn5qXxfio54U= +github.com/gordonklaus/ineffassign v0.0.0-20210914165742-4cc7213b9bc8/go.mod h1:Qcp2HIAYhR7mNUVSIxZww3Guk4it82ghYcEXIAk+QT0= github.com/gorhill/cronexpr v0.0.0-20180427100037-88b0669f7d75/go.mod h1:g2644b03hfBX9Ov0ZBDgXXens4rxSxmqFBbhvKv2yVA= github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= github.com/gorilla/handlers v0.0.0-20150720190736-60c7bfde3e33/go.mod h1:Qkdc/uu4tH4g6mTK6auzZ766c4CA0Ng8+o/OAirnOIQ= @@ -661,8 +677,8 @@ github.com/gostaticanalysis/comment v1.3.0/go.mod h1:xMicKDx7XRXYdVwY9f9wQpDJVnq github.com/gostaticanalysis/comment v1.4.1/go.mod h1:ih6ZxzTHLdadaiSnF5WY3dxUoXfXAlTaRzuaNDlSado= github.com/gostaticanalysis/comment v1.4.2 h1:hlnx5+S2fY9Zo9ePo4AhgYsYHbM2+eAv8m/s1JiCd6Q= github.com/gostaticanalysis/comment v1.4.2/go.mod h1:KLUTGDv6HOCotCH8h2erHKmpci2ZoR8VPu34YA2uzdM= -github.com/gostaticanalysis/forcetypeassert v0.0.0-20200621232751-01d4955beaa5 h1:rx8127mFPqXXsfPSo8BwnIU97MKFZc89WHAHt8PwDVY= -github.com/gostaticanalysis/forcetypeassert v0.0.0-20200621232751-01d4955beaa5/go.mod h1:qZEedyP/sY1lTGV1uJ3VhWZ2mqag3IkWsDHVbplHXak= +github.com/gostaticanalysis/forcetypeassert v0.1.0 h1:6eUflI3DiGusXGK6X7cCcIgVCpZ2CiZ1Q7jl6ZxNV70= +github.com/gostaticanalysis/forcetypeassert v0.1.0/go.mod h1:qZEedyP/sY1lTGV1uJ3VhWZ2mqag3IkWsDHVbplHXak= github.com/gostaticanalysis/nilerr v0.1.1 h1:ThE+hJP0fEp4zWLkWHWcRyI2Od0p7DlgYG3Uqrmrcpk= github.com/gostaticanalysis/nilerr v0.1.1/go.mod h1:wZYb6YI5YAxxq0i1+VJbY0s2YONW0HU0GPE3+5PWN4A= github.com/gostaticanalysis/testutil v0.3.1-0.20210208050101-bfb5c8eec0e4/go.mod h1:D+FIZ+7OahH3ePw/izIEeH5I06eKs1IKI4Xr64/Am3M= @@ -678,28 +694,32 @@ github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= github.com/grpc-ecosystem/grpc-gateway v1.12.1/go.mod h1:8XEsbTttt/W+VvjtQhLACqCisSPWTxCZ7sBRjU6iH9c= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= -github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= github.com/hashicorp/consul/api v1.3.0/go.mod h1:MmDNSzIMUjNpY/mQ398R4bk2FnqQLoPndWW5VkKPlCE= github.com/hashicorp/consul/api v1.10.1/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M= -github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= +github.com/hashicorp/consul/api v1.11.0/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M= +github.com/hashicorp/consul/api v1.12.0/go.mod h1:6pVBMo0ebnYdt2S3H87XhekM/HHrUoTD2XXb/VrZVy0= github.com/hashicorp/consul/sdk v0.3.0/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms= github.com/hashicorp/errwrap v0.0.0-20141028054710-7554cd9344ce/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= -github.com/hashicorp/go-hclog v0.12.0 h1:d4QkX8FRTYaKaCZBoXYY8zJX2BXjWxurN/GA2tkrmZM= github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= +github.com/hashicorp/go-hclog v1.0.0 h1:bkKf0BeBXcSYa7f5Fyi9gMuQ8gNsxeiNpZjR6VxNZeo= +github.com/hashicorp/go-hclog v1.0.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= github.com/hashicorp/go-multierror v0.0.0-20161216184304-ed905158d874/go.mod h1:JMRHfdO9jKNzS/+BTlxCjKNQHg/jZAft8U7LloJvN7I= github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= github.com/hashicorp/go-retryablehttp v0.6.8/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY= github.com/hashicorp/go-retryablehttp v0.7.0 h1:eu1EI/mbirUgP5C8hVsTNaGZreBDlYiwC1FZWkvQPQ4= github.com/hashicorp/go-retryablehttp v0.7.0/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY= @@ -721,14 +741,20 @@ github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= github.com/hashicorp/mdns v1.0.1/go.mod h1:4gW7WsVCke5TE7EPeYliwHlRUyBtfCwuFwuMg2DmyNY= +github.com/hashicorp/mdns v1.0.4/go.mod h1:mtBihi+LeNXGtG8L9dX59gAEa12BDtBQSp4v/YAJqrc= github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= github.com/hashicorp/memberlist v0.2.2/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= +github.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= github.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk= +github.com/hashicorp/serf v0.9.6/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4= +github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= +github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/huandu/xstrings v1.0.0/go.mod h1:4qWG/gcEcfX4z/mBDHJ++3ReCw9ibxbsNJbcucJdbSo= github.com/huandu/xstrings v1.2.0/go.mod h1:DvyZB1rfVYsBIigL8HwpZgxHwXozlTgGqn63UyNX5k4= github.com/hudl/fargo v1.3.0/go.mod h1:y3CKSmjA+wD2gak7sUSXTAoopbhU08POFhmITJgmKTg= +github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/imdario/mergo v0.3.4/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= @@ -821,8 +847,8 @@ github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfV github.com/juju/ratelimit v1.0.1/go.mod h1:qapgC/Gy+xNh9UxzV13HGGl/6UXNN+ct+vwSgWNm/qk= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= -github.com/julz/importas v0.0.0-20210419104244-841f0c0fe66d h1:XeSMXURZPtUffuWAaq90o6kLgZdgu+QA8wk4MPC8ikI= -github.com/julz/importas v0.0.0-20210419104244-841f0c0fe66d/go.mod h1:oSFU2R4XK/P7kNBrnL/FEQlDGN1/6WoxXEjSSXO0DV0= +github.com/julz/importas v0.1.0 h1:F78HnrsjY3cR7j0etXy5+TU1Zuy7Xt08X/1aJnH5xXY= +github.com/julz/importas v0.1.0/go.mod h1:oSFU2R4XK/P7kNBrnL/FEQlDGN1/6WoxXEjSSXO0DV0= github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k= github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs= github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= @@ -853,8 +879,8 @@ github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/kulti/thelper v0.4.0 h1:2Nx7XbdbE/BYZeoip2mURKUdtHQRuy6Ug+wR7K9ywNM= -github.com/kulti/thelper v0.4.0/go.mod h1:vMu2Cizjy/grP+jmsvOFDx1kYP6+PD1lqg4Yu5exl2U= +github.com/kulti/thelper v0.5.1 h1:Uf4CUekH0OvzQTFPrWkstJvXgm6pnNEtQu3HiqEkpB0= +github.com/kulti/thelper v0.5.1/go.mod h1:vMu2Cizjy/grP+jmsvOFDx1kYP6+PD1lqg4Yu5exl2U= github.com/kunwardeep/paralleltest v1.0.3 h1:UdKIkImEAXjR1chUWLn+PNXqWUGs//7tzMeWuP7NhmI= github.com/kunwardeep/paralleltest v1.0.3/go.mod h1:vLydzomDFpk7yu5UX02RmP0H8QfRPOV/oFhWN85Mjb4= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= @@ -862,11 +888,13 @@ github.com/kyoh86/exportloopref v0.1.8 h1:5Ry/at+eFdkX9Vsdw3qU4YkvGtzuVfzT4X7S77 github.com/kyoh86/exportloopref v0.1.8/go.mod h1:1tUcJeiioIs7VWe5gcOObrux3lb66+sBqGZrRkMwPgg= github.com/ldez/gomoddirectives v0.2.2 h1:p9/sXuNFArS2RLc+UpYZSI4KQwGMEDWC/LbtF5OPFVg= github.com/ldez/gomoddirectives v0.2.2/go.mod h1:cpgBogWITnCfRq2qGoDkKMEVSaarhdBr6g8G04uz6d0= -github.com/ldez/tagliatelle v0.2.0 h1:693V8Bf1NdShJ8eu/s84QySA0J2VWBanVBa2WwXD/Wk= -github.com/ldez/tagliatelle v0.2.0/go.mod h1:8s6WJQwEYHbKZDsp/LjArytKOG8qaMrKQQ3mFukHs88= +github.com/ldez/tagliatelle v0.3.1 h1:3BqVVlReVUZwafJUwQ+oxbx2BEX2vUG4Yu/NOfMiKiM= +github.com/ldez/tagliatelle v0.3.1/go.mod h1:8s6WJQwEYHbKZDsp/LjArytKOG8qaMrKQQ3mFukHs88= github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w= github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= +github.com/leonklingele/grouper v1.1.0 h1:tC2y/ygPbMFSBOs3DcyaEMKnnwH7eYKzohOtRrf0SAg= +github.com/leonklingele/grouper v1.1.0/go.mod h1:uk3I3uDfi9B6PeUjsCKi6ndcf63Uy7snXgR4yDYQVDY= github.com/letsencrypt/pkcs11key/v4 v4.0.0/go.mod h1:EFUvBDay26dErnNb70Nd0/VW3tJiIbETBPTl9ATXQag= github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= @@ -875,13 +903,13 @@ github.com/lib/pq v1.3.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.8.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lib/pq v1.9.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/lib/pq v1.10.3/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lib/pq v1.10.4 h1:SO9z7FRPzA03QhHKJrH5BXA6HU1rS4V2nIVrrNC1iYk= github.com/lib/pq v1.10.4/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM= github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4= github.com/logrusorgru/aurora v0.0.0-20181002194514-a7b3b318ed4e/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= +github.com/lyft/protoc-gen-star v0.5.3/go.mod h1:V0xaHgaf5oCCqmcxYcWiDfTiKsZsRc87/1qhoTACD8w= github.com/lyft/protoc-gen-validate v0.0.13/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ= github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls= @@ -903,8 +931,8 @@ github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVc github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.11 h1:nQ+aFkoE2TMGc0b68U2OKSexC+eq46+XwZzWXHRmPYs= -github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= +github.com/mattn/go-colorable v0.1.12 h1:jF+Du6AlPIjs2BiUiQlKOX0rt3SujHxPnksPKZbaA40= +github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= @@ -925,9 +953,8 @@ github.com/mattn/go-shellwords v1.0.3/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vq github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= github.com/mattn/go-sqlite3 v1.14.8/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= -github.com/mattn/go-sqlite3 v1.14.9 h1:10HX2Td0ocZpYEjhilsuo6WWtUqttj2Kb0KtD86/KYA= -github.com/mattn/go-sqlite3 v1.14.9/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= -github.com/mattn/goveralls v0.0.2/go.mod h1:8d1ZMHsd7fW6IRPKQh46F2WRpyib5/X4FOpevwGNQEw= +github.com/mattn/go-sqlite3 v1.14.11 h1:gt+cp9c0XGqe9S/wAHTL3n/7MqY+siPWgWJgqdsFrzQ= +github.com/mattn/go-sqlite3 v1.14.11/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369 h1:I0XW9+e1XWDxdcEniV4rQAIOPUGDq67JSCiRCgGCZLI= github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= @@ -935,11 +962,12 @@ github.com/mbilski/exhaustivestruct v1.2.0 h1:wCBmUnSYufAHO6J4AVWY6ff+oxWxsVFrwg github.com/mbilski/exhaustivestruct v1.2.0/go.mod h1:OeTBVxQWoEmB2J2JCHmXWPJ0aksxSUOUy+nvtVEfzXc= github.com/mgechev/dots v0.0.0-20210922191527-e955255bf517 h1:zpIH83+oKzcpryru8ceC6BxnoG8TBrhgAvRg8obzup0= github.com/mgechev/dots v0.0.0-20210922191527-e955255bf517/go.mod h1:KQ7+USdGKfpPjXk4Ga+5XxQM4Lm4e3gAogrreFAYpOg= -github.com/mgechev/revive v1.1.2 h1:MiYA/o9M7REjvOF20QN43U8OtXDDHQFKLCtJnxLGLog= -github.com/mgechev/revive v1.1.2/go.mod h1:bnXsMr+ZTH09V5rssEI+jHAZ4z+ZdyhgO/zsy3EhK+0= +github.com/mgechev/revive v1.1.4 h1:sZOjY6GU35Kr9jKa/wsKSHgrFz8eASIB5i3tqWZMp0A= +github.com/mgechev/revive v1.1.4/go.mod h1:ZZq2bmyssGh8MSPz3VVziqRNIMYTJXzP8MUKG90vZ9A= github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= github.com/miekg/dns v1.1.35/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM= +github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= github.com/miekg/pkcs11 v1.0.2/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= github.com/miekg/pkcs11 v1.0.3/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= github.com/mistifyio/go-zfs v2.1.2-0.20190413222219-f784269be439+incompatible/go.mod h1:8AuVvqP/mXw1px98n46wfvcGfQ4ci2FwoAjKYxuo3Z4= @@ -955,15 +983,15 @@ github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS4 github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/mapstructure v1.4.2 h1:6h7AQ0yhTcIsmFmnAwQls75jp2Gzs4iB8W7pjMO+rqo= github.com/mitchellh/mapstructure v1.4.2/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.4.3 h1:OVowDSCllw/YjdLkam3/sm7wEtOy59d8ndGgCcyj8cs= +github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/osext v0.0.0-20151018003038-5e2d6d41470f/go.mod h1:OkQIRizQZAeMln+1tSwduZz7+Af5oFlKirV/MSYes2A= github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/mitchellh/reflectwalk v1.0.1/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/moby/locker v1.0.1/go.mod h1:S7SDdo5zpBK84bzzVlKr2V0hz+7x9hWbYC/kq7oQppc= -github.com/moby/moby v20.10.10+incompatible h1:KriJ8Zcm+NrFlaI0HNi2GtbfsT6J33o+XHmpAWZ6E7M= -github.com/moby/moby v20.10.10+incompatible/go.mod h1:fDXVQ6+S340veQPv35CzDahGBmHsiclFwfEygB/TWMc= +github.com/moby/moby v20.10.12+incompatible h1:MJVrdG0tIQqVJQBTdtooPuZQFIgski5pYTXlcW8ToE0= +github.com/moby/moby v20.10.12+incompatible/go.mod h1:fDXVQ6+S340veQPv35CzDahGBmHsiclFwfEygB/TWMc= github.com/moby/sys/mountinfo v0.4.0/go.mod h1:rEr8tzG/lsIZHBtN/JjGG+LMYx9eXgW2JI+6q0qou+A= github.com/moby/sys/mountinfo v0.4.1/go.mod h1:rEr8tzG/lsIZHBtN/JjGG+LMYx9eXgW2JI+6q0qou+A= github.com/moby/sys/symlink v0.1.0/go.mod h1:GGDODQmbFOjFsXvfLVn3+ZRxkch54RkSiGqsZeMYowQ= @@ -1007,8 +1035,8 @@ github.com/nbutton23/zxcvbn-go v0.0.0-20210217022336-fa2cb2858354 h1:4kuARK6Y6Fx github.com/nbutton23/zxcvbn-go v0.0.0-20210217022336-fa2cb2858354/go.mod h1:KSVJerMDfblTH7p5MZaTt+8zaT2iEk3AkVb9PQdZuE8= github.com/ncw/swift v1.0.47/go.mod h1:23YIA4yWVnGwv2dQlN4bB7egfYX6YLn0Yo/S6zZO/ZM= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= -github.com/nishanths/exhaustive v0.2.3 h1:+ANTMqRNrqwInnP9aszg/0jDo+zbXa4x66U19Bx/oTk= -github.com/nishanths/exhaustive v0.2.3/go.mod h1:bhIX678Nx8inLM9PbpvK1yv6oGtoP8BfaIeMzgBNKvc= +github.com/nishanths/exhaustive v0.7.11 h1:xV/WU3Vdwh5BUH4N06JNUznb6d5zhRPOnlgCrpNYNKA= +github.com/nishanths/exhaustive v0.7.11/go.mod h1:gX+MP7DWMKJmNa1HfMozK+u04hQd3na9i0hyqf3/dOI= github.com/nishanths/predeclared v0.0.0-20190419143655-18a43bb90ffc/go.mod h1:62PewwiQTlm/7Rj+cxVYqZvDIUc+JjZq6GHAC1fsObQ= github.com/nishanths/predeclared v0.2.1 h1:1TXtjmy4f3YCFjTxRd8zcFHOmoUir+gp0ESzjFzG2sw= github.com/nishanths/predeclared v0.2.1/go.mod h1:HvkGJcA3naj4lOwnFXFDkFxVtSqQMB9sbB1usJ+xjQE= @@ -1033,6 +1061,8 @@ github.com/onsi/ginkgo v1.11.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+ github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= github.com/onsi/ginkgo v1.16.4 h1:29JGrr5oVBm5ulCWet69zQkzWipVXIol6ygQUe/EzNc= github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= +github.com/onsi/ginkgo/v2 v2.0.0 h1:CcuG/HvWNkkaqCUpJifQY8z7qEMBJya6aLPx6ftGyjQ= +github.com/onsi/ginkgo/v2 v2.0.0/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c= github.com/onsi/gomega v0.0.0-20151007035656-2152b45fa28a/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= github.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= @@ -1040,8 +1070,8 @@ github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1Cpa github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= github.com/onsi/gomega v1.10.3/go.mod h1:V9xEwhxec5O8UDM77eCW8vLymOMltsqPVYWrpDsH8xc= -github.com/onsi/gomega v1.16.0 h1:6gjqkI8iiRHMvdccRJM8rVKjCWk6ZIm6FTm3ddIe4/c= -github.com/onsi/gomega v1.16.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= +github.com/onsi/gomega v1.17.0 h1:9Luw4uT5HTjHTN8+aNcSThgH1vdXnmdJ8xIfZ4wyTRE= +github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk= github.com/opencontainers/go-digest v0.0.0-20170106003457-a6d0ee40d420/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= github.com/opencontainers/go-digest v0.0.0-20180430190053-c9281466c8b2/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= @@ -1085,10 +1115,10 @@ github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT9 github.com/otiai10/mint v1.3.1/go.mod h1:/yxELlJQ0ufhjUwhshSj+wFjZ78CnZ48/1wtmBH1OTc= github.com/pact-foundation/pact-go v1.0.4/go.mod h1:uExwJY4kCzNPcHRj+hCR/HBbOOIwwtUjcrb0b5/5kLM= github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= github.com/pelletier/go-toml v1.8.1/go.mod h1:T2/BmBdy8dvIRq1a/8aqjN41wvWlN4lrapLU/GW4pbc= -github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/pelletier/go-toml v1.9.4 h1:tjENF6MfZAg8e4ZmZTeWaWiT2vXtsoO6+iuOjFhECwM= github.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/performancecopilot/speed v3.0.0+incompatible/go.mod h1:/CLtqpZ5gBg1M9iaPbIdPPGyKcA8hKdoy6hAWba7Yac= @@ -1108,10 +1138,11 @@ github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZ github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/polyfloyd/go-errorlint v0.0.0-20210722154253-910bb7978349 h1:Kq/3kL0k033ds3tyez5lFPrfQ74fNJ+OqCclRipubwA= -github.com/polyfloyd/go-errorlint v0.0.0-20210722154253-910bb7978349/go.mod h1:wi9BfjxjF/bwiZ701TzmfKu6UKC357IOAtNr0Td0Lvw= +github.com/polyfloyd/go-errorlint v0.0.0-20211125173453-6d6d39c5bb8b h1:/BDyEJWLnDUYKGWdlNx/82qSaVu2bUok/EvPUtIGuvw= +github.com/polyfloyd/go-errorlint v0.0.0-20211125173453-6d6d39c5bb8b/go.mod h1:wi9BfjxjF/bwiZ701TzmfKu6UKC357IOAtNr0Td0Lvw= github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= github.com/pquerna/cachecontrol v0.0.0-20171018203845-0dec1b30a021/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA= github.com/prometheus/client_golang v0.0.0-20180209125602-c332b6f63c06/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= @@ -1120,10 +1151,11 @@ github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDf github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= github.com/prometheus/client_golang v1.1.0/go.mod h1:I1FGZT9+L76gKKOs5djB6ezCbFQP1xR9D75/vuwEF3g= github.com/prometheus/client_golang v1.3.0/go.mod h1:hJaj2vgQTGQmVCsAACORcieXFeDPbaTKGT+JTgUa3og= +github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= -github.com/prometheus/client_golang v1.12.0 h1:C+UIj/QWtmqY13Arb8kwMt5j34/0Z2iKamrJ+ryC0Gg= -github.com/prometheus/client_golang v1.12.0/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= +github.com/prometheus/client_golang v1.12.1 h1:ZiaPsmm9uiBeaSMRznKsCDNtPCS0T3JVDGF+06gjBzk= +github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= github.com/prometheus/client_model v0.0.0-20171117100541-99fa1f4be8e5/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= @@ -1139,6 +1171,7 @@ github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y8 github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= github.com/prometheus/common v0.6.0/go.mod h1:eBmuwkDJBwy6iBfxCBob6t6dR6ENT/y+J+Zk0j9GMYc= github.com/prometheus/common v0.7.0/go.mod h1:DjGbpBbp5NYNiECxcL/VnbXCCaQpKd3tt26CguLLsqA= +github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= github.com/prometheus/common v0.32.1 h1:hWIdL3N2HoUx3B8j3YN9mWor0qhY/NlEKZEaXxuIRh4= @@ -1160,14 +1193,17 @@ github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1 github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= github.com/pseudomuto/protoc-gen-doc v1.3.2/go.mod h1:y5+P6n3iGrbKG+9O04V5ld71in3v/bX88wUwgt+U8EA= github.com/pseudomuto/protokit v0.2.0/go.mod h1:2PdH30hxVHsup8KpBTOXTBeMVhJZVio3Q8ViKSAXT0Q= -github.com/quasilyte/go-consistent v0.0.0-20190521200055-c6f3937de18c/go.mod h1:5STLWrekHfjyYwxBRVRXNOSewLJ3PWfDJd1VyTS21fI= github.com/quasilyte/go-ruleguard v0.3.1-0.20210203134552-1b5a410e1cc8/go.mod h1:KsAh3x0e7Fkpgs+Q9pNLS5XpFSvYCEVl5gP9Pp1xp30= -github.com/quasilyte/go-ruleguard v0.3.13 h1:O1G41cq1jUr3cJmqp7vOUT0SokqjzmS9aESWJuIDRaY= -github.com/quasilyte/go-ruleguard v0.3.13/go.mod h1:Ul8wwdqR6kBVOCt2dipDBkE+T6vAV/iixkrKuRTN1oQ= +github.com/quasilyte/go-ruleguard v0.3.15 h1:iWYzp1z72IlXTioET0+XI6SjQdPfMGfuAiZiKznOt7g= +github.com/quasilyte/go-ruleguard v0.3.15/go.mod h1:NhuWhnlVEM1gT1A4VJHYfy9MuYSxxwHgxWoPsn9llB4= github.com/quasilyte/go-ruleguard/dsl v0.3.0/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= -github.com/quasilyte/go-ruleguard/dsl v0.3.10/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= +github.com/quasilyte/go-ruleguard/dsl v0.3.12-0.20220101150716-969a394a9451/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= +github.com/quasilyte/go-ruleguard/dsl v0.3.12/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= +github.com/quasilyte/go-ruleguard/dsl v0.3.17/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= github.com/quasilyte/go-ruleguard/rules v0.0.0-20201231183845-9e62ed36efe1/go.mod h1:7JTjp89EGyU1d6XfBiXihJNG37wB2VRkd125Q1u7Plc= -github.com/quasilyte/go-ruleguard/rules v0.0.0-20210428214800-545e0d2e0bf7/go.mod h1:4cgAphtvu7Ftv7vOT2ZOYhC6CvBxZixcasr8qIOTA50= +github.com/quasilyte/go-ruleguard/rules v0.0.0-20211022131956-028d6511ab71/go.mod h1:4cgAphtvu7Ftv7vOT2ZOYhC6CvBxZixcasr8qIOTA50= +github.com/quasilyte/gogrep v0.0.0-20220103110004-ffaa07af02e3 h1:P4QPNn+TK49zJjXKERt/vyPbv/mCHB/zQ4flDYOMN+M= +github.com/quasilyte/gogrep v0.0.0-20220103110004-ffaa07af02e3/go.mod h1:wSEyW6O61xRV6zb6My3HxrQ5/8ke7NE2OayqCHa3xRM= github.com/quasilyte/regex/syntax v0.0.0-20200407221936-30656e2c4a95 h1:L8QM9bvf68pVdQ3bCFZMDmnt9yqcMBro1pC7F+IPYMY= github.com/quasilyte/regex/syntax v0.0.0-20200407221936-30656e2c4a95/go.mod h1:rlzQ04UMyJXu/aOvhd8qT+hvDrFpiwqp8MRXDY9szc0= github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= @@ -1177,9 +1213,9 @@ github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6So github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/rogpeppe/go-internal v1.6.2/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8= github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= +github.com/rogpeppe/go-internal v1.8.1 h1:geMPLpDpQOgVyCg5z5GoRwLHepNdb71NXb67XFkP+Eg= +github.com/rogpeppe/go-internal v1.8.1/go.mod h1:JeRgkft04UBgHMgCIwADu4Pn6Mtm5d4nPKWu0nJ5d+o= github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= github.com/rs/xid v1.3.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= @@ -1199,18 +1235,20 @@ github.com/ryanrolds/sqlclosecheck v0.3.0/go.mod h1:1gREqxyTGR3lVtpngyFo3hZAgk0K github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/safchain/ethtool v0.0.0-20190326074333-42ed695e3de8/go.mod h1:Z0q5wiBQGYcxhMZ6gUqHn6pYNLypFAvaL3UvgZLR0U4= github.com/sagikazarmark/crypt v0.1.0/go.mod h1:B/mN0msZuINBtQ1zZLEQcegFJJf9vnYIR88KRMEuODE= +github.com/sagikazarmark/crypt v0.3.0/go.mod h1:uD/D+6UF4SrIR1uGEv7bBNkNqLGqUr43MRiaGWX1Nig= +github.com/sagikazarmark/crypt v0.4.0/go.mod h1:ALv2SRj7GxYV4HO9elxH9nS6M9gW+xDNxqmyJ6RfDFM= github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0XIa01GRL2eRQVjQkKGqKF3SF9vZR/HnPullcV2E= github.com/sanposhiho/wastedassign/v2 v2.0.6 h1:+6/hQIHKNJAUixEj6EmOngGIisyeI+T3335lYTyxRoA= github.com/sanposhiho/wastedassign/v2 v2.0.6/go.mod h1:KyZ0MWTwxxBmfwn33zh3k1dmsbF2ud9pAAGfoLfjhtI= github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/seccomp/libseccomp-golang v0.9.1/go.mod h1:GbW5+tmTXfcxTToHLXlScSlAvWlF4P2Ca7zGrPiEpWo= -github.com/securego/gosec/v2 v2.9.1 h1:anHKLS/ApTYU6NZkKa/5cQqqcbKZURjvc+MtR++S4EQ= -github.com/securego/gosec/v2 v2.9.1/go.mod h1:oDcDLcatOJxkCGaCaq8lua1jTnYf6Sou4wdiJ1n4iHc= +github.com/securego/gosec/v2 v2.9.6 h1:ysfvgQBp2zmTgXQl65UkqEkYlQGbnVSRUGpCrJiiR4c= +github.com/securego/gosec/v2 v2.9.6/go.mod h1:EESY9Ywxo/Zc5NyF/qIj6Cop+4PSWM0F0OfGD7FdIXc= github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c h1:W65qqJCIOVP4jpqPQ0YvHYKwcMEMVWIzWC5iNQQfBTU= github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c/go.mod h1:/PevMnwAxekIXwN8qQyfc5gl2NlkB3CQlkizAbOkeBs= -github.com/shirou/gopsutil/v3 v3.21.10/go.mod h1:t75NhzCZ/dYyPQjyQmrAYP6c8+LCdFANeBMdLPCNnew= +github.com/shirou/gopsutil/v3 v3.22.1/go.mod h1:WapW1AOOPlHyXr+yOyw3uYx36enocrtSoSBy0L5vUHY= github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= github.com/shopspring/decimal v0.0.0-20200227202807-02e2044944cc/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= @@ -1226,6 +1264,8 @@ github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrf github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE= github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/sivchari/containedctx v1.0.1 h1:fJq44cX+tD+uT5xGrsg25GwiaY61NGybQk9WWKij3Uo= +github.com/sivchari/containedctx v1.0.1/go.mod h1:PwZOeqm4/DLoJOqMSIJs3aKqXRX4YO+uXww087KZ7Bw= github.com/sivchari/tenv v1.4.7 h1:FdTpgRlTue5eb5nXIYgS/lyVXSjugU8UUVDwhP1NLU8= github.com/sivchari/tenv v1.4.7/go.mod h1:5nF+bITvkebQVanjU6IuMbvIot/7ReNsUV7I5NbprB0= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= @@ -1240,18 +1280,18 @@ github.com/sourcegraph/go-diff v0.6.1/go.mod h1:iBszgVvyxdc8SFZ7gm69go2KDdt3ag07 github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk= +github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4= github.com/spf13/afero v1.6.0 h1:xoax2sJ2DT8S8xA2paPFjDCScCNeWsg75VG0DLRreiY= github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cast v1.4.1 h1:s0hze+J0196ZfEMTs80N7UlFt0BDuQ7Q+JDnHiMWKdA= github.com/spf13/cast v1.4.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cobra v0.0.2-0.20171109065643-2da4a54c5cee/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU= github.com/spf13/cobra v1.0.0/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE= -github.com/spf13/cobra v1.2.1 h1:+KmjbUw1hriSNMF55oPrkZcb27aECyrj8V2ytv7kWDw= -github.com/spf13/cobra v1.2.1/go.mod h1:ExllRjgxM/piMAM+3tAZvg8fsklGAf3tPfi+i8t68Nk= +github.com/spf13/cobra v1.3.0 h1:R7cSvGu+Vv+qX0gW5R/85dx2kmmJT5z5NM8ifdYjdn0= +github.com/spf13/cobra v1.3.0/go.mod h1:BrRVncBjOJa/eUcVVm9CE+oC6as8k+VYr4NY7WCi9V4= github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= @@ -1263,9 +1303,10 @@ github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE= -github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns= -github.com/spf13/viper v1.9.0 h1:yR6EXjTp0y0cLN8OZg1CRZmOBdI88UcGkhgyJhu6nZk= github.com/spf13/viper v1.9.0/go.mod h1:+i6ajR7OX2XaiBkrcZJFK21htRk7eDeLg7+O6bhUPP4= +github.com/spf13/viper v1.10.0/go.mod h1:SoyBPwAtKDzypXNDFKN5kzH7ppppbGZtls1UpIy5AsM= +github.com/spf13/viper v1.10.1 h1:nuJZuYpG7gTj/XqiUwg8bA0cp1+M2mC3J4g5luUYBKk= +github.com/spf13/viper v1.10.1/go.mod h1:IGlFPqhNAPKRxohIzWpI5QEy4kuI7tcl5WvR+8qy1rU= github.com/ssgreg/nlreturn/v2 v2.2.1 h1:X4XDI7jstt3ySqGU86YGAURbxw3oTDPK9sPEi6YEwQ0= github.com/ssgreg/nlreturn/v2 v2.2.1/go.mod h1:E/iiPB78hV7Szg2YfRgyIrk1AD6JVMTRkkxBiELzh2I= github.com/stefanberger/go-pkcs11uri v0.0.0-20201008174630-78d3cae3a980/go.mod h1:AO3tvPzVZ/ayst6UlUKUv6rcPQInYe3IknH3jYhAKu8= @@ -1298,8 +1339,8 @@ github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635/go.mod h1:hkRG github.com/syndtr/goleveldb v1.0.0 h1:fBdIW9lB4Iz0n9khmH8w27SJ3QEJ7+IgjPEwGSZiFdE= github.com/syndtr/goleveldb v1.0.0/go.mod h1:ZVVdQEZoIme9iO1Ch2Jdy24qqXrMMOU6lpPAyBWyWuQ= github.com/tchap/go-patricia v2.2.6+incompatible/go.mod h1:bmLyhP68RS6kStMGxByiQ23RP/odRBOTVjwp2cDyi6I= -github.com/tdakkota/asciicheck v0.0.0-20200416200610-e657995f937b h1:HxLVTlqcHhFAz3nWUcuvpH7WuOMv8LQoCWmruLfFH2U= -github.com/tdakkota/asciicheck v0.0.0-20200416200610-e657995f937b/go.mod h1:yHp0ai0Z9gUljN3o0xMhYJnH/IcvkdTBOX2fmJ93JEM= +github.com/tdakkota/asciicheck v0.1.1 h1:PKzG7JUTUmVspQTDqtkX9eSiLGossXTybutHwTXuO0A= +github.com/tdakkota/asciicheck v0.1.1/go.mod h1:yHp0ai0Z9gUljN3o0xMhYJnH/IcvkdTBOX2fmJ93JEM= github.com/tenntenn/modver v1.0.1 h1:2klLppGhDgzJrScMpkj9Ujy3rXPUspSjAcev9tSEBgA= github.com/tenntenn/modver v1.0.1/go.mod h1:bePIyQPb7UeioSRkw3Q0XeMhYZSMx9B8ePqg6SAMGH0= github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3 h1:f+jULpRQGxTSkNYKJ51yaw6ChIqO+Je8UqsTKN/cDag= @@ -1308,8 +1349,8 @@ github.com/tetafro/godot v1.4.11 h1:BVoBIqAf/2QdbFmSwAWnaIqDivZdOV0ZRwEm6jivLKw= github.com/tetafro/godot v1.4.11/go.mod h1:LR3CJpxDVGlYOWn3ZZg1PgNZdTUvzsZWu8xaEohUpn8= github.com/tevino/abool v1.2.0 h1:heAkClL8H6w+mK5md9dzsuohKeXHUpY7Vw0ZCKW+huA= github.com/tevino/abool v1.2.0/go.mod h1:qc66Pna1RiIsPa7O4Egxxs9OqkuxDX55zznh9K07Tzg= -github.com/timakin/bodyclose v0.0.0-20200424151742-cb6215831a94 h1:ig99OeTyDwQWhPe2iw9lwfQVF1KB3Q4fpP3X7/2VBG8= -github.com/timakin/bodyclose v0.0.0-20200424151742-cb6215831a94/go.mod h1:Qimiffbc6q9tBWlVV6x0P9sat/ao1xEkREYPPj9hphk= +github.com/timakin/bodyclose v0.0.0-20210704033933-f49887972144 h1:kl4KhGNsJIbDHS9/4U9yQo1UcPQM0kOMJHn29EoH/Ro= +github.com/timakin/bodyclose v0.0.0-20210704033933-f49887972144/go.mod h1:Qimiffbc6q9tBWlVV6x0P9sat/ao1xEkREYPPj9hphk= github.com/tklauser/go-sysconf v0.3.9/go.mod h1:11DU/5sG7UexIrp/O6g35hrWzu0JxlwQ3LSFUzyeuhs= github.com/tklauser/numcpus v0.3.0/go.mod h1:yFGUr7TUHQRAhyqBcEg0Ge34zDBAsIvJJcyE6boqnA8= github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= @@ -1318,8 +1359,9 @@ github.com/tmc/grpc-websocket-proxy v0.0.0-20200427203606-3cfed13b9966/go.mod h1 github.com/tomarrell/wrapcheck/v2 v2.4.0 h1:mU4H9KsqqPZUALOUbVOpjy8qNQbWLoLI9fV68/1tq30= github.com/tomarrell/wrapcheck/v2 v2.4.0/go.mod h1:68bQ/eJg55BROaRTbMjC7vuhL2OgfoG8bLp9ZyoBfyY= github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce/go.mod h1:o8v6yHRoik09Xen7gje4m9ERNah1d1PPsVq1VEx9vE4= -github.com/tommy-muehle/go-mnd/v2 v2.4.0 h1:1t0f8Uiaq+fqKteUR4N9Umr6E99R+lDnLnq7PwX2PPE= -github.com/tommy-muehle/go-mnd/v2 v2.4.0/go.mod h1:WsUAkMJMYww6l/ufffCD3m+P7LEvr8TnZn9lwVDlgzw= +github.com/tommy-muehle/go-mnd/v2 v2.5.0 h1:iAj0a8e6+dXSL7Liq0aXPox36FiN1dBbjA6lt9fl65s= +github.com/tommy-muehle/go-mnd/v2 v2.5.0/go.mod h1:WsUAkMJMYww6l/ufffCD3m+P7LEvr8TnZn9lwVDlgzw= +github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw= github.com/ugorji/go v1.2.6 h1:tGiWC9HENWE2tqYycIqFTNorMmFRVhNwCpDOpWqnk8E= @@ -1330,8 +1372,8 @@ github.com/ugorji/go/codec v1.2.6 h1:7kbGefxLoDBuYXOms4yD7223OpNMMPNPZxXk5TvFcyQ github.com/ugorji/go/codec v1.2.6/go.mod h1:V6TCNZ4PHqoHGFZuSG1W8nrCzzdgA2DozYxWFFpvxTw= github.com/ultraware/funlen v0.0.3 h1:5ylVWm8wsNwH5aWo9438pwvsK0QiqVuUrt9bn7S/iLA= github.com/ultraware/funlen v0.0.3/go.mod h1:Dp4UiAus7Wdb9KUZsYWZEWiRzGuM2kXM1lPbfaF6xhA= -github.com/ultraware/whitespace v0.0.4 h1:If7Va4cM03mpgrNH9k49/VOicWpGoG70XPBFFODYDsg= -github.com/ultraware/whitespace v0.0.4/go.mod h1:aVMh/gQve5Maj9hQ/hg+F75lr/X5A89uZnzAmWSineA= +github.com/ultraware/whitespace v0.0.5 h1:hh+/cpIcopyMYbZNVov9iSxvJU3OYQg78Sfaqzi/CzI= +github.com/ultraware/whitespace v0.0.5/go.mod h1:aVMh/gQve5Maj9hQ/hg+F75lr/X5A89uZnzAmWSineA= github.com/urfave/cli v0.0.0-20171014202726-7bc6a0acffa5/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= @@ -1356,8 +1398,8 @@ github.com/willf/bitset v1.1.11-0.20200630133818-d5bec3311243/go.mod h1:RjeCKbqT github.com/willf/bitset v1.1.11/go.mod h1:83CECat5yLh5zVOf4P1ErAgKA5UDvKtgyUABdr3+MjI= github.com/woodpecker-ci/expr v0.0.0-20210628233344-164b8b3d0915 h1:9zBOoKSR9CBeYoKQv6LFIuImg8lorCjh8XzK72bJMRg= github.com/woodpecker-ci/expr v0.0.0-20210628233344-164b8b3d0915/go.mod h1:PbzlZ93HrA1cf16OUP1vckAPq57gtF+ccnwZeDkmC9s= -github.com/xanzy/go-gitlab v0.52.2 h1:gkgg1z4ON70sphibtD86Bfmt1qV3mZ0pU0CBBCFAEvQ= -github.com/xanzy/go-gitlab v0.52.2/go.mod h1:Q+hQhV508bDPoBijv7YjK/Lvlb4PhVhJdKqXVQrUoAE= +github.com/xanzy/go-gitlab v0.55.1 h1:IgX/DS9buV0AUz8fuJPQkdl0fQGfBiAsAHxpun8sNhg= +github.com/xanzy/go-gitlab v0.55.1/go.mod h1:F0QEXwmqiBUxCgJm8fE9S+1veX4XC9Z4cfaAbqwk4YM= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= @@ -1369,8 +1411,10 @@ github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQ github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= github.com/xo/terminfo v0.0.0-20210125001918-ca9a967f8778/go.mod h1:2MuV+tbUrU1zIOPMxZ5EncGwgmMJsa+9ucAQZXxsObs= github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= -github.com/yeya24/promlinter v0.1.0 h1:goWULN0jH5Yajmu/K+v1xCqIREeB+48OiJ2uu2ssc7U= -github.com/yeya24/promlinter v0.1.0/go.mod h1:rs5vtZzeBHqqMwXqFScncpCF6u06lezhZepno9AB1Oc= +github.com/yagipy/maintidx v1.0.0 h1:h5NvIsCz+nRDapQ0exNv4aJ0yXSI0420omVANTv3GJM= +github.com/yagipy/maintidx v1.0.0/go.mod h1:0qNf/I/CCZXSMhsRsrEPDZ+DkekpKLXAJfsTACwgXLk= +github.com/yeya24/promlinter v0.1.1-0.20210918184747-d757024714a1 h1:YAaOqqMTstELMMGblt6yJ/fcOt4owSYuw3IttMnKfAM= +github.com/yeya24/promlinter v0.1.1-0.20210918184747-d757024714a1/go.mod h1:rs5vtZzeBHqqMwXqFScncpCF6u06lezhZepno9AB1Oc= github.com/yudai/gojsondiff v1.0.0/go.mod h1:AY32+k2cwILAkW1fbgxQ5mUmMiZFgLIV+FBNExI05xg= github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82/go.mod h1:lgjkn3NuSvDfVJdfcVVdX+jpBxNmX4rDAzaS45IcYoM= github.com/yudai/pp v2.0.1+incompatible/go.mod h1:PuxR/8QJ7cyCkFp/aUDS+JY727OFEZkTdatxwunjIkc= @@ -1380,11 +1424,15 @@ github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yusufpapurcu/wmi v1.2.2/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= github.com/yvasiyarov/go-metrics v0.0.0-20140926110328-57bccd1ccd43/go.mod h1:aX5oPXxHm3bOH+xeAttToC8pqch2ScQN/JoXYupl6xs= github.com/yvasiyarov/gorelic v0.0.0-20141212073537-a9bba5b9ab50/go.mod h1:NUSPSUX/bi6SeDMUh6brw0nXpxHnc96TguQh0+r/ssA= github.com/yvasiyarov/newrelic_platform_go v0.0.0-20140908184405-b21fdbd4370f/go.mod h1:GlGEuHIJweS1mbCqG+7vt2nvWLzLLnRHbXz5JKd/Qbg= github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= github.com/ziutek/mymysql v1.5.4/go.mod h1:LMSpPZ6DbqWFxNCHW77HeMg9I646SAhApZ/wKdgO/C0= +gitlab.com/bosi/decorder v0.2.1 h1:ehqZe8hI4w7O4b1vgsDZw1YU1PE7iJXrQWFMsocbQ1w= +gitlab.com/bosi/decorder v0.2.1/go.mod h1:6C/nhLSbF6qZbYD8bRmISBwc6vcWdNsiIBkRvjJFrH0= go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.etcd.io/bbolt v1.3.4/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ= @@ -1393,8 +1441,11 @@ go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mI go.etcd.io/etcd v0.0.0-20200513171258-e048e166ab9c/go.mod h1:xCI7ZzBfRuGgBXyXO6yfWfDmlWd35khcWpUa4L0xI/k= go.etcd.io/etcd v0.5.0-alpha.5.0.20200910180754-dd1b699fc489/go.mod h1:yVHk9ub3CSBatqGNg7GRmsnfLWtoW60w4eDYfh7vHDg= go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= +go.etcd.io/etcd/api/v3 v3.5.1/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= +go.etcd.io/etcd/client/pkg/v3 v3.5.1/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ= +go.etcd.io/etcd/client/v2 v2.305.1/go.mod h1:pMEacxZW7o8pg4CrFE7pquyCJJzZvkvdD2RibOCCCGs= go.mozilla.org/mozlog v0.0.0-20170222151521-4bb13139d403/go.mod h1:jHoPAGnDrCy6kaI2tAze5Prf0Nr0w/oNkROt2lw3n3o= go.mozilla.org/pkcs7 v0.0.0-20200128120323-432b2356ecb1/go.mod h1:SNgMg+EgDFwmvSmLRTNKC5fegJjB7v23qTQ0XLGUNHk= go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= @@ -1450,10 +1501,10 @@ golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a/go.mod h1:P+XmwS30IXTQdn5 golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20211215165025-cf75a172585e/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8= -golang.org/x/crypto v0.0.0-20220128200615-198e4374d7ed h1:YoWVYYAfvQ4ddHv3OKmIvX7NCAhFGTj62VP2l2kfBbA= -golang.org/x/crypto v0.0.0-20220128200615-198e4374d7ed/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20220112180741-5e0467b6c7ce/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20220214200702-86341886e292 h1:f+lwQ+GtmgoY+A2YaQxlSOnDjXcQ7ZRLWOHbC6HtRqE= +golang.org/x/crypto v0.0.0-20220214200702-86341886e292/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -1490,8 +1541,9 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.5.0 h1:UG21uOlmZabA4fW5i7ZX6bjw1xELEGg/ZLgZq9auk/Q= golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= +golang.org/x/mod v0.5.1 h1:OJxoQ/rynoF0dcCdI7cLPktw/hR2cueqYfjm43oqK38= +golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -1545,11 +1597,14 @@ golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8= golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210510120150-4163338589ed/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd h1:O7DYs+zxREGLKzKoMQrtrEacpb0ZVXA5rIwylE2Xchk= golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= @@ -1565,13 +1620,14 @@ golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8 h1:RerP+noqYHUQ8CMRcPlC2nvTa4dcBIjegkuWdcUDuqg= +golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b h1:clP8eMhB30EHdc0bd2Twtq6kgU7yl5ub2cQLSdrv1Dg= +golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -1664,11 +1720,13 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20201126233918-771906719818/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201202213521-69691e467435/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -1687,15 +1745,23 @@ golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210816074244-15123e1e1f71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210816183151-1e6c022a8912/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210915083310-ed5796bab164/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210917161153-d61c044b1678/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211013075003-97ac67df715c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211105183446-c75c47738b0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211205182925-97ca703d548d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211213223007-03aa0b5f6827/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220111092808-5a964db01320/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27 h1:XDXtA5hveEEV8JB2l7nhMTp3t3cHp9ZpwcdjqyEWLlo= -golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220224120231-95c6836cb0e7 h1:BXxu8t6QN0G1uff4bzZzSkpsax8+ALqTGUtz08QrV00= +golang.org/x/sys v0.0.0-20220224120231-95c6836cb0e7/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -1715,14 +1781,13 @@ golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxb golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20211116232009-f0f3c7e86c11 h1:GZokNIeuVkl3aZHJchRrr13WCsols02MLUcz1U9is6M= -golang.org/x/time v0.0.0-20211116232009-f0f3c7e86c11/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20220210224613-90d013bbcef8 h1:vVKdlvoWBphwdxWKrFZEuM0kGgGLxUOYcY4U/2Vjg44= +golang.org/x/time v0.0.0-20220210224613-90d013bbcef8/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190110163146-51295c7ec13a/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190307163923-6a08e3108db3/go.mod h1:25r3+/G6/xytQM8iWZKq3Hn0kr0rgFKPUNVEL/dr3z4= @@ -1753,7 +1818,6 @@ golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= @@ -1806,14 +1870,11 @@ golang.org/x/tools v0.0.0-20201023174141-c8cfbd0f21e6/go.mod h1:emZCQorbCU4vsT4f golang.org/x/tools v0.0.0-20201028025901-8cd080b735b3/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201114224030-61ea331ec02b/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201118003311-bd56c0adb394/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201124115921-2c860bdd6e78/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201230224404-63754364767c/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210101214203-2dba1e4ea05c/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210104081019-d8d6ddbec6ee/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= @@ -1824,9 +1885,11 @@ golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.6/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= -golang.org/x/tools v0.1.7 h1:6j8CgantCy3yc8JGBqkDLMKWqZ0RDU2g1HVgacojGWQ= golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= +golang.org/x/tools v0.1.8/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= +golang.org/x/tools v0.1.9-0.20211228192929-ee1ca4ffc4da/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= +golang.org/x/tools v0.1.9 h1:j9KsMiaP1c3B0OTQGth0/k+miLGTgLsAFUCrF2vLcF8= +golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -1858,13 +1921,18 @@ google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34q google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= -google.golang.org/api v0.44.0/go.mod h1:EBOGZqzyhtvMDoxwS97ctnh0zUmYY6CxqXsc1AvkYD8= google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo= google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4= google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU= google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k= +google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= +google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI= +google.golang.org/api v0.59.0/go.mod h1:sT2boj7M9YJxZzgeZqXogmhfmRWDtPzT31xkieUbuZU= +google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I= +google.golang.org/api v0.62.0/go.mod h1:dKmwPCydfsad4qCH08MSdgWjfHOyfpd4VtDGgRFdavw= +google.golang.org/api v0.63.0/go.mod h1:gs4ij2ffTRXwuzzgJl/56BdwJaA194ijkfn++9tDuPo= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -1939,6 +2007,17 @@ google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKr google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w= google.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211008145708-270636b82663/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211028162531-8db9c33dc351/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211129164237-f09f9a12af12/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211203200212-54befc351ae9/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20220126215142-9970aeb2e350 h1:YxHp5zqIcAShDEvRr5/0rVESVS+njYF68PSdazrNLJo= google.golang.org/genproto v0.0.0-20220126215142-9970aeb2e350/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/grpc v0.0.0-20160317175043-d3ddb4469d5a/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= @@ -1975,6 +2054,9 @@ google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQ google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.42.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= +google.golang.org/grpc v1.43.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= google.golang.org/grpc v1.44.0 h1:weqSxi/TMs1SqFRMHCtBgXRs8k3X39QIDEZ0pRcttUg= google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= @@ -2009,9 +2091,9 @@ gopkg.in/gcfg.v1 v1.2.3/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o= gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2/go.mod h1:Xk6kEKp8OKb+X14hQBKWaSkCsqBpgog8nAV2xsGOxlo= gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= -gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/ini.v1 v1.63.2 h1:tGK/CyBg7SMzb60vP1M03vNZ3VDu3wGQJwn7Sxi9r3c= gopkg.in/ini.v1 v1.63.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/ini.v1 v1.66.2 h1:XfR1dOYubytKy4Shzc2LHrrGhU0lDCfDGG1yLPmpgsI= +gopkg.in/ini.v1 v1.66.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k= gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= gopkg.in/square/go-jose.v2 v2.2.2/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= @@ -2047,8 +2129,8 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -honnef.co/go/tools v0.2.1 h1:/EPr//+UMMXwMTkXvCCoaJDq8cpjMO80Ou+L4PDo2mY= -honnef.co/go/tools v0.2.1/go.mod h1:lPVVZ2BS5TfnjLyizF7o7hv7j9/L+8cZY2hLyjP9cGY= +honnef.co/go/tools v0.2.2 h1:MNh1AVMyVX23VUHE2O27jm6lNj3vjO5DexS4A1xvnzk= +honnef.co/go/tools v0.2.2/go.mod h1:lPVVZ2BS5TfnjLyizF7o7hv7j9/L+8cZY2hLyjP9cGY= k8s.io/api v0.20.1/go.mod h1:KqwcCVogGxQY3nBlRpwt+wpAMF/KjaCc7RpywacvqUo= k8s.io/api v0.20.4/go.mod h1:++lNL1AJMkDymriNniQsWRkMDzRaX2Y/POTUi8yvqYQ= k8s.io/api v0.20.6/go.mod h1:X9e8Qag6JV/bL5G6bU8sdVRltWKmdHsFUGS3eVndqE8= @@ -2101,14 +2183,14 @@ modernc.org/tcl v1.5.5/go.mod h1:ADkaTUuwukkrlhqwERyq0SM8OvyXo7+TjFz7yAF56EI= modernc.org/token v1.0.0 h1:a0jaWiNMDhDUtqOj09wvjWWAqd3q7WpBulmL9H2egsk= modernc.org/token v1.0.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM= modernc.org/z v1.0.1/go.mod h1:8/SRk5C/HgiQWCgXdfpb+1RvhORdkz5sw72d3jjtyqA= -mvdan.cc/gofumpt v0.1.1 h1:bi/1aS/5W00E2ny5q65w9SnKpWEF/UIOqDYBILpo9rA= -mvdan.cc/gofumpt v0.1.1/go.mod h1:yXG1r1WqZVKWbVRtBWKWX9+CxGYfA51nSomhM0woR48= +mvdan.cc/gofumpt v0.2.1 h1:7jakRGkQcLAJdT+C8Bwc9d0BANkVPSkHZkzNv07pJAs= +mvdan.cc/gofumpt v0.2.1/go.mod h1:a/rvZPhsNaedOJBzqRD9omnwVwHZsBdJirXHa9Gh9Ig= mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed h1:WX1yoOaKQfddO/mLzdV4wptyWgoH/6hwLs7QHTixo0I= mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed/go.mod h1:Xkxe497xwlCKkIaQYRfC7CSLworTXY9RMqwhhCm+8Nc= mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b h1:DxJ5nJdkhDlLok9K6qO+5290kphDJbHOQO1DFFFTeBo= mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b/go.mod h1:2odslEg/xrtNQqCYg2/jCoyKnw3vv5biOc3JnIcYfL4= -mvdan.cc/unparam v0.0.0-20210104141923-aac4ce9116a7 h1:HT3e4Krq+IE44tiN36RvVEb6tvqeIdtsVSsxmNPqlFU= -mvdan.cc/unparam v0.0.0-20210104141923-aac4ce9116a7/go.mod h1:hBpJkZE8H/sb+VRFvw2+rBpHNsTBcvSpk61hr8mzXZE= +mvdan.cc/unparam v0.0.0-20211214103731-d0ef000c54e5 h1:Jh3LAeMt1eGpxomyu3jVkmVZWW2MxZ1qIIV2TZ/nRio= +mvdan.cc/unparam v0.0.0-20211214103731-d0ef000c54e5/go.mod h1:b8RRCBm0eeiWR8cfN88xeq2G5SG3VKGO+5UPWi5FSOY= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/vendor/github.com/BurntSushi/toml/README.md b/vendor/github.com/BurntSushi/toml/README.md index 64410cf75..cc13f8667 100644 --- a/vendor/github.com/BurntSushi/toml/README.md +++ b/vendor/github.com/BurntSushi/toml/README.md @@ -1,10 +1,6 @@ -## TOML parser and encoder for Go with reflection - TOML stands for Tom's Obvious, Minimal Language. This Go package provides a reflection interface similar to Go's standard library `json` and `xml` -packages. This package also supports the `encoding.TextUnmarshaler` and -`encoding.TextMarshaler` interfaces so that you can define custom data -representations. (There is an example of this below.) +packages. Compatible with TOML version [v1.0.0](https://toml.io/en/v1.0.0). @@ -16,26 +12,25 @@ v0.4.0`). This library requires Go 1.13 or newer; install it with: - $ go get github.com/BurntSushi/toml + % go get github.com/BurntSushi/toml@latest It also comes with a TOML validator CLI tool: - $ go get github.com/BurntSushi/toml/cmd/tomlv - $ tomlv some-toml-file.toml + % go install github.com/BurntSushi/toml/cmd/tomlv@latest + % tomlv some-toml-file.toml ### Testing +This package passes all tests in [toml-test] for both the decoder and the +encoder. -This package passes all tests in -[toml-test](https://github.com/BurntSushi/toml-test) for both the decoder -and the encoder. +[toml-test]: https://github.com/BurntSushi/toml-test ### Examples +This package works similar to how the Go standard library handles XML and JSON. +Namely, data is loaded into Go values via reflection. -This package works similarly to how the Go standard library handles XML and -JSON. Namely, data is loaded into Go values via reflection. - -For the simplest example, consider some TOML file as just a list of keys -and values: +For the simplest example, consider some TOML file as just a list of keys and +values: ```toml Age = 25 @@ -61,9 +56,8 @@ And then decoded with: ```go var conf Config -if _, err := toml.Decode(tomlData, &conf); err != nil { - // handle error -} +err := toml.Decode(tomlData, &conf) +// handle error ``` You can also use struct tags if your struct field name doesn't map to a TOML @@ -75,15 +69,14 @@ some_key_NAME = "wat" ```go type TOML struct { - ObscureKey string `toml:"some_key_NAME"` + ObscureKey string `toml:"some_key_NAME"` } ``` Beware that like other most other decoders **only exported fields** are considered when encoding and decoding; private fields are silently ignored. -### Using the `encoding.TextUnmarshaler` interface - +### Using the `Marshaler` and `encoding.TextUnmarshaler` interfaces Here's an example that automatically parses duration strings into `time.Duration` values: @@ -136,7 +129,6 @@ To target TOML specifically you can implement `UnmarshalTOML` TOML interface in a similar way. ### More complex usage - Here's an example of how to load the example from the official spec page: ```toml @@ -216,5 +208,4 @@ type clients struct { Note that a case insensitive match will be tried if an exact match can't be found. -A working example of the above can be found in `_examples/example.{go,toml}`. - +A working example of the above can be found in `_example/example.{go,toml}`. diff --git a/vendor/github.com/BurntSushi/toml/decode.go b/vendor/github.com/BurntSushi/toml/decode.go index d3d3b8397..e24f0c5d5 100644 --- a/vendor/github.com/BurntSushi/toml/decode.go +++ b/vendor/github.com/BurntSushi/toml/decode.go @@ -9,7 +9,6 @@ import ( "os" "reflect" "strings" - "time" ) // Unmarshaler is the interface implemented by objects that can unmarshal a @@ -40,6 +39,13 @@ type Primitive struct { context Key } +// The significand precision for float32 and float64 is 24 and 53 bits; this is +// the range a natural number can be stored in a float without loss of data. +const ( + maxSafeFloat32Int = 16777215 // 2^24-1 + maxSafeFloat64Int = 9007199254740991 // 2^53-1 +) + // PrimitiveDecode is just like the other `Decode*` functions, except it // decodes a TOML value that has already been parsed. Valid primitive values // can *only* be obtained from values filled by the decoder functions, @@ -100,18 +106,38 @@ func NewDecoder(r io.Reader) *Decoder { return &Decoder{r: r} } +var ( + unmarshalToml = reflect.TypeOf((*Unmarshaler)(nil)).Elem() + unmarshalText = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem() +) + // Decode TOML data in to the pointer `v`. func (dec *Decoder) Decode(v interface{}) (MetaData, error) { rv := reflect.ValueOf(v) if rv.Kind() != reflect.Ptr { - return MetaData{}, e("Decode of non-pointer %s", reflect.TypeOf(v)) + s := "%q" + if reflect.TypeOf(v) == nil { + s = "%v" + } + + return MetaData{}, e("cannot decode to non-pointer "+s, reflect.TypeOf(v)) } if rv.IsNil() { - return MetaData{}, e("Decode of nil %s", reflect.TypeOf(v)) + return MetaData{}, e("cannot decode to nil value of %q", reflect.TypeOf(v)) } - // TODO: have parser should read from io.Reader? Or at the very least, make - // it read from []byte rather than string + // Check if this is a supported type: struct, map, interface{}, or something + // that implements UnmarshalTOML or UnmarshalText. + rv = indirect(rv) + rt := rv.Type() + if rv.Kind() != reflect.Struct && rv.Kind() != reflect.Map && + !(rv.Kind() == reflect.Interface && rv.NumMethod() == 0) && + !rt.Implements(unmarshalToml) && !rt.Implements(unmarshalText) { + return MetaData{}, e("cannot decode to type %s", rt) + } + + // TODO: parser should read from io.Reader? Or at the very least, make it + // read from []byte rather than string data, err := ioutil.ReadAll(dec.r) if err != nil { return MetaData{}, err @@ -121,11 +147,15 @@ func (dec *Decoder) Decode(v interface{}) (MetaData, error) { if err != nil { return MetaData{}, err } + md := MetaData{ - p.mapping, p.types, p.ordered, - make(map[string]bool, len(p.ordered)), nil, + mapping: p.mapping, + types: p.types, + keys: p.ordered, + decoded: make(map[string]struct{}, len(p.ordered)), + context: nil, } - return md, md.unify(p.mapping, indirect(rv)) + return md, md.unify(p.mapping, rv) } // Decode the TOML data in to the pointer v. @@ -218,9 +248,7 @@ func (md *MetaData) unify(data interface{}, rv reflect.Value) error { return e("unsupported type %s", rv.Type()) } return md.unifyAnything(data, rv) - case reflect.Float32: - fallthrough - case reflect.Float64: + case reflect.Float32, reflect.Float64: return md.unifyFloat64(data, rv) } return e("unsupported type %s", rv.Kind()) @@ -254,17 +282,17 @@ func (md *MetaData) unifyStruct(mapping interface{}, rv reflect.Value) error { for _, i := range f.index { subv = indirect(subv.Field(i)) } + if isUnifiable(subv) { - md.decoded[md.context.add(key).String()] = true + md.decoded[md.context.add(key).String()] = struct{}{} md.context = append(md.context, key) - if err := md.unify(datum, subv); err != nil { + err := md.unify(datum, subv) + if err != nil { return err } md.context = md.context[0 : len(md.context)-1] } else if f.name != "" { - // Bad user! No soup for you! - return e("cannot write unexported field %s.%s", - rv.Type().String(), f.name) + return e("cannot write unexported field %s.%s", rv.Type().String(), f.name) } } } @@ -283,22 +311,22 @@ func (md *MetaData) unifyMap(mapping interface{}, rv reflect.Value) error { if tmap == nil { return nil } - return badtype("map", mapping) + return md.badtype("map", mapping) } if rv.IsNil() { rv.Set(reflect.MakeMap(rv.Type())) } for k, v := range tmap { - md.decoded[md.context.add(k).String()] = true + md.decoded[md.context.add(k).String()] = struct{}{} md.context = append(md.context, k) - rvkey := indirect(reflect.New(rv.Type().Key())) rvval := reflect.Indirect(reflect.New(rv.Type().Elem())) if err := md.unify(v, rvval); err != nil { return err } md.context = md.context[0 : len(md.context)-1] + rvkey := indirect(reflect.New(rv.Type().Key())) rvkey.SetString(k) rv.SetMapIndex(rvkey, rvval) } @@ -311,7 +339,7 @@ func (md *MetaData) unifyArray(data interface{}, rv reflect.Value) error { if !datav.IsValid() { return nil } - return badtype("slice", data) + return md.badtype("slice", data) } if l := datav.Len(); l != rv.Len() { return e("expected array length %d; got TOML array of length %d", rv.Len(), l) @@ -325,7 +353,7 @@ func (md *MetaData) unifySlice(data interface{}, rv reflect.Value) error { if !datav.IsValid() { return nil } - return badtype("slice", data) + return md.badtype("slice", data) } n := datav.Len() if rv.IsNil() || rv.Cap() < n { @@ -346,26 +374,21 @@ func (md *MetaData) unifySliceArray(data, rv reflect.Value) error { return nil } -func (md *MetaData) unifyDatetime(data interface{}, rv reflect.Value) error { - if _, ok := data.(time.Time); ok { - rv.Set(reflect.ValueOf(data)) - return nil - } - return badtype("time.Time", data) -} - func (md *MetaData) unifyString(data interface{}, rv reflect.Value) error { if s, ok := data.(string); ok { rv.SetString(s) return nil } - return badtype("string", data) + return md.badtype("string", data) } func (md *MetaData) unifyFloat64(data interface{}, rv reflect.Value) error { if num, ok := data.(float64); ok { switch rv.Kind() { case reflect.Float32: + if num < -math.MaxFloat32 || num > math.MaxFloat32 { + return e("value %f is out of range for float32", num) + } fallthrough case reflect.Float64: rv.SetFloat(num) @@ -374,7 +397,26 @@ func (md *MetaData) unifyFloat64(data interface{}, rv reflect.Value) error { } return nil } - return badtype("float", data) + + if num, ok := data.(int64); ok { + switch rv.Kind() { + case reflect.Float32: + if num < -maxSafeFloat32Int || num > maxSafeFloat32Int { + return e("value %d is out of range for float32", num) + } + fallthrough + case reflect.Float64: + if num < -maxSafeFloat64Int || num > maxSafeFloat64Int { + return e("value %d is out of range for float64", num) + } + rv.SetFloat(float64(num)) + default: + panic("bug") + } + return nil + } + + return md.badtype("float", data) } func (md *MetaData) unifyInt(data interface{}, rv reflect.Value) error { @@ -421,7 +463,7 @@ func (md *MetaData) unifyInt(data interface{}, rv reflect.Value) error { } return nil } - return badtype("integer", data) + return md.badtype("integer", data) } func (md *MetaData) unifyBool(data interface{}, rv reflect.Value) error { @@ -429,7 +471,7 @@ func (md *MetaData) unifyBool(data interface{}, rv reflect.Value) error { rv.SetBool(b) return nil } - return badtype("boolean", data) + return md.badtype("boolean", data) } func (md *MetaData) unifyAnything(data interface{}, rv reflect.Value) error { @@ -440,6 +482,12 @@ func (md *MetaData) unifyAnything(data interface{}, rv reflect.Value) error { func (md *MetaData) unifyText(data interface{}, v encoding.TextUnmarshaler) error { var s string switch sdata := data.(type) { + case Marshaler: + text, err := sdata.MarshalTOML() + if err != nil { + return err + } + s = string(text) case TextMarshaler: text, err := sdata.MarshalText() if err != nil { @@ -457,7 +505,7 @@ func (md *MetaData) unifyText(data interface{}, v encoding.TextUnmarshaler) erro case float64: s = fmt.Sprintf("%f", sdata) default: - return badtype("primitive (string-like)", data) + return md.badtype("primitive (string-like)", data) } if err := v.UnmarshalText([]byte(s)); err != nil { return err @@ -465,17 +513,22 @@ func (md *MetaData) unifyText(data interface{}, v encoding.TextUnmarshaler) erro return nil } +func (md *MetaData) badtype(dst string, data interface{}) error { + return e("incompatible types: TOML key %q has type %T; destination has type %s", md.context, data, dst) +} + // rvalue returns a reflect.Value of `v`. All pointers are resolved. func rvalue(v interface{}) reflect.Value { return indirect(reflect.ValueOf(v)) } // indirect returns the value pointed to by a pointer. -// Pointers are followed until the value is not a pointer. -// New values are allocated for each nil pointer. // -// An exception to this rule is if the value satisfies an interface of -// interest to us (like encoding.TextUnmarshaler). +// Pointers are followed until the value is not a pointer. New values are +// allocated for each nil pointer. +// +// An exception to this rule is if the value satisfies an interface of interest +// to us (like encoding.TextUnmarshaler). func indirect(v reflect.Value) reflect.Value { if v.Kind() != reflect.Ptr { if v.CanSet() { @@ -505,7 +558,3 @@ func isUnifiable(rv reflect.Value) bool { func e(format string, args ...interface{}) error { return fmt.Errorf("toml: "+format, args...) } - -func badtype(expected string, data interface{}) error { - return e("cannot load TOML value of type %T into a Go %s", data, expected) -} diff --git a/vendor/github.com/BurntSushi/toml/decode_go116.go b/vendor/github.com/BurntSushi/toml/decode_go116.go index 38aa75fdc..eddfb641b 100644 --- a/vendor/github.com/BurntSushi/toml/decode_go116.go +++ b/vendor/github.com/BurntSushi/toml/decode_go116.go @@ -1,3 +1,4 @@ +//go:build go1.16 // +build go1.16 package toml diff --git a/vendor/github.com/BurntSushi/toml/deprecated.go b/vendor/github.com/BurntSushi/toml/deprecated.go index db89eac1d..c6af3f239 100644 --- a/vendor/github.com/BurntSushi/toml/deprecated.go +++ b/vendor/github.com/BurntSushi/toml/deprecated.go @@ -5,29 +5,17 @@ import ( "io" ) -// DEPRECATED! -// -// Use the identical encoding.TextMarshaler instead. It is defined here to -// support Go 1.1 and older. +// Deprecated: use encoding.TextMarshaler type TextMarshaler encoding.TextMarshaler -// DEPRECATED! -// -// Use the identical encoding.TextUnmarshaler instead. It is defined here to -// support Go 1.1 and older. +// Deprecated: use encoding.TextUnmarshaler type TextUnmarshaler encoding.TextUnmarshaler -// DEPRECATED! -// -// Use MetaData.PrimitiveDecode instead. +// Deprecated: use MetaData.PrimitiveDecode. func PrimitiveDecode(primValue Primitive, v interface{}) error { - md := MetaData{decoded: make(map[string]bool)} + md := MetaData{decoded: make(map[string]struct{})} return md.unify(primValue.undecoded, rvalue(v)) } -// DEPRECATED! -// -// Use NewDecoder(reader).Decode(&v) instead. -func DecodeReader(r io.Reader, v interface{}) (MetaData, error) { - return NewDecoder(r).Decode(v) -} +// Deprecated: use NewDecoder(reader).Decode(&value). +func DecodeReader(r io.Reader, v interface{}) (MetaData, error) { return NewDecoder(r).Decode(v) } diff --git a/vendor/github.com/BurntSushi/toml/encode.go b/vendor/github.com/BurntSushi/toml/encode.go index 10d88ac63..dee4e6d31 100644 --- a/vendor/github.com/BurntSushi/toml/encode.go +++ b/vendor/github.com/BurntSushi/toml/encode.go @@ -21,12 +21,11 @@ type tomlEncodeError struct{ error } var ( errArrayNilElement = errors.New("toml: cannot encode array with nil element") errNonString = errors.New("toml: cannot encode a map with non-string key type") - errAnonNonStruct = errors.New("toml: cannot encode an anonymous field that is not a struct") errNoKey = errors.New("toml: top-level values must be Go maps or structs") errAnything = errors.New("") // used in testing ) -var quotedReplacer = strings.NewReplacer( +var dblQuotedReplacer = strings.NewReplacer( "\"", "\\\"", "\\", "\\\\", "\x00", `\u0000`, @@ -64,13 +63,22 @@ var quotedReplacer = strings.NewReplacer( "\x7f", `\u007f`, ) +// Marshaler is the interface implemented by types that can marshal themselves +// into valid TOML. +type Marshaler interface { + MarshalTOML() ([]byte, error) +} + // Encoder encodes a Go to a TOML document. // // The mapping between Go values and TOML values should be precisely the same as -// for the Decode* functions. Similarly, the TextMarshaler interface is -// supported by encoding the resulting bytes as strings. If you want to write -// arbitrary binary data then you will need to use something like base64 since -// TOML does not have any binary types. +// for the Decode* functions. +// +// The toml.Marshaler and encoder.TextMarshaler interfaces are supported to +// encoding the value as custom TOML. +// +// If you want to write arbitrary binary data then you will need to use +// something like base64 since TOML does not have any binary types. // // When encoding TOML hashes (Go maps or structs), keys without any sub-hashes // are encoded first. @@ -83,16 +91,14 @@ var quotedReplacer = strings.NewReplacer( // structs. (e.g. [][]map[string]string is not allowed but []map[string]string // is okay, as is []map[string][]string). // -// NOTE: Only exported keys are encoded due to the use of reflection. Unexported +// NOTE: only exported keys are encoded due to the use of reflection. Unexported // keys are silently discarded. type Encoder struct { - // The string to use for a single indentation level. The default is two - // spaces. + // String to use for a single indentation level; default is two spaces. Indent string - // hasWritten is whether we have written any output to w yet. - hasWritten bool w *bufio.Writer + hasWritten bool // written any output to w yet? } // NewEncoder create a new Encoder. @@ -130,12 +136,13 @@ func (enc *Encoder) safeEncode(key Key, rv reflect.Value) (err error) { } func (enc *Encoder) encode(key Key, rv reflect.Value) { - // Special case. Time needs to be in ISO8601 format. - // Special case. If we can marshal the type to text, then we used that. - // Basically, this prevents the encoder for handling these types as - // generic structs (or whatever the underlying type of a TextMarshaler is). + // Special case: time needs to be in ISO8601 format. + // + // Special case: if we can marshal the type to text, then we used that. This + // prevents the encoder for handling these types as generic structs (or + // whatever the underlying type of a TextMarshaler is). switch t := rv.Interface().(type) { - case time.Time, encoding.TextMarshaler: + case time.Time, encoding.TextMarshaler, Marshaler: enc.writeKeyValue(key, rv, false) return // TODO: #76 would make this superfluous after implemented. @@ -200,13 +207,19 @@ func (enc *Encoder) eElement(rv reflect.Value) { enc.wf(v.In(time.UTC).Format(format)) } return - case encoding.TextMarshaler: - // Use text marshaler if it's available for this value. - if s, err := v.MarshalText(); err != nil { + case Marshaler: + s, err := v.MarshalTOML() + if err != nil { encPanic(err) - } else { - enc.writeQuoted(string(s)) } + enc.writeQuoted(string(s)) + return + case encoding.TextMarshaler: + s, err := v.MarshalText() + if err != nil { + encPanic(err) + } + enc.writeQuoted(string(s)) return } @@ -260,7 +273,7 @@ func floatAddDecimal(fstr string) string { } func (enc *Encoder) writeQuoted(s string) { - enc.wf("\"%s\"", quotedReplacer.Replace(s)) + enc.wf("\"%s\"", dblQuotedReplacer.Replace(s)) } func (enc *Encoder) eArrayOrSliceElement(rv reflect.Value) { @@ -286,7 +299,7 @@ func (enc *Encoder) eArrayOfTables(key Key, rv reflect.Value) { continue } enc.newline() - enc.wf("%s[[%s]]", enc.indentStr(key), key.maybeQuotedAll()) + enc.wf("%s[[%s]]", enc.indentStr(key), key) enc.newline() enc.eMapOrStruct(key, trv, false) } @@ -299,7 +312,7 @@ func (enc *Encoder) eTable(key Key, rv reflect.Value) { enc.newline() } if len(key) > 0 { - enc.wf("%s[%s]", enc.indentStr(key), key.maybeQuotedAll()) + enc.wf("%s[%s]", enc.indentStr(key), key) enc.newline() } enc.eMapOrStruct(key, rv, false) @@ -328,7 +341,7 @@ func (enc *Encoder) eMap(key Key, rv reflect.Value, inline bool) { var mapKeysDirect, mapKeysSub []string for _, mapKey := range rv.MapKeys() { k := mapKey.String() - if typeIsHash(tomlTypeOfGo(rv.MapIndex(mapKey))) { + if typeIsTable(tomlTypeOfGo(rv.MapIndex(mapKey))) { mapKeysSub = append(mapKeysSub, k) } else { mapKeysDirect = append(mapKeysDirect, k) @@ -364,6 +377,8 @@ func (enc *Encoder) eMap(key Key, rv reflect.Value, inline bool) { } } +const is32Bit = (32 << (^uint(0) >> 63)) == 32 + func (enc *Encoder) eStruct(key Key, rv reflect.Value, inline bool) { // Write keys for fields directly under this key first, because if we write // a field that creates a new table then all keys under it will be in that @@ -408,10 +423,20 @@ func (enc *Encoder) eStruct(key Key, rv reflect.Value, inline bool) { } } - if typeIsHash(tomlTypeOfGo(frv)) { + if typeIsTable(tomlTypeOfGo(frv)) { fieldsSub = append(fieldsSub, append(start, f.Index...)) } else { - fieldsDirect = append(fieldsDirect, append(start, f.Index...)) + // Copy so it works correct on 32bit archs; not clear why this + // is needed. See #314, and https://www.reddit.com/r/golang/comments/pnx8v4 + // This also works fine on 64bit, but 32bit archs are somewhat + // rare and this is a wee bit faster. + if is32Bit { + copyStart := make([]int, len(start)) + copy(copyStart, start) + fieldsDirect = append(fieldsDirect, append(copyStart, f.Index...)) + } else { + fieldsDirect = append(fieldsDirect, append(start, f.Index...)) + } } } } @@ -462,13 +487,13 @@ func (enc *Encoder) eStruct(key Key, rv reflect.Value, inline bool) { } } -// tomlTypeName returns the TOML type name of the Go value's type. It is -// used to determine whether the types of array elements are mixed (which is -// forbidden). If the Go value is nil, then it is illegal for it to be an array -// element, and valueIsNil is returned as true. - -// Returns the TOML type of a Go value. The type may be `nil`, which means -// no concrete TOML type could be found. +// tomlTypeOfGo returns the TOML type name of the Go value's type. +// +// It is used to determine whether the types of array elements are mixed (which +// is forbidden). If the Go value is nil, then it is illegal for it to be an +// array element, and valueIsNil is returned as true. +// +// The type may be `nil`, which means no concrete TOML type could be found. func tomlTypeOfGo(rv reflect.Value) tomlType { if isNil(rv) || !rv.IsValid() { return nil @@ -495,32 +520,43 @@ func tomlTypeOfGo(rv reflect.Value) tomlType { case reflect.Map: return tomlHash case reflect.Struct: - switch rv.Interface().(type) { - case time.Time: + if _, ok := rv.Interface().(time.Time); ok { return tomlDatetime - case encoding.TextMarshaler: - return tomlString - default: - // Someone used a pointer receiver: we can make it work for pointer - // values. - if rv.CanAddr() { - _, ok := rv.Addr().Interface().(encoding.TextMarshaler) - if ok { - return tomlString - } - } - return tomlHash } + if isMarshaler(rv) { + return tomlString + } + return tomlHash default: - _, ok := rv.Interface().(encoding.TextMarshaler) - if ok { + if isMarshaler(rv) { return tomlString } + encPanic(errors.New("unsupported type: " + rv.Kind().String())) - panic("") // Need *some* return value + panic("unreachable") } } +func isMarshaler(rv reflect.Value) bool { + switch rv.Interface().(type) { + case encoding.TextMarshaler: + return true + case Marshaler: + return true + } + + // Someone used a pointer receiver: we can make it work for pointer values. + if rv.CanAddr() { + if _, ok := rv.Addr().Interface().(encoding.TextMarshaler); ok { + return true + } + if _, ok := rv.Addr().Interface().(Marshaler); ok { + return true + } + } + return false +} + // tomlArrayType returns the element type of a TOML array. The type returned // may be nil if it cannot be determined (e.g., a nil slice or a zero length // slize). This function may also panic if it finds a type that cannot be @@ -604,7 +640,14 @@ func (enc *Encoder) newline() { // // key = // -// If inline is true it won't add a newline at the end. +// This is also used for "k = v" in inline tables; so something like this will +// be written in three calls: +// +// ┌────────────────────┐ +// │ ┌───┐ ┌─────┐│ +// v v v v vv +// key = {k = v, k2 = v2} +// func (enc *Encoder) writeKeyValue(key Key, val reflect.Value, inline bool) { if len(key) == 0 { encPanic(errNoKey) @@ -617,7 +660,8 @@ func (enc *Encoder) writeKeyValue(key Key, val reflect.Value, inline bool) { } func (enc *Encoder) wf(format string, v ...interface{}) { - if _, err := fmt.Fprintf(enc.w, format, v...); err != nil { + _, err := fmt.Fprintf(enc.w, format, v...) + if err != nil { encPanic(err) } enc.hasWritten = true diff --git a/vendor/github.com/BurntSushi/toml/error.go b/vendor/github.com/BurntSushi/toml/error.go new file mode 100644 index 000000000..36edc4655 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/error.go @@ -0,0 +1,229 @@ +package toml + +import ( + "fmt" + "strings" +) + +// ParseError is returned when there is an error parsing the TOML syntax. +// +// For example invalid syntax, duplicate keys, etc. +// +// In addition to the error message itself, you can also print detailed location +// information with context by using ErrorWithLocation(): +// +// toml: error: Key 'fruit' was already created and cannot be used as an array. +// +// At line 4, column 2-7: +// +// 2 | fruit = [] +// 3 | +// 4 | [[fruit]] # Not allowed +// ^^^^^ +// +// Furthermore, the ErrorWithUsage() can be used to print the above with some +// more detailed usage guidance: +// +// toml: error: newlines not allowed within inline tables +// +// At line 1, column 18: +// +// 1 | x = [{ key = 42 # +// ^ +// +// Error help: +// +// Inline tables must always be on a single line: +// +// table = {key = 42, second = 43} +// +// It is invalid to split them over multiple lines like so: +// +// # INVALID +// table = { +// key = 42, +// second = 43 +// } +// +// Use regular for this: +// +// [table] +// key = 42 +// second = 43 +type ParseError struct { + Message string // Short technical message. + Usage string // Longer message with usage guidance; may be blank. + Position Position // Position of the error + LastKey string // Last parsed key, may be blank. + Line int // Line the error occurred. Deprecated: use Position. + + err error + input string +} + +// Position of an error. +type Position struct { + Line int // Line number, starting at 1. + Start int // Start of error, as byte offset starting at 0. + Len int // Lenght in bytes. +} + +func (pe ParseError) Error() string { + msg := pe.Message + if msg == "" { // Error from errorf() + msg = pe.err.Error() + } + + if pe.LastKey == "" { + return fmt.Sprintf("toml: line %d: %s", pe.Position.Line, msg) + } + return fmt.Sprintf("toml: line %d (last key %q): %s", + pe.Position.Line, pe.LastKey, msg) +} + +// ErrorWithUsage() returns the error with detailed location context. +// +// See the documentation on ParseError. +func (pe ParseError) ErrorWithPosition() string { + if pe.input == "" { // Should never happen, but just in case. + return pe.Error() + } + + var ( + lines = strings.Split(pe.input, "\n") + col = pe.column(lines) + b = new(strings.Builder) + ) + + msg := pe.Message + if msg == "" { + msg = pe.err.Error() + } + + // TODO: don't show control characters as literals? This may not show up + // well everywhere. + + if pe.Position.Len == 1 { + fmt.Fprintf(b, "toml: error: %s\n\nAt line %d, column %d:\n\n", + msg, pe.Position.Line, col+1) + } else { + fmt.Fprintf(b, "toml: error: %s\n\nAt line %d, column %d-%d:\n\n", + msg, pe.Position.Line, col, col+pe.Position.Len) + } + if pe.Position.Line > 2 { + fmt.Fprintf(b, "% 7d | %s\n", pe.Position.Line-2, lines[pe.Position.Line-3]) + } + if pe.Position.Line > 1 { + fmt.Fprintf(b, "% 7d | %s\n", pe.Position.Line-1, lines[pe.Position.Line-2]) + } + fmt.Fprintf(b, "% 7d | %s\n", pe.Position.Line, lines[pe.Position.Line-1]) + fmt.Fprintf(b, "% 10s%s%s\n", "", strings.Repeat(" ", col), strings.Repeat("^", pe.Position.Len)) + return b.String() +} + +// ErrorWithUsage() returns the error with detailed location context and usage +// guidance. +// +// See the documentation on ParseError. +func (pe ParseError) ErrorWithUsage() string { + m := pe.ErrorWithPosition() + if u, ok := pe.err.(interface{ Usage() string }); ok && u.Usage() != "" { + return m + "Error help:\n\n " + + strings.ReplaceAll(strings.TrimSpace(u.Usage()), "\n", "\n ") + + "\n" + } + return m +} + +func (pe ParseError) column(lines []string) int { + var pos, col int + for i := range lines { + ll := len(lines[i]) + 1 // +1 for the removed newline + if pos+ll >= pe.Position.Start { + col = pe.Position.Start - pos + if col < 0 { // Should never happen, but just in case. + col = 0 + } + break + } + pos += ll + } + + return col +} + +type ( + errLexControl struct{ r rune } + errLexEscape struct{ r rune } + errLexUTF8 struct{ b byte } + errLexInvalidNum struct{ v string } + errLexInvalidDate struct{ v string } + errLexInlineTableNL struct{} + errLexStringNL struct{} +) + +func (e errLexControl) Error() string { + return fmt.Sprintf("TOML files cannot contain control characters: '0x%02x'", e.r) +} +func (e errLexControl) Usage() string { return "" } + +func (e errLexEscape) Error() string { return fmt.Sprintf(`invalid escape in string '\%c'`, e.r) } +func (e errLexEscape) Usage() string { return usageEscape } +func (e errLexUTF8) Error() string { return fmt.Sprintf("invalid UTF-8 byte: 0x%02x", e.b) } +func (e errLexUTF8) Usage() string { return "" } +func (e errLexInvalidNum) Error() string { return fmt.Sprintf("invalid number: %q", e.v) } +func (e errLexInvalidNum) Usage() string { return "" } +func (e errLexInvalidDate) Error() string { return fmt.Sprintf("invalid date: %q", e.v) } +func (e errLexInvalidDate) Usage() string { return "" } +func (e errLexInlineTableNL) Error() string { return "newlines not allowed within inline tables" } +func (e errLexInlineTableNL) Usage() string { return usageInlineNewline } +func (e errLexStringNL) Error() string { return "strings cannot contain newlines" } +func (e errLexStringNL) Usage() string { return usageStringNewline } + +const usageEscape = ` +A '\' inside a "-delimited string is interpreted as an escape character. + +The following escape sequences are supported: +\b, \t, \n, \f, \r, \", \\, \uXXXX, and \UXXXXXXXX + +To prevent a '\' from being recognized as an escape character, use either: + +- a ' or '''-delimited string; escape characters aren't processed in them; or +- write two backslashes to get a single backslash: '\\'. + +If you're trying to add a Windows path (e.g. "C:\Users\martin") then using '/' +instead of '\' will usually also work: "C:/Users/martin". +` + +const usageInlineNewline = ` +Inline tables must always be on a single line: + + table = {key = 42, second = 43} + +It is invalid to split them over multiple lines like so: + + # INVALID + table = { + key = 42, + second = 43 + } + +Use regular for this: + + [table] + key = 42 + second = 43 +` + +const usageStringNewline = ` +Strings must always be on a single line, and cannot span more than one line: + + # INVALID + string = "Hello, + world!" + +Instead use """ or ''' to split strings over multiple lines: + + string = """Hello, + world!""" +` diff --git a/vendor/github.com/BurntSushi/toml/go.sum b/vendor/github.com/BurntSushi/toml/go.sum deleted file mode 100644 index e69de29bb..000000000 diff --git a/vendor/github.com/BurntSushi/toml/lex.go b/vendor/github.com/BurntSushi/toml/lex.go index adc4eb5d5..63ef20f47 100644 --- a/vendor/github.com/BurntSushi/toml/lex.go +++ b/vendor/github.com/BurntSushi/toml/lex.go @@ -37,28 +37,14 @@ const ( itemInlineTableEnd ) -const ( - eof = 0 - comma = ',' - tableStart = '[' - tableEnd = ']' - arrayTableStart = '[' - arrayTableEnd = ']' - tableSep = '.' - keySep = '=' - arrayStart = '[' - arrayEnd = ']' - commentStart = '#' - stringStart = '"' - stringEnd = '"' - rawStringStart = '\'' - rawStringEnd = '\'' - inlineTableStart = '{' - inlineTableEnd = '}' -) +const eof = 0 type stateFn func(lx *lexer) stateFn +func (p Position) String() string { + return fmt.Sprintf("at line %d; start %d; length %d", p.Line, p.Start, p.Len) +} + type lexer struct { input string start int @@ -67,26 +53,26 @@ type lexer struct { state stateFn items chan item - // Allow for backing up up to four runes. - // This is necessary because TOML contains 3-rune tokens (""" and '''). + // Allow for backing up up to 4 runes. This is necessary because TOML + // contains 3-rune tokens (""" and '''). prevWidths [4]int - nprev int // how many of prevWidths are in use - // If we emit an eof, we can still back up, but it is not OK to call - // next again. - atEOF bool + nprev int // how many of prevWidths are in use + atEOF bool // If we emit an eof, we can still back up, but it is not OK to call next again. // A stack of state functions used to maintain context. - // The idea is to reuse parts of the state machine in various places. - // For example, values can appear at the top level or within arbitrarily - // nested arrays. The last state on the stack is used after a value has - // been lexed. Similarly for comments. + // + // The idea is to reuse parts of the state machine in various places. For + // example, values can appear at the top level or within arbitrarily nested + // arrays. The last state on the stack is used after a value has been lexed. + // Similarly for comments. stack []stateFn } type item struct { - typ itemType - val string - line int + typ itemType + val string + err error + pos Position } func (lx *lexer) nextItem() item { @@ -96,7 +82,7 @@ func (lx *lexer) nextItem() item { return item default: lx.state = lx.state(lx) - //fmt.Printf(" STATE %-24s current: %-10q stack: %s\n", lx.state, lx.current(), lx.stack) + //fmt.Printf(" STATE %-24s current: %-10q stack: %s\n", lx.state, lx.current(), lx.stack) } } } @@ -105,9 +91,9 @@ func lex(input string) *lexer { lx := &lexer{ input: input, state: lexTop, - line: 1, items: make(chan item, 10), stack: make([]stateFn, 0, 10), + line: 1, } return lx } @@ -129,13 +115,25 @@ func (lx *lexer) current() string { return lx.input[lx.start:lx.pos] } +func (lx lexer) getPos() Position { + p := Position{ + Line: lx.line, + Start: lx.start, + Len: lx.pos - lx.start, + } + if p.Len <= 0 { + p.Len = 1 + } + return p +} + func (lx *lexer) emit(typ itemType) { - lx.items <- item{typ, lx.current(), lx.line} + lx.items <- item{typ: typ, pos: lx.getPos(), val: lx.current()} lx.start = lx.pos } func (lx *lexer) emitTrim(typ itemType) { - lx.items <- item{typ, strings.TrimSpace(lx.current()), lx.line} + lx.items <- item{typ: typ, pos: lx.getPos(), val: strings.TrimSpace(lx.current())} lx.start = lx.pos } @@ -160,7 +158,13 @@ func (lx *lexer) next() (r rune) { r, w := utf8.DecodeRuneInString(lx.input[lx.pos:]) if r == utf8.RuneError { - lx.errorf("invalid UTF-8 byte at position %d (line %d): 0x%02x", lx.pos, lx.line, lx.input[lx.pos]) + lx.error(errLexUTF8{lx.input[lx.pos]}) + return utf8.RuneError + } + + // Note: don't use peek() here, as this calls next(). + if isControl(r) || (r == '\r' && (len(lx.input)-1 == lx.pos || lx.input[lx.pos+1] != '\n')) { + lx.errorControlChar(r) return utf8.RuneError } @@ -188,6 +192,7 @@ func (lx *lexer) backup() { lx.prevWidths[1] = lx.prevWidths[2] lx.prevWidths[2] = lx.prevWidths[3] lx.nprev-- + lx.pos -= w if lx.pos < len(lx.input) && lx.input[lx.pos] == '\n' { lx.line-- @@ -223,18 +228,58 @@ func (lx *lexer) skip(pred func(rune) bool) { } } -// errorf stops all lexing by emitting an error and returning `nil`. +// error stops all lexing by emitting an error and returning `nil`. +// // Note that any value that is a character is escaped if it's a special // character (newlines, tabs, etc.). -func (lx *lexer) errorf(format string, values ...interface{}) stateFn { - lx.items <- item{ - itemError, - fmt.Sprintf(format, values...), - lx.line, +func (lx *lexer) error(err error) stateFn { + if lx.atEOF { + return lx.errorPrevLine(err) } + lx.items <- item{typ: itemError, pos: lx.getPos(), err: err} return nil } +// errorfPrevline is like error(), but sets the position to the last column of +// the previous line. +// +// This is so that unexpected EOF or NL errors don't show on a new blank line. +func (lx *lexer) errorPrevLine(err error) stateFn { + pos := lx.getPos() + pos.Line-- + pos.Len = 1 + pos.Start = lx.pos - 1 + lx.items <- item{typ: itemError, pos: pos, err: err} + return nil +} + +// errorPos is like error(), but allows explicitly setting the position. +func (lx *lexer) errorPos(start, length int, err error) stateFn { + pos := lx.getPos() + pos.Start = start + pos.Len = length + lx.items <- item{typ: itemError, pos: pos, err: err} + return nil +} + +// errorf is like error, and creates a new error. +func (lx *lexer) errorf(format string, values ...interface{}) stateFn { + if lx.atEOF { + pos := lx.getPos() + pos.Line-- + pos.Len = 1 + pos.Start = lx.pos - 1 + lx.items <- item{typ: itemError, pos: pos, err: fmt.Errorf(format, values...)} + return nil + } + lx.items <- item{typ: itemError, pos: lx.getPos(), err: fmt.Errorf(format, values...)} + return nil +} + +func (lx *lexer) errorControlChar(cc rune) stateFn { + return lx.errorPos(lx.pos-1, 1, errLexControl{cc}) +} + // lexTop consumes elements at the top level of TOML data. func lexTop(lx *lexer) stateFn { r := lx.next() @@ -242,10 +287,10 @@ func lexTop(lx *lexer) stateFn { return lexSkip(lx, lexTop) } switch r { - case commentStart: + case '#': lx.push(lexTop) return lexCommentStart - case tableStart: + case '[': return lexTableStart case eof: if lx.pos > lx.start { @@ -268,7 +313,7 @@ func lexTop(lx *lexer) stateFn { func lexTopEnd(lx *lexer) stateFn { r := lx.next() switch { - case r == commentStart: + case r == '#': // a comment will read to a newline for us. lx.push(lexTop) return lexCommentStart @@ -292,7 +337,7 @@ func lexTopEnd(lx *lexer) stateFn { // It also handles the case that this is an item in an array of tables. // e.g., '[[name]]'. func lexTableStart(lx *lexer) stateFn { - if lx.peek() == arrayTableStart { + if lx.peek() == '[' { lx.next() lx.emit(itemArrayTableStart) lx.push(lexArrayTableEnd) @@ -309,10 +354,8 @@ func lexTableEnd(lx *lexer) stateFn { } func lexArrayTableEnd(lx *lexer) stateFn { - if r := lx.next(); r != arrayTableEnd { - return lx.errorf( - "expected end of table array name delimiter %q, but got %q instead", - arrayTableEnd, r) + if r := lx.next(); r != ']' { + return lx.errorf("expected end of table array name delimiter ']', but got %q instead", r) } lx.emit(itemArrayTableEnd) return lexTopEnd @@ -321,11 +364,11 @@ func lexArrayTableEnd(lx *lexer) stateFn { func lexTableNameStart(lx *lexer) stateFn { lx.skip(isWhitespace) switch r := lx.peek(); { - case r == tableEnd || r == eof: + case r == ']' || r == eof: return lx.errorf("unexpected end of table name (table names cannot be empty)") - case r == tableSep: + case r == '.': return lx.errorf("unexpected table separator (table names cannot be empty)") - case r == stringStart || r == rawStringStart: + case r == '"' || r == '\'': lx.ignore() lx.push(lexTableNameEnd) return lexQuotedName @@ -342,10 +385,10 @@ func lexTableNameEnd(lx *lexer) stateFn { switch r := lx.next(); { case isWhitespace(r): return lexTableNameEnd - case r == tableSep: + case r == '.': lx.ignore() return lexTableNameStart - case r == tableEnd: + case r == ']': return lx.pop() default: return lx.errorf("expected '.' or ']' to end table name, but got %q instead", r) @@ -379,10 +422,10 @@ func lexQuotedName(lx *lexer) stateFn { switch { case isWhitespace(r): return lexSkip(lx, lexValue) - case r == stringStart: + case r == '"': lx.ignore() // ignore the '"' return lexString - case r == rawStringStart: + case r == '\'': lx.ignore() // ignore the "'" return lexRawString case r == eof: @@ -400,7 +443,7 @@ func lexKeyStart(lx *lexer) stateFn { return lx.errorf("unexpected '=': key name appears blank") case r == '.': return lx.errorf("unexpected '.': keys cannot start with a '.'") - case r == stringStart || r == rawStringStart: + case r == '"' || r == '\'': lx.ignore() fallthrough default: // Bare key @@ -416,7 +459,7 @@ func lexKeyNameStart(lx *lexer) stateFn { return lx.errorf("unexpected '='") case r == '.': return lx.errorf("unexpected '.'") - case r == stringStart || r == rawStringStart: + case r == '"' || r == '\'': lx.ignore() lx.push(lexKeyEnd) return lexQuotedName @@ -434,7 +477,7 @@ func lexKeyEnd(lx *lexer) stateFn { case isWhitespace(r): return lexSkip(lx, lexKeyEnd) case r == eof: - return lx.errorf("unexpected EOF; expected key separator %q", keySep) + return lx.errorf("unexpected EOF; expected key separator '='") case r == '.': lx.ignore() return lexKeyNameStart @@ -461,17 +504,17 @@ func lexValue(lx *lexer) stateFn { return lexNumberOrDateStart } switch r { - case arrayStart: + case '[': lx.ignore() lx.emit(itemArray) return lexArrayValue - case inlineTableStart: + case '{': lx.ignore() lx.emit(itemInlineTableStart) return lexInlineTableValue - case stringStart: - if lx.accept(stringStart) { - if lx.accept(stringStart) { + case '"': + if lx.accept('"') { + if lx.accept('"') { lx.ignore() // Ignore """ return lexMultilineString } @@ -479,9 +522,9 @@ func lexValue(lx *lexer) stateFn { } lx.ignore() // ignore the '"' return lexString - case rawStringStart: - if lx.accept(rawStringStart) { - if lx.accept(rawStringStart) { + case '\'': + if lx.accept('\'') { + if lx.accept('\'') { lx.ignore() // Ignore """ return lexMultilineRawString } @@ -520,14 +563,12 @@ func lexArrayValue(lx *lexer) stateFn { switch { case isWhitespace(r) || isNL(r): return lexSkip(lx, lexArrayValue) - case r == commentStart: + case r == '#': lx.push(lexArrayValue) return lexCommentStart - case r == comma: + case r == ',': return lx.errorf("unexpected comma") - case r == arrayEnd: - // NOTE(caleb): The spec isn't clear about whether you can have - // a trailing comma or not, so we'll allow it. + case r == ']': return lexArrayEnd } @@ -540,22 +581,20 @@ func lexArrayValue(lx *lexer) stateFn { // the next value (or the end of the array): it ignores whitespace and newlines // and expects either a ',' or a ']'. func lexArrayValueEnd(lx *lexer) stateFn { - r := lx.next() - switch { + switch r := lx.next(); { case isWhitespace(r) || isNL(r): return lexSkip(lx, lexArrayValueEnd) - case r == commentStart: + case r == '#': lx.push(lexArrayValueEnd) return lexCommentStart - case r == comma: + case r == ',': lx.ignore() return lexArrayValue // move on to the next value - case r == arrayEnd: + case r == ']': return lexArrayEnd + default: + return lx.errorf("expected a comma (',') or array terminator (']'), but got %s", runeOrEOF(r)) } - return lx.errorf( - "expected a comma or array terminator %q, but got %s instead", - arrayEnd, runeOrEOF(r)) } // lexArrayEnd finishes the lexing of an array. @@ -574,13 +613,13 @@ func lexInlineTableValue(lx *lexer) stateFn { case isWhitespace(r): return lexSkip(lx, lexInlineTableValue) case isNL(r): - return lx.errorf("newlines not allowed within inline tables") - case r == commentStart: + return lx.errorPrevLine(errLexInlineTableNL{}) + case r == '#': lx.push(lexInlineTableValue) return lexCommentStart - case r == comma: + case r == ',': return lx.errorf("unexpected comma") - case r == inlineTableEnd: + case r == '}': return lexInlineTableEnd } lx.backup() @@ -596,23 +635,21 @@ func lexInlineTableValueEnd(lx *lexer) stateFn { case isWhitespace(r): return lexSkip(lx, lexInlineTableValueEnd) case isNL(r): - return lx.errorf("newlines not allowed within inline tables") - case r == commentStart: + return lx.errorPrevLine(errLexInlineTableNL{}) + case r == '#': lx.push(lexInlineTableValueEnd) return lexCommentStart - case r == comma: + case r == ',': lx.ignore() lx.skip(isWhitespace) if lx.peek() == '}' { return lx.errorf("trailing comma not allowed in inline tables") } return lexInlineTableValue - case r == inlineTableEnd: + case r == '}': return lexInlineTableEnd default: - return lx.errorf( - "expected a comma or an inline table terminator %q, but got %s instead", - inlineTableEnd, runeOrEOF(r)) + return lx.errorf("expected a comma or an inline table terminator '}', but got %s instead", runeOrEOF(r)) } } @@ -638,14 +675,12 @@ func lexString(lx *lexer) stateFn { switch { case r == eof: return lx.errorf(`unexpected EOF; expected '"'`) - case isControl(r) || r == '\r': - return lx.errorf("control characters are not allowed inside strings: '0x%02x'", r) case isNL(r): - return lx.errorf("strings cannot contain newlines") + return lx.errorPrevLine(errLexStringNL{}) case r == '\\': lx.push(lexString) return lexStringEscape - case r == stringEnd: + case r == '"': lx.backup() lx.emit(itemString) lx.next() @@ -660,23 +695,20 @@ func lexString(lx *lexer) stateFn { func lexMultilineString(lx *lexer) stateFn { r := lx.next() switch r { + default: + return lexMultilineString case eof: return lx.errorf(`unexpected EOF; expected '"""'`) - case '\r': - if lx.peek() != '\n' { - return lx.errorf("control characters are not allowed inside strings: '0x%02x'", r) - } - return lexMultilineString case '\\': return lexMultilineStringEscape - case stringEnd: + case '"': /// Found " → try to read two more "". - if lx.accept(stringEnd) { - if lx.accept(stringEnd) { + if lx.accept('"') { + if lx.accept('"') { /// Peek ahead: the string can contain " and "", including at the /// end: """str""""" /// 6 or more at the end, however, is an error. - if lx.peek() == stringEnd { + if lx.peek() == '"' { /// Check if we already lexed 5 's; if so we have 6 now, and /// that's just too many man! if strings.HasSuffix(lx.current(), `"""""`) { @@ -699,12 +731,8 @@ func lexMultilineString(lx *lexer) stateFn { } lx.backup() } + return lexMultilineString } - - if isControl(r) { - return lx.errorf("control characters are not allowed inside strings: '0x%02x'", r) - } - return lexMultilineString } // lexRawString consumes a raw string. Nothing can be escaped in such a string. @@ -712,20 +740,19 @@ func lexMultilineString(lx *lexer) stateFn { func lexRawString(lx *lexer) stateFn { r := lx.next() switch { + default: + return lexRawString case r == eof: return lx.errorf(`unexpected EOF; expected "'"`) - case isControl(r) || r == '\r': - return lx.errorf("control characters are not allowed inside strings: '0x%02x'", r) case isNL(r): - return lx.errorf("strings cannot contain newlines") - case r == rawStringEnd: + return lx.errorPrevLine(errLexStringNL{}) + case r == '\'': lx.backup() lx.emit(itemRawString) lx.next() lx.ignore() return lx.pop() } - return lexRawString } // lexMultilineRawString consumes a raw string. Nothing can be escaped in such @@ -734,21 +761,18 @@ func lexRawString(lx *lexer) stateFn { func lexMultilineRawString(lx *lexer) stateFn { r := lx.next() switch r { + default: + return lexMultilineRawString case eof: return lx.errorf(`unexpected EOF; expected "'''"`) - case '\r': - if lx.peek() != '\n' { - return lx.errorf("control characters are not allowed inside strings: '0x%02x'", r) - } - return lexMultilineRawString - case rawStringEnd: + case '\'': /// Found ' → try to read two more ''. - if lx.accept(rawStringEnd) { - if lx.accept(rawStringEnd) { + if lx.accept('\'') { + if lx.accept('\'') { /// Peek ahead: the string can contain ' and '', including at the /// end: '''str''''' /// 6 or more at the end, however, is an error. - if lx.peek() == rawStringEnd { + if lx.peek() == '\'' { /// Check if we already lexed 5 's; if so we have 6 now, and /// that's just too many man! if strings.HasSuffix(lx.current(), "'''''") { @@ -771,12 +795,8 @@ func lexMultilineRawString(lx *lexer) stateFn { } lx.backup() } + return lexMultilineRawString } - - if isControl(r) { - return lx.errorf("control characters are not allowed inside strings: '0x%02x'", r) - } - return lexMultilineRawString } // lexMultilineStringEscape consumes an escaped character. It assumes that the @@ -817,8 +837,7 @@ func lexStringEscape(lx *lexer) stateFn { case 'U': return lexLongUnicodeEscape } - return lx.errorf("invalid escape character %q; only the following escape characters are allowed: "+ - `\b, \t, \n, \f, \r, \", \\, \uXXXX, and \UXXXXXXXX`, r) + return lx.error(errLexEscape{r}) } func lexShortUnicodeEscape(lx *lexer) stateFn { @@ -1108,8 +1127,6 @@ func lexComment(lx *lexer) stateFn { lx.backup() lx.emit(itemText) return lx.pop() - case isControl(r): - return lx.errorf("control characters are not allowed inside comments: '0x%02x'", r) default: return lexComment } @@ -1121,52 +1138,6 @@ func lexSkip(lx *lexer, nextState stateFn) stateFn { return nextState } -// isWhitespace returns true if `r` is a whitespace character according -// to the spec. -func isWhitespace(r rune) bool { - return r == '\t' || r == ' ' -} - -func isNL(r rune) bool { - return r == '\n' || r == '\r' -} - -// Control characters except \n, \t -func isControl(r rune) bool { - switch r { - case '\t', '\r', '\n': - return false - default: - return (r >= 0x00 && r <= 0x1f) || r == 0x7f - } -} - -func isDigit(r rune) bool { - return r >= '0' && r <= '9' -} - -func isHexadecimal(r rune) bool { - return (r >= '0' && r <= '9') || - (r >= 'a' && r <= 'f') || - (r >= 'A' && r <= 'F') -} - -func isOctal(r rune) bool { - return r >= '0' && r <= '7' -} - -func isBinary(r rune) bool { - return r == '0' || r == '1' -} - -func isBareKeyChar(r rune) bool { - return (r >= 'A' && r <= 'Z') || - (r >= 'a' && r <= 'z') || - (r >= '0' && r <= '9') || - r == '_' || - r == '-' -} - func (s stateFn) String() string { name := runtime.FuncForPC(reflect.ValueOf(s).Pointer()).Name() if i := strings.LastIndexByte(name, '.'); i > -1 { @@ -1223,3 +1194,26 @@ func (itype itemType) String() string { func (item item) String() string { return fmt.Sprintf("(%s, %s)", item.typ.String(), item.val) } + +func isWhitespace(r rune) bool { return r == '\t' || r == ' ' } +func isNL(r rune) bool { return r == '\n' || r == '\r' } +func isControl(r rune) bool { // Control characters except \t, \r, \n + switch r { + case '\t', '\r', '\n': + return false + default: + return (r >= 0x00 && r <= 0x1f) || r == 0x7f + } +} +func isDigit(r rune) bool { return r >= '0' && r <= '9' } +func isBinary(r rune) bool { return r == '0' || r == '1' } +func isOctal(r rune) bool { return r >= '0' && r <= '7' } +func isHexadecimal(r rune) bool { + return (r >= '0' && r <= '9') || (r >= 'a' && r <= 'f') || (r >= 'A' && r <= 'F') +} +func isBareKeyChar(r rune) bool { + return (r >= 'A' && r <= 'Z') || + (r >= 'a' && r <= 'z') || + (r >= '0' && r <= '9') || + r == '_' || r == '-' +} diff --git a/vendor/github.com/BurntSushi/toml/decode_meta.go b/vendor/github.com/BurntSushi/toml/meta.go similarity index 76% rename from vendor/github.com/BurntSushi/toml/decode_meta.go rename to vendor/github.com/BurntSushi/toml/meta.go index ad8899c6c..868619fb9 100644 --- a/vendor/github.com/BurntSushi/toml/decode_meta.go +++ b/vendor/github.com/BurntSushi/toml/meta.go @@ -1,34 +1,39 @@ package toml -import "strings" +import ( + "strings" +) -// MetaData allows access to meta information about TOML data that may not be -// inferable via reflection. In particular, whether a key has been defined and -// the TOML type of a key. +// MetaData allows access to meta information about TOML data that's not +// accessible otherwise. +// +// It allows checking if a key is defined in the TOML data, whether any keys +// were undecoded, and the TOML type of a key. type MetaData struct { + context Key // Used only during decoding. + mapping map[string]interface{} types map[string]tomlType keys []Key - decoded map[string]bool - context Key // Used only during decoding. + decoded map[string]struct{} } // IsDefined reports if the key exists in the TOML data. // // The key should be specified hierarchically, for example to access the TOML -// key "a.b.c" you would use: +// key "a.b.c" you would use IsDefined("a", "b", "c"). Keys are case sensitive. // -// IsDefined("a", "b", "c") -// -// IsDefined will return false if an empty key given. Keys are case sensitive. +// Returns false for an empty key. func (md *MetaData) IsDefined(key ...string) bool { if len(key) == 0 { return false } - var hash map[string]interface{} - var ok bool - var hashOrVal interface{} = md.mapping + var ( + hash map[string]interface{} + ok bool + hashOrVal interface{} = md.mapping + ) for _, k := range key { if hash, ok = hashOrVal.(map[string]interface{}); !ok { return false @@ -45,51 +50,12 @@ func (md *MetaData) IsDefined(key ...string) bool { // Type will return the empty string if given an empty key or a key that does // not exist. Keys are case sensitive. func (md *MetaData) Type(key ...string) string { - fullkey := strings.Join(key, ".") - if typ, ok := md.types[fullkey]; ok { + if typ, ok := md.types[Key(key).String()]; ok { return typ.typeString() } return "" } -// Key represents any TOML key, including key groups. Use (MetaData).Keys to get -// values of this type. -type Key []string - -func (k Key) String() string { return strings.Join(k, ".") } - -func (k Key) maybeQuotedAll() string { - var ss []string - for i := range k { - ss = append(ss, k.maybeQuoted(i)) - } - return strings.Join(ss, ".") -} - -func (k Key) maybeQuoted(i int) string { - if k[i] == "" { - return `""` - } - quote := false - for _, c := range k[i] { - if !isBareKeyChar(c) { - quote = true - break - } - } - if quote { - return `"` + quotedReplacer.Replace(k[i]) + `"` - } - return k[i] -} - -func (k Key) add(piece string) Key { - newKey := make(Key, len(k)+1) - copy(newKey, k) - newKey[len(k)] = piece - return newKey -} - // Keys returns a slice of every key in the TOML data, including key groups. // // Each key is itself a slice, where the first element is the top of the @@ -115,9 +81,40 @@ func (md *MetaData) Keys() []Key { func (md *MetaData) Undecoded() []Key { undecoded := make([]Key, 0, len(md.keys)) for _, key := range md.keys { - if !md.decoded[key.String()] { + if _, ok := md.decoded[key.String()]; !ok { undecoded = append(undecoded, key) } } return undecoded } + +// Key represents any TOML key, including key groups. Use (MetaData).Keys to get +// values of this type. +type Key []string + +func (k Key) String() string { + ss := make([]string, len(k)) + for i := range k { + ss[i] = k.maybeQuoted(i) + } + return strings.Join(ss, ".") +} + +func (k Key) maybeQuoted(i int) string { + if k[i] == "" { + return `""` + } + for _, c := range k[i] { + if !isBareKeyChar(c) { + return `"` + dblQuotedReplacer.Replace(k[i]) + `"` + } + } + return k[i] +} + +func (k Key) add(piece string) Key { + newKey := make(Key, len(k)+1) + copy(newKey, k) + newKey[len(k)] = piece + return newKey +} diff --git a/vendor/github.com/BurntSushi/toml/parse.go b/vendor/github.com/BurntSushi/toml/parse.go index d9ae5db94..8269cca17 100644 --- a/vendor/github.com/BurntSushi/toml/parse.go +++ b/vendor/github.com/BurntSushi/toml/parse.go @@ -1,7 +1,6 @@ package toml import ( - "errors" "fmt" "strconv" "strings" @@ -12,35 +11,23 @@ import ( ) type parser struct { - mapping map[string]interface{} - types map[string]tomlType - lx *lexer + lx *lexer + context Key // Full key for the current hash in scope. + currentKey string // Base key name for everything except hashes. + pos Position // Current position in the TOML file. - ordered []Key // List of keys in the order that they appear in the TOML data. - context Key // Full key for the current hash in scope. - currentKey string // Base key name for everything except hashes. - approxLine int // Rough approximation of line number - implicits map[string]bool // Record implied keys (e.g. 'key.group.names'). -} - -// ParseError is used when a file can't be parsed: for example invalid integer -// literals, duplicate keys, etc. -type ParseError struct { - Message string - Line int - LastKey string -} - -func (pe ParseError) Error() string { - return fmt.Sprintf("Near line %d (last key parsed '%s'): %s", - pe.Line, pe.LastKey, pe.Message) + ordered []Key // List of keys in the order that they appear in the TOML data. + mapping map[string]interface{} // Map keyname → key value. + types map[string]tomlType // Map keyname → TOML type. + implicits map[string]struct{} // Record implicit keys (e.g. "key.group.names"). } func parse(data string) (p *parser, err error) { defer func() { if r := recover(); r != nil { - var ok bool - if err, ok = r.(ParseError); ok { + if pErr, ok := r.(ParseError); ok { + pErr.input = data + err = pErr return } panic(r) @@ -60,8 +47,13 @@ func parse(data string) (p *parser, err error) { if len(data) < 6 { ex = len(data) } - if strings.ContainsRune(data[:ex], 0) { - return nil, errors.New("files cannot contain NULL bytes; probably using UTF-16; TOML files must be UTF-8") + if i := strings.IndexRune(data[:ex], 0); i > -1 { + return nil, ParseError{ + Message: "files cannot contain NULL bytes; probably using UTF-16; TOML files must be UTF-8", + Position: Position{Line: 1, Start: i, Len: 1}, + Line: 1, + input: data, + } } p = &parser{ @@ -69,7 +61,7 @@ func parse(data string) (p *parser, err error) { types: make(map[string]tomlType), lx: lex(data), ordered: make([]Key, 0), - implicits: make(map[string]bool), + implicits: make(map[string]struct{}), } for { item := p.next() @@ -82,12 +74,21 @@ func parse(data string) (p *parser, err error) { return p, nil } -func (p *parser) panicf(format string, v ...interface{}) { - msg := fmt.Sprintf(format, v...) +func (p *parser) panicItemf(it item, format string, v ...interface{}) { panic(ParseError{ - Message: msg, - Line: p.approxLine, - LastKey: p.current(), + Message: fmt.Sprintf(format, v...), + Position: it.pos, + Line: it.pos.Len, + LastKey: p.current(), + }) +} + +func (p *parser) panicf(format string, v ...interface{}) { + panic(ParseError{ + Message: fmt.Sprintf(format, v...), + Position: p.pos, + Line: p.pos.Line, + LastKey: p.current(), }) } @@ -95,11 +96,26 @@ func (p *parser) next() item { it := p.lx.nextItem() //fmt.Printf("ITEM %-18s line %-3d │ %q\n", it.typ, it.line, it.val) if it.typ == itemError { - p.panicf("%s", it.val) + if it.err != nil { + panic(ParseError{ + Position: it.pos, + Line: it.pos.Line, + LastKey: p.current(), + err: it.err, + }) + } + + p.panicItemf(it, "%s", it.val) } return it } +func (p *parser) nextPos() item { + it := p.next() + p.pos = it.pos + return it +} + func (p *parser) bug(format string, v ...interface{}) { panic(fmt.Sprintf("BUG: "+format+"\n\n", v...)) } @@ -119,11 +135,9 @@ func (p *parser) assertEqual(expected, got itemType) { func (p *parser) topLevel(item item) { switch item.typ { case itemCommentStart: // # .. - p.approxLine = item.line p.expect(itemText) case itemTableStart: // [ .. ] - name := p.next() - p.approxLine = name.line + name := p.nextPos() var key Key for ; name.typ != itemTableEnd && name.typ != itemEOF; name = p.next() { @@ -135,8 +149,7 @@ func (p *parser) topLevel(item item) { p.setType("", tomlHash) p.ordered = append(p.ordered, key) case itemArrayTableStart: // [[ .. ]] - name := p.next() - p.approxLine = name.line + name := p.nextPos() var key Key for ; name.typ != itemArrayTableEnd && name.typ != itemEOF; name = p.next() { @@ -150,8 +163,7 @@ func (p *parser) topLevel(item item) { case itemKeyStart: // key = .. outerContext := p.context /// Read all the key parts (e.g. 'a' and 'b' in 'a.b') - k := p.next() - p.approxLine = k.line + k := p.nextPos() var key Key for ; k.typ != itemKeyEnd && k.typ != itemEOF; k = p.next() { key = append(key, p.keyString(k)) @@ -206,9 +218,9 @@ var datetimeRepl = strings.NewReplacer( func (p *parser) value(it item, parentIsArray bool) (interface{}, tomlType) { switch it.typ { case itemString: - return p.replaceEscapes(it.val), p.typeOfPrimitive(it) + return p.replaceEscapes(it, it.val), p.typeOfPrimitive(it) case itemMultilineString: - return p.replaceEscapes(stripFirstNewline(stripEscapedNewlines(it.val))), p.typeOfPrimitive(it) + return p.replaceEscapes(it, stripFirstNewline(stripEscapedNewlines(it.val))), p.typeOfPrimitive(it) case itemRawString: return it.val, p.typeOfPrimitive(it) case itemRawMultilineString: @@ -240,10 +252,10 @@ func (p *parser) value(it item, parentIsArray bool) (interface{}, tomlType) { func (p *parser) valueInteger(it item) (interface{}, tomlType) { if !numUnderscoresOK(it.val) { - p.panicf("Invalid integer %q: underscores must be surrounded by digits", it.val) + p.panicItemf(it, "Invalid integer %q: underscores must be surrounded by digits", it.val) } if numHasLeadingZero(it.val) { - p.panicf("Invalid integer %q: cannot have leading zeroes", it.val) + p.panicItemf(it, "Invalid integer %q: cannot have leading zeroes", it.val) } num, err := strconv.ParseInt(it.val, 0, 64) @@ -254,7 +266,7 @@ func (p *parser) valueInteger(it item) (interface{}, tomlType) { // So mark the former as a bug but the latter as a legitimate user // error. if e, ok := err.(*strconv.NumError); ok && e.Err == strconv.ErrRange { - p.panicf("Integer '%s' is out of the range of 64-bit signed integers.", it.val) + p.panicItemf(it, "Integer '%s' is out of the range of 64-bit signed integers.", it.val) } else { p.bug("Expected integer value, but got '%s'.", it.val) } @@ -272,18 +284,18 @@ func (p *parser) valueFloat(it item) (interface{}, tomlType) { }) for _, part := range parts { if !numUnderscoresOK(part) { - p.panicf("Invalid float %q: underscores must be surrounded by digits", it.val) + p.panicItemf(it, "Invalid float %q: underscores must be surrounded by digits", it.val) } } if len(parts) > 0 && numHasLeadingZero(parts[0]) { - p.panicf("Invalid float %q: cannot have leading zeroes", it.val) + p.panicItemf(it, "Invalid float %q: cannot have leading zeroes", it.val) } if !numPeriodsOK(it.val) { // As a special case, numbers like '123.' or '1.e2', // which are valid as far as Go/strconv are concerned, // must be rejected because TOML says that a fractional // part consists of '.' followed by 1+ digits. - p.panicf("Invalid float %q: '.' must be followed by one or more digits", it.val) + p.panicItemf(it, "Invalid float %q: '.' must be followed by one or more digits", it.val) } val := strings.Replace(it.val, "_", "", -1) if val == "+nan" || val == "-nan" { // Go doesn't support this, but TOML spec does. @@ -292,9 +304,9 @@ func (p *parser) valueFloat(it item) (interface{}, tomlType) { num, err := strconv.ParseFloat(val, 64) if err != nil { if e, ok := err.(*strconv.NumError); ok && e.Err == strconv.ErrRange { - p.panicf("Float '%s' is out of the range of 64-bit IEEE-754 floating-point numbers.", it.val) + p.panicItemf(it, "Float '%s' is out of the range of 64-bit IEEE-754 floating-point numbers.", it.val) } else { - p.panicf("Invalid float value: %q", it.val) + p.panicItemf(it, "Invalid float value: %q", it.val) } } return num, p.typeOfPrimitive(it) @@ -325,7 +337,7 @@ func (p *parser) valueDatetime(it item) (interface{}, tomlType) { } } if !ok { - p.panicf("Invalid TOML Datetime: %q.", it.val) + p.panicItemf(it, "Invalid TOML Datetime: %q.", it.val) } return t, p.typeOfPrimitive(it) } @@ -335,8 +347,12 @@ func (p *parser) valueArray(it item) (interface{}, tomlType) { // p.setType(p.currentKey, typ) var ( - array []interface{} types []tomlType + + // Initialize to a non-nil empty slice. This makes it consistent with + // how S = [] decodes into a non-nil slice inside something like struct + // { S []string }. See #338 + array = []interface{}{} ) for it = p.next(); it.typ != itemArrayEnd; it = p.next() { if it.typ == itemCommentStart { @@ -347,6 +363,12 @@ func (p *parser) valueArray(it item) (interface{}, tomlType) { val, typ := p.value(it, true) array = append(array, val) types = append(types, typ) + + // XXX: types isn't used here, we need it to record the accurate type + // information. + // + // Not entirely sure how to best store this; could use "key[0]", + // "key[1]" notation, or maybe store it on the Array type? } return array, tomlArray } @@ -373,8 +395,7 @@ func (p *parser) valueInlineTable(it item, parentIsArray bool) (interface{}, tom } /// Read all key parts. - k := p.next() - p.approxLine = k.line + k := p.nextPos() var key Key for ; k.typ != itemKeyEnd && k.typ != itemEOF; k = p.next() { key = append(key, p.keyString(k)) @@ -408,7 +429,7 @@ func (p *parser) valueInlineTable(it item, parentIsArray bool) (interface{}, tom // numHasLeadingZero checks if this number has leading zeroes, allowing for '0', // +/- signs, and base prefixes. func numHasLeadingZero(s string) bool { - if len(s) > 1 && s[0] == '0' && isDigit(rune(s[1])) { // >1 to allow "0" and isDigit to allow 0x + if len(s) > 1 && s[0] == '0' && !(s[1] == 'b' || s[1] == 'o' || s[1] == 'x') { // Allow 0b, 0o, 0x return true } if len(s) > 2 && (s[0] == '-' || s[0] == '+') && s[1] == '0' { @@ -503,7 +524,7 @@ func (p *parser) addContext(key Key, array bool) { if hash, ok := hashContext[k].([]map[string]interface{}); ok { hashContext[k] = append(hash, make(map[string]interface{})) } else { - p.panicf("Key '%s' was already created and cannot be used as an array.", keyContext) + p.panicf("Key '%s' was already created and cannot be used as an array.", key) } } else { p.setValue(key[len(key)-1], make(map[string]interface{})) @@ -513,8 +534,8 @@ func (p *parser) addContext(key Key, array bool) { // set calls setValue and setType. func (p *parser) set(key string, val interface{}, typ tomlType) { - p.setValue(p.currentKey, val) - p.setType(p.currentKey, typ) + p.setValue(key, val) + p.setType(key, typ) } // setValue sets the given key to the given value in the current context. @@ -573,27 +594,31 @@ func (p *parser) setValue(key string, value interface{}) { hash[key] = value } -// setType sets the type of a particular value at a given key. -// It should be called immediately AFTER setValue. +// setType sets the type of a particular value at a given key. It should be +// called immediately AFTER setValue. // // Note that if `key` is empty, then the type given will be applied to the // current context (which is either a table or an array of tables). func (p *parser) setType(key string, typ tomlType) { keyContext := make(Key, 0, len(p.context)+1) - for _, k := range p.context { - keyContext = append(keyContext, k) - } + keyContext = append(keyContext, p.context...) if len(key) > 0 { // allow type setting for hashes keyContext = append(keyContext, key) } + // Special case to make empty keys ("" = 1) work. + // Without it it will set "" rather than `""`. + // TODO: why is this needed? And why is this only needed here? + if len(keyContext) == 0 { + keyContext = Key{""} + } p.types[keyContext.String()] = typ } // Implicit keys need to be created when tables are implied in "a.b.c.d = 1" and // "[a.b.c]" (the "a", "b", and "c" hashes are never created explicitly). -func (p *parser) addImplicit(key Key) { p.implicits[key.String()] = true } -func (p *parser) removeImplicit(key Key) { p.implicits[key.String()] = false } -func (p *parser) isImplicit(key Key) bool { return p.implicits[key.String()] } +func (p *parser) addImplicit(key Key) { p.implicits[key.String()] = struct{}{} } +func (p *parser) removeImplicit(key Key) { delete(p.implicits, key.String()) } +func (p *parser) isImplicit(key Key) bool { _, ok := p.implicits[key.String()]; return ok } func (p *parser) isArray(key Key) bool { return p.types[key.String()] == tomlArray } func (p *parser) addImplicitContext(key Key) { p.addImplicit(key) @@ -662,8 +687,8 @@ func stripEscapedNewlines(s string) string { return strings.Join(split, "") } -func (p *parser) replaceEscapes(str string) string { - var replaced []rune +func (p *parser) replaceEscapes(it item, str string) string { + replaced := make([]rune, 0, len(str)) s := []byte(str) r := 0 for r < len(s) { @@ -683,7 +708,7 @@ func (p *parser) replaceEscapes(str string) string { p.bug("Expected valid escape code after \\, but got %q.", s[r]) return "" case ' ', '\t': - p.panicf("invalid escape: '\\%c'", s[r]) + p.panicItemf(it, "invalid escape: '\\%c'", s[r]) return "" case 'b': replaced = append(replaced, rune(0x0008)) @@ -710,14 +735,14 @@ func (p *parser) replaceEscapes(str string) string { // At this point, we know we have a Unicode escape of the form // `uXXXX` at [r, r+5). (Because the lexer guarantees this // for us.) - escaped := p.asciiEscapeToUnicode(s[r+1 : r+5]) + escaped := p.asciiEscapeToUnicode(it, s[r+1:r+5]) replaced = append(replaced, escaped) r += 5 case 'U': // At this point, we know we have a Unicode escape of the form // `uXXXX` at [r, r+9). (Because the lexer guarantees this // for us.) - escaped := p.asciiEscapeToUnicode(s[r+1 : r+9]) + escaped := p.asciiEscapeToUnicode(it, s[r+1:r+9]) replaced = append(replaced, escaped) r += 9 } @@ -725,15 +750,14 @@ func (p *parser) replaceEscapes(str string) string { return string(replaced) } -func (p *parser) asciiEscapeToUnicode(bs []byte) rune { +func (p *parser) asciiEscapeToUnicode(it item, bs []byte) rune { s := string(bs) hex, err := strconv.ParseUint(strings.ToLower(s), 16, 32) if err != nil { - p.bug("Could not parse '%s' as a hexadecimal number, but the "+ - "lexer claims it's OK: %s", s, err) + p.bug("Could not parse '%s' as a hexadecimal number, but the lexer claims it's OK: %s", s, err) } if !utf8.ValidRune(rune(hex)) { - p.panicf("Escaped character '\\u%s' is not valid UTF-8.", s) + p.panicItemf(it, "Escaped character '\\u%s' is not valid UTF-8.", s) } return rune(hex) } diff --git a/vendor/github.com/BurntSushi/toml/type_fields.go b/vendor/github.com/BurntSushi/toml/type_fields.go index 608997c22..254ca82e5 100644 --- a/vendor/github.com/BurntSushi/toml/type_fields.go +++ b/vendor/github.com/BurntSushi/toml/type_fields.go @@ -70,8 +70,8 @@ func typeFields(t reflect.Type) []field { next := []field{{typ: t}} // Count of queued names for current level and the next. - count := map[reflect.Type]int{} - nextCount := map[reflect.Type]int{} + var count map[reflect.Type]int + var nextCount map[reflect.Type]int // Types already visited at an earlier level. visited := map[reflect.Type]bool{} diff --git a/vendor/github.com/BurntSushi/toml/type_check.go b/vendor/github.com/BurntSushi/toml/type_toml.go similarity index 98% rename from vendor/github.com/BurntSushi/toml/type_check.go rename to vendor/github.com/BurntSushi/toml/type_toml.go index d56aa80fa..4e90d7737 100644 --- a/vendor/github.com/BurntSushi/toml/type_check.go +++ b/vendor/github.com/BurntSushi/toml/type_toml.go @@ -16,7 +16,7 @@ func typeEqual(t1, t2 tomlType) bool { return t1.typeString() == t2.typeString() } -func typeIsHash(t tomlType) bool { +func typeIsTable(t tomlType) bool { return typeEqual(t, tomlHash) || typeEqual(t, tomlArrayHash) } diff --git a/vendor/github.com/OpenPeeDeeP/depguard/README.md b/vendor/github.com/OpenPeeDeeP/depguard/README.md index d704ce6ad..b9422757d 100644 --- a/vendor/github.com/OpenPeeDeeP/depguard/README.md +++ b/vendor/github.com/OpenPeeDeeP/depguard/README.md @@ -1,7 +1,7 @@ # Depguard Go linter that checks package imports are in a list of acceptable packages. It -supports a white list and black list option and can do prefix or glob matching. +can also deny a list of packages and can do prefix or glob matching. This allows you to allow imports from a whole organization or only allow specific packages within a repository. It is recommended to use prefix matching as it is faster than glob matching. The fewer glob matches the better. @@ -24,7 +24,7 @@ The following is an example configuration file. ```json { - "type": "whitelist", + "type": "allowlist", "packages": ["github.com/OpenPeeDeeP/depguard"], "packageErrorMessages": { "github.com/OpenPeeDeeP/depguards": "Please use \"github.com/OpenPeeDeeP/depguard\"," @@ -34,14 +34,48 @@ The following is an example configuration file. } ``` -- `type` can be either `whitelist` or `blacklist`. This check is case insensitive. - If not specified the default is `blacklist`. +- `type` can be either `allowlist` or `denylist`. This check is case insensitive. + If not specified the default is `denylist`. The values `whitelist` and `blacklist` + are also accepted for backwards compatibility. - `packages` is a list of packages for the list type specified. - `packageErrorMessages` is a mapping from packages to the error message to display - `inTests` is a list of packages allowed/disallowed only in test files. -- Set `includeGoStdLib` (`includeGoRoot` for backwards compatability) to true if you want to check the list against standard lib. +- Set `includeGoStdLib` (`includeGoRoot` for backwards compatibility) to true if you want to check the list against standard lib. If not specified the default is false. +### Ignore File Rules + +The configuration also allows us to specify rules to ignore certain files considered by the linter. This means that we need not apply package import checks across our entire code base. + +For example, consider the following configuration to block a test package: +```json +{ + "type": "denylist", + "packages": ["github.com/stretchr/testify"], + "inTests": ["github.com/stretchr/testify"] +} +``` + +We can use a `ignoreFileRules` field to write a configuration that only considers test files: +```json +{ + "type": "denylist", + "packages": ["github.com/stretchr/testify"], + "ignoreFileRules": ["!**/*_test.go"] +} +``` + +Or if we wanted to consider only non-test files: +```json +{ + "type": "denylist", + "packages": ["github.com/stretchr/testify"], + "ignoreFileRules": ["**/*_test.go"] +} +``` + +Like the `packages` field, the `ignoreFileRules` field can accept both string prefixes and string glob patterns. Note in the first example above, the use of the `!` character in front of the rule. This is a special character which signals that the linter should negate the rule. This allows for more precise control, but it is only available for glob patterns. + ## Gometalinter The binary installation of this linter can be used with @@ -73,5 +107,5 @@ gometalinter --linter='depguard:depguard -c path/to/config.json:PATH:LINE:COL:ME ## Golangci-lint This linter was built with -[Golangci-lint](https://github.com/golangci/golangci-lint) in mind. It is compatable +[Golangci-lint](https://github.com/golangci/golangci-lint) in mind. It is compatible and read their docs to see how to implement all their linters, including this one. diff --git a/vendor/github.com/OpenPeeDeeP/depguard/depguard.go b/vendor/github.com/OpenPeeDeeP/depguard/depguard.go index 1dbffb7d6..b72754913 100644 --- a/vendor/github.com/OpenPeeDeeP/depguard/depguard.go +++ b/vendor/github.com/OpenPeeDeeP/depguard/depguard.go @@ -25,6 +25,8 @@ const ( // StringToListType makes it easier to turn a string into a ListType. // It assumes that the string representation is lower case. var StringToListType = map[string]ListType{ + "allowlist": LTWhitelist, + "denylist": LTBlacklist, "whitelist": LTWhitelist, "blacklist": LTBlacklist, } @@ -35,6 +37,12 @@ type Issue struct { Position token.Position } +// Wrapper for glob patterns that allows for custom negation +type negatableGlob struct { + g glob.Glob + negate bool +} + // Depguard checks imports to make sure they follow the given list and constraints. type Depguard struct { ListType ListType @@ -48,6 +56,10 @@ type Depguard struct { prefixTestPackages []string globTestPackages []glob.Glob + IgnoreFileRules []string + prefixIgnoreFileRules []string + globIgnoreFileRules []negatableGlob + prefixRoot []string } @@ -69,6 +81,9 @@ func (dg *Depguard) Run(config *loader.Config, prog *loader.Program) ([]*Issue, var issues []*Issue for pkg, positions := range directImports { for _, pos := range positions { + if ignoreFile(pos.Filename, dg.prefixIgnoreFileRules, dg.globIgnoreFileRules) { + continue + } prefixList, globList := dg.prefixPackages, dg.globPackages if len(dg.TestPackages) > 0 && strings.Index(pos.Filename, "_test.go") != -1 { @@ -119,6 +134,32 @@ func (dg *Depguard) initialize(config *loader.Config, prog *loader.Program) erro // Sort the test packages so we can have a faster search in the array sort.Strings(dg.prefixTestPackages) + // parse ignore file rules + for _, rule := range dg.IgnoreFileRules { + if strings.ContainsAny(rule, "!?*[]{}") { + ng := negatableGlob{} + if strings.HasPrefix(rule, "!") { + ng.negate = true + rule = rule[1:] // Strip out the leading '!' + } else { + ng.negate = false + } + + g, err := glob.Compile(rule, '/') + if err != nil { + return err + } + ng.g = g + + dg.globIgnoreFileRules = append(dg.globIgnoreFileRules, ng) + } else { + dg.prefixIgnoreFileRules = append(dg.prefixIgnoreFileRules, rule) + } + } + + // Sort the rules so we can have a faster search in the array + sort.Strings(dg.prefixIgnoreFileRules) + if !dg.IncludeGoRoot { var err error dg.prefixRoot, err = listRootPrefixs(config.Build) @@ -158,30 +199,49 @@ func (dg *Depguard) createImportMap(prog *loader.Program) (map[string][]token.Po return importMap, nil } -func pkgInList(pkg string, prefixList []string, globList []glob.Glob) bool { - if pkgInPrefixList(pkg, prefixList) { +func ignoreFile(filename string, prefixList []string, negatableGlobList []negatableGlob) bool { + if strInPrefixList(filename, prefixList) { return true } - return pkgInGlobList(pkg, globList) + return strInNegatableGlobList(filename, negatableGlobList) } -func pkgInPrefixList(pkg string, prefixList []string) bool { - // Idx represents where in the package slice the passed in package would go +func pkgInList(pkg string, prefixList []string, globList []glob.Glob) bool { + if strInPrefixList(pkg, prefixList) { + return true + } + return strInGlobList(pkg, globList) +} + +func strInPrefixList(str string, prefixList []string) bool { + // Idx represents where in the prefix slice the passed in string would go // when sorted. -1 Just means that it would be at the very front of the slice. idx := sort.Search(len(prefixList), func(i int) bool { - return prefixList[i] > pkg + return prefixList[i] > str }) - 1 - // This means that the package passed in has no way to be prefixed by anything - // in the package list as it is already smaller then everything + // This means that the string passed in has no way to be prefixed by anything + // in the prefix list as it is already smaller then everything if idx == -1 { return false } - return strings.HasPrefix(pkg, prefixList[idx]) + return strings.HasPrefix(str, prefixList[idx]) } -func pkgInGlobList(pkg string, globList []glob.Glob) bool { +func strInGlobList(str string, globList []glob.Glob) bool { for _, g := range globList { - if g.Match(pkg) { + if g.Match(str) { + return true + } + } + return false +} + +func strInNegatableGlobList(str string, negatableGlobList []negatableGlob) bool { + for _, ng := range negatableGlobList { + // Return true when: + // - Match is true and negate is off + // - Match is false and negate is on + if ng.g.Match(str) != ng.negate { return true } } diff --git a/vendor/github.com/OpenPeeDeeP/depguard/go.mod b/vendor/github.com/OpenPeeDeeP/depguard/go.mod index 5ad37edb8..68daf00d7 100644 --- a/vendor/github.com/OpenPeeDeeP/depguard/go.mod +++ b/vendor/github.com/OpenPeeDeeP/depguard/go.mod @@ -5,5 +5,6 @@ go 1.13 require ( github.com/gobwas/glob v0.2.3 github.com/kisielk/gotool v1.0.0 + github.com/stretchr/testify v1.7.0 // indirect golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b ) diff --git a/vendor/github.com/OpenPeeDeeP/depguard/go.sum b/vendor/github.com/OpenPeeDeeP/depguard/go.sum index 24693c36d..11a8c1c4a 100644 --- a/vendor/github.com/OpenPeeDeeP/depguard/go.sum +++ b/vendor/github.com/OpenPeeDeeP/depguard/go.sum @@ -1,6 +1,16 @@ +github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= github.com/kisielk/gotool v1.0.0 h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b h1:7tibmaEqrQYA+q6ri7NQjuxqSwechjtDHKq6/e85S38= golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/vendor/github.com/ashanbrown/forbidigo/forbidigo/forbidigo.go b/vendor/github.com/ashanbrown/forbidigo/forbidigo/forbidigo.go index 2337404ae..17740faa7 100644 --- a/vendor/github.com/ashanbrown/forbidigo/forbidigo/forbidigo.go +++ b/vendor/github.com/ashanbrown/forbidigo/forbidigo/forbidigo.go @@ -24,10 +24,15 @@ type UsedIssue struct { identifier string pattern string position token.Position + customMsg string } func (a UsedIssue) Details() string { - return fmt.Sprintf("use of `%s` forbidden by pattern `%s`", a.identifier, a.pattern) + explanation := fmt.Sprintf(` because %q`, a.customMsg) + if a.customMsg == "" { + explanation = fmt.Sprintf(" by pattern `%s`", a.pattern) + } + return fmt.Sprintf("use of `%s` forbidden", a.identifier) + explanation } func (a UsedIssue) Position() token.Position { @@ -36,13 +41,13 @@ func (a UsedIssue) Position() token.Position { func (a UsedIssue) String() string { return toString(a) } -func toString(i Issue) string { +func toString(i UsedIssue) string { return fmt.Sprintf("%s at %s", i.Details(), i.Position()) } type Linter struct { cfg config - patterns []*regexp.Regexp + patterns []*pattern } func DefaultPatterns() []string { @@ -65,13 +70,13 @@ func NewLinter(patterns []string, options ...Option) (*Linter, error) { if len(patterns) == 0 { patterns = DefaultPatterns() } - compiledPatterns := make([]*regexp.Regexp, 0, len(patterns)) - for _, p := range patterns { - re, err := regexp.Compile(p) + compiledPatterns := make([]*pattern, 0, len(patterns)) + for _, ptrn := range patterns { + p, err := parse(ptrn) if err != nil { - return nil, fmt.Errorf("unable to compile pattern `%s`: %s", p, err) + return nil, err } - compiledPatterns = append(compiledPatterns, re) + compiledPatterns = append(compiledPatterns, p) } return &Linter{ cfg: cfg, @@ -158,11 +163,12 @@ func (v *visitor) Visit(node ast.Node) ast.Visitor { return v } for _, p := range v.linter.patterns { - if p.MatchString(v.textFor(node)) && !v.permit(node) { + if p.pattern.MatchString(v.textFor(node)) && !v.permit(node) { v.issues = append(v.issues, UsedIssue{ identifier: v.textFor(node), - pattern: p.String(), + pattern: p.pattern.String(), position: v.fset.Position(node.Pos()), + customMsg: p.msg, }) } } diff --git a/vendor/github.com/ashanbrown/forbidigo/forbidigo/patterns.go b/vendor/github.com/ashanbrown/forbidigo/forbidigo/patterns.go new file mode 100644 index 000000000..c23648822 --- /dev/null +++ b/vendor/github.com/ashanbrown/forbidigo/forbidigo/patterns.go @@ -0,0 +1,43 @@ +package forbidigo + +import ( + "fmt" + "regexp" + "regexp/syntax" + "strings" +) + +type pattern struct { + pattern *regexp.Regexp + msg string +} + +func parse(ptrn string) (*pattern, error) { + ptrnRe, err := regexp.Compile(ptrn) + if err != nil { + return nil, fmt.Errorf("unable to compile pattern `%s`: %s", ptrn, err) + } + re, err := syntax.Parse(ptrn, syntax.Perl) + if err != nil { + return nil, fmt.Errorf("unable to parse pattern `%s`: %s", ptrn, err) + } + msg := extractComment(re) + return &pattern{pattern: ptrnRe, msg: msg}, nil +} + +// Traverse the leaf submatches in the regex tree and extract a comment, if any +// is present. +func extractComment(re *syntax.Regexp) string { + for _, sub := range re.Sub { + if len(sub.Sub) > 0 { + if comment := extractComment(sub); comment != "" { + return comment + } + } + subStr := sub.String() + if strings.HasPrefix(subStr, "#") { + return strings.TrimSpace(strings.TrimPrefix(subStr, "#")) + } + } + return "" +} diff --git a/vendor/github.com/ashanbrown/makezero/makezero/makezero.go b/vendor/github.com/ashanbrown/makezero/makezero/makezero.go index db9b45adc..89cfcf4ee 100644 --- a/vendor/github.com/ashanbrown/makezero/makezero/makezero.go +++ b/vendor/github.com/ashanbrown/makezero/makezero/makezero.go @@ -12,6 +12,13 @@ import ( "regexp" ) +// a decl might include multiple var, +// so var name with decl make final uniq obj +type uniqDecl struct { + varName string + decl interface{} +} + type Issue interface { Details() string Position() token.Position @@ -58,7 +65,7 @@ type visitor struct { comments []*ast.CommentGroup // comments to apply during this visit info *types.Info - nonZeroLengthSliceDecls map[interface{}]struct{} + nonZeroLengthSliceDecls map[uniqDecl]struct{} fset *token.FileSet issues []Issue } @@ -81,7 +88,7 @@ func (l Linter) Run(fset *token.FileSet, info *types.Info, nodes ...ast.Node) ([ comments = file.Comments } visitor := visitor{ - nonZeroLengthSliceDecls: make(map[interface{}]struct{}), + nonZeroLengthSliceDecls: make(map[uniqDecl]struct{}), initLenMustBeZero: l.initLenMustBeZero, info: info, fset: fset, @@ -116,9 +123,9 @@ func (v *visitor) Visit(node ast.Node) ast.Visitor { if len(right.Args) == 2 { // ignore if not a slice or it has explicit zero length if !v.isSlice(right.Args[0]) { - break + continue } else if lit, ok := right.Args[1].(*ast.BasicLit); ok && lit.Kind == token.INT && lit.Value == "0" { - break + continue } if v.initLenMustBeZero && !v.hasNoLintOnSameLine(fun) { v.issues = append(v.issues, MustHaveNonZeroInitLenIssue{ @@ -148,7 +155,10 @@ func (v *visitor) hasNonZeroInitialLength(ident *ast.Ident) bool { ident.Name, v.fset.Position(ident.Pos()).String()) return false } - _, exists := v.nonZeroLengthSliceDecls[ident.Obj.Decl] + _, exists := v.nonZeroLengthSliceDecls[uniqDecl{ + varName: ident.Obj.Name, + decl: ident.Obj.Decl, + }] return exists } @@ -160,7 +170,10 @@ func (v *visitor) recordNonZeroLengthSlices(node ast.Node) { if ident.Obj == nil { return } - v.nonZeroLengthSliceDecls[ident.Obj.Decl] = struct{}{} + v.nonZeroLengthSliceDecls[uniqDecl{ + varName: ident.Obj.Name, + decl: ident.Obj.Decl, + }] = struct{}{} } func (v *visitor) isSlice(node ast.Node) bool { diff --git a/vendor/github.com/blizzy78/varnamelen/.editorconfig b/vendor/github.com/blizzy78/varnamelen/.editorconfig index bf1663fb2..7b6461545 100644 --- a/vendor/github.com/blizzy78/varnamelen/.editorconfig +++ b/vendor/github.com/blizzy78/varnamelen/.editorconfig @@ -7,3 +7,7 @@ insert_final_newline = true indent_style = tab indent_size = 4 trim_trailing_whitespace = true + +[**/*.yml] +indent_style = space +indent_size = 2 diff --git a/vendor/github.com/blizzy78/varnamelen/.golangci.yml b/vendor/github.com/blizzy78/varnamelen/.golangci.yml index 2b52fc63e..92d895881 100644 --- a/vendor/github.com/blizzy78/varnamelen/.golangci.yml +++ b/vendor/github.com/blizzy78/varnamelen/.golangci.yml @@ -1,20 +1,55 @@ linters: enable: + - asciicheck + - bodyclose + - cyclop + - durationcheck + - errname + - errorlint + - exportloopref + - forcetypeassert - gocognit - gocritic - - gocyclo - goerr113 - - golint - - interfacer + - gofmt + - goprintffuncname + - gosec + - ifshort - nakedret + - nestif + - nilerr + - noctx + - nolintlint - prealloc + - predeclared + - promlinter + - revive + - rowserrcheck + - sqlclosecheck + - stylecheck + - thelper + - tparallel - unconvert - unparam + - varnamelen + - wastedassign + - wrapcheck + - wsl linters-settings: gocognit: min-complexity: 15 - gocyclo: - min-complexity: 10 nakedret: max-func-lines: 0 + nolintlint: + allow-unused: false + allow-leading-space: false + require-explanation: true + require-specific: true + unused: + go: 1.16 + varnamelen: + check-return: true + ignore-type-assert-ok: true + ignore-map-index-ok: true + ignore-chan-recv-ok: true diff --git a/vendor/github.com/blizzy78/varnamelen/.travis.yml b/vendor/github.com/blizzy78/varnamelen/.travis.yml deleted file mode 100644 index c3ea88740..000000000 --- a/vendor/github.com/blizzy78/varnamelen/.travis.yml +++ /dev/null @@ -1,7 +0,0 @@ -language: go -go: - - "1.15" -before_script: - - go get github.com/mattn/goveralls -after_script: - - goveralls -service=travis-ci diff --git a/vendor/github.com/blizzy78/varnamelen/README.md b/vendor/github.com/blizzy78/varnamelen/README.md index 7e507a90b..d13ff39ef 100644 --- a/vendor/github.com/blizzy78/varnamelen/README.md +++ b/vendor/github.com/blizzy78/varnamelen/README.md @@ -1,4 +1,4 @@ -[![Build Status](https://api.travis-ci.com/blizzy78/varnamelen.svg?branch=master)](https://app.travis-ci.com/github/blizzy78/varnamelen) [![Coverage Status](https://coveralls.io/repos/github/blizzy78/varnamelen/badge.svg?branch=master)](https://coveralls.io/github/blizzy78/varnamelen?branch=master) [![GoDoc](https://pkg.go.dev/badge/github.com/blizzy78/varnamelen)](https://pkg.go.dev/github.com/blizzy78/varnamelen) +[![GoDoc](https://pkg.go.dev/badge/github.com/blizzy78/varnamelen)](https://pkg.go.dev/github.com/blizzy78/varnamelen) varnamelen @@ -25,6 +25,48 @@ test.go:6:2: variable name 'i' is too short for the scope of its usage (varnamel ``` +golangci-lint Integration +------------------------- + +varnamelen is integrated into [golangci-lint] (though it may not always be the most recent version.) + +Example configuration for golangci-lint: + +```yaml +linters-settings: + varnamelen: + # The longest distance, in source lines, that is being considered a "small scope." (defaults to 5) + # Variables used in at most this many lines will be ignored. + max-distance: 5 + # The minimum length of a variable's name that is considered "long." (defaults to 3) + # Variable names that are at least this long will be ignored. + min-name-length: 3 + # Check method receiver names. (defaults to false) + check-receiver: false + # Check named return values. (defaults to false) + check-return: false + # Ignore "ok" variables that hold the bool return value of a type assertion. (defaults to false) + ignore-type-assert-ok: false + # Ignore "ok" variables that hold the bool return value of a map index. (defaults to false) + ignore-map-index-ok: false + # Ignore "ok" variables that hold the bool return value of a channel receive. (defaults to false) + ignore-chan-recv-ok: false + # Optional list of variable names that should be ignored completely. (defaults to empty list) + ignore-names: + - err + # Optional list of variable declarations that should be ignored completely. (defaults to empty list) + # Entries must be in the form of " " or " *" for + # variables, or "const " for constants. + ignore-decls: + - c echo.Context + - t testing.T + - f *foo.Bar + - e error + - i int + - const C +``` + + Standalone Usage ---------------- @@ -63,8 +105,16 @@ Flags: apply all suggested fixes -flags print analyzer flags in JSON + -ignoreChanRecvOk + ignore 'ok' variables that hold the bool return value of a channel receive + -ignoreDecls value + comma-separated list of ignored variable declarations + -ignoreMapIndexOk + ignore 'ok' variables that hold the bool return value of a map index -ignoreNames value comma-separated list of ignored variable names + -ignoreTypeAssertOk + ignore 'ok' variables that hold the bool return value of a type assertion -json emit JSON output -maxDistance int @@ -87,3 +137,7 @@ License ------- This package is licensed under the MIT license. + + + +[golangci-lint]: https://github.com/golangci/golangci-lint diff --git a/vendor/github.com/blizzy78/varnamelen/flags.go b/vendor/github.com/blizzy78/varnamelen/flags.go new file mode 100644 index 000000000..a4aeb1818 --- /dev/null +++ b/vendor/github.com/blizzy78/varnamelen/flags.go @@ -0,0 +1,98 @@ +package varnamelen + +import "strings" + +// stringsValue is the value of a list-of-strings flag. +type stringsValue struct { + Values []string +} + +// declarationsValue is the value of a list-of-declarations flag. +type declarationsValue struct { + Values []declaration +} + +// Set implements Value. +func (sv *stringsValue) Set(values string) error { + if strings.TrimSpace(values) == "" { + sv.Values = nil + return nil + } + + parts := strings.Split(values, ",") + + sv.Values = make([]string, len(parts)) + + for i, part := range parts { + sv.Values[i] = strings.TrimSpace(part) + } + + return nil +} + +// String implements Value. +func (sv *stringsValue) String() string { + return strings.Join(sv.Values, ",") +} + +// contains returns true if sv contains s. +func (sv *stringsValue) contains(s string) bool { + for _, v := range sv.Values { + if v == s { + return true + } + } + + return false +} + +// Set implements Value. +func (dv *declarationsValue) Set(values string) error { + if strings.TrimSpace(values) == "" { + dv.Values = nil + return nil + } + + parts := strings.Split(values, ",") + + dv.Values = make([]declaration, len(parts)) + + for idx, part := range parts { + dv.Values[idx] = parseDeclaration(strings.TrimSpace(part)) + } + + return nil +} + +// String implements Value. +func (dv *declarationsValue) String() string { + parts := make([]string, len(dv.Values)) + + for idx, val := range dv.Values { + parts[idx] = val.name + " " + val.typ + } + + return strings.Join(parts, ",") +} + +// matchVariable returns true if vari matches any of the declarations in dv. +func (dv *declarationsValue) matchVariable(vari variable) bool { + for _, decl := range dv.Values { + if vari.match(decl) { + return true + } + } + + return false +} + +// matchParameter returns true if param matches any of the declarations in dv. +func (dv *declarationsValue) matchParameter(param parameter) bool { + for _, decl := range dv.Values { + if param.match(decl) { + return true + } + } + + return false +} diff --git a/vendor/github.com/blizzy78/varnamelen/go.mod b/vendor/github.com/blizzy78/varnamelen/go.mod index 0a6b16e63..85710f129 100644 --- a/vendor/github.com/blizzy78/varnamelen/go.mod +++ b/vendor/github.com/blizzy78/varnamelen/go.mod @@ -1,10 +1,9 @@ module github.com/blizzy78/varnamelen -go 1.15 +go 1.16 require ( github.com/matryer/is v1.4.0 - golang.org/x/mod v0.5.0 // indirect - golang.org/x/sys v0.0.0-20210917161153-d61c044b1678 // indirect - golang.org/x/tools v0.1.6 + golang.org/x/sys v0.0.0-20211105183446-c75c47738b0c // indirect + golang.org/x/tools v0.1.9 ) diff --git a/vendor/github.com/blizzy78/varnamelen/go.sum b/vendor/github.com/blizzy78/varnamelen/go.sum index 96264f219..5093ed4da 100644 --- a/vendor/github.com/blizzy78/varnamelen/go.sum +++ b/vendor/github.com/blizzy78/varnamelen/go.sum @@ -1,30 +1,30 @@ github.com/matryer/is v1.4.0 h1:sosSmIWwkYITGrxZ25ULNDeKiMNzFSr4V/eqBQP0PeE= github.com/matryer/is v1.4.0/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU= -github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.5.0 h1:UG21uOlmZabA4fW5i7ZX6bjw1xELEGg/ZLgZq9auk/Q= -golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= +golang.org/x/mod v0.5.1 h1:OJxoQ/rynoF0dcCdI7cLPktw/hR2cueqYfjm43oqK38= +golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210917161153-d61c044b1678 h1:J27LZFQBFoihqXoegpscI10HpjZ7B5WQLLKL2FZXQKw= -golang.org/x/sys v0.0.0-20210917161153-d61c044b1678/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211105183446-c75c47738b0c h1:+8miTPjMCTXwih7BQmvWwd0PjdBZq2MKp/qQaahSzEM= +golang.org/x/sys v0.0.0-20211105183446-c75c47738b0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.1.6 h1:SIasE1FVIQOWz2GEAHFOmoW7xchJcqlucjSULTL0Ag4= -golang.org/x/tools v0.1.6/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= +golang.org/x/tools v0.1.9 h1:j9KsMiaP1c3B0OTQGth0/k+miLGTgLsAFUCrF2vLcF8= +golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= diff --git a/vendor/github.com/blizzy78/varnamelen/varnamelen.go b/vendor/github.com/blizzy78/varnamelen/varnamelen.go index 9c1959644..b6c62b57c 100644 --- a/vendor/github.com/blizzy78/varnamelen/varnamelen.go +++ b/vendor/github.com/blizzy78/varnamelen/varnamelen.go @@ -2,6 +2,8 @@ package varnamelen import ( "go/ast" + "go/token" + "go/types" "strings" "golang.org/x/tools/go/analysis" @@ -27,11 +29,18 @@ type varNameLen struct { // checkReturn determines whether named return values should be checked. checkReturn bool -} -// stringsValue is the value of a list-of-strings flag. -type stringsValue struct { - Values []string + // ignoreTypeAssertOk determines whether "ok" variables that hold the bool return value of a type assertion should be ignored. + ignoreTypeAssertOk bool + + // ignoreMapIndexOk determines whether "ok" variables that hold the bool return value of a map index should be ignored. + ignoreMapIndexOk bool + + // ignoreChannelReceiveOk determines whether "ok" variables that hold the bool return value of a channel receive should be ignored. + ignoreChannelReceiveOk bool + + // ignoreDeclarations is an optional list of variable declarations that should be ignored completely. + ignoreDeclarations declarationsValue } // variable represents a declared variable. @@ -39,8 +48,17 @@ type variable struct { // name is the name of the variable. name string + // constant is true if the variable is actually a constant. + constant bool + + // typ is the type of the variable. + typ string + // assign is the assign statement that declares the variable. assign *ast.AssignStmt + + // valueSpec is the value specification that declares the variable. + valueSpec *ast.ValueSpec } // parameter represents a declared function or method parameter. @@ -48,10 +66,39 @@ type parameter struct { // name is the name of the parameter. name string + // typ is the type of the parameter. + typ string + // field is the declaration of the parameter. field *ast.Field } +// declaration is a variable declaration. +type declaration struct { + // name is the name of the variable. + name string + + // constant is true if the variable is actually a constant. + constant bool + + // typ is the type of the variable. Not used for constants. + typ string +} + +// importDeclaration is an import declaration. +type importDeclaration struct { + // name is the short name or alias for the imported package. This is either the package's default name, + // or the alias specified in the import statement. + // Not used if self is true. + name string + + // path is the full path to the imported package. + path string + + // self is true when this is an implicit import declaration for the current package. + self bool +} + const ( // defaultMaxDistance is the default value for the maximum distance between the declaration of a variable and its usage // that is considered a "small scope." @@ -61,12 +108,23 @@ const ( defaultMinNameLength = 3 ) +// conventionalDecls is a list of conventional variable declarations. +var conventionalDecls = []declaration{ + parseDeclaration("t *testing.T"), + parseDeclaration("b *testing.B"), + parseDeclaration("tb testing.TB"), + parseDeclaration("pb *testing.PB"), + parseDeclaration("m *testing.M"), + parseDeclaration("ctx context.Context"), +} + // NewAnalyzer returns a new analyzer that checks variable name length. func NewAnalyzer() *analysis.Analyzer { vnl := varNameLen{ - maxDistance: defaultMaxDistance, - minNameLength: defaultMinNameLength, - ignoreNames: stringsValue{}, + maxDistance: defaultMaxDistance, + minNameLength: defaultMinNameLength, + ignoreNames: stringsValue{}, + ignoreDeclarations: declarationsValue{}, } analyzer := analysis.Analyzer{ @@ -91,6 +149,10 @@ func NewAnalyzer() *analysis.Analyzer { analyzer.Flags.Var(&vnl.ignoreNames, "ignoreNames", "comma-separated list of ignored variable names") analyzer.Flags.BoolVar(&vnl.checkReceiver, "checkReceiver", false, "check method receiver names") analyzer.Flags.BoolVar(&vnl.checkReturn, "checkReturn", false, "check named return values") + analyzer.Flags.BoolVar(&vnl.ignoreTypeAssertOk, "ignoreTypeAssertOk", false, "ignore 'ok' variables that hold the bool return value of a type assertion") + analyzer.Flags.BoolVar(&vnl.ignoreMapIndexOk, "ignoreMapIndexOk", false, "ignore 'ok' variables that hold the bool return value of a map index") + analyzer.Flags.BoolVar(&vnl.ignoreChannelReceiveOk, "ignoreChanRecvOk", false, "ignore 'ok' variables that hold the bool return value of a channel receive") + analyzer.Flags.Var(&vnl.ignoreDeclarations, "ignoreDecls", "comma-separated list of ignored variable declarations") return &analyzer } @@ -99,55 +161,132 @@ func NewAnalyzer() *analysis.Analyzer { func (v *varNameLen) run(pass *analysis.Pass) { varToDist, paramToDist, returnToDist := v.distances(pass) + v.checkVariables(pass, varToDist) + v.checkParams(pass, paramToDist) + v.checkReturns(pass, returnToDist) +} + +// checkVariables applies v to variables in varToDist. +func (v *varNameLen) checkVariables(pass *analysis.Pass, varToDist map[variable]int) { for variable, dist := range varToDist { + if v.ignoreNames.contains(variable.name) { + continue + } + + if v.ignoreDeclarations.matchVariable(variable) { + continue + } + if v.checkNameAndDistance(variable.name, dist) { continue } - pass.Reportf(variable.assign.Pos(), "variable name '%s' is too short for the scope of its usage", variable.name) - } - for param, dist := range paramToDist { - if param.isConventional() { + if v.checkTypeAssertOk(variable) { continue } - if v.checkNameAndDistance(param.name, dist) { - continue - } - pass.Reportf(param.field.Pos(), "parameter name '%s' is too short for the scope of its usage", param.name) - } - for param, dist := range returnToDist { - if v.checkNameAndDistance(param.name, dist) { + if v.checkMapIndexOk(variable) { continue } - pass.Reportf(param.field.Pos(), "return value name '%s' is too short for the scope of its usage", param.name) + + if v.checkChannelReceiveOk(variable) { + continue + } + + if variable.assign != nil { + pass.Reportf(variable.assign.Pos(), "%s name '%s' is too short for the scope of its usage", variable.kindName(), variable.name) + continue + } + + pass.Reportf(variable.valueSpec.Pos(), "%s name '%s' is too short for the scope of its usage", variable.kindName(), variable.name) } } -// checkNameAndDistance returns true when name or dist are considered "short", or when name is to be ignored. +// checkParams applies v to parameters in paramToDist. +func (v *varNameLen) checkParams(pass *analysis.Pass, paramToDist map[parameter]int) { + for param, dist := range paramToDist { + if v.ignoreNames.contains(param.name) { + continue + } + + if v.ignoreDeclarations.matchParameter(param) { + continue + } + + if v.checkNameAndDistance(param.name, dist) { + continue + } + + if param.isConventional() { + continue + } + + pass.Reportf(param.field.Pos(), "parameter name '%s' is too short for the scope of its usage", param.name) + } +} + +// checkReturns applies v to named return values in returnToDist. +func (v *varNameLen) checkReturns(pass *analysis.Pass, returnToDist map[parameter]int) { + for returnValue, dist := range returnToDist { + if v.ignoreNames.contains(returnValue.name) { + continue + } + + if v.ignoreDeclarations.matchParameter(returnValue) { + continue + } + + if v.checkNameAndDistance(returnValue.name, dist) { + continue + } + + pass.Reportf(returnValue.field.Pos(), "return value name '%s' is too short for the scope of its usage", returnValue.name) + } +} + +// checkNameAndDistance returns true if name or dist are considered "short". func (v *varNameLen) checkNameAndDistance(name string, dist int) bool { if len(name) >= v.minNameLength { return true } + if dist <= v.maxDistance { return true } - if v.ignoreNames.contains(name) { - return true - } + return false } -// distances maps of variables or parameters and their longest usage distances. +// checkTypeAssertOk returns true if "ok" variables that hold the bool return value of a type assertion +// should be ignored, and if vari is such a variable. +func (v *varNameLen) checkTypeAssertOk(vari variable) bool { + return v.ignoreTypeAssertOk && vari.isTypeAssertOk() +} + +// checkMapIndexOk returns true if "ok" variables that hold the bool return value of a map index +// should be ignored, and if vari is such a variable. +func (v *varNameLen) checkMapIndexOk(vari variable) bool { + return v.ignoreMapIndexOk && vari.isMapIndexOk() +} + +// checkChannelReceiveOk returns true if "ok" variables that hold the bool return value of a channel receive +// should be ignored, and if vari is such a variable. +func (v *varNameLen) checkChannelReceiveOk(vari variable) bool { + return v.ignoreChannelReceiveOk && vari.isChannelReceiveOk() +} + +// distances returns maps of variables, parameters, and return values mapping to their longest usage distances. func (v *varNameLen) distances(pass *analysis.Pass) (map[variable]int, map[parameter]int, map[parameter]int) { - assignIdents, paramIdents, returnIdents := v.idents(pass) + assignIdents, valueSpecIdents, paramIdents, returnIdents, imports := v.identsAndImports(pass) varToDist := map[variable]int{} for _, ident := range assignIdents { - assign := ident.Obj.Decl.(*ast.AssignStmt) + assign := ident.Obj.Decl.(*ast.AssignStmt) //nolint:forcetypeassert // check is done in identsAndImports + variable := variable{ name: ident.Name, + typ: shortTypeName(pass.TypesInfo.TypeOf(identAssignExpr(ident, assign)), imports), assign: assign, } @@ -156,12 +295,29 @@ func (v *varNameLen) distances(pass *analysis.Pass) (map[variable]int, map[param varToDist[variable] = useLine - declLine } + for _, ident := range valueSpecIdents { + valueSpec := ident.Obj.Decl.(*ast.ValueSpec) //nolint:forcetypeassert // check is done in identsAndImports + + variable := variable{ + name: ident.Name, + constant: ident.Obj.Kind == ast.Con, + typ: shortTypeName(pass.TypesInfo.TypeOf(valueSpec.Type), imports), + valueSpec: valueSpec, + } + + useLine := pass.Fset.Position(ident.NamePos).Line + declLine := pass.Fset.Position(valueSpec.Pos()).Line + varToDist[variable] = useLine - declLine + } + paramToDist := map[parameter]int{} for _, ident := range paramIdents { - field := ident.Obj.Decl.(*ast.Field) + field := ident.Obj.Decl.(*ast.Field) //nolint:forcetypeassert // check is done in identsAndImports + param := parameter{ name: ident.Name, + typ: shortTypeName(pass.TypesInfo.TypeOf(field.Type), imports), field: field, } @@ -173,9 +329,11 @@ func (v *varNameLen) distances(pass *analysis.Pass) (map[variable]int, map[param returnToDist := map[parameter]int{} for _, ident := range returnIdents { - field := ident.Obj.Decl.(*ast.Field) + field := ident.Obj.Decl.(*ast.Field) //nolint:forcetypeassert // check is done in identsAndImports + param := parameter{ name: ident.Name, + typ: shortTypeName(pass.TypesInfo.TypeOf(field.Type), imports), field: field, } @@ -187,61 +345,245 @@ func (v *varNameLen) distances(pass *analysis.Pass) (map[variable]int, map[param return varToDist, paramToDist, returnToDist } -// idents returns Idents referencing assign statements, parameters, and return values, respectively. -func (v *varNameLen) idents(pass *analysis.Pass) ([]*ast.Ident, []*ast.Ident, []*ast.Ident) { //nolint:gocognit - inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) +// identsAndImports returns Idents referencing assign statements, value specifications, parameters, and return values, respectively, +// as well as import declarations. +func (v *varNameLen) identsAndImports(pass *analysis.Pass) ([]*ast.Ident, []*ast.Ident, []*ast.Ident, []*ast.Ident, []importDeclaration) { //nolint:gocognit,cyclop // this is complex stuff + inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) //nolint:forcetypeassert // inspect.Analyzer always returns *inspector.Inspector filter := []ast.Node{ - (*ast.Ident)(nil), + (*ast.ImportSpec)(nil), (*ast.FuncDecl)(nil), + (*ast.Ident)(nil), } funcs := []*ast.FuncDecl{} methods := []*ast.FuncDecl{} + + imports := []importDeclaration{} assignIdents := []*ast.Ident{} + valueSpecIdents := []*ast.Ident{} paramIdents := []*ast.Ident{} returnIdents := []*ast.Ident{} inspector.Preorder(filter, func(node ast.Node) { - if f, ok := node.(*ast.FuncDecl); ok { - funcs = append(funcs, f) - if f.Recv != nil { - methods = append(methods, f) - } - return - } - - ident := node.(*ast.Ident) - if ident.Obj == nil { - return - } - - if _, ok := ident.Obj.Decl.(*ast.AssignStmt); ok { - assignIdents = append(assignIdents, ident) - return - } - - if field, ok := ident.Obj.Decl.(*ast.Field); ok { - if isReceiver(field, methods) && !v.checkReceiver { + switch node2 := node.(type) { + case *ast.ImportSpec: + decl, ok := importSpecToDecl(node2, pass.Pkg.Imports()) + if !ok { return } - if isReturn(field, funcs) { - if !v.checkReturn { + imports = append(imports, decl) + + case *ast.FuncDecl: + funcs = append(funcs, node2) + + if node2.Recv == nil { + return + } + + methods = append(methods, node2) + + case *ast.Ident: + if node2.Obj == nil { + return + } + + switch objDecl := node2.Obj.Decl.(type) { + case *ast.AssignStmt: + assignIdents = append(assignIdents, node2) + + case *ast.ValueSpec: + valueSpecIdents = append(valueSpecIdents, node2) + + case *ast.Field: + if isReceiver(objDecl, methods) && !v.checkReceiver { return } - returnIdents = append(returnIdents, ident) - return - } - paramIdents = append(paramIdents, ident) + if isReturn(objDecl, funcs) { + if !v.checkReturn { + return + } + + returnIdents = append(returnIdents, node2) + + return + } + + paramIdents = append(paramIdents, node2) + } } }) - return assignIdents, paramIdents, returnIdents + imports = append(imports, importDeclaration{ + path: pass.Pkg.Path(), + self: true, + }) + + return assignIdents, valueSpecIdents, paramIdents, returnIdents, imports } -// isReceiver returns true when field is a receiver parameter of any of the given methods. +func importSpecToDecl(spec *ast.ImportSpec, imports []*types.Package) (importDeclaration, bool) { + path := strings.TrimSuffix(strings.TrimPrefix(spec.Path.Value, "\""), "\"") + + if spec.Name != nil { + return importDeclaration{ + name: spec.Name.Name, + path: path, + }, true + } + + for _, imp := range imports { + if imp.Path() == path { + return importDeclaration{ + name: imp.Name(), + path: path, + }, true + } + } + + return importDeclaration{}, false +} + +// isTypeAssertOk returns true if v is an "ok" variable that holds the bool return value of a type assertion. +func (v variable) isTypeAssertOk() bool { + if v.name != "ok" { + return false + } + + if v.assign == nil { + return false + } + + if len(v.assign.Lhs) != 2 { + return false + } + + ident, ok := v.assign.Lhs[1].(*ast.Ident) + if !ok { + return false + } + + if ident.Name != "ok" { + return false + } + + if len(v.assign.Rhs) != 1 { + return false + } + + if _, ok := v.assign.Rhs[0].(*ast.TypeAssertExpr); !ok { + return false + } + + return true +} + +// isMapIndexOk returns true if v is an "ok" variable that holds the bool return value of a map index. +func (v variable) isMapIndexOk() bool { + if v.name != "ok" { + return false + } + + if v.assign == nil { + return false + } + + if len(v.assign.Lhs) != 2 { + return false + } + + ident, ok := v.assign.Lhs[1].(*ast.Ident) + if !ok { + return false + } + + if ident.Name != "ok" { + return false + } + + if len(v.assign.Rhs) != 1 { + return false + } + + if _, ok := v.assign.Rhs[0].(*ast.IndexExpr); !ok { + return false + } + + return true +} + +// isChannelReceiveOk returns true if v is an "ok" variable that holds the bool return value of a channel receive. +func (v variable) isChannelReceiveOk() bool { + if v.name != "ok" { + return false + } + + if v.assign == nil { + return false + } + + if len(v.assign.Lhs) != 2 { + return false + } + + ident, ok := v.assign.Lhs[1].(*ast.Ident) + if !ok { + return false + } + + if ident.Name != "ok" { + return false + } + + if len(v.assign.Rhs) != 1 { + return false + } + + unary, ok := v.assign.Rhs[0].(*ast.UnaryExpr) + if !ok { + return false + } + + if unary.Op != token.ARROW { + return false + } + + return true +} + +// match returns true if v matches decl. +func (v variable) match(decl declaration) bool { + if v.name != decl.name { + return false + } + + if v.constant != decl.constant { + return false + } + + if v.constant { + return true + } + + if v.typ == "" { + return false + } + + return decl.matchType(v.typ) +} + +// kindName returns "constant" if v.constant==true, else "variable". +func (v variable) kindName() string { + if v.constant { + return "constant" + } + + return "variable" +} + +// isReceiver returns true if field is a receiver parameter of any of the given methods. func isReceiver(field *ast.Field, methods []*ast.FuncDecl) bool { for _, m := range methods { for _, recv := range m.Recv.List { @@ -250,93 +592,102 @@ func isReceiver(field *ast.Field, methods []*ast.FuncDecl) bool { } } } + return false } -// isReturn returns true when field is a return value of any of the given funcs. +// isReturn returns true if field is a return value of any of the given funcs. func isReturn(field *ast.Field, funcs []*ast.FuncDecl) bool { for _, f := range funcs { if f.Type.Results == nil { continue } + for _, r := range f.Type.Results.List { if r == field { return true } } } + return false } -// Set implements Value. -func (sv *stringsValue) Set(s string) error { - sv.Values = strings.Split(s, ",") - return nil -} - -// String implements Value. -func (sv *stringsValue) String() string { - return strings.Join(sv.Values, ",") -} - -// contains returns true when sv contains s. -func (sv *stringsValue) contains(s string) bool { - for _, v := range sv.Values { - if v == s { +// isConventional returns true if p is a conventional Go parameter, such as "ctx context.Context" or +// "t *testing.T". +func (p parameter) isConventional() bool { + for _, decl := range conventionalDecls { + if p.match(decl) { return true } } + return false } -// isConventional returns true when p is a conventional Go parameter, such as "ctx context.Context" or -// "t *testing.T". -func (p parameter) isConventional() bool { //nolint:gocyclo,gocognit - switch { - case p.name == "t" && p.isPointerType("testing.T"): - return true - case p.name == "b" && p.isPointerType("testing.B"): - return true - case p.name == "tb" && p.isType("testing.TB"): - return true - case p.name == "pb" && p.isPointerType("testing.PB"): - return true - case p.name == "m" && p.isPointerType("testing.M"): - return true - case p.name == "ctx" && p.isType("context.Context"): - return true - default: +// match returns whether p matches decl. +func (p parameter) match(decl declaration) bool { + if p.name != decl.name { return false } + + return decl.matchType(p.typ) +} + +// parseDeclaration parses and returns a variable declaration parsed from decl. +func parseDeclaration(decl string) declaration { + if strings.HasPrefix(decl, "const ") { + return declaration{ + name: strings.TrimPrefix(decl, "const "), + constant: true, + } + } + + parts := strings.SplitN(decl, " ", 2) + + return declaration{ + name: parts[0], + typ: parts[1], + } } -// isType returns true when p is of type typeName. -func (p parameter) isType(typeName string) bool { - sel, ok := p.field.Type.(*ast.SelectorExpr) - if !ok { - return false - } - return isType(sel, typeName) +// matchType returns true if typ matches d.typ. +func (d declaration) matchType(typ string) bool { + return d.typ == typ } -// isPointerType returns true when p is a pointer type of type typeName. -func (p parameter) isPointerType(typeName string) bool { - star, ok := p.field.Type.(*ast.StarExpr) - if !ok { - return false +// identAssignExpr returns the expression that is assigned to ident. +// +// TODO: This currently only works for simple one-to-one assignments without the use of multi-values. +func identAssignExpr(_ *ast.Ident, assign *ast.AssignStmt) ast.Expr { + if len(assign.Lhs) != 1 || len(assign.Rhs) != 1 { + return nil } - sel, ok := star.X.(*ast.SelectorExpr) - if !ok { - return false - } - return isType(sel, typeName) + + return assign.Rhs[0] } -// isType returns true when sel is a selector for type typeName. -func isType(sel *ast.SelectorExpr, typeName string) bool { - ident, ok := sel.X.(*ast.Ident) - if !ok { - return false +// shortTypeName returns the short name of typ, with respect to imports. +// For example, if package github.com/matryer/is is imported with alias "x", +// and typ represents []*github.com/matryer/is.I, shortTypeName will return "[]*x.I". +// For imports without aliases, the package's default name will be used. +func shortTypeName(typ types.Type, imports []importDeclaration) string { + if typ == nil { + return "" } - return typeName == ident.Name+"."+sel.Sel.Name + + typStr := typ.String() + + for _, imp := range imports { + prefix := imp.path + "." + + if imp.self { + typStr = strings.ReplaceAll(typStr, prefix, "") + continue + } + + typStr = strings.ReplaceAll(typStr, prefix, imp.name+".") + } + + return typStr } diff --git a/vendor/github.com/breml/bidichk/pkg/bidichk/bidichk.go b/vendor/github.com/breml/bidichk/pkg/bidichk/bidichk.go index 17c36d4cd..2e1e89934 100644 --- a/vendor/github.com/breml/bidichk/pkg/bidichk/bidichk.go +++ b/vendor/github.com/breml/bidichk/pkg/bidichk/bidichk.go @@ -2,21 +2,146 @@ package bidichk import ( "bytes" + "flag" + "fmt" "go/token" "os" + "sort" "strings" "unicode/utf8" "golang.org/x/tools/go/analysis" ) -var Analyzer = &analysis.Analyzer{ - Name: "bidichk", - Doc: "Checks for dangerous unicode character sequences", - Run: run, +const ( + doc = "bidichk detects dangerous unicode character sequences" + disallowedDoc = `coma separated list of disallowed runes (full name or short name) + +Supported runes + +LEFT-TO-RIGHT-EMBEDDING, LRE (u+202A) +RIGHT-TO-LEFT-EMBEDDING, RLE (u+202B) +POP-DIRECTIONAL-FORMATTING, PDF (u+202C) +LEFT-TO-RIGHT-OVERRIDE, LRO (u+202D) +RIGHT-TO-LEFT-OVERRIDE, RLO (u+202E) +LEFT-TO-RIGHT-ISOLATE, LRI (u+2066) +RIGHT-TO-LEFT-ISOLATE, RLI (u+2067) +FIRST-STRONG-ISOLATE, FSI (u+2068) +POP-DIRECTIONAL-ISOLATE, PDI (u+2069) +` +) + +type disallowedRunes map[string]rune + +func (m disallowedRunes) String() string { + ss := make([]string, 0, len(m)) + for s := range m { + ss = append(ss, s) + } + sort.Strings(ss) + return strings.Join(ss, ",") } -func run(pass *analysis.Pass) (interface{}, error) { +func (m disallowedRunes) Set(s string) error { + ss := strings.FieldsFunc(s, func(c rune) bool { return c == ',' }) + if len(ss) == 0 { + return nil + } + + for k := range m { + delete(m, k) + } + + for _, v := range ss { + switch v { + case runeShortNameLRE, runeShortNameRLE, runeShortNamePDF, + runeShortNameLRO, runeShortNameRLO, runeShortNameLRI, + runeShortNameRLI, runeShortNameFSI, runeShortNamePDI: + v = shortNameLookup[v] + fallthrough + case runeNameLRE, runeNameRLE, runeNamePDF, + runeNameLRO, runeNameRLO, runeNameLRI, + runeNameRLI, runeNameFSI, runeNamePDI: + m[v] = runeLookup[v] + default: + return fmt.Errorf("unknown check name %q (see help for full list)", v) + } + } + return nil +} + +const ( + runeNameLRE = "LEFT-TO-RIGHT-EMBEDDING" + runeNameRLE = "RIGHT-TO-LEFT-EMBEDDING" + runeNamePDF = "POP-DIRECTIONAL-FORMATTING" + runeNameLRO = "LEFT-TO-RIGHT-OVERRIDE" + runeNameRLO = "RIGHT-TO-LEFT-OVERRIDE" + runeNameLRI = "LEFT-TO-RIGHT-ISOLATE" + runeNameRLI = "RIGHT-TO-LEFT-ISOLATE" + runeNameFSI = "FIRST-STRONG-ISOLATE" + runeNamePDI = "POP-DIRECTIONAL-ISOLATE" + + runeShortNameLRE = "LRE" // LEFT-TO-RIGHT-EMBEDDING + runeShortNameRLE = "RLE" // RIGHT-TO-LEFT-EMBEDDING + runeShortNamePDF = "PDF" // POP-DIRECTIONAL-FORMATTING + runeShortNameLRO = "LRO" // LEFT-TO-RIGHT-OVERRIDE + runeShortNameRLO = "RLO" // RIGHT-TO-LEFT-OVERRIDE + runeShortNameLRI = "LRI" // LEFT-TO-RIGHT-ISOLATE + runeShortNameRLI = "RLI" // RIGHT-TO-LEFT-ISOLATE + runeShortNameFSI = "FSI" // FIRST-STRONG-ISOLATE + runeShortNamePDI = "PDI" // POP-DIRECTIONAL-ISOLATE +) + +var runeLookup = map[string]rune{ + runeNameLRE: '\u202A', // LEFT-TO-RIGHT-EMBEDDING + runeNameRLE: '\u202B', // RIGHT-TO-LEFT-EMBEDDING + runeNamePDF: '\u202C', // POP-DIRECTIONAL-FORMATTING + runeNameLRO: '\u202D', // LEFT-TO-RIGHT-OVERRIDE + runeNameRLO: '\u202E', // RIGHT-TO-LEFT-OVERRIDE + runeNameLRI: '\u2066', // LEFT-TO-RIGHT-ISOLATE + runeNameRLI: '\u2067', // RIGHT-TO-LEFT-ISOLATE + runeNameFSI: '\u2068', // FIRST-STRONG-ISOLATE + runeNamePDI: '\u2069', // POP-DIRECTIONAL-ISOLATE +} + +var shortNameLookup = map[string]string{ + runeShortNameLRE: runeNameLRE, + runeShortNameRLE: runeNameRLE, + runeShortNamePDF: runeNamePDF, + runeShortNameLRO: runeNameLRO, + runeShortNameRLO: runeNameRLO, + runeShortNameLRI: runeNameLRI, + runeShortNameRLI: runeNameRLI, + runeShortNameFSI: runeNameFSI, + runeShortNamePDI: runeNamePDI, +} + +type bidichk struct { + disallowedRunes disallowedRunes +} + +// NewAnalyzer return a new bidichk analyzer. +func NewAnalyzer() *analysis.Analyzer { + bidichk := bidichk{} + bidichk.disallowedRunes = make(map[string]rune, len(runeLookup)) + for k, v := range runeLookup { + bidichk.disallowedRunes[k] = v + } + + a := &analysis.Analyzer{ + Name: "bidichk", + Doc: doc, + Run: bidichk.run, + } + + a.Flags.Init("bidichk", flag.ExitOnError) + a.Flags.Var(&bidichk.disallowedRunes, "disallowed-runes", disallowedDoc) + a.Flags.Var(versionFlag{}, "V", "print version and exit") + + return a +} + +func (b bidichk) run(pass *analysis.Pass) (interface{}, error) { var err error pass.Fset.Iterate(func(f *token.File) bool { @@ -24,31 +149,19 @@ func run(pass *analysis.Pass) (interface{}, error) { return true } - return check(f.Name(), f.Pos(0), pass) == nil + return b.check(f.Name(), f.Pos(0), pass) == nil }) return nil, err } -var disallowedRunes = map[string]rune{ - "LEFT-TO-RIGHT-EMBEDDING": '\u202A', - "RIGHT-TO-LEFT-EMBEDDING": '\u202B', - "POP-DIRECTIONAL-FORMATTING": '\u202C', - "LEFT-TO-RIGHT-OVERRIDE": '\u202D', - "RIGHT-TO-LEFT-OVERRIDE": '\u202E', - "LEFT-TO-RIGHT-ISOLATE": '\u2066', - "RIGHT-TO-LEFT-ISOLATE": '\u2067', - "FIRST-STRONG-ISOLATE": '\u2068', - "POP-DIRECTIONAL-ISOLATE": '\u2069', -} - -func check(filename string, pos token.Pos, pass *analysis.Pass) error { +func (b bidichk) check(filename string, pos token.Pos, pass *analysis.Pass) error { body, err := os.ReadFile(filename) if err != nil { return err } - for name, r := range disallowedRunes { + for name, r := range b.disallowedRunes { start := 0 for { idx := bytes.IndexRune(body[start:], r) diff --git a/vendor/github.com/breml/bidichk/pkg/bidichk/version.go b/vendor/github.com/breml/bidichk/pkg/bidichk/version.go new file mode 100644 index 000000000..4cfc57dd1 --- /dev/null +++ b/vendor/github.com/breml/bidichk/pkg/bidichk/version.go @@ -0,0 +1,19 @@ +package bidichk + +import ( + "fmt" + "os" +) + +var Version = "bidichk version dev" + +type versionFlag struct{} + +func (versionFlag) IsBoolFlag() bool { return true } +func (versionFlag) Get() interface{} { return nil } +func (versionFlag) String() string { return "" } +func (versionFlag) Set(s string) error { + fmt.Println(Version) + os.Exit(0) + return nil +} diff --git a/vendor/github.com/breml/errchkjson/.gitignore b/vendor/github.com/breml/errchkjson/.gitignore new file mode 100644 index 000000000..0362de301 --- /dev/null +++ b/vendor/github.com/breml/errchkjson/.gitignore @@ -0,0 +1,29 @@ +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib +/errchkjson +/cmd/errchkjson/errchkjson + +# Test binary, build with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out +coverage.html + +# Log files +*.log + +# Env files +.env + +# Exclude todo +TODO.md + +# Exclude IDE settings +.idea/ +*.iml +.vscode/ diff --git a/vendor/github.com/breml/errchkjson/.goreleaser.yml b/vendor/github.com/breml/errchkjson/.goreleaser.yml new file mode 100644 index 000000000..5f23690f1 --- /dev/null +++ b/vendor/github.com/breml/errchkjson/.goreleaser.yml @@ -0,0 +1,33 @@ +# This is an example .goreleaser.yml file with some sane defaults. +# Make sure to check the documentation at http://goreleaser.com +before: + hooks: + # You may remove this if you don't use go modules. + - go mod tidy +builds: + - main: ./cmd/errchkjson + binary: errchkjson + env: + - CGO_ENABLED=0 + goos: + - linux + - windows + - darwin +archives: + - name_template: "{{ .Binary }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}" + replacements: + darwin: Darwin + linux: Linux + windows: Windows + 386: i386 + amd64: x86_64 +snapshot: + name_template: "{{ .Tag }}-next" +changelog: + skip: true +release: + github: + owner: breml + name: errchkjson +gomod: + proxy: true diff --git a/vendor/github.com/breml/errchkjson/LICENSE b/vendor/github.com/breml/errchkjson/LICENSE new file mode 100644 index 000000000..08db5cb6f --- /dev/null +++ b/vendor/github.com/breml/errchkjson/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 Lucas Bremgartner + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/breml/errchkjson/README.md b/vendor/github.com/breml/errchkjson/README.md new file mode 100644 index 000000000..fe2461c1d --- /dev/null +++ b/vendor/github.com/breml/errchkjson/README.md @@ -0,0 +1,125 @@ +# errchkjson + +[![Test Status](https://github.com/breml/errchkjson/actions/workflows/ci.yml/badge.svg)](https://github.com/breml/errchkjson/actions/workflows/ci.yml) [![Go Report Card](https://goreportcard.com/badge/github.com/breml/errchkjson)](https://goreportcard.com/report/github.com/breml/errchkjson) [![License](https://img.shields.io/badge/license-MIT-blue.svg)](LICENSE) + +Checks types passed to the json encoding functions. Reports unsupported types and reports occations, where the check for the returned error can be omited. + +Consider this [http.Handler](https://pkg.go.dev/net/http#Handler): + +```Go +func JSONHelloWorld(w http.ResponseWriter, r *http.Request) { + response := struct { + Message string + Code int + }{ + Message: "Hello World", + Code: 200, + } + + body, err := json.Marshal(response) + if err != nil { + panic(err) // unreachable, because json encoding of a struct with just a string and an int will never return an error. + } + + w.Write(body) +} +``` + +Because the `panic` is not possible to happen, one might refactor the code like this: + +```Go +func JSONHelloWorld(w http.ResponseWriter, r *http.Request) { + response := struct { + Message string + Code int + }{ + Message: "Hello World", + Code: 200, + } + + body, _ := json.Marshal(response) + + w.Write(body) +} +``` + +This is ok, as long as the struct is not altered in such a way, that could potentially lead +to `json.Marshal` returning an error. + +`errchkjson` allows you to lint your code such that the above error returned from `json.Marshal` +can be omitted while still staying safe, because as soon as an unsafe type is added to the +response type, the linter will warn you. + +## Installation + +Download `errchkjson` from the [releases](https://github.com/breml/errchkjson/releases) or get the latest version from source with: + +```shell +go get github.com/breml/errchkjson/cmd/errchkjson +``` + +## Usage + +### Shell + +Check everything: + +```shell +errchkjson ./... +``` + +`errchkjson` also recognizes the following command-line options: + +The `-omit-safe` flag disables checking for safe returns of errors from json.Marshal + +## Types + +### Safe + +The following types are safe to use with [json encoding functions](https://pkg.go.dev/encoding/json), that is, the encoding to JSON can not fail: + +Safe basic types: + +* `bool` +* `int`, `int8`, `int16`, `int32`, `int64`, `uint`, `uint8`, `uint16`, `uint32`, `uint64`, `uintptr` +* `string` +* Pointer type of the above listed basic types + +Composed types (struct, map, slice, array) are safe, if the type of the value is +safe. For structs, only exported fields are relevant. For maps, the key needs to be either an integer type or a string. + +### Unsafe + +The following types are unsafe to use with [json encoding functions](https://pkg.go.dev/encoding/json), that is, the encoding to JSON can fail (return an error): + +Unsafe basic types: + +* `float32`, `float64` +* `interface{}` +* Pointer type of the above listed basic types + +Any composed types (struct, map, slice, array) containing an unsafe basic type. + +If a type implements the `json.Marshaler` or `encoding.TextMarshaler` interface (e.g. `json.Number`). + +### Forbidden + +Forbidden basic types: + +* `complex64`, `complex128` +* `chan` +* `func` +* `unsafe.Pointer` + +Any composed types (struct, map, slice, array) containing a forbidden basic type. Any map +using a key with a forbidden type (`bool`, `float32`, `float64`, `struct`). + +## Bugs found during development + +During the development of `errcheckjson`, the following issues in package `encoding/json` of the Go standard library have been found and PR have been merged: + +* [Issue #34154: encoding/json: string option (struct tag) on string field with SetEscapeHTML(false) escapes anyway](https://github.com/golang/go/issues/34154) +* [PR #34127: encoding/json: fix and optimize marshal for quoted string](https://github.com/golang/go/pull/34127) +* [Issue #34268: encoding/json: wrong encoding for json.Number field with string option (struct tag)](https://github.com/golang/go/issues/34268) +* [PR #34269: encoding/json: make Number with the ,string option marshal with quotes](https://github.com/golang/go/pull/34269) +* [PR #34272: encoding/json: validate strings when decoding into Number](https://github.com/golang/go/pull/34272) diff --git a/vendor/github.com/breml/errchkjson/errchkjson.go b/vendor/github.com/breml/errchkjson/errchkjson.go new file mode 100644 index 000000000..5a5b6d1a5 --- /dev/null +++ b/vendor/github.com/breml/errchkjson/errchkjson.go @@ -0,0 +1,310 @@ +// Package errchkjson defines an Analyzer that finds places, where it is +// safe to omit checking the error returned from json.Marshal. +package errchkjson + +import ( + "flag" + "fmt" + "go/ast" + "go/token" + "go/types" + "reflect" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/types/typeutil" +) + +type errchkjson struct { + omitSafe bool // -omit-safe flag + reportNoExported bool // -report-no-exported flag +} + +// NewAnalyzer returns a new errchkjson analyzer. +func NewAnalyzer() *analysis.Analyzer { + errchkjson := &errchkjson{} + + a := &analysis.Analyzer{ + Name: "errchkjson", + Doc: "Checks types passed to the json encoding functions. Reports unsupported types and reports occations, where the check for the returned error can be omitted.", + Run: errchkjson.run, + } + + a.Flags.Init("errchkjson", flag.ExitOnError) + a.Flags.BoolVar(&errchkjson.omitSafe, "omit-safe", false, "if omit-safe is true, checking of safe returns is omitted") + a.Flags.BoolVar(&errchkjson.reportNoExported, "report-no-exported", false, "if report-no-exported is true, encoding a struct without exported fields is reported as issue") + a.Flags.Var(versionFlag{}, "V", "print version and exit") + + return a +} + +func (e *errchkjson) run(pass *analysis.Pass) (interface{}, error) { + for _, file := range pass.Files { + ast.Inspect(file, func(n ast.Node) bool { + if n == nil { + return true + } + + // if the error is returned, it is the caller's responsibility to check + // the return value. + if _, ok := n.(*ast.ReturnStmt); ok { + return false + } + + ce, ok := n.(*ast.CallExpr) + if ok { + fn, _ := typeutil.Callee(pass.TypesInfo, ce).(*types.Func) + if fn == nil { + return true + } + + switch fn.FullName() { + case "encoding/json.Marshal", "encoding/json.MarshalIndent": + e.handleJSONMarshal(pass, ce, fn.FullName(), true) + case "(*encoding/json.Encoder).Encode": + e.handleJSONMarshal(pass, ce, fn.FullName(), true) + default: + return true + } + return false + } + + as, ok := n.(*ast.AssignStmt) + if !ok { + return true + } + + ce, ok = as.Rhs[0].(*ast.CallExpr) + if !ok { + return true + } + + fn, _ := typeutil.Callee(pass.TypesInfo, ce).(*types.Func) + if fn == nil { + return true + } + + switch fn.FullName() { + case "encoding/json.Marshal", "encoding/json.MarshalIndent": + e.handleJSONMarshal(pass, ce, fn.FullName(), blankIdentifier(as.Lhs[1])) + case "(*encoding/json.Encoder).Encode": + e.handleJSONMarshal(pass, ce, fn.FullName(), blankIdentifier(as.Lhs[0])) + default: + return true + } + return false + }) + } + + return nil, nil +} + +func blankIdentifier(n ast.Expr) bool { + if errIdent, ok := n.(*ast.Ident); ok { + if errIdent.Name == "_" { + return true + } + } + return false +} + +func (e *errchkjson) handleJSONMarshal(pass *analysis.Pass, ce *ast.CallExpr, fnName string, blankIdentifier bool) { + t := pass.TypesInfo.TypeOf(ce.Args[0]) + if t == nil { + // Not sure, if this is at all possible + if blankIdentifier { + pass.Reportf(ce.Pos(), "Type of argument to `%s` could not be evaluated and error return value is not checked", fnName) + } + return + } + + if _, ok := t.(*types.Pointer); ok { + t = t.(*types.Pointer).Elem() + } + + err := e.jsonSafe(t, 0, map[types.Type]struct{}{}) + if err != nil { + if _, ok := err.(unsupported); ok { + pass.Reportf(ce.Pos(), "`%s` for %v", fnName, err) + return + } + if _, ok := err.(noexported); ok { + pass.Reportf(ce.Pos(), "Error argument passed to `%s` does not contain any exported field", fnName) + } + // Only care about unsafe types if they are assigned to the blank identifier. + if blankIdentifier { + pass.Reportf(ce.Pos(), "Error return value of `%s` is not checked: %v", fnName, err) + } + } + if err == nil && !blankIdentifier && !e.omitSafe { + pass.Reportf(ce.Pos(), "Error return value of `%s` is checked but passed argument is safe", fnName) + } + // Report an error, if err for json.Marshal is not checked and safe types are omitted + if err == nil && blankIdentifier && e.omitSafe { + pass.Reportf(ce.Pos(), "Error return value of `%s` is not checked", fnName) + } +} + +const ( + allowedBasicTypes = types.IsBoolean | types.IsInteger | types.IsString + allowedMapKeyBasicTypes = types.IsInteger | types.IsString + unsupportedBasicTypes = types.IsComplex +) + +func (e *errchkjson) jsonSafe(t types.Type, level int, seenTypes map[types.Type]struct{}) error { + if _, ok := seenTypes[t]; ok { + return nil + } + + if types.Implements(t, textMarshalerInterface()) || types.Implements(t, jsonMarshalerInterface()) { + return fmt.Errorf("unsafe type `%s` found", t.String()) + } + + switch ut := t.Underlying().(type) { + case *types.Basic: + if ut.Info()&allowedBasicTypes > 0 { // bool, int-family, string + if ut.Info()&types.IsString > 0 && t.String() == "encoding/json.Number" { + return fmt.Errorf("unsafe type `%s` found", t.String()) + } + return nil + } + if ut.Info()&unsupportedBasicTypes > 0 { // complex64, complex128 + return newUnsupportedError(fmt.Errorf("unsupported type `%s` found", ut.String())) + } + switch ut.Kind() { + case types.UntypedNil: + return nil + case types.UnsafePointer: + return newUnsupportedError(fmt.Errorf("unsupported type `%s` found", ut.String())) + default: + // E.g. float32, float64 + return fmt.Errorf("unsafe type `%s` found", ut.String()) + } + + case *types.Array: + err := e.jsonSafe(ut.Elem(), level+1, seenTypes) + if err != nil { + return err + } + return nil + + case *types.Slice: + err := e.jsonSafe(ut.Elem(), level+1, seenTypes) + if err != nil { + return err + } + return nil + + case *types.Struct: + seenTypes[t] = struct{}{} + exported := 0 + for i := 0; i < ut.NumFields(); i++ { + if !ut.Field(i).Exported() { + // Unexported fields can be ignored + continue + } + if tag, ok := reflect.StructTag(ut.Tag(i)).Lookup("json"); ok { + if tag == "-" { + // Fields omitted in json can be ignored + continue + } + } + err := e.jsonSafe(ut.Field(i).Type(), level+1, seenTypes) + if err != nil { + return err + } + exported++ + } + if e.reportNoExported && level == 0 && exported == 0 { + return newNoexportedError(fmt.Errorf("struct does not export any field")) + } + return nil + + case *types.Pointer: + err := e.jsonSafe(ut.Elem(), level+1, seenTypes) + if err != nil { + return err + } + return nil + + case *types.Map: + err := jsonSafeMapKey(ut.Key()) + if err != nil { + return err + } + err = e.jsonSafe(ut.Elem(), level+1, seenTypes) + if err != nil { + return err + } + return nil + + case *types.Chan, *types.Signature: + // Types that are not supported for encoding to json: + return newUnsupportedError(fmt.Errorf("unsupported type `%s` found", ut.String())) + + default: + // Types that are not supported for encoding to json or are not completely safe, like: interfaces + return fmt.Errorf("unsafe type `%s` found", t.String()) + } +} + +func jsonSafeMapKey(t types.Type) error { + if types.Implements(t, textMarshalerInterface()) || types.Implements(t, jsonMarshalerInterface()) { + return fmt.Errorf("unsafe type `%s` as map key found", t.String()) + } + switch ut := t.Underlying().(type) { + case *types.Basic: + if ut.Info()&types.IsString > 0 && t.String() == "encoding/json.Number" { + return fmt.Errorf("unsafe type `%s` as map key found", t.String()) + } + if ut.Info()&allowedMapKeyBasicTypes > 0 { // bool, int-family, string + return nil + } + // E.g. bool, float32, float64, complex64, complex128 + return newUnsupportedError(fmt.Errorf("unsupported type `%s` as map key found", t.String())) + case *types.Interface: + return fmt.Errorf("unsafe type `%s` as map key found", t.String()) + default: + // E.g. struct composed solely of basic types, that are comparable + return newUnsupportedError(fmt.Errorf("unsupported type `%s` as map key found", t.String())) + } +} + +// Construct *types.Interface for interface encoding.TextMarshaler +// type TextMarshaler interface { +// MarshalText() (text []byte, err error) +// } +// +func textMarshalerInterface() *types.Interface { + textMarshalerInterface := types.NewInterfaceType([]*types.Func{ + types.NewFunc(token.NoPos, nil, "MarshalText", types.NewSignature( + nil, nil, types.NewTuple( + types.NewVar(token.NoPos, nil, "text", + types.NewSlice( + types.Universe.Lookup("byte").Type())), + types.NewVar(token.NoPos, nil, "err", types.Universe.Lookup("error").Type())), + false)), + }, nil) + textMarshalerInterface.Complete() + + return textMarshalerInterface +} + +// Construct *types.Interface for interface json.Marshaler +// type Marshaler interface { +// MarshalJSON() ([]byte, error) +// } +// +func jsonMarshalerInterface() *types.Interface { + textMarshalerInterface := types.NewInterfaceType([]*types.Func{ + types.NewFunc(token.NoPos, nil, "MarshalJSON", types.NewSignature( + nil, nil, types.NewTuple( + types.NewVar(token.NoPos, nil, "", + types.NewSlice( + types.Universe.Lookup("byte").Type())), + types.NewVar(token.NoPos, nil, "", types.Universe.Lookup("error").Type())), + false)), + }, nil) + textMarshalerInterface.Complete() + + return textMarshalerInterface +} diff --git a/vendor/github.com/breml/errchkjson/go.mod b/vendor/github.com/breml/errchkjson/go.mod new file mode 100644 index 000000000..29452ea1f --- /dev/null +++ b/vendor/github.com/breml/errchkjson/go.mod @@ -0,0 +1,5 @@ +module github.com/breml/errchkjson + +go 1.16 + +require golang.org/x/tools v0.1.7 diff --git a/vendor/github.com/breml/errchkjson/go.sum b/vendor/github.com/breml/errchkjson/go.sum new file mode 100644 index 000000000..13ba81505 --- /dev/null +++ b/vendor/github.com/breml/errchkjson/go.sum @@ -0,0 +1,27 @@ +github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e h1:WUoyKPm6nCo1BnNUvPGnFG3T5DUVem42yDJZZ4CNxMA= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.7 h1:6j8CgantCy3yc8JGBqkDLMKWqZ0RDU2g1HVgacojGWQ= +golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/github.com/breml/errchkjson/noexported_error.go b/vendor/github.com/breml/errchkjson/noexported_error.go new file mode 100644 index 000000000..07b7a07d2 --- /dev/null +++ b/vendor/github.com/breml/errchkjson/noexported_error.go @@ -0,0 +1,23 @@ +package errchkjson + +type noexported interface { + noexported() +} + +var _ noexported = noexportedError{} + +type noexportedError struct { + err error +} + +func newNoexportedError(err error) error { + return noexportedError{ + err: err, + } +} + +func (u noexportedError) noexported() {} + +func (u noexportedError) Error() string { + return u.err.Error() +} diff --git a/vendor/github.com/breml/errchkjson/unsupported_error.go b/vendor/github.com/breml/errchkjson/unsupported_error.go new file mode 100644 index 000000000..1a38c3f53 --- /dev/null +++ b/vendor/github.com/breml/errchkjson/unsupported_error.go @@ -0,0 +1,23 @@ +package errchkjson + +type unsupported interface { + unsupported() +} + +var _ unsupported = unsupportedError{} + +type unsupportedError struct { + err error +} + +func newUnsupportedError(err error) error { + return unsupportedError{ + err: err, + } +} + +func (u unsupportedError) unsupported() {} + +func (u unsupportedError) Error() string { + return u.err.Error() +} diff --git a/vendor/github.com/breml/errchkjson/version.go b/vendor/github.com/breml/errchkjson/version.go new file mode 100644 index 000000000..77d8ef8bb --- /dev/null +++ b/vendor/github.com/breml/errchkjson/version.go @@ -0,0 +1,19 @@ +package errchkjson + +import ( + "fmt" + "os" +) + +var Version = "errchkjson version dev" + +type versionFlag struct{} + +func (versionFlag) IsBoolFlag() bool { return true } +func (versionFlag) Get() interface{} { return nil } +func (versionFlag) String() string { return "" } +func (versionFlag) Set(s string) error { + fmt.Println(Version) + os.Exit(0) + return nil +} diff --git a/vendor/github.com/daixiang0/gci/pkg/analyzer/analyzer.go b/vendor/github.com/daixiang0/gci/pkg/analyzer/analyzer.go new file mode 100644 index 000000000..10818a791 --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/analyzer/analyzer.go @@ -0,0 +1,131 @@ +package analyzer + +import ( + "bytes" + "fmt" + "go/token" + "strings" + + "github.com/daixiang0/gci/pkg/configuration" + "github.com/daixiang0/gci/pkg/gci" + "github.com/daixiang0/gci/pkg/io" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" +) + +const ( + NoInlineCommentsFlag = "noInlineComments" + NoPrefixCommentsFlag = "noPrefixComments" + SectionsFlag = "Sections" + SectionSeparatorsFlag = "SectionSeparators" + SectionDelimiter = ";" +) + +var ( + noInlineComments bool + noPrefixComments bool + sectionsStr string + sectionSeparatorsStr string +) + +func init() { + Analyzer.Flags.BoolVar(&noInlineComments, NoInlineCommentsFlag, false, "If comments in the same line as the input should be present") + Analyzer.Flags.BoolVar(&noPrefixComments, NoPrefixCommentsFlag, false, "If comments above an input should be present") + Analyzer.Flags.StringVar(§ionsStr, SectionsFlag, "", "Specify the Sections format that should be used to check the file formatting") + Analyzer.Flags.StringVar(§ionSeparatorsStr, SectionSeparatorsFlag, "", "Specify the Sections that are inserted as Separators between Sections") +} + +var Analyzer = &analysis.Analyzer{ + Name: "gci", + Doc: "A tool that control golang package import order and make it always deterministic.", + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: runAnalysis, +} + +func runAnalysis(pass *analysis.Pass) (interface{}, error) { + // TODO input validation + + var fileReferences []*token.File + // extract file references for all files in the analyzer pass + for _, pkgFile := range pass.Files { + fileForPos := pass.Fset.File(pkgFile.Package) + if fileForPos != nil { + fileReferences = append(fileReferences, fileForPos) + } + } + expectedNumFiles := len(pass.Files) + foundNumFiles := len(fileReferences) + if expectedNumFiles != foundNumFiles { + return nil, InvalidNumberOfFilesInAnalysis{expectedNumFiles, foundNumFiles} + } + + // read configuration options + gciCfg, err := parseGciConfiguration() + if err != nil { + return nil, err + } + + for _, file := range fileReferences { + filePath := file.Name() + unmodifiedFile, formattedFile, err := gci.LoadFormatGoFile(io.File{filePath}, *gciCfg) + if err != nil { + return nil, err + } + // search for a difference + fileRunes := bytes.Runes(unmodifiedFile) + formattedRunes := bytes.Runes(formattedFile) + diffIdx := compareRunes(fileRunes, formattedRunes) + switch diffIdx { + case -1: + // no difference + default: + diffPos := file.Position(file.Pos(diffIdx)) + // prevent invalid access to array + fileRune := "nil" + formattedRune := "nil" + if len(fileRunes)-1 >= diffIdx { + fileRune = fmt.Sprintf("%q", fileRunes[diffIdx]) + } + if len(formattedRunes)-1 >= diffIdx { + formattedRune = fmt.Sprintf("%q", formattedRunes[diffIdx]) + } + pass.Reportf(file.Pos(diffIdx), "Expected %s, Found %s at %s[line %d,col %d]", formattedRune, fileRune, filePath, diffPos.Line, diffPos.Column) + } + } + return nil, nil +} + +func compareRunes(a, b []rune) (differencePos int) { + // check shorter rune slice first to prevent invalid array access + shorterRune := a + if len(b) < len(a) { + shorterRune = b + } + // check for differences up to where the length is identical + for idx := 0; idx < len(shorterRune); idx++ { + if a[idx] != b[idx] { + return idx + } + } + // check that we have compared two equally long rune arrays + if len(a) != len(b) { + return len(shorterRune) + 1 + } + return -1 +} + +func parseGciConfiguration() (*gci.GciConfiguration, error) { + fmtCfg := configuration.FormatterConfiguration{noInlineComments, noPrefixComments, false} + + var sectionStrings []string + if sectionsStr != "" { + sectionStrings = strings.Split(sectionsStr, SectionDelimiter) + } + + var sectionSeparatorStrings []string + if sectionSeparatorsStr != "" { + sectionSeparatorStrings = strings.Split(sectionSeparatorsStr, SectionDelimiter) + } + return gci.GciStringConfiguration{fmtCfg, sectionStrings, sectionSeparatorStrings}.Parse() +} diff --git a/vendor/github.com/daixiang0/gci/pkg/analyzer/errors.go b/vendor/github.com/daixiang0/gci/pkg/analyzer/errors.go new file mode 100644 index 000000000..83d78309f --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/analyzer/errors.go @@ -0,0 +1,16 @@ +package analyzer + +import "fmt" + +type InvalidNumberOfFilesInAnalysis struct { + expectedNumFiles, foundNumFiles int +} + +func (i InvalidNumberOfFilesInAnalysis) Error() string { + return fmt.Sprintf("Expected %d files in Analyzer input, Found %d", i.expectedNumFiles, i.foundNumFiles) +} + +func (i InvalidNumberOfFilesInAnalysis) Is(err error) bool { + _, ok := err.(InvalidNumberOfFilesInAnalysis) + return ok +} diff --git a/vendor/github.com/daixiang0/gci/pkg/configuration/formatter.go b/vendor/github.com/daixiang0/gci/pkg/configuration/formatter.go new file mode 100644 index 000000000..6ac048477 --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/configuration/formatter.go @@ -0,0 +1,7 @@ +package configuration + +type FormatterConfiguration struct { + NoInlineComments bool `yaml:"no-inlineComments"` + NoPrefixComments bool `yaml:"no-prefixComments"` + Debug bool `yaml:"-"` +} diff --git a/vendor/github.com/daixiang0/gci/pkg/constants/sequences.go b/vendor/github.com/daixiang0/gci/pkg/constants/sequences.go new file mode 100644 index 000000000..512d6d155 --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/constants/sequences.go @@ -0,0 +1,16 @@ +package constants + +const ( + CommentFlag = "//" + ImportStartFlag = "\nimport (\n" + ImportEndFlag = "\n)" + + Blank = " " + Indent = "\t" + Linebreak = "\n" + + SectionSeparator = ":" + + ParameterOpeningBrackets = "(" + ParameterClosingBrackets = ")" +) diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/configuration.go b/vendor/github.com/daixiang0/gci/pkg/gci/configuration.go new file mode 100644 index 000000000..f1ef7ed52 --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/gci/configuration.go @@ -0,0 +1,58 @@ +package gci + +import ( + "io/ioutil" + + "github.com/daixiang0/gci/pkg/configuration" + sectionsPkg "github.com/daixiang0/gci/pkg/gci/sections" + + "gopkg.in/yaml.v3" +) + +type GciConfiguration struct { + configuration.FormatterConfiguration + Sections SectionList + SectionSeparators SectionList +} + +type GciStringConfiguration struct { + Cfg configuration.FormatterConfiguration `yaml:",inline"` + SectionStrings []string `yaml:"sections"` + SectionSeparatorStrings []string `yaml:"sectionseparators"` +} + +func (g GciStringConfiguration) Parse() (*GciConfiguration, error) { + sections := DefaultSections() + var err error + if len(g.SectionStrings) > 0 { + sections, err = sectionsPkg.SectionParserInst.ParseSectionStrings(g.SectionStrings, true, true) + if err != nil { + return nil, err + } + } + sectionSeparators := DefaultSectionSeparators() + if len(g.SectionSeparatorStrings) > 0 { + sectionSeparators, err = sectionsPkg.SectionParserInst.ParseSectionStrings(g.SectionSeparatorStrings, false, false) + if err != nil { + return nil, err + } + } + return &GciConfiguration{g.Cfg, sections, sectionSeparators}, nil +} + +func initializeGciConfigFromYAML(filePath string) (*GciConfiguration, error) { + yamlCfg := GciStringConfiguration{} + yamlData, err := ioutil.ReadFile(filePath) + if err != nil { + return nil, err + } + err = yaml.Unmarshal(yamlData, &yamlCfg) + if err != nil { + return nil, err + } + gciCfg, err := yamlCfg.Parse() + if err != nil { + return nil, err + } + return gciCfg, nil +} diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/errors.go b/vendor/github.com/daixiang0/gci/pkg/gci/errors.go new file mode 100644 index 000000000..6fde84f37 --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/gci/errors.go @@ -0,0 +1,78 @@ +package gci + +import ( + "errors" + "fmt" + + importPkg "github.com/daixiang0/gci/pkg/gci/imports" + sectionsPkg "github.com/daixiang0/gci/pkg/gci/sections" +) + +type EqualSpecificityMatchError struct { + importDef importPkg.ImportDef + sectionA, sectionB sectionsPkg.Section +} + +func (e EqualSpecificityMatchError) Error() string { + return fmt.Sprintf("Import %s matched section %s and %s equally", e.importDef, e.sectionA, e.sectionB) +} + +func (e EqualSpecificityMatchError) Is(err error) bool { + _, ok := err.(EqualSpecificityMatchError) + return ok +} + +type NoMatchingSectionForImportError struct { + importDef importPkg.ImportDef +} + +func (n NoMatchingSectionForImportError) Error() string { + return fmt.Sprintf("No section found for Import: %v", n.importDef) +} + +func (n NoMatchingSectionForImportError) Is(err error) bool { + _, ok := err.(NoMatchingSectionForImportError) + return ok +} + +type InvalidImportSplitError struct { + segments []string +} + +func (i InvalidImportSplitError) Error() string { + return fmt.Sprintf("seperating the inline comment from the import yielded an invalid number of segments: %v", i.segments) +} + +func (i InvalidImportSplitError) Is(err error) bool { + _, ok := err.(InvalidImportSplitError) + return ok +} + +type InvalidAliasSplitError struct { + segments []string +} + +func (i InvalidAliasSplitError) Error() string { + return fmt.Sprintf("seperating the alias from the path yielded an invalid number of segments: %v", i.segments) +} + +func (i InvalidAliasSplitError) Is(err error) bool { + _, ok := err.(InvalidAliasSplitError) + return ok +} + +var MissingImportStatementError = FileParsingError{errors.New("no import statement present in File")} +var ImportStatementNotClosedError = FileParsingError{errors.New("import statement not closed")} + +type FileParsingError struct { + error +} + +func (f FileParsingError) Unwrap() error { + return f.error +} + +func (f FileParsingError) Is(err error) bool { + _, ok := err.(FileParsingError) + return ok +} diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/format.go b/vendor/github.com/daixiang0/gci/pkg/gci/format.go new file mode 100644 index 000000000..78573da14 --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/gci/format.go @@ -0,0 +1,98 @@ +package gci + +import ( + "bytes" + "fmt" + "log" + "strings" + + "github.com/daixiang0/gci/pkg/constants" + importPkg "github.com/daixiang0/gci/pkg/gci/imports" + sectionsPkg "github.com/daixiang0/gci/pkg/gci/sections" + "github.com/daixiang0/gci/pkg/gci/specificity" +) + +// Formats the import section of a Go file +func formatGoFile(input []byte, cfg GciConfiguration) ([]byte, error) { + startIndex := bytes.Index(input, []byte(constants.ImportStartFlag)) + if startIndex < 0 { + return nil, MissingImportStatementError + } + endIndexFromStart := bytes.Index(input[startIndex:], []byte(constants.ImportEndFlag)) + if endIndexFromStart < 0 { + return nil, ImportStatementNotClosedError + } + endIndex := startIndex + endIndexFromStart + + unformattedImports := input[startIndex+len(constants.ImportStartFlag) : endIndex] + formattedImports, err := formatImportBlock(unformattedImports, cfg) + if err != nil { + return nil, err + } + + var output []byte + output = append(output, input[:startIndex+len(constants.ImportStartFlag)]...) + output = append(output, formattedImports...) + output = append(output, input[endIndex+1:]...) + return output, nil +} + +// Takes unsorted imports as byte array and formats them according to the specified sections +func formatImportBlock(input []byte, cfg GciConfiguration) ([]byte, error) { + //strings.ReplaceAll(input, "\r\n", linebreak) + lines := strings.Split(string(input), constants.Linebreak) + imports, err := parseToImportDefinitions(lines) + if err != nil { + return nil, fmt.Errorf("an error occured while trying to parse imports: %w", err) + } + if cfg.Debug { + log.Println("Parsed imports in file:", imports) + } + // create mapping between sections and imports + sectionMap := make(map[sectionsPkg.Section][]importPkg.ImportDef, len(cfg.Sections)) + // find matching section for every importSpec + for _, i := range imports { + // determine match specificity for every available section + var bestSection sectionsPkg.Section + var bestSectionSpecificity specificity.MatchSpecificity = specificity.MisMatch{} + for _, section := range cfg.Sections { + sectionSpecificity := section.MatchSpecificity(i) + if sectionSpecificity.IsMoreSpecific(specificity.MisMatch{}) && sectionSpecificity.Equal(bestSectionSpecificity) { + // specificity is identical + return nil, EqualSpecificityMatchError{i, bestSection, section} + } + if sectionSpecificity.IsMoreSpecific(bestSectionSpecificity) { + // better match found + bestSectionSpecificity = sectionSpecificity + bestSection = section + } + } + if bestSection == nil { + return nil, NoMatchingSectionForImportError{i} + } + if cfg.Debug { + log.Printf("Matched import %s to section %s", i, bestSection) + } + sectionMap[bestSection] = append(sectionMap[bestSection], i) + } + // format every section to a str + var sectionStrings []string + for _, section := range cfg.Sections { + sectionStr := section.Format(sectionMap[section], cfg.FormatterConfiguration) + // prevent adding an empty section which would cause a separator to be inserted + if sectionStr != "" { + if cfg.Debug { + log.Printf("Formatting section %s with imports: %v", section, sectionMap[section]) + } + sectionStrings = append(sectionStrings, sectionStr) + } + } + // format sectionSeparators + var sectionSeparatorStr string + for _, sectionSep := range cfg.SectionSeparators { + sectionSeparatorStr += sectionSep.Format([]importPkg.ImportDef{}, cfg.FormatterConfiguration) + } + // generate output by joining the sections + output := strings.Join(sectionStrings, sectionSeparatorStr) + return []byte(output), nil +} diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/gci.go b/vendor/github.com/daixiang0/gci/pkg/gci/gci.go index 7efa576ca..68c839fc9 100644 --- a/vendor/github.com/daixiang0/gci/pkg/gci/gci.go +++ b/vendor/github.com/daixiang0/gci/pkg/gci/gci.go @@ -2,382 +2,126 @@ package gci import ( "bytes" + "errors" "fmt" - "io" - "io/ioutil" + "log" "os" - "os/exec" - "path/filepath" - "sort" - "strings" + + sectionsPkg "github.com/daixiang0/gci/pkg/gci/sections" + "github.com/daixiang0/gci/pkg/io" + + "github.com/hexops/gotextdiff" + "github.com/hexops/gotextdiff/myers" + "github.com/hexops/gotextdiff/span" + "golang.org/x/sync/errgroup" ) -const ( - // pkg type: standard, remote, local - standard int = iota - // 3rd-party packages - remote - local +type SectionList []sectionsPkg.Section - commentFlag = "//" -) - -var ( - importStartFlag = []byte(` -import ( -`) - importEndFlag = []byte(` -) -`) -) - -type FlagSet struct { - LocalFlag []string - DoWrite, DoDiff *bool +func (list SectionList) String() []string { + var output []string + for _, section := range list { + output = append(output, section.String()) + } + return output } -type pkg struct { - list map[int][]string - comment map[string]string - alias map[string]string +func DefaultSections() SectionList { + return SectionList{sectionsPkg.StandardPackage{}, sectionsPkg.DefaultSection{nil, nil}} } -// ParseLocalFlag takes a comma-separated list of -// package-name-prefixes (as passed to the "-local" flag), and splits -// it in to a list. This is different than strings.Split in that it -// handles the empty string and empty entries in the list. -func ParseLocalFlag(str string) []string { - return strings.FieldsFunc(str, func(c rune) bool { return c == ',' }) +func DefaultSectionSeparators() SectionList { + return SectionList{sectionsPkg.NewLine{}} +} +func LocalFlagsToSections(localFlags []string) SectionList { + sections := DefaultSections() + // Add all local arguments as ImportPrefix sections + for _, prefix := range localFlags { + sections = append(sections, sectionsPkg.Prefix{prefix, nil, nil}) + } + return sections } -func newPkg(data [][]byte, localFlag []string) *pkg { - listMap := make(map[int][]string) - commentMap := make(map[string]string) - aliasMap := make(map[string]string) - p := &pkg{ - list: listMap, - comment: commentMap, - alias: aliasMap, - } - - formatData := make([]string, 0) - // remove all empty lines - for _, v := range data { - if len(v) > 0 { - formatData = append(formatData, strings.TrimSpace(string(v))) - } - } - - n := len(formatData) - for i := n - 1; i >= 0; i-- { - line := formatData[i] - - // check commentFlag: - // 1. one line commentFlag - // 2. commentFlag after import path - commentIndex := strings.Index(line, commentFlag) - if commentIndex == 0 { - // comment in the last line is useless, ignore it - if i+1 >= n { - continue - } - pkg, _, _ := getPkgInfo(formatData[i+1], strings.Index(formatData[i+1], commentFlag) >= 0) - p.comment[pkg] = line - continue - } else if commentIndex > 0 { - pkg, alias, comment := getPkgInfo(line, true) - if alias != "" { - p.alias[pkg] = alias - } - - p.comment[pkg] = comment - pkgType := getPkgType(pkg, localFlag) - p.list[pkgType] = append(p.list[pkgType], pkg) - continue - } - - pkg, alias, _ := getPkgInfo(line, false) - - if alias != "" { - p.alias[pkg] = alias - } - - pkgType := getPkgType(pkg, localFlag) - p.list[pkgType] = append(p.list[pkgType], pkg) - } - - return p -} - -// fmt format import pkgs as expected -func (p *pkg) fmt() []byte { - ret := make([]string, 0, 100) - - for pkgType := range []int{standard, remote, local} { - sort.Strings(p.list[pkgType]) - for _, s := range p.list[pkgType] { - if p.comment[s] != "" { - l := fmt.Sprintf("%s%s%s%s", linebreak, indent, p.comment[s], linebreak) - ret = append(ret, l) - } - - if p.alias[s] != "" { - s = fmt.Sprintf("%s%s%s%s%s", indent, p.alias[s], blank, s, linebreak) - } else { - s = fmt.Sprintf("%s%s%s", indent, s, linebreak) - } - - ret = append(ret, s) - } - - if len(p.list[pkgType]) > 0 { - ret = append(ret, linebreak) - } - } - if len(ret) > 0 && ret[len(ret)-1] == linebreak { - ret = ret[:len(ret)-1] - } - - // remove duplicate empty lines - s1 := fmt.Sprintf("%s%s%s%s", linebreak, linebreak, linebreak, indent) - s2 := fmt.Sprintf("%s%s%s", linebreak, linebreak, indent) - return []byte(strings.ReplaceAll(strings.Join(ret, ""), s1, s2)) -} - -// getPkgInfo assume line is a import path, and return (path, alias, comment) -func getPkgInfo(line string, comment bool) (string, string, string) { - if comment { - s := strings.Split(line, commentFlag) - pkgArray := strings.Split(s[0], blank) - if len(pkgArray) > 1 { - return pkgArray[1], pkgArray[0], fmt.Sprintf("%s%s%s", commentFlag, blank, strings.TrimSpace(s[1])) - } else { - return strings.TrimSpace(pkgArray[0]), "", fmt.Sprintf("%s%s%s", commentFlag, blank, strings.TrimSpace(s[1])) - } - } else { - pkgArray := strings.Split(line, blank) - if len(pkgArray) > 1 { - return pkgArray[1], pkgArray[0], "" - } else { - return pkgArray[0], "", "" - } - } -} - -func getPkgType(line string, localFlag []string) int { - pkgName := strings.Trim(line, "\"\\`") - - for _, localPkg := range localFlag { - if strings.HasPrefix(pkgName, localPkg) { - return local - } - } - - if isStandardPackage(pkgName) { - return standard - } - - return remote -} - -const ( - blank = " " - indent = "\t" - linebreak = "\n" -) - -func diff(b1, b2 []byte, filename string) (data []byte, err error) { - f1, err := writeTempFile("", "gci", b1) - if err != nil { - return - } - defer os.Remove(f1) - - f2, err := writeTempFile("", "gci", b2) - if err != nil { - return - } - defer os.Remove(f2) - - cmd := "diff" - - data, err = exec.Command(cmd, "-u", f1, f2).CombinedOutput() - if len(data) > 0 { - // diff exits with a non-zero status when the files don't match. - // Ignore that failure as long as we get output. - return replaceTempFilename(data, filename) - } - return -} - -func writeTempFile(dir, prefix string, data []byte) (string, error) { - file, err := ioutil.TempFile(dir, prefix) - if err != nil { - return "", err - } - _, err = file.Write(data) - if err1 := file.Close(); err == nil { - err = err1 - } - if err != nil { - os.Remove(file.Name()) - return "", err - } - return file.Name(), nil -} - -// replaceTempFilename replaces temporary filenames in diff with actual one. -// -// --- /tmp/gofmt316145376 2017-02-03 19:13:00.280468375 -0500 -// +++ /tmp/gofmt617882815 2017-02-03 19:13:00.280468375 -0500 -// ... -// -> -// --- path/to/file.go.orig 2017-02-03 19:13:00.280468375 -0500 -// +++ path/to/file.go 2017-02-03 19:13:00.280468375 -0500 -// ... -func replaceTempFilename(diff []byte, filename string) ([]byte, error) { - bs := bytes.SplitN(diff, []byte{'\n'}, 3) - if len(bs) < 3 { - return nil, fmt.Errorf("got unexpected diff for %s", filename) - } - // Preserve timestamps. - var t0, t1 []byte - if i := bytes.LastIndexByte(bs[0], '\t'); i != -1 { - t0 = bs[0][i:] - } - if i := bytes.LastIndexByte(bs[1], '\t'); i != -1 { - t1 = bs[1][i:] - } - // Always print filepath with slash separator. - f := filepath.ToSlash(filename) - bs[0] = []byte(fmt.Sprintf("--- %s%s", f+".orig", t0)) - bs[1] = []byte(fmt.Sprintf("+++ %s%s", f, t1)) - return bytes.Join(bs, []byte{'\n'}), nil -} - -func visitFile(set *FlagSet) filepath.WalkFunc { - return func(path string, f os.FileInfo, err error) error { - if err == nil && isGoFile(f) { - err = processFile(path, os.Stdout, set) - } - return err - } -} - -func WalkDir(path string, set *FlagSet) error { - return filepath.Walk(path, visitFile(set)) -} - -func isGoFile(f os.FileInfo) bool { - // ignore non-Go files - name := f.Name() - return !f.IsDir() && !strings.HasPrefix(name, ".") && strings.HasSuffix(name, ".go") -} - -func ProcessFile(filename string, out io.Writer, set *FlagSet) error { - return processFile(filename, out, set) -} - -func processFile(filename string, out io.Writer, set *FlagSet) error { - var err error - - f, err := os.Open(filename) - if err != nil { - return err - } - defer f.Close() - - src, err := ioutil.ReadAll(f) - if err != nil { - return err - } - - ori := make([]byte, len(src)) - copy(ori, src) - start := bytes.Index(src, importStartFlag) - // in case no importStartFlag or importStartFlag exist in the commentFlag - if start < 0 { - fmt.Printf("skip file %s since no import\n", filename) +func PrintFormattedFiles(paths []string, cfg GciConfiguration) error { + return processStdInAndGoFilesInPaths(paths, cfg, func(filePath string, unmodifiedFile, formattedFile []byte) error { + fmt.Print(string(formattedFile)) return nil - } - end := bytes.Index(src[start:], importEndFlag) + start - - ret := bytes.Split(src[start+len(importStartFlag):end], []byte(linebreak)) - - p := newPkg(ret, set.LocalFlag) - - res := append(src[:start+len(importStartFlag)], append(p.fmt(), src[end+1:]...)...) - - if !bytes.Equal(ori, res) { - if *set.DoWrite { - // On Windows, we need to re-set the permissions from the file. See golang/go#38225. - var perms os.FileMode - if fi, err := os.Stat(filename); err == nil { - perms = fi.Mode() & os.ModePerm - } - err = ioutil.WriteFile(filename, res, perms) - if err != nil { - return err - } - } - if *set.DoDiff { - data, err := diff(ori, res, filename) - if err != nil { - return fmt.Errorf("failed to diff: %v", err) - } - fmt.Printf("diff -u %s %s\n", filepath.ToSlash(filename+".orig"), filepath.ToSlash(filename)) - if _, err := out.Write(data); err != nil { - return fmt.Errorf("failed to write: %v", err) - } - } - } - if !*set.DoWrite && !*set.DoDiff { - if _, err = out.Write(res); err != nil { - return fmt.Errorf("failed to write: %v", err) - } - } - - return err + }) } -// Run return source and result in []byte if succeed -func Run(filename string, set *FlagSet) ([]byte, []byte, error) { - var err error +func WriteFormattedFiles(paths []string, cfg GciConfiguration) error { + return processGoFilesInPaths(paths, cfg, func(filePath string, unmodifiedFile, formattedFile []byte) error { + if bytes.Equal(unmodifiedFile, formattedFile) { + log.Printf("Skipping correctly formatted File: %s", filePath) + return nil + } + log.Printf("Writing formatted File: %s", filePath) + return os.WriteFile(filePath, formattedFile, 0644) + }) +} - f, err := os.Open(filename) +func DiffFormattedFiles(paths []string, cfg GciConfiguration) error { + return processStdInAndGoFilesInPaths(paths, cfg, func(filePath string, unmodifiedFile, formattedFile []byte) error { + fileURI := span.URIFromPath(filePath) + edits := myers.ComputeEdits(fileURI, string(unmodifiedFile), string(formattedFile)) + unifiedEdits := gotextdiff.ToUnified(filePath, filePath, string(unmodifiedFile), edits) + fmt.Printf("%v", unifiedEdits) + return nil + }) +} + +type fileFormattingFunc func(filePath string, unmodifiedFile, formattedFile []byte) error + +func processStdInAndGoFilesInPaths(paths []string, cfg GciConfiguration, fileFunc fileFormattingFunc) error { + return processFiles(io.StdInGenerator.Combine(io.GoFilesInPathsGenerator(paths)), cfg, fileFunc) +} + +func processGoFilesInPaths(paths []string, cfg GciConfiguration, fileFunc fileFormattingFunc) error { + return processFiles(io.GoFilesInPathsGenerator(paths), cfg, fileFunc) +} + +func processFiles(fileGenerator io.FileGeneratorFunc, cfg GciConfiguration, fileFunc fileFormattingFunc) error { + var taskGroup errgroup.Group + files, err := fileGenerator() if err != nil { - return nil, nil, err + return err } - defer f.Close() + for _, file := range files { + // run file processing in parallel + taskGroup.Go(processingFunc(file, cfg, fileFunc)) + } + return taskGroup.Wait() +} - src, err := ioutil.ReadAll(f) +func processingFunc(file io.FileObj, cfg GciConfiguration, formattingFunc fileFormattingFunc) func() error { + return func() error { + unmodifiedFile, formattedFile, err := LoadFormatGoFile(file, cfg) + if err != nil { + if errors.Is(err, FileParsingError{}) { + // do not process files that are improperly formatted + return nil + } + return err + } + return formattingFunc(file.Path(), unmodifiedFile, formattedFile) + } +} + +func LoadFormatGoFile(file io.FileObj, cfg GciConfiguration) (unmodifiedFile, formattedFile []byte, err error) { + unmodifiedFile, err = file.Load() + log.Printf("Loaded File: %s", file.Path()) if err != nil { return nil, nil, err } - ori := make([]byte, len(src)) - copy(ori, src) - start := bytes.Index(src, importStartFlag) - // in case no importStartFlag or importStartFlag exist in the commentFlag - if start < 0 { - return nil, nil, nil + formattedFile, err = formatGoFile(unmodifiedFile, cfg) + if err != nil { + // ignore missing import statements + if !errors.Is(err, MissingImportStatementError) { + return unmodifiedFile, nil, err + } + log.Printf("File does not contain an import statement: %s", file.Path()) + formattedFile = unmodifiedFile } - end := bytes.Index(src[start:], importEndFlag) + start - - // in case import flags are part of a codegen template, or otherwise "wrong" - if start+len(importStartFlag) > end { - return nil, nil, nil - } - - ret := bytes.Split(src[start+len(importStartFlag):end], []byte(linebreak)) - - p := newPkg(ret, set.LocalFlag) - - res := append(src[:start+len(importStartFlag)], append(p.fmt(), src[end+1:]...)...) - - if bytes.Equal(ori, res) { - return ori, nil, nil - } - - return ori, res, nil + return unmodifiedFile, formattedFile, nil } diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/imports/errors.go b/vendor/github.com/daixiang0/gci/pkg/gci/imports/errors.go new file mode 100644 index 000000000..49baa6d39 --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/gci/imports/errors.go @@ -0,0 +1,37 @@ +package imports + +import ( + "errors" + "fmt" +) + +type ValidationError struct { + error +} + +func (v ValidationError) Unwrap() error { + return v.error +} + +func (v ValidationError) Is(err error) bool { + _, ok := err.(ValidationError) + return ok +} + +var MissingOpeningQuotesError = ValidationError{errors.New("path is missing starting quotes")} + +var MissingClosingQuotesError = ValidationError{errors.New("path is missing closing quotes")} + +type InvalidCharacterError struct { + char rune + alias string +} + +func (i InvalidCharacterError) Error() string { + return fmt.Sprintf("Found non-letter character %q in Alias: %s", i.char, i.alias) +} + +func (i InvalidCharacterError) Is(err error) bool { + _, ok := err.(InvalidCharacterError) + return ok +} diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/imports/import.go b/vendor/github.com/daixiang0/gci/pkg/gci/imports/import.go new file mode 100644 index 000000000..f9229cbe4 --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/gci/imports/import.go @@ -0,0 +1,86 @@ +package imports + +import ( + "sort" + "strings" + "unicode" + + "github.com/daixiang0/gci/pkg/configuration" + "github.com/daixiang0/gci/pkg/constants" +) + +type ImportDef struct { + Alias string + QuotedPath string + PrefixComment []string + InlineComment string +} + +func (i ImportDef) Path() string { + return strings.TrimSuffix(strings.TrimPrefix(i.QuotedPath, string('"')), string('"')) +} + +// Validate checks whether the contents are valid for an import +func (i ImportDef) Validate() error { + err := checkAlias(i.Alias) + if err != nil { + return ValidationError{err} + } + if !strings.HasPrefix(i.QuotedPath, string('"')) { + return MissingOpeningQuotesError + } + if !strings.HasSuffix(i.QuotedPath, string('"')) { + return MissingClosingQuotesError + } + return nil +} + +func checkAlias(alias string) error { + for idx, r := range alias { + if !unicode.IsLetter(r) { + if r != '_' && r != '.' { + if idx == 0 || !unicode.IsDigit(r) { + // aliases may not start with a digit + return InvalidCharacterError{r, alias} + } + } + } + } + return nil +} + +func (i ImportDef) String() string { + return i.QuotedPath +} + +func (i ImportDef) Format(cfg configuration.FormatterConfiguration) string { + linePrefix := constants.Indent + var output string + if cfg.NoPrefixComments == false { + for _, prefixComment := range i.PrefixComment { + output += linePrefix + prefixComment + constants.Linebreak + } + } + output += linePrefix + if i.Alias != "" { + output += i.Alias + constants.Blank + } + output += i.QuotedPath + if cfg.NoInlineComments == false { + if i.InlineComment != "" { + output += constants.Blank + i.InlineComment + } + } + output += constants.Linebreak + return output +} + +func SortImportsByPath(imports []ImportDef) []ImportDef { + sort.Slice( + imports, + func(i, j int) bool { + return sort.StringsAreSorted([]string{imports[i].Path(), imports[j].Path()}) + }, + ) + return imports +} diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/parse.go b/vendor/github.com/daixiang0/gci/pkg/gci/parse.go new file mode 100644 index 000000000..2ab183d63 --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/gci/parse.go @@ -0,0 +1,56 @@ +package gci + +import ( + "strings" + + "github.com/daixiang0/gci/pkg/constants" + importPkg "github.com/daixiang0/gci/pkg/gci/imports" +) + +// Recursively parses import lines into a list of ImportDefs +func parseToImportDefinitions(unformattedLines []string) ([]importPkg.ImportDef, error) { + newImport := importPkg.ImportDef{} + for index, unformattedLine := range unformattedLines { + line := strings.TrimSpace(unformattedLine) + if line == "" { + //empty line --> starts a new import + return parseToImportDefinitions(unformattedLines[index+1:]) + } + if strings.HasPrefix(line, constants.CommentFlag) { + // comment line + newImport.PrefixComment = append(newImport.PrefixComment, line) + continue + } + // split inline comment from import + importSegments := strings.SplitN(line, constants.CommentFlag, 2) + switch len(importSegments) { + case 1: + // no inline comment + case 2: + // inline comment present + newImport.InlineComment = constants.CommentFlag + importSegments[1] + default: + return nil, InvalidImportSplitError{importSegments} + } + // split alias from path + pkgArray := strings.Fields(importSegments[0]) + switch len(pkgArray) { + case 1: + // only a path + newImport.QuotedPath = pkgArray[0] + case 2: + // alias + path + newImport.Alias = pkgArray[0] + newImport.QuotedPath = pkgArray[1] + default: + return nil, InvalidAliasSplitError{pkgArray} + } + err := newImport.Validate() + if err != nil { + return nil, err + } + followingImports, err := parseToImportDefinitions(unformattedLines[index+1:]) + return append([]importPkg.ImportDef{newImport}, followingImports...), err + } + return nil, nil +} diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/sections/commentline.go b/vendor/github.com/daixiang0/gci/pkg/gci/sections/commentline.go new file mode 100644 index 000000000..428644b19 --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/gci/sections/commentline.go @@ -0,0 +1,51 @@ +package sections + +import ( + "fmt" + "strings" + + "github.com/daixiang0/gci/pkg/configuration" + "github.com/daixiang0/gci/pkg/constants" + importPkg "github.com/daixiang0/gci/pkg/gci/imports" + "github.com/daixiang0/gci/pkg/gci/specificity" +) + +func init() { + commentLineType := SectionType{ + generatorFun: func(parameter string, sectionPrefix, sectionSuffix Section) (Section, error) { + return CommentLine{parameter}, nil + }, + aliases: []string{"Comment", "CommentLine"}, + parameterHelp: "your text here", + description: "Prints the specified indented comment", + }.StandAloneSection() + SectionParserInst.registerSectionWithoutErr(&commentLineType) +} + +type CommentLine struct { + Comment string +} + +func (c CommentLine) MatchSpecificity(spec importPkg.ImportDef) specificity.MatchSpecificity { + return specificity.MisMatch{} +} + +func (c CommentLine) Format(imports []importPkg.ImportDef, cfg configuration.FormatterConfiguration) string { + comment := constants.Indent + "//" + c.Comment + if !strings.HasSuffix(comment, constants.Linebreak) { + comment += constants.Linebreak + } + return comment +} + +func (c CommentLine) sectionPrefix() Section { + return nil +} + +func (c CommentLine) sectionSuffix() Section { + return nil +} + +func (c CommentLine) String() string { + return fmt.Sprintf("CommentLine(%s)", c.Comment) +} diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/sections/default.go b/vendor/github.com/daixiang0/gci/pkg/gci/sections/default.go new file mode 100644 index 000000000..aca04f4e3 --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/gci/sections/default.go @@ -0,0 +1,43 @@ +package sections + +import ( + "github.com/daixiang0/gci/pkg/configuration" + importPkg "github.com/daixiang0/gci/pkg/gci/imports" + "github.com/daixiang0/gci/pkg/gci/specificity" +) + +func init() { + defaultSectionType := SectionType{ + generatorFun: func(parameter string, sectionPrefix, sectionSuffix Section) (Section, error) { + return DefaultSection{sectionPrefix, sectionSuffix}, nil + }, + aliases: []string{"Def", "Default"}, + description: "Contains all imports that could not be matched to another section type", + }.WithoutParameter() + SectionParserInst.registerSectionWithoutErr(&defaultSectionType) +} + +type DefaultSection struct { + Prefix Section + Suffix Section +} + +func (d DefaultSection) sectionPrefix() Section { + return d.Prefix +} + +func (d DefaultSection) sectionSuffix() Section { + return d.Suffix +} + +func (d DefaultSection) MatchSpecificity(spec importPkg.ImportDef) specificity.MatchSpecificity { + return specificity.Default{} +} + +func (d DefaultSection) Format(imports []importPkg.ImportDef, cfg configuration.FormatterConfiguration) string { + return inorderSectionFormat(d, imports, cfg) +} + +func (d DefaultSection) String() string { + return sectionStringWithPrefixSuffix("Default", d) +} diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/sections/errors.go b/vendor/github.com/daixiang0/gci/pkg/gci/sections/errors.go new file mode 100644 index 000000000..3cc720d57 --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/gci/sections/errors.go @@ -0,0 +1,68 @@ +package sections + +import ( + "errors" + "fmt" + + "github.com/daixiang0/gci/pkg/constants" +) + +type SectionParsingError struct { + error +} + +func (s SectionParsingError) Unwrap() error { + return s.error +} + +func (s SectionParsingError) Wrap(sectionStr string) error { + return fmt.Errorf("failed to parse section %q: %w", sectionStr, s) +} + +func (s SectionParsingError) Is(err error) bool { + _, ok := err.(SectionParsingError) + return ok +} + +type TypeAlreadyRegisteredError struct { + duplicateAlias string + newType, existingType SectionType +} + +func (t TypeAlreadyRegisteredError) Error() string { + return fmt.Sprintf("New type %q could not be registered because alias %q was already defined in %q", t.newType, t.duplicateAlias, t.existingType) +} + +func (t TypeAlreadyRegisteredError) Is(err error) bool { + _, ok := err.(TypeAlreadyRegisteredError) + return ok +} + +var PrefixNotAllowedError = errors.New("section may not contain a Prefix") + +var SuffixNotAllowedError = errors.New("section may not contain a Suffix") + +var SectionFormatInvalidError = errors.New("section Definition does not match format [FormattingSection:]Section[:FormattingSection]") + +type SectionAliasNotRegisteredWithParser struct { + missingAlias string +} + +func (s SectionAliasNotRegisteredWithParser) Error() string { + return fmt.Sprintf("section alias %q not registered with parser", s.missingAlias) +} + +func (s SectionAliasNotRegisteredWithParser) Is(err error) bool { + _, ok := err.(SectionAliasNotRegisteredWithParser) + return ok +} + +var MissingParameterClosingBracketsError = fmt.Errorf("section parameter is missing closing %q", constants.ParameterClosingBrackets) + +var MoreThanOneOpeningQuotesError = fmt.Errorf("found more than one %q parameter start sequences", constants.ParameterClosingBrackets) + +var SectionTypeDoesNotAcceptParametersError = errors.New("section type does not accept a parameter") + +var SectionTypeDoesNotAcceptPrefixError = errors.New("section may not contain a Prefix") + +var SectionTypeDoesNotAcceptSuffixError = errors.New("section may not contain a Suffix") diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/sections/newline.go b/vendor/github.com/daixiang0/gci/pkg/gci/sections/newline.go new file mode 100644 index 000000000..b54788b65 --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/gci/sections/newline.go @@ -0,0 +1,41 @@ +package sections + +import ( + "github.com/daixiang0/gci/pkg/configuration" + "github.com/daixiang0/gci/pkg/constants" + importPkg "github.com/daixiang0/gci/pkg/gci/imports" + "github.com/daixiang0/gci/pkg/gci/specificity" +) + +func init() { + newLineType := SectionType{ + generatorFun: func(parameter string, sectionPrefix, sectionSuffix Section) (Section, error) { + return NewLine{}, nil + }, + aliases: []string{"NL", "NewLine"}, + description: "Prints an empty line", + }.StandAloneSection().WithoutParameter() + SectionParserInst.registerSectionWithoutErr(&newLineType) +} + +type NewLine struct{} + +func (n NewLine) sectionPrefix() Section { + return nil +} + +func (n NewLine) sectionSuffix() Section { + return nil +} + +func (n NewLine) MatchSpecificity(spec importPkg.ImportDef) specificity.MatchSpecificity { + return specificity.MisMatch{} +} + +func (n NewLine) Format(imports []importPkg.ImportDef, cfg configuration.FormatterConfiguration) string { + return constants.Linebreak +} + +func (n NewLine) String() string { + return "NewLine" +} diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/sections/prefix.go b/vendor/github.com/daixiang0/gci/pkg/gci/sections/prefix.go new file mode 100644 index 000000000..49b30b243 --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/gci/sections/prefix.go @@ -0,0 +1,51 @@ +package sections + +import ( + "fmt" + "strings" + + "github.com/daixiang0/gci/pkg/configuration" + importPkg "github.com/daixiang0/gci/pkg/gci/imports" + "github.com/daixiang0/gci/pkg/gci/specificity" +) + +func init() { + prefixType := &SectionType{ + generatorFun: func(parameter string, sectionPrefix, sectionSuffix Section) (Section, error) { + return Prefix{parameter, sectionPrefix, sectionSuffix}, nil + }, + aliases: []string{"Prefix", "pkgPrefix"}, + parameterHelp: "gitlab.com/myorg", + description: "Groups all imports with the specified Prefix. Imports will be matched to the longest Prefix.", + } + SectionParserInst.registerSectionWithoutErr(prefixType) +} + +type Prefix struct { + ImportPrefix string + Prefix Section + Suffix Section +} + +func (p Prefix) sectionPrefix() Section { + return p.Prefix +} + +func (p Prefix) sectionSuffix() Section { + return p.Suffix +} + +func (p Prefix) MatchSpecificity(spec importPkg.ImportDef) specificity.MatchSpecificity { + if len(p.ImportPrefix) > 0 && strings.HasPrefix(spec.Path(), p.ImportPrefix) { + return specificity.Match{len(p.ImportPrefix)} + } + return specificity.MisMatch{} +} + +func (p Prefix) Format(imports []importPkg.ImportDef, cfg configuration.FormatterConfiguration) string { + return inorderSectionFormat(p, imports, cfg) +} + +func (p Prefix) String() string { + return sectionStringWithPrefixSuffix(fmt.Sprintf("Prefix(%s)", p.ImportPrefix), p) +} diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/sections/section.go b/vendor/github.com/daixiang0/gci/pkg/gci/sections/section.go new file mode 100644 index 000000000..cbfd46748 --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/gci/sections/section.go @@ -0,0 +1,58 @@ +package sections + +import ( + "fmt" + + "github.com/daixiang0/gci/pkg/configuration" + importPkg "github.com/daixiang0/gci/pkg/gci/imports" + "github.com/daixiang0/gci/pkg/gci/specificity" +) + +// Section defines a part of the formatted output. +type Section interface { + // MatchSpecificity returns how well an Import matches to this Section + MatchSpecificity(spec importPkg.ImportDef) specificity.MatchSpecificity + // Format receives the array of imports that have matched this section and formats them according to it´s rules + Format(imports []importPkg.ImportDef, cfg configuration.FormatterConfiguration) string + // Returns the Section that will be prefixed if this section is rendered + sectionPrefix() Section + // Returns the Section that will be suffixed if this section is rendered + sectionSuffix() Section + // String Implements the stringer interface + String() string +} + +//Unbound methods that are required until interface methods are supported + +// Default method for formatting a section +func inorderSectionFormat(section Section, imports []importPkg.ImportDef, cfg configuration.FormatterConfiguration) string { + imports = importPkg.SortImportsByPath(imports) + var output string + if len(imports) > 0 && section.sectionPrefix() != nil { + // imports are not passed to a prefix section to prevent rendering them twice + output += section.sectionPrefix().Format([]importPkg.ImportDef{}, cfg) + } + for _, importDef := range imports { + output += importDef.Format(cfg) + } + if len(imports) > 0 && section.sectionSuffix() != nil { + // imports are not passed to a suffix section to prevent rendering them twice + output += section.sectionSuffix().Format([]importPkg.ImportDef{}, cfg) + } + return output +} + +// Default method for converting a section to a String representation +func sectionStringWithPrefixSuffix(mainSectionStr string, section Section) (output string) { + if section.sectionPrefix() != nil { + output += fmt.Sprintf("%v:", section.sectionPrefix()) + } else if section.sectionSuffix() != nil { + // insert empty prefix to make suffix distinguishable from prefix + output += ":" + } + output += mainSectionStr + if section.sectionSuffix() != nil { + output += fmt.Sprintf(":%v", section.sectionSuffix()) + } + return output +} diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/sections/sectionparser.go b/vendor/github.com/daixiang0/gci/pkg/gci/sections/sectionparser.go new file mode 100644 index 000000000..b0b89a143 --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/gci/sections/sectionparser.go @@ -0,0 +1,139 @@ +package sections + +import ( + "fmt" + "strings" + + "github.com/daixiang0/gci/pkg/constants" +) + +var SectionParserInst = SectionParser{} + +type SectionParser struct { + sectionTypes []SectionType +} + +func (s *SectionParser) RegisterSection(newSectionType *SectionType) error { + for _, existingSectionType := range s.sectionTypes { + for _, alias := range existingSectionType.aliases { + for _, newAlias := range newSectionType.aliases { + if alias == newAlias { + return TypeAlreadyRegisteredError{alias, *newSectionType, existingSectionType} + } + } + } + } + s.sectionTypes = append(s.sectionTypes, *newSectionType) + return nil +} + +func (s *SectionParser) registerSectionWithoutErr(newSectionType *SectionType) { + err := s.RegisterSection(newSectionType) + if err != nil { + panic(err) + } +} + +func (s *SectionParser) ParseSectionStrings(sectionStrings []string, withSuffix, withPrefix bool) ([]Section, error) { + var parsedSections []Section + for _, sectionStr := range sectionStrings { + section, err := s.parseSectionString(sectionStr, withSuffix, withPrefix) + if err != nil { + return nil, SectionParsingError{err}.Wrap(sectionStr) + } + parsedSections = append(parsedSections, section) + } + return parsedSections, nil +} + +func (s *SectionParser) parseSectionString(sectionStr string, withSuffix, withPrefix bool) (Section, error) { + trimmedSection := strings.TrimSpace(sectionStr) + sectionSegments := strings.Split(trimmedSection, constants.SectionSeparator) + switch len(sectionSegments) { + case 1: + // section + return s.parseSectionStringComponents("", sectionSegments[0], "") + case 2: + // prefix + section + if !withPrefix { + return nil, PrefixNotAllowedError + } + return s.parseSectionStringComponents(sectionSegments[0], sectionSegments[1], "") + case 3: + // prefix + section + suffix + if !withPrefix { + return nil, PrefixNotAllowedError + } + if !withSuffix { + return nil, SuffixNotAllowedError + } + return s.parseSectionStringComponents(sectionSegments[0], sectionSegments[1], sectionSegments[2]) + } + return nil, SectionFormatInvalidError +} + +func (s *SectionParser) parseSectionStringComponents(sectionPrefixStr string, sectionStr string, sectionSuffixStr string) (Section, error) { + var sectionPrefix, sectionSuffix Section + var err error + if len(sectionPrefixStr) > 0 { + sectionPrefix, err = s.createSectionFromString(sectionPrefixStr, nil, nil) + if err != nil { + return nil, fmt.Errorf("section prefix %q could not be parsed: %w", sectionPrefixStr, err) + } + } + if len(sectionSuffixStr) > 0 { + sectionSuffix, err = s.createSectionFromString(sectionSuffixStr, nil, nil) + if err != nil { + return nil, fmt.Errorf("section suffix %q could not be parsed: %w", sectionSuffixStr, err) + } + } + section, err := s.createSectionFromString(sectionStr, sectionPrefix, sectionSuffix) + if err != nil { + return nil, err + } + return section, nil +} + +func (s *SectionParser) createSectionFromString(sectionStr string, prefixSection, suffixSection Section) (Section, error) { + // create map of all aliases + aliasMap := map[string]SectionType{} + for _, sectionType := range s.sectionTypes { + for _, alias := range sectionType.aliases { + aliasMap[strings.ToLower(alias)] = sectionType + } + } + // parse everything before the parameter brackets + sectionComponents := strings.Split(sectionStr, constants.ParameterOpeningBrackets) + alias := sectionComponents[0] + sectionType, exists := aliasMap[strings.ToLower(alias)] + if !exists { + return nil, SectionAliasNotRegisteredWithParser{alias} + } + switch len(sectionComponents) { + case 1: + return sectionType.generatorFun("", prefixSection, suffixSection) + case 2: + if strings.HasSuffix(sectionComponents[1], constants.ParameterClosingBrackets) { + return sectionType.generatorFun(strings.TrimSuffix(sectionComponents[1], constants.ParameterClosingBrackets), prefixSection, suffixSection) + } else { + return nil, MissingParameterClosingBracketsError + } + } + return nil, MoreThanOneOpeningQuotesError +} + +func (s *SectionParser) SectionHelpTexts() string { + help := "" + for _, sectionType := range s.sectionTypes { + var aliasesWithParameters []string + for _, alias := range sectionType.aliases { + parameterSuffix := "" + if sectionType.parameterHelp != "" { + parameterSuffix = "(" + sectionType.parameterHelp + ")" + } + aliasesWithParameters = append(aliasesWithParameters, alias+parameterSuffix) + } + help += fmt.Sprintf("%s - %s\n", strings.Join(aliasesWithParameters, " | "), sectionType.description) + } + return help +} diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/sections/sectiontype.go b/vendor/github.com/daixiang0/gci/pkg/gci/sections/sectiontype.go new file mode 100644 index 000000000..30287678b --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/gci/sections/sectiontype.go @@ -0,0 +1,40 @@ +package sections + +import ( + "fmt" +) + +// A SectionType is used to dynamically register Sections with the parser +type SectionType struct { + generatorFun func(parameter string, sectionPrefix, sectionSuffix Section) (Section, error) + aliases []string + parameterHelp string + description string +} + +func (t SectionType) WithoutParameter() SectionType { + generatorFun := func(parameter string, sectionPrefix, sectionSuffix Section) (Section, error) { + if parameter != "" { + return nil, SectionTypeDoesNotAcceptParametersError + } + return t.generatorFun(parameter, sectionPrefix, sectionSuffix) + } + return SectionType{generatorFun, t.aliases, "", t.description} +} + +func (t SectionType) StandAloneSection() SectionType { + generatorFun := func(parameter string, sectionPrefix, sectionSuffix Section) (Section, error) { + if sectionPrefix != nil { + return nil, SectionTypeDoesNotAcceptPrefixError + } + if sectionSuffix != nil { + return nil, SectionTypeDoesNotAcceptSuffixError + } + return t.generatorFun(parameter, sectionPrefix, sectionSuffix) + } + return SectionType{generatorFun, t.aliases, t.parameterHelp, t.description} +} + +func (t SectionType) String() string { + return fmt.Sprintf("Sectiontype(aliases: %v,description: %s)", t.aliases, t.description) +} diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/sections/standardpackage.go b/vendor/github.com/daixiang0/gci/pkg/gci/sections/standardpackage.go new file mode 100644 index 000000000..b53475b08 --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/gci/sections/standardpackage.go @@ -0,0 +1,51 @@ +package sections + +import ( + "github.com/daixiang0/gci/pkg/configuration" + importPkg "github.com/daixiang0/gci/pkg/gci/imports" + "github.com/daixiang0/gci/pkg/gci/specificity" +) + +func init() { + standardPackageType := SectionType{ + generatorFun: func(parameter string, sectionPrefix, sectionSuffix Section) (Section, error) { + return StandardPackage{sectionPrefix, sectionSuffix}, nil + }, + aliases: []string{"Std", "Standard"}, + description: "Captures all standard packages if they do not match another section", + }.WithoutParameter() + SectionParserInst.registerSectionWithoutErr(&standardPackageType) +} + +type StandardPackage struct { + prefix Section + suffix Section +} + +func (s StandardPackage) sectionPrefix() Section { + return s.prefix +} + +func (s StandardPackage) sectionSuffix() Section { + return s.suffix +} + +func (s StandardPackage) MatchSpecificity(spec importPkg.ImportDef) specificity.MatchSpecificity { + if isStandardPackage(spec.Path()) { + return specificity.StandardPackageMatch{} + } + return specificity.MisMatch{} +} + +func (s StandardPackage) Format(imports []importPkg.ImportDef, cfg configuration.FormatterConfiguration) string { + return inorderSectionFormat(s, imports, cfg) +} + +func (s StandardPackage) String() string { + return sectionStringWithPrefixSuffix("Standard", s) +} + +func isStandardPackage(pkg string) bool { + _, ok := standardPackages[pkg] + return ok +} diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/std.go b/vendor/github.com/daixiang0/gci/pkg/gci/sections/standardpackage_list.go similarity index 96% rename from vendor/github.com/daixiang0/gci/pkg/gci/std.go rename to vendor/github.com/daixiang0/gci/pkg/gci/sections/standardpackage_list.go index ac96b55ab..a501f2915 100644 --- a/vendor/github.com/daixiang0/gci/pkg/gci/std.go +++ b/vendor/github.com/daixiang0/gci/pkg/gci/sections/standardpackage_list.go @@ -1,6 +1,6 @@ -package gci +package sections -// Code generated based on go1.16beta1. DO NOT EDIT. +// Code generated based on go1.17.5. DO NOT EDIT. var standardPackages = map[string]struct{}{ "archive/tar": {}, @@ -63,6 +63,7 @@ var standardPackages = map[string]struct{}{ "fmt": {}, "go/ast": {}, "go/build": {}, + "go/build/constraint": {}, "go/constant": {}, "go/doc": {}, "go/format": {}, @@ -154,8 +155,3 @@ var standardPackages = map[string]struct{}{ "unicode/utf8": {}, "unsafe": {}, } - -func isStandardPackage(pkg string) bool { - _, ok := standardPackages[pkg] - return ok -} diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/specificity/default.go b/vendor/github.com/daixiang0/gci/pkg/gci/specificity/default.go new file mode 100644 index 000000000..2d91bd8b4 --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/gci/specificity/default.go @@ -0,0 +1,19 @@ +package specificity + +type Default struct { +} + +func (d Default) IsMoreSpecific(than MatchSpecificity) bool { + return isMoreSpecific(d, than) +} +func (d Default) Equal(to MatchSpecificity) bool { + return equalSpecificity(d, to) +} + +func (d Default) class() specificityClass { + return DefaultClass +} + +func (d Default) String() string { + return "Default" +} diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/specificity/match.go b/vendor/github.com/daixiang0/gci/pkg/gci/specificity/match.go new file mode 100644 index 000000000..f08d2b66b --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/gci/specificity/match.go @@ -0,0 +1,24 @@ +package specificity + +import "fmt" + +type Match struct { + Length int +} + +func (m Match) IsMoreSpecific(than MatchSpecificity) bool { + otherMatch, isMatch := than.(Match) + return isMoreSpecific(m, than) || (isMatch && m.Length > otherMatch.Length) +} + +func (m Match) Equal(to MatchSpecificity) bool { + return equalSpecificity(m, to) +} + +func (m Match) class() specificityClass { + return MatchClass +} + +func (m Match) String() string { + return fmt.Sprintf("Match(length: %d)", m.Length) +} diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/specificity/mismatch.go b/vendor/github.com/daixiang0/gci/pkg/gci/specificity/mismatch.go new file mode 100644 index 000000000..78013e343 --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/gci/specificity/mismatch.go @@ -0,0 +1,21 @@ +package specificity + +type MisMatch struct { +} + +func (m MisMatch) IsMoreSpecific(than MatchSpecificity) bool { + return isMoreSpecific(m, than) +} + +func (m MisMatch) Equal(to MatchSpecificity) bool { + return equalSpecificity(m, to) +} + +func (m MisMatch) class() specificityClass { + return MisMatchClass +} + +func (m MisMatch) String() string { + return "Mismatch" + +} diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/specificity/specificity.go b/vendor/github.com/daixiang0/gci/pkg/gci/specificity/specificity.go new file mode 100644 index 000000000..32b6e2802 --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/gci/specificity/specificity.go @@ -0,0 +1,28 @@ +package specificity + +type specificityClass int + +const ( + MisMatchClass = 0 + DefaultClass = 10 + StandardPackageClass = 20 + MatchClass = 30 +) + +// MatchSpecificity is used to determine which section matches an import best +type MatchSpecificity interface { + IsMoreSpecific(than MatchSpecificity) bool + Equal(to MatchSpecificity) bool + class() specificityClass +} + +//Unbound methods that are required until interface methods are supported + +func isMoreSpecific(this, than MatchSpecificity) bool { + return this.class() > than.class() +} + +func equalSpecificity(base, to MatchSpecificity) bool { + // m.class() == to.class() would not work for Match + return !base.IsMoreSpecific(to) && !to.IsMoreSpecific(base) +} diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/specificity/standard.go b/vendor/github.com/daixiang0/gci/pkg/gci/specificity/standard.go new file mode 100644 index 000000000..30e8f8f97 --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/gci/specificity/standard.go @@ -0,0 +1,20 @@ +package specificity + +type StandardPackageMatch struct { +} + +func (s StandardPackageMatch) IsMoreSpecific(than MatchSpecificity) bool { + return isMoreSpecific(s, than) +} + +func (s StandardPackageMatch) Equal(to MatchSpecificity) bool { + return equalSpecificity(s, to) +} + +func (s StandardPackageMatch) class() specificityClass { + return StandardPackageClass +} + +func (s StandardPackageMatch) String() string { + return "Standard" +} diff --git a/vendor/github.com/daixiang0/gci/pkg/io/file.go b/vendor/github.com/daixiang0/gci/pkg/io/file.go new file mode 100644 index 000000000..f92d16e14 --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/io/file.go @@ -0,0 +1,59 @@ +package io + +import "io/ioutil" + +// FileObj allows mocking the access to files +type FileObj interface { + Load() ([]byte, error) + Path() string +} + +// File represents a file that can be loaded from the file system +type File struct { + FilePath string +} + +func (f File) Path() string { + return f.FilePath +} + +func (f File) Load() ([]byte, error) { + return ioutil.ReadFile(f.FilePath) +} + +// FileGeneratorFunc returns a list of files that can be loaded and processed +type FileGeneratorFunc func() ([]FileObj, error) + +func (a FileGeneratorFunc) Combine(b FileGeneratorFunc) FileGeneratorFunc { + return func() ([]FileObj, error) { + files, err := a() + if err != nil { + return nil, err + } + additionalFiles, err := b() + if err != nil { + return nil, err + } + files = append(files, additionalFiles...) + return files, err + } +} + +func GoFilesInPathsGenerator(paths []string) FileGeneratorFunc { + return FilesInPathsGenerator(paths, isGoFile) +} + +func FilesInPathsGenerator(paths []string, fileCheckFun fileCheckFunction) FileGeneratorFunc { + return func() (foundFiles []FileObj, err error) { + for _, path := range paths { + files, err := FindFilesForPath(path, fileCheckFun) + if err != nil { + return nil, err + } + for _, filePath := range files { + foundFiles = append(foundFiles, File{filePath}) + } + } + return foundFiles, nil + } +} diff --git a/vendor/github.com/daixiang0/gci/pkg/io/search.go b/vendor/github.com/daixiang0/gci/pkg/io/search.go new file mode 100644 index 000000000..04f005876 --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/io/search.go @@ -0,0 +1,47 @@ +package io + +import ( + "io/fs" + "os" + "path/filepath" +) + +type fileCheckFunction func(file os.FileInfo) bool + +func FindFilesForPath(path string, fileCheckFun fileCheckFunction) ([]string, error) { + switch entry, err := os.Stat(path); { + case err != nil: + return nil, err + case entry.IsDir(): + return findFilesForDirectory(path, fileCheckFun) + case fileCheckFun(entry): + return []string{filepath.Clean(path)}, nil + default: + return []string{}, nil + } +} + +func findFilesForDirectory(dirPath string, fileCheckFun fileCheckFunction) ([]string, error) { + var filePaths []string + err := filepath.WalkDir(dirPath, func(path string, entry fs.DirEntry, err error) error { + if err != nil { + return err + } + file, err := entry.Info() + if err != nil { + return err + } + if !entry.IsDir() && fileCheckFun(file) { + filePaths = append(filePaths, filepath.Clean(path)) + } + return nil + }) + if err != nil { + return nil, err + } + return filePaths, nil +} + +func isGoFile(file os.FileInfo) bool { + return !file.IsDir() && filepath.Ext(file.Name()) == ".go" +} diff --git a/vendor/github.com/daixiang0/gci/pkg/io/stdin.go b/vendor/github.com/daixiang0/gci/pkg/io/stdin.go new file mode 100644 index 000000000..5d92768db --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/io/stdin.go @@ -0,0 +1,28 @@ +package io + +import ( + "io/ioutil" + "os" +) + +type stdInFile struct { +} + +func (s stdInFile) Load() ([]byte, error) { + return ioutil.ReadAll(os.Stdin) +} + +func (s stdInFile) Path() string { + return "StdIn" +} + +var StdInGenerator FileGeneratorFunc = func() ([]FileObj, error) { + stat, err := os.Stdin.Stat() + if err != nil { + return nil, err + } + if (stat.Mode() & os.ModeCharDevice) == 0 { + return []FileObj{stdInFile{}}, nil + } + return []FileObj{}, nil +} diff --git a/vendor/github.com/docker/distribution/reference/normalize.go b/vendor/github.com/docker/distribution/reference/normalize.go index 2d71fc5e9..b3dfb7a6d 100644 --- a/vendor/github.com/docker/distribution/reference/normalize.go +++ b/vendor/github.com/docker/distribution/reference/normalize.go @@ -56,6 +56,35 @@ func ParseNormalizedNamed(s string) (Named, error) { return named, nil } +// ParseDockerRef normalizes the image reference following the docker convention. This is added +// mainly for backward compatibility. +// The reference returned can only be either tagged or digested. For reference contains both tag +// and digest, the function returns digested reference, e.g. docker.io/library/busybox:latest@ +// sha256:7cc4b5aefd1d0cadf8d97d4350462ba51c694ebca145b08d7d41b41acc8db5aa will be returned as +// docker.io/library/busybox@sha256:7cc4b5aefd1d0cadf8d97d4350462ba51c694ebca145b08d7d41b41acc8db5aa. +func ParseDockerRef(ref string) (Named, error) { + named, err := ParseNormalizedNamed(ref) + if err != nil { + return nil, err + } + if _, ok := named.(NamedTagged); ok { + if canonical, ok := named.(Canonical); ok { + // The reference is both tagged and digested, only + // return digested. + newNamed, err := WithName(canonical.Name()) + if err != nil { + return nil, err + } + newCanonical, err := WithDigest(newNamed, canonical.Digest()) + if err != nil { + return nil, err + } + return newCanonical, nil + } + } + return TagNameOnly(named), nil +} + // splitDockerDomain splits a repository name to domain and remotename string. // If no valid domain is found, the default domain is used. Repository name // needs to be already validated before. diff --git a/vendor/github.com/docker/distribution/reference/reference.go b/vendor/github.com/docker/distribution/reference/reference.go index 2f66cca87..8c0c23b2f 100644 --- a/vendor/github.com/docker/distribution/reference/reference.go +++ b/vendor/github.com/docker/distribution/reference/reference.go @@ -205,7 +205,7 @@ func Parse(s string) (Reference, error) { var repo repository nameMatch := anchoredNameRegexp.FindStringSubmatch(matches[1]) - if nameMatch != nil && len(nameMatch) == 3 { + if len(nameMatch) == 3 { repo.domain = nameMatch[1] repo.path = nameMatch[2] } else { diff --git a/vendor/github.com/docker/distribution/registry/api/errcode/errors.go b/vendor/github.com/docker/distribution/registry/api/errcode/errors.go index 6d9bb4b62..4c35b879a 100644 --- a/vendor/github.com/docker/distribution/registry/api/errcode/errors.go +++ b/vendor/github.com/docker/distribution/registry/api/errcode/errors.go @@ -207,11 +207,11 @@ func (errs Errors) MarshalJSON() ([]byte, error) { for _, daErr := range errs { var err Error - switch daErr.(type) { + switch daErr := daErr.(type) { case ErrorCode: - err = daErr.(ErrorCode).WithDetail(nil) + err = daErr.WithDetail(nil) case Error: - err = daErr.(Error) + err = daErr default: err = ErrorCodeUnknown.WithDetail(daErr) diff --git a/vendor/github.com/esimonov/ifshort/pkg/analyzer/analyzer.go b/vendor/github.com/esimonov/ifshort/pkg/analyzer/analyzer.go index 397a17424..b2d06881d 100644 --- a/vendor/github.com/esimonov/ifshort/pkg/analyzer/analyzer.go +++ b/vendor/github.com/esimonov/ifshort/pkg/analyzer/analyzer.go @@ -108,8 +108,15 @@ func (nom namedOccurrenceMap) checkStatement(stmt ast.Stmt, ifPos token.Pos) { for _, el := range v.Body.List { nom.checkStatement(el, v.If) } + if elseBlock, ok := v.Else.(*ast.BlockStmt); ok { + for _, el := range elseBlock.List { + nom.checkStatement(el, v.If) + } + } switch cond := v.Cond.(type) { + case *ast.UnaryExpr: + nom.checkExpression(cond.X, v.If) case *ast.BinaryExpr: nom.checkExpression(cond.X, v.If) nom.checkExpression(cond.Y, v.If) @@ -217,6 +224,8 @@ func (nom namedOccurrenceMap) checkExpression(candidate ast.Expr, ifPos token.Po case *ast.KeyValueExpr: nom.checkExpression(v.Key, ifPos) nom.checkExpression(v.Value, ifPos) + case *ast.SelectorExpr: + nom.checkExpression(v.X, ifPos) } } case *ast.FuncLit: diff --git a/vendor/github.com/esimonov/ifshort/pkg/analyzer/occurrences.go b/vendor/github.com/esimonov/ifshort/pkg/analyzer/occurrences.go index 34224c93a..0d3793a57 100644 --- a/vendor/github.com/esimonov/ifshort/pkg/analyzer/occurrences.go +++ b/vendor/github.com/esimonov/ifshort/pkg/analyzer/occurrences.go @@ -187,14 +187,14 @@ func (nom namedOccurrenceMap) addFromCondition(stmt *ast.IfStmt) { case *ast.BinaryExpr: for _, v := range [2]ast.Expr{v.X, v.Y} { switch e := v.(type) { + case *ast.CallExpr: + nom.addFromCallExpr(stmt.If, e) case *ast.Ident: nom.addFromIdent(stmt.If, e) case *ast.SelectorExpr: nom.addFromIdent(stmt.If, e.X) } } - case *ast.Ident: - nom.addFromIdent(stmt.If, v) case *ast.CallExpr: for _, a := range v.Args { switch e := a.(type) { @@ -204,6 +204,15 @@ func (nom namedOccurrenceMap) addFromCondition(stmt *ast.IfStmt) { nom.addFromCallExpr(stmt.If, e) } } + case *ast.Ident: + nom.addFromIdent(stmt.If, v) + case *ast.UnaryExpr: + switch e := v.X.(type) { + case *ast.Ident: + nom.addFromIdent(stmt.If, e) + case *ast.SelectorExpr: + nom.addFromIdent(stmt.If, e.X) + } } } diff --git a/vendor/github.com/fzipp/gocyclo/CHANGELOG.md b/vendor/github.com/fzipp/gocyclo/CHANGELOG.md index 3959a62a5..efbc9febe 100644 --- a/vendor/github.com/fzipp/gocyclo/CHANGELOG.md +++ b/vendor/github.com/fzipp/gocyclo/CHANGELOG.md @@ -4,7 +4,14 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## [0.3.1] +## [0.4.0] - 2021-12-19 +### Added +- Support method receivers with type parameters introduced in Go 1.18 + +### Changed +- Use more efficient filepath.WalkDir instead of filepath.Walk + +## [0.3.1] - 2020-10-20 ### Added - Test coverage diff --git a/vendor/github.com/fzipp/gocyclo/README.md b/vendor/github.com/fzipp/gocyclo/README.md index f1056934c..3b009a9d1 100644 --- a/vendor/github.com/fzipp/gocyclo/README.md +++ b/vendor/github.com/fzipp/gocyclo/README.md @@ -1,6 +1,7 @@ # gocyclo [![PkgGoDev](https://pkg.go.dev/badge/github.com/fzipp/gocyclo)](https://pkg.go.dev/github.com/fzipp/gocyclo) +![Build Status](https://github.com/fzipp/gocyclo/workflows/build/badge.svg) [![Go Report Card](https://goreportcard.com/badge/github.com/fzipp/gocyclo)](https://goreportcard.com/report/github.com/fzipp/gocyclo) Gocyclo calculates @@ -31,7 +32,7 @@ to smaller functions. To install the `gocyclo` command, run ``` -$ go get github.com/fzipp/gocyclo/cmd/gocyclo +$ go install github.com/fzipp/gocyclo/cmd/gocyclo@latest ``` and put the resulting binary in one of your PATH directories if diff --git a/vendor/github.com/fzipp/gocyclo/analyze.go b/vendor/github.com/fzipp/gocyclo/analyze.go index c053e83e6..1f3479ab5 100644 --- a/vendor/github.com/fzipp/gocyclo/analyze.go +++ b/vendor/github.com/fzipp/gocyclo/analyze.go @@ -9,6 +9,7 @@ import ( "go/ast" "go/parser" "go/token" + "io/fs" "log" "os" "path/filepath" @@ -39,8 +40,8 @@ func Analyze(paths []string, ignore *regexp.Regexp) Stats { } func analyzeDir(dirname string, ignore *regexp.Regexp, stats Stats) Stats { - filepath.Walk(dirname, func(path string, info os.FileInfo, err error) error { - if err == nil && isGoFile(info) { + filepath.WalkDir(dirname, func(path string, entry fs.DirEntry, err error) error { + if err == nil && isGoFile(entry) { stats = analyzeFile(path, ignore, stats) } return err @@ -48,8 +49,8 @@ func analyzeDir(dirname string, ignore *regexp.Regexp, stats Stats) Stats { return stats } -func isGoFile(f os.FileInfo) bool { - return !f.IsDir() && strings.HasSuffix(f.Name(), ".go") +func isGoFile(entry fs.DirEntry) bool { + return !entry.IsDir() && strings.HasSuffix(entry.Name(), ".go") } func analyzeFile(path string, ignore *regexp.Regexp, stats Stats) Stats { @@ -137,15 +138,3 @@ func funcName(fn *ast.FuncDecl) string { } return fn.Name.Name } - -// recvString returns a string representation of recv of the -// form "T", "*T", or "BADRECV" (if not a proper receiver type). -func recvString(recv ast.Expr) string { - switch t := recv.(type) { - case *ast.Ident: - return t.Name - case *ast.StarExpr: - return "*" + recvString(t.X) - } - return "BADRECV" -} diff --git a/vendor/github.com/fzipp/gocyclo/go.mod b/vendor/github.com/fzipp/gocyclo/go.mod index c80982786..1266f2f0d 100644 --- a/vendor/github.com/fzipp/gocyclo/go.mod +++ b/vendor/github.com/fzipp/gocyclo/go.mod @@ -1,3 +1,3 @@ module github.com/fzipp/gocyclo -go 1.15 +go 1.18 diff --git a/vendor/github.com/fzipp/gocyclo/recv.go b/vendor/github.com/fzipp/gocyclo/recv.go new file mode 100644 index 000000000..a5c82fef5 --- /dev/null +++ b/vendor/github.com/fzipp/gocyclo/recv.go @@ -0,0 +1,26 @@ +// Copyright 2021 Frederik Zipp. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.18 +// +build go1.18 + +package gocyclo + +import "go/ast" + +// recvString returns a string representation of recv of the +// form "T", "*T", or "BADRECV" (if not a proper receiver type). +func recvString(recv ast.Expr) string { + switch t := recv.(type) { + case *ast.Ident: + return t.Name + case *ast.StarExpr: + return "*" + recvString(t.X) + case *ast.IndexExpr: + return recvString(t.X) + case *ast.IndexListExpr: + return recvString(t.X) + } + return "BADRECV" +} diff --git a/vendor/github.com/fzipp/gocyclo/recv_pre118.go b/vendor/github.com/fzipp/gocyclo/recv_pre118.go new file mode 100644 index 000000000..2fe2d0cdb --- /dev/null +++ b/vendor/github.com/fzipp/gocyclo/recv_pre118.go @@ -0,0 +1,24 @@ +// Copyright 2021 Frederik Zipp. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build !go1.18 +// +build !go1.18 + +package gocyclo + +import "go/ast" + +// recvString returns a string representation of recv of the +// form "T", "*T", or "BADRECV" (if not a proper receiver type). +func recvString(recv ast.Expr) string { + switch t := recv.(type) { + case *ast.Ident: + return t.Name + case *ast.StarExpr: + return "*" + recvString(t.X) + case *ast.IndexExpr: + return recvString(t.X) + } + return "BADRECV" +} diff --git a/vendor/github.com/gin-gonic/gin/.travis.yml b/vendor/github.com/gin-gonic/gin/.travis.yml index 8ebae7123..bcc21414d 100644 --- a/vendor/github.com/gin-gonic/gin/.travis.yml +++ b/vendor/github.com/gin-gonic/gin/.travis.yml @@ -3,8 +3,6 @@ language: go matrix: fast_finish: true include: - - go: 1.12.x - env: GO111MODULE=on - go: 1.13.x - go: 1.13.x env: diff --git a/vendor/github.com/gin-gonic/gin/CHANGELOG.md b/vendor/github.com/gin-gonic/gin/CHANGELOG.md index 308af74c3..4c806a5a7 100644 --- a/vendor/github.com/gin-gonic/gin/CHANGELOG.md +++ b/vendor/github.com/gin-gonic/gin/CHANGELOG.md @@ -1,5 +1,35 @@ # Gin ChangeLog +## Gin v1.7.7 + +### BUGFIXES + +* Fixed X-Forwarded-For unsafe handling of CVE-2020-28483 [#2844](https://github.com/gin-gonic/gin/pull/2844), closed issue [#2862](https://github.com/gin-gonic/gin/issues/2862). +* Tree: updated the code logic for `latestNode` [#2897](https://github.com/gin-gonic/gin/pull/2897), closed issue [#2894](https://github.com/gin-gonic/gin/issues/2894) [#2878](https://github.com/gin-gonic/gin/issues/2878). +* Tree: fixed the misplacement of adding slashes [#2847](https://github.com/gin-gonic/gin/pull/2847), closed issue [#2843](https://github.com/gin-gonic/gin/issues/2843). +* Tree: fixed tsr with mixed static and wildcard paths [#2924](https://github.com/gin-gonic/gin/pull/2924), closed issue [#2918](https://github.com/gin-gonic/gin/issues/2918). + +### ENHANCEMENTS + +* TrustedProxies: make it backward-compatible [#2887](https://github.com/gin-gonic/gin/pull/2887), closed issue [#2819](https://github.com/gin-gonic/gin/issues/2819). +* TrustedPlatform: provide custom options for another CDN services [#2906](https://github.com/gin-gonic/gin/pull/2906). + +### DOCS + +* NoMethod: added usage annotation ([#2832](https://github.com/gin-gonic/gin/pull/2832#issuecomment-929954463)). + +## Gin v1.7.6 + +### BUGFIXES + +* bump new release to fix v1.7.5 release error by using v1.7.4 codes. + +## Gin v1.7.4 + +### BUGFIXES + +* bump new release to fix checksum mismatch + ## Gin v1.7.3 ### BUGFIXES diff --git a/vendor/github.com/gin-gonic/gin/README.md b/vendor/github.com/gin-gonic/gin/README.md index d4772d764..9bf459b09 100644 --- a/vendor/github.com/gin-gonic/gin/README.md +++ b/vendor/github.com/gin-gonic/gin/README.md @@ -77,6 +77,7 @@ Gin is a web framework written in Go (Golang). It features a martini-like API wi - [http2 server push](#http2-server-push) - [Define format for the log of routes](#define-format-for-the-log-of-routes) - [Set and get a cookie](#set-and-get-a-cookie) + - [Don't trust all proxies](#don't-trust-all-proxies) - [Testing](#testing) - [Users](#users) @@ -84,7 +85,7 @@ Gin is a web framework written in Go (Golang). It features a martini-like API wi To install Gin package, you need to install Go and set your Go workspace first. -1. The first need [Go](https://golang.org/) installed (**version 1.12+ is required**), then you can use the below Go command to install Gin. +1. The first need [Go](https://golang.org/) installed (**version 1.13+ is required**), then you can use the below Go command to install Gin. ```sh $ go get -u github.com/gin-gonic/gin @@ -2130,11 +2131,17 @@ Gin lets you specify which headers to hold the real client IP (if any), as well as specifying which proxies (or direct clients) you trust to specify one of these headers. -The `TrustedProxies` slice on your `gin.Engine` specifes network addresses or -network CIDRs from where clients which their request headers related to client +Use function `SetTrustedProxies()` on your `gin.Engine` to specify network addresses +or network CIDRs from where clients which their request headers related to client IP can be trusted. They can be IPv4 addresses, IPv4 CIDRs, IPv6 addresses or IPv6 CIDRs. +**Attention:** Gin trust all proxies by default if you don't specify a trusted +proxy using the function above, **this is NOT safe**. At the same time, if you don't +use any proxy, you can disable this feature by using `Engine.SetTrustedProxies(nil)`, +then `Context.ClientIP()` will return the remote address directly to avoid some +unnecessary computation. + ```go import ( "fmt" @@ -2145,7 +2152,7 @@ import ( func main() { router := gin.Default() - router.TrustedProxies = []string{"192.168.1.2"} + router.SetTrustedProxies([]string{"192.168.1.2"}) router.GET("/", func(c *gin.Context) { // If the client is 192.168.1.2, use the X-Forwarded-For @@ -2158,6 +2165,34 @@ func main() { } ``` +**Notice:** If you are using a CDN service, you can set the `Engine.TrustedPlatform` +to skip TrustedProxies check, it has a higher priority than TrustedProxies. +Look at the example below: +```go +import ( + "fmt" + + "github.com/gin-gonic/gin" +) + +func main() { + + router := gin.Default() + // Use predefined header gin.PlatformXXX + router.TrustedPlatform = gin.PlatformGoogleAppEngine + // Or set your own trusted request header for another trusted proxy service + // Don't set it to any suspect request header, it's unsafe + router.TrustedPlatform = "X-CDN-IP" + + router.GET("/", func(c *gin.Context) { + // If you set TrustedPlatform, ClientIP() will resolve the + // corresponding header and return IP directly + fmt.Printf("ClientIP: %s\n", c.ClientIP()) + }) + router.Run() +} +``` + ## Testing The `net/http/httptest` package is preferable way for HTTP testing. diff --git a/vendor/github.com/gin-gonic/gin/context.go b/vendor/github.com/gin-gonic/gin/context.go index dc03c358a..220d1bc7b 100644 --- a/vendor/github.com/gin-gonic/gin/context.go +++ b/vendor/github.com/gin-gonic/gin/context.go @@ -9,6 +9,7 @@ import ( "fmt" "io" "io/ioutil" + "log" "math" "mime/multipart" "net" @@ -53,8 +54,9 @@ type Context struct { index int8 fullPath string - engine *Engine - params *Params + engine *Engine + params *Params + skippedNodes *[]skippedNode // This mutex protect Keys map mu sync.RWMutex @@ -96,7 +98,8 @@ func (c *Context) reset() { c.Accepted = nil c.queryCache = nil c.formCache = nil - *c.params = (*c.params)[0:0] + *c.params = (*c.params)[:0] + *c.skippedNodes = (*c.skippedNodes)[:0] } // Copy returns a copy of the current context that can be safely used outside the request's scope. @@ -725,13 +728,23 @@ func (c *Context) ShouldBindBodyWith(obj interface{}, bb binding.BindingBody) (e return bb.BindBody(body, obj) } -// ClientIP implements a best effort algorithm to return the real client IP. +// ClientIP implements one best effort algorithm to return the real client IP. // It called c.RemoteIP() under the hood, to check if the remote IP is a trusted proxy or not. -// If it's it will then try to parse the headers defined in Engine.RemoteIPHeaders (defaulting to [X-Forwarded-For, X-Real-Ip]). -// If the headers are nots syntactically valid OR the remote IP does not correspong to a trusted proxy, +// If it is it will then try to parse the headers defined in Engine.RemoteIPHeaders (defaulting to [X-Forwarded-For, X-Real-Ip]). +// If the headers are not syntactically valid OR the remote IP does not correspond to a trusted proxy, // the remote IP (coming form Request.RemoteAddr) is returned. func (c *Context) ClientIP() string { + // Check if we're running on a trusted platform, continue running backwards if error + if c.engine.TrustedPlatform != "" { + // Developers can define their own header of Trusted Platform or use predefined constants + if addr := c.requestHeader(c.engine.TrustedPlatform); addr != "" { + return addr + } + } + + // Legacy "AppEngine" flag if c.engine.AppEngine { + log.Println(`The AppEngine flag is going to be deprecated. Please check issues #2723 and #2739 and use 'TrustedPlatform: gin.PlatformGoogleAppEngine' instead.`) if addr := c.requestHeader("X-Appengine-Remote-Addr"); addr != "" { return addr } @@ -744,7 +757,7 @@ func (c *Context) ClientIP() string { if trusted && c.engine.ForwardedByClientIP && c.engine.RemoteIPHeaders != nil { for _, headerName := range c.engine.RemoteIPHeaders { - ip, valid := validateHeader(c.requestHeader(headerName)) + ip, valid := c.engine.validateHeader(c.requestHeader(headerName)) if valid { return ip } @@ -753,10 +766,21 @@ func (c *Context) ClientIP() string { return remoteIP.String() } +func (e *Engine) isTrustedProxy(ip net.IP) bool { + if e.trustedCIDRs != nil { + for _, cidr := range e.trustedCIDRs { + if cidr.Contains(ip) { + return true + } + } + } + return false +} + // RemoteIP parses the IP from Request.RemoteAddr, normalizes and returns the IP (without the port). // It also checks if the remoteIP is a trusted proxy or not. // In order to perform this validation, it will see if the IP is contained within at least one of the CIDR blocks -// defined in Engine.TrustedProxies +// defined by Engine.SetTrustedProxies() func (c *Context) RemoteIP() (net.IP, bool) { ip, _, err := net.SplitHostPort(strings.TrimSpace(c.Request.RemoteAddr)) if err != nil { @@ -767,35 +791,25 @@ func (c *Context) RemoteIP() (net.IP, bool) { return nil, false } - if c.engine.trustedCIDRs != nil { - for _, cidr := range c.engine.trustedCIDRs { - if cidr.Contains(remoteIP) { - return remoteIP, true - } - } - } - - return remoteIP, false + return remoteIP, c.engine.isTrustedProxy(remoteIP) } -func validateHeader(header string) (clientIP string, valid bool) { +func (e *Engine) validateHeader(header string) (clientIP string, valid bool) { if header == "" { return "", false } items := strings.Split(header, ",") - for i, ipStr := range items { - ipStr = strings.TrimSpace(ipStr) + for i := len(items) - 1; i >= 0; i-- { + ipStr := strings.TrimSpace(items[i]) ip := net.ParseIP(ipStr) if ip == nil { return "", false } - // We need to return the first IP in the list, but, - // we should not early return since we need to validate that - // the rest of the header is syntactically valid - if i == 0 { - clientIP = ipStr - valid = true + // X-Forwarded-For is appended by proxy + // Check IPs in reverse order and stop when find untrusted proxy + if (i == 0) || (!e.isTrustedProxy(ip)) { + return ipStr, true } } return diff --git a/vendor/github.com/gin-gonic/gin/context_appengine.go b/vendor/github.com/gin-gonic/gin/context_appengine.go index d56584348..8bf938961 100644 --- a/vendor/github.com/gin-gonic/gin/context_appengine.go +++ b/vendor/github.com/gin-gonic/gin/context_appengine.go @@ -8,5 +8,5 @@ package gin func init() { - defaultAppEngine = true + defaultPlatform = PlatformGoogleAppEngine } diff --git a/vendor/github.com/gin-gonic/gin/debug.go b/vendor/github.com/gin-gonic/gin/debug.go index 4c7cd0c39..9bacc6857 100644 --- a/vendor/github.com/gin-gonic/gin/debug.go +++ b/vendor/github.com/gin-gonic/gin/debug.go @@ -12,7 +12,7 @@ import ( "strings" ) -const ginSupportMinGoVer = 12 +const ginSupportMinGoVer = 13 // IsDebugging returns true if the framework is running in debug mode. // Use SetMode(gin.ReleaseMode) to disable debug mode. @@ -67,7 +67,7 @@ func getMinVer(v string) (uint64, error) { func debugPrintWARNINGDefault() { if v, e := getMinVer(runtime.Version()); e == nil && v <= ginSupportMinGoVer { - debugPrint(`[WARNING] Now Gin requires Go 1.12+. + debugPrint(`[WARNING] Now Gin requires Go 1.13+. `) } diff --git a/vendor/github.com/gin-gonic/gin/gin.go b/vendor/github.com/gin-gonic/gin/gin.go index 03a0e127e..58e76f41f 100644 --- a/vendor/github.com/gin-gonic/gin/gin.go +++ b/vendor/github.com/gin-gonic/gin/gin.go @@ -11,6 +11,7 @@ import ( "net/http" "os" "path" + "reflect" "strings" "sync" @@ -25,7 +26,9 @@ var ( default405Body = []byte("405 method not allowed") ) -var defaultAppEngine bool +var defaultPlatform string + +var defaultTrustedCIDRs = []*net.IPNet{{IP: net.IP{0x0, 0x0, 0x0, 0x0}, Mask: net.IPMask{0x0, 0x0, 0x0, 0x0}}} // 0.0.0.0/0 // HandlerFunc defines the handler used by gin middleware as return value. type HandlerFunc func(*Context) @@ -52,6 +55,16 @@ type RouteInfo struct { // RoutesInfo defines a RouteInfo array. type RoutesInfo []RouteInfo +// Trusted platforms +const ( + // When running on Google App Engine. Trust X-Appengine-Remote-Addr + // for determining the client's IP + PlatformGoogleAppEngine = "X-Appengine-Remote-Addr" + // When using Cloudflare's CDN. Trust CF-Connecting-IP for determining + // the client's IP + PlatformCloudflare = "CF-Connecting-IP" +) + // Engine is the framework's instance, it contains the muxer, middleware and configuration settings. // Create an instance of Engine, by using New() or Default() type Engine struct { @@ -89,18 +102,7 @@ type Engine struct { // `(*gin.Context).Request.RemoteAddr`. ForwardedByClientIP bool - // List of headers used to obtain the client IP when - // `(*gin.Engine).ForwardedByClientIP` is `true` and - // `(*gin.Context).Request.RemoteAddr` is matched by at least one of the - // network origins of `(*gin.Engine).TrustedProxies`. - RemoteIPHeaders []string - - // List of network origins (IPv4 addresses, IPv4 CIDRs, IPv6 addresses or - // IPv6 CIDRs) from which to trust request's headers that contain - // alternative client IP when `(*gin.Engine).ForwardedByClientIP` is - // `true`. - TrustedProxies []string - + // DEPRECATED: USE `TrustedPlatform` WITH VALUE `gin.GoogleAppEngine` INSTEAD // #726 #755 If enabled, it will trust some headers starting with // 'X-AppEngine...' for better integration with that PaaS. AppEngine bool @@ -113,14 +115,24 @@ type Engine struct { // as url.Path gonna be used, which is already unescaped. UnescapePathValues bool - // Value of 'maxMemory' param that is given to http.Request's ParseMultipartForm - // method call. - MaxMultipartMemory int64 - // RemoveExtraSlash a parameter can be parsed from the URL even with extra slashes. // See the PR #1817 and issue #1644 RemoveExtraSlash bool + // List of headers used to obtain the client IP when + // `(*gin.Engine).ForwardedByClientIP` is `true` and + // `(*gin.Context).Request.RemoteAddr` is matched by at least one of the + // network origins of list defined by `(*gin.Engine).SetTrustedProxies()`. + RemoteIPHeaders []string + + // If set to a constant of value gin.Platform*, trusts the headers set by + // that platform, for example to determine the client IP + TrustedPlatform string + + // Value of 'maxMemory' param that is given to http.Request's ParseMultipartForm + // method call. + MaxMultipartMemory int64 + delims render.Delims secureJSONPrefix string HTMLRender render.HTMLRender @@ -132,6 +144,8 @@ type Engine struct { pool sync.Pool trees methodTrees maxParams uint16 + maxSections uint16 + trustedProxies []string trustedCIDRs []*net.IPNet } @@ -159,8 +173,7 @@ func New() *Engine { HandleMethodNotAllowed: false, ForwardedByClientIP: true, RemoteIPHeaders: []string{"X-Forwarded-For", "X-Real-IP"}, - TrustedProxies: []string{"0.0.0.0/0"}, - AppEngine: defaultAppEngine, + TrustedPlatform: defaultPlatform, UseRawPath: false, RemoveExtraSlash: false, UnescapePathValues: true, @@ -168,6 +181,8 @@ func New() *Engine { trees: make(methodTrees, 0, 9), delims: render.Delims{Left: "{{", Right: "}}"}, secureJSONPrefix: "while(1);", + trustedProxies: []string{"0.0.0.0/0"}, + trustedCIDRs: defaultTrustedCIDRs, } engine.RouterGroup.engine = engine engine.pool.New = func() interface{} { @@ -186,7 +201,8 @@ func Default() *Engine { func (engine *Engine) allocateContext() *Context { v := make(Params, 0, engine.maxParams) - return &Context{engine: engine, params: &v} + skippedNodes := make([]skippedNode, 0, engine.maxSections) + return &Context{engine: engine, params: &v, skippedNodes: &skippedNodes} } // Delims sets template left and right delims and returns a Engine instance. @@ -249,7 +265,7 @@ func (engine *Engine) NoRoute(handlers ...HandlerFunc) { engine.rebuild404Handlers() } -// NoMethod sets the handlers called when... TODO. +// NoMethod sets the handlers called when Engine.HandleMethodNotAllowed = true. func (engine *Engine) NoMethod(handlers ...HandlerFunc) { engine.noMethod = handlers engine.rebuild405Handlers() @@ -292,6 +308,10 @@ func (engine *Engine) addRoute(method, path string, handlers HandlersChain) { if paramsCount := countParams(path); paramsCount > engine.maxParams { engine.maxParams = paramsCount } + + if sectionsCount := countSections(path); sectionsCount > engine.maxSections { + engine.maxSections = sectionsCount + } } // Routes returns a slice of registered routes, including some useful information, such as: @@ -326,11 +346,11 @@ func iterate(path, method string, routes RoutesInfo, root *node) RoutesInfo { func (engine *Engine) Run(addr ...string) (err error) { defer func() { debugPrintError(err) }() - trustedCIDRs, err := engine.prepareTrustedCIDRs() - if err != nil { - return err + if engine.isUnsafeTrustedProxies() { + debugPrint("[WARNING] You trusted all proxies, this is NOT safe. We recommend you to set a value.\n" + + "Please check https://pkg.go.dev/github.com/gin-gonic/gin#readme-don-t-trust-all-proxies for details.") } - engine.trustedCIDRs = trustedCIDRs + address := resolveAddress(addr) debugPrint("Listening and serving HTTP on %s\n", address) err = http.ListenAndServe(address, engine) @@ -338,12 +358,12 @@ func (engine *Engine) Run(addr ...string) (err error) { } func (engine *Engine) prepareTrustedCIDRs() ([]*net.IPNet, error) { - if engine.TrustedProxies == nil { + if engine.trustedProxies == nil { return nil, nil } - cidr := make([]*net.IPNet, 0, len(engine.TrustedProxies)) - for _, trustedProxy := range engine.TrustedProxies { + cidr := make([]*net.IPNet, 0, len(engine.trustedProxies)) + for _, trustedProxy := range engine.trustedProxies { if !strings.Contains(trustedProxy, "/") { ip := parseIP(trustedProxy) if ip == nil { @@ -366,6 +386,31 @@ func (engine *Engine) prepareTrustedCIDRs() ([]*net.IPNet, error) { return cidr, nil } +// SetTrustedProxies set a list of network origins (IPv4 addresses, +// IPv4 CIDRs, IPv6 addresses or IPv6 CIDRs) from which to trust +// request's headers that contain alternative client IP when +// `(*gin.Engine).ForwardedByClientIP` is `true`. `TrustedProxies` +// feature is enabled by default, and it also trusts all proxies +// by default. If you want to disable this feature, use +// Engine.SetTrustedProxies(nil), then Context.ClientIP() will +// return the remote address directly. +func (engine *Engine) SetTrustedProxies(trustedProxies []string) error { + engine.trustedProxies = trustedProxies + return engine.parseTrustedProxies() +} + +// isUnsafeTrustedProxies compares Engine.trustedCIDRs and defaultTrustedCIDRs, it's not safe if equal (returns true) +func (engine *Engine) isUnsafeTrustedProxies() bool { + return reflect.DeepEqual(engine.trustedCIDRs, defaultTrustedCIDRs) +} + +// parseTrustedProxies parse Engine.trustedProxies to Engine.trustedCIDRs +func (engine *Engine) parseTrustedProxies() error { + trustedCIDRs, err := engine.prepareTrustedCIDRs() + engine.trustedCIDRs = trustedCIDRs + return err +} + // parseIP parse a string representation of an IP and returns a net.IP with the // minimum byte representation or nil if input is invalid. func parseIP(ip string) net.IP { @@ -387,6 +432,11 @@ func (engine *Engine) RunTLS(addr, certFile, keyFile string) (err error) { debugPrint("Listening and serving HTTPS on %s\n", addr) defer func() { debugPrintError(err) }() + if engine.isUnsafeTrustedProxies() { + debugPrint("[WARNING] You trusted all proxies, this is NOT safe. We recommend you to set a value.\n" + + "Please check https://pkg.go.dev/github.com/gin-gonic/gin#readme-don-t-trust-all-proxies for details.") + } + err = http.ListenAndServeTLS(addr, certFile, keyFile, engine) return } @@ -398,6 +448,11 @@ func (engine *Engine) RunUnix(file string) (err error) { debugPrint("Listening and serving HTTP on unix:/%s", file) defer func() { debugPrintError(err) }() + if engine.isUnsafeTrustedProxies() { + debugPrint("[WARNING] You trusted all proxies, this is NOT safe. We recommend you to set a value.\n" + + "Please check https://pkg.go.dev/github.com/gin-gonic/gin#readme-don-t-trust-all-proxies for details.") + } + listener, err := net.Listen("unix", file) if err != nil { return @@ -416,6 +471,11 @@ func (engine *Engine) RunFd(fd int) (err error) { debugPrint("Listening and serving HTTP on fd@%d", fd) defer func() { debugPrintError(err) }() + if engine.isUnsafeTrustedProxies() { + debugPrint("[WARNING] You trusted all proxies, this is NOT safe. We recommend you to set a value.\n" + + "Please check https://pkg.go.dev/github.com/gin-gonic/gin#readme-don-t-trust-all-proxies for details.") + } + f := os.NewFile(uintptr(fd), fmt.Sprintf("fd@%d", fd)) listener, err := net.FileListener(f) if err != nil { @@ -431,6 +491,12 @@ func (engine *Engine) RunFd(fd int) (err error) { func (engine *Engine) RunListener(listener net.Listener) (err error) { debugPrint("Listening and serving HTTP on listener what's bind with address@%s", listener.Addr()) defer func() { debugPrintError(err) }() + + if engine.isUnsafeTrustedProxies() { + debugPrint("[WARNING] You trusted all proxies, this is NOT safe. We recommend you to set a value.\n" + + "Please check https://pkg.go.dev/github.com/gin-gonic/gin#readme-don-t-trust-all-proxies for details.") + } + err = http.Serve(listener, engine) return } @@ -479,7 +545,7 @@ func (engine *Engine) handleHTTPRequest(c *Context) { } root := t[i].root // Find route in tree - value := root.getValue(rPath, c.params, unescape) + value := root.getValue(rPath, c.params, c.skippedNodes, unescape) if value.params != nil { c.Params = *value.params } @@ -507,7 +573,7 @@ func (engine *Engine) handleHTTPRequest(c *Context) { if tree.method == httpMethod { continue } - if value := tree.root.getValue(rPath, nil, unescape); value.handlers != nil { + if value := tree.root.getValue(rPath, nil, c.skippedNodes, unescape); value.handlers != nil { c.handlers = engine.allNoMethod serveError(c, http.StatusMethodNotAllowed, default405Body) return diff --git a/vendor/github.com/gin-gonic/gin/go.mod b/vendor/github.com/gin-gonic/gin/go.mod index 884ff8517..33d546a19 100644 --- a/vendor/github.com/gin-gonic/gin/go.mod +++ b/vendor/github.com/gin-gonic/gin/go.mod @@ -12,3 +12,5 @@ require ( github.com/ugorji/go/codec v1.1.7 gopkg.in/yaml.v2 v2.2.8 ) + +retract v1.7.5 diff --git a/vendor/github.com/gin-gonic/gin/tree.go b/vendor/github.com/gin-gonic/gin/tree.go index 5eb093489..158a33908 100644 --- a/vendor/github.com/gin-gonic/gin/tree.go +++ b/vendor/github.com/gin-gonic/gin/tree.go @@ -17,6 +17,7 @@ import ( var ( strColon = []byte(":") strStar = []byte("*") + strSlash = []byte("/") ) // Param is a single URL parameter, consisting of a key and a value. @@ -98,6 +99,11 @@ func countParams(path string) uint16 { return n } +func countSections(path string) uint16 { + s := bytesconv.StringToBytes(path) + return uint16(bytes.Count(s, strSlash)) +} + type nodeType uint8 const ( @@ -394,16 +400,19 @@ type nodeValue struct { fullPath string } +type skippedNode struct { + path string + node *node + paramsCount int16 +} + // Returns the handle registered with the given path (key). The values of // wildcards are saved to a map. // If no handle can be found, a TSR (trailing slash redirect) recommendation is // made if a handle exists with an extra (without the) trailing slash for the // given path. -func (n *node) getValue(path string, params *Params, unescape bool) (value nodeValue) { - var ( - skippedPath string - latestNode = n // Caching the latest node - ) +func (n *node) getValue(path string, params *Params, skippedNodes *[]skippedNode, unescape bool) (value nodeValue) { + var globalParamsCount int16 walk: // Outer loop for walking the tree for { @@ -418,15 +427,20 @@ walk: // Outer loop for walking the tree if c == idxc { // strings.HasPrefix(n.children[len(n.children)-1].path, ":") == n.wildChild if n.wildChild { - skippedPath = prefix + path - latestNode = &node{ - path: n.path, - wildChild: n.wildChild, - nType: n.nType, - priority: n.priority, - children: n.children, - handlers: n.handlers, - fullPath: n.fullPath, + index := len(*skippedNodes) + *skippedNodes = (*skippedNodes)[:index+1] + (*skippedNodes)[index] = skippedNode{ + path: prefix + path, + node: &node{ + path: n.path, + wildChild: n.wildChild, + nType: n.nType, + priority: n.priority, + children: n.children, + handlers: n.handlers, + fullPath: n.fullPath, + }, + paramsCount: globalParamsCount, } } @@ -434,15 +448,26 @@ walk: // Outer loop for walking the tree continue walk } } - // If the path at the end of the loop is not equal to '/' and the current node has no child nodes - // the current node needs to be equal to the latest matching node - matched := path != "/" && !n.wildChild - if matched { - n = latestNode - } - // If there is no wildcard pattern, recommend a redirection if !n.wildChild { + // If the path at the end of the loop is not equal to '/' and the current node has no child nodes + // the current node needs to roll back to last vaild skippedNode + if path != "/" { + for l := len(*skippedNodes); l > 0; { + skippedNode := (*skippedNodes)[l-1] + *skippedNodes = (*skippedNodes)[:l-1] + if strings.HasSuffix(skippedNode.path, path) { + path = skippedNode.path + n = skippedNode.node + if value.params != nil { + *value.params = (*value.params)[:skippedNode.paramsCount] + } + globalParamsCount = skippedNode.paramsCount + continue walk + } + } + } + // Nothing found. // We can recommend to redirect to the same URL without a // trailing slash if a leaf exists for that path. @@ -452,18 +477,12 @@ walk: // Outer loop for walking the tree // Handle wildcard child, which is always at the end of the array n = n.children[len(n.children)-1] + globalParamsCount++ switch n.nType { case param: // fix truncate the parameter // tree_test.go line: 204 - if matched { - path = prefix + path - // The saved path is used after the prefix route is intercepted by matching - if n.indices == "/" { - path = skippedPath[1:] - } - } // Find param end (either '/' or path end) end := 0 @@ -472,7 +491,7 @@ walk: // Outer loop for walking the tree } // Save param value - if params != nil { + if params != nil && cap(*params) > 0 { if value.params == nil { value.params = params } @@ -500,7 +519,7 @@ walk: // Outer loop for walking the tree } // ... but we can't - value.tsr = (len(path) == end+1) + value.tsr = len(path) == end+1 return } @@ -512,7 +531,7 @@ walk: // Outer loop for walking the tree // No handle found. Check if a handle for this path + a // trailing slash exists for TSR recommendation n = n.children[0] - value.tsr = (n.path == "/" && n.handlers != nil) + value.tsr = n.path == "/" && n.handlers != nil } return @@ -549,9 +568,22 @@ walk: // Outer loop for walking the tree if path == prefix { // If the current path does not equal '/' and the node does not have a registered handle and the most recently matched node has a child node - // the current node needs to be equal to the latest matching node - if latestNode.wildChild && n.handlers == nil && path != "/" { - n = latestNode.children[len(latestNode.children)-1] + // the current node needs to roll back to last vaild skippedNode + if n.handlers == nil && path != "/" { + for l := len(*skippedNodes); l > 0; { + skippedNode := (*skippedNodes)[l-1] + *skippedNodes = (*skippedNodes)[:l-1] + if strings.HasSuffix(skippedNode.path, path) { + path = skippedNode.path + n = skippedNode.node + if value.params != nil { + *value.params = (*value.params)[:skippedNode.paramsCount] + } + globalParamsCount = skippedNode.paramsCount + continue walk + } + } + // n = latestNode.children[len(latestNode.children)-1] } // We should have reached the node containing the handle. // Check if this node has a handle registered. @@ -582,25 +614,29 @@ walk: // Outer loop for walking the tree return } - if path != "/" && len(skippedPath) > 0 && strings.HasSuffix(skippedPath, path) { - path = skippedPath - // Reduce the number of cycles - n, latestNode = latestNode, n - // skippedPath cannot execute - // example: - // * /:cc/cc - // call /a/cc expectations:match/200 Actual:match/200 - // call /a/dd expectations:unmatch/404 Actual: panic - // call /addr/dd/aa expectations:unmatch/404 Actual: panic - // skippedPath: It can only be executed if the secondary route is not found - skippedPath = "" - continue walk - } - // Nothing found. We can recommend to redirect to the same URL with an // extra trailing slash if a leaf exists for that path value.tsr = path == "/" || - (len(prefix) == len(path)+1 && n.handlers != nil) + (len(prefix) == len(path)+1 && prefix[len(path)] == '/' && + path == prefix[:len(prefix)-1] && n.handlers != nil) + + // roll back to last valid skippedNode + if !value.tsr && path != "/" { + for l := len(*skippedNodes); l > 0; { + skippedNode := (*skippedNodes)[l-1] + *skippedNodes = (*skippedNodes)[:l-1] + if strings.HasSuffix(skippedNode.path, path) { + path = skippedNode.path + n = skippedNode.node + if value.params != nil { + *value.params = (*value.params)[:skippedNode.paramsCount] + } + globalParamsCount = skippedNode.paramsCount + continue walk + } + } + } + return } } diff --git a/vendor/github.com/gin-gonic/gin/version.go b/vendor/github.com/gin-gonic/gin/version.go index 535bfc827..4b69b9b91 100644 --- a/vendor/github.com/gin-gonic/gin/version.go +++ b/vendor/github.com/gin-gonic/gin/version.go @@ -5,4 +5,4 @@ package gin // Version is the current gin framework's version. -const Version = "v1.7.3" +const Version = "v1.7.7" diff --git a/vendor/github.com/go-critic/go-critic/checkers/boolExprSimplify_checker.go b/vendor/github.com/go-critic/go-critic/checkers/boolExprSimplify_checker.go index 325fb56a3..8e931cc4b 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/boolExprSimplify_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/boolExprSimplify_checker.go @@ -6,15 +6,16 @@ import ( "go/token" "strconv" - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astcopy" "github.com/go-toolsmith/astequal" "github.com/go-toolsmith/astp" "github.com/go-toolsmith/typep" "golang.org/x/tools/go/ast/astutil" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/checkers/internal/lintutil" + "github.com/go-critic/go-critic/framework/linter" ) func init() { diff --git a/vendor/github.com/go-critic/go-critic/checkers/checkers.go b/vendor/github.com/go-critic/go-critic/checkers/checkers.go index 7ce829d36..0c2ebc00c 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/checkers.go +++ b/vendor/github.com/go-critic/go-critic/checkers/checkers.go @@ -2,15 +2,9 @@ package checkers import ( - "fmt" - "go/ast" - "go/build" - "go/token" "os" - "github.com/go-critic/go-critic/checkers/rulesdata" "github.com/go-critic/go-critic/framework/linter" - "github.com/quasilyte/go-ruleguard/ruleguard" ) var collection = &linter.CheckerCollection{ @@ -23,93 +17,3 @@ var debug = func() func() bool { return v } }() - -//go:generate go run ./rules/precompile.go -rules ./rules/rules.go -o ./rulesdata/rulesdata.go - -func init() { - filename := "rules/rules.go" - - fset := token.NewFileSet() - var groups []ruleguard.GoRuleGroup - - var buildContext *build.Context - - ruleguardDebug := os.Getenv("GOCRITIC_RULEGUARD_DEBUG") != "" - - // First we create an Engine to parse all rules. - // We need it to get the structured info about our rules - // that will be used to generate checkers. - // We introduce an extra scope in hope that rootEngine - // will be garbage-collected after we don't need it. - // LoadedGroups() returns a slice copy and that's all what we need. - { - rootEngine := ruleguard.NewEngine() - rootEngine.InferBuildContext() - buildContext = rootEngine.BuildContext - - loadContext := &ruleguard.LoadContext{ - Fset: fset, - DebugImports: ruleguardDebug, - DebugPrint: func(s string) { - fmt.Println("debug:", s) - }, - } - if err := rootEngine.LoadFromIR(loadContext, filename, rulesdata.PrecompiledRules); err != nil { - panic(fmt.Sprintf("load embedded ruleguard rules: %v", err)) - } - groups = rootEngine.LoadedGroups() - } - - // For every rules group we create a new checker and a separate engine. - // That dedicated ruleguard engine will contain rules only from one group. - for i := range groups { - g := groups[i] - info := &linter.CheckerInfo{ - Name: g.Name, - Summary: g.DocSummary, - Before: g.DocBefore, - After: g.DocAfter, - Note: g.DocNote, - Tags: g.DocTags, - - EmbeddedRuleguard: true, - } - collection.AddChecker(info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - parseContext := &ruleguard.LoadContext{ - Fset: fset, - GroupFilter: func(name string) bool { - return name == g.Name - }, - DebugImports: ruleguardDebug, - DebugPrint: func(s string) { - fmt.Println("debug:", s) - }, - } - engine := ruleguard.NewEngine() - engine.BuildContext = buildContext - err := engine.LoadFromIR(parseContext, filename, rulesdata.PrecompiledRules) - if err != nil { - return nil, err - } - c := &embeddedRuleguardChecker{ - ctx: ctx, - engine: engine, - } - return c, nil - }) - } -} - -type embeddedRuleguardChecker struct { - ctx *linter.CheckerContext - engine *ruleguard.Engine -} - -func (c *embeddedRuleguardChecker) WalkFile(f *ast.File) { - runRuleguardEngine(c.ctx, f, c.engine, &ruleguard.RunContext{ - Pkg: c.ctx.Pkg, - Types: c.ctx.TypesInfo, - Sizes: c.ctx.SizesInfo, - Fset: c.ctx.FileSet, - }) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/commentFormatting_checker.go b/vendor/github.com/go-critic/go-critic/checkers/commentFormatting_checker.go index 513eb246d..f330b723a 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/commentFormatting_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/commentFormatting_checker.go @@ -20,19 +20,30 @@ func init() { info.After = `// This is a comment` collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - parts := []string{ - `^//go:generate .*$`, // e.g.: go:generate value - `^//[\w-]+:.*$`, // e.g.: key: value - `^//nolint\b`, // e.g.: nolint - `^//line /.*:\d+`, // e.g.: line /path/to/file:123 - `^//export \w+$`, // e.g.: export Foo - `^//[/+#-]+.*$`, // e.g.: vertical breaker ///////////// + regexpPatterns := []*regexp.Regexp{ + regexp.MustCompile(`^//[\w-]+:.*$`), // e.g.: key: value } - pat := "(?m)" + strings.Join(parts, "|") - pragmaRE := regexp.MustCompile(pat) + equalPatterns := []string{ + "//nolint", + } + parts := []string{ + "//go:generate ", // e.g.: go:generate value + "//line /", // e.g.: line /path/to/file:123 + "//nolint ", // e.g.: nolint + "//noinspection ", // e.g.: noinspection ALL, some GoLand and friends versions + "//export ", // e.g.: export Foo + "///", // e.g.: vertical breaker ///////////// + "//+", + "//#", + "//-", + "//!", + } + return astwalk.WalkerForComment(&commentFormattingChecker{ - ctx: ctx, - pragmaRE: pragmaRE, + ctx: ctx, + partPatterns: parts, + equalPatterns: equalPatterns, + regexpPatterns: regexpPatterns, }), nil }) } @@ -41,19 +52,43 @@ type commentFormattingChecker struct { astwalk.WalkHandler ctx *linter.CheckerContext - pragmaRE *regexp.Regexp + partPatterns []string + equalPatterns []string + regexpPatterns []*regexp.Regexp } func (c *commentFormattingChecker) VisitComment(cg *ast.CommentGroup) { if strings.HasPrefix(cg.List[0].Text, "/*") { return } + +outerLoop: for _, comment := range cg.List { - if len(comment.Text) <= len("// ") { + commentLen := len(comment.Text) + if commentLen <= len("// ") { continue } - if c.pragmaRE.MatchString(comment.Text) { - continue + + for _, p := range c.partPatterns { + if commentLen < len(p) { + continue + } + + if strings.EqualFold(comment.Text[:len(p)], p) { + continue outerLoop + } + } + + for _, p := range c.equalPatterns { + if strings.EqualFold(comment.Text, p) { + continue outerLoop + } + } + + for _, p := range c.regexpPatterns { + if p.MatchString(comment.Text) { + continue outerLoop + } } // Make a decision based on a first comment text rune. @@ -76,5 +111,9 @@ func (c *commentFormattingChecker) specialChar(r rune) bool { } func (c *commentFormattingChecker) warn(comment *ast.Comment) { - c.ctx.Warn(comment, "put a space between `//` and comment text") + c.ctx.WarnFixable(comment, linter.QuickFix{ + From: comment.Pos(), + To: comment.End(), + Replacement: []byte(strings.Replace(comment.Text, "//", "// ", 1)), + }, "put a space between `//` and comment text") } diff --git a/vendor/github.com/go-critic/go-critic/checkers/deferInLoop_checker.go b/vendor/github.com/go-critic/go-critic/checkers/deferInLoop_checker.go new file mode 100644 index 000000000..da90fe67a --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/deferInLoop_checker.go @@ -0,0 +1,70 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "deferInLoop" + info.Tags = []string{"diagnostic", "experimental"} + info.Summary = "Detects loops inside functions that use defer" + info.Before = ` +for _, filename := range []string{"foo", "bar"} { + f, err := os.Open(filename) + + defer f.Close() +} +` + info.After = ` +func process(filename string) { + f, err := os.Open(filename) + + defer f.Close() +} +/* ... */ +for _, filename := range []string{"foo", "bar"} { + process(filename) +}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForFuncDecl(&deferInLoopChecker{ctx: ctx}), nil + }) +} + +type deferInLoopChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + inFor bool +} + +func (c *deferInLoopChecker) VisitFuncDecl(fn *ast.FuncDecl) { + ast.Inspect(fn.Body, c.traversalFunc) +} + +func (c deferInLoopChecker) traversalFunc(cur ast.Node) bool { + switch n := cur.(type) { + case *ast.DeferStmt: + if c.inFor { + c.warn(n) + } + case *ast.RangeStmt, *ast.ForStmt: + if !c.inFor { + ast.Inspect(cur, deferInLoopChecker{ctx: c.ctx, inFor: true}.traversalFunc) + return false + } + case *ast.FuncLit: + ast.Inspect(n.Body, deferInLoopChecker{ctx: c.ctx, inFor: false}.traversalFunc) + return false + case nil: + return false + } + return true +} + +func (c *deferInLoopChecker) warn(cause *ast.DeferStmt) { + c.ctx.Warn(cause, "Possible resource leak, 'defer' is called in the 'for' loop") +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/deprecatedComment_checker.go b/vendor/github.com/go-critic/go-critic/checkers/deprecatedComment_checker.go index f60e58b58..0eb507237 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/deprecatedComment_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/deprecatedComment_checker.go @@ -2,7 +2,6 @@ package checkers import ( "go/ast" - "regexp" "strings" "github.com/go-critic/go-critic/checkers/internal/astwalk" @@ -24,12 +23,15 @@ func FuncOld() int` collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { c := &deprecatedCommentChecker{ctx: ctx} - c.commonPatterns = []*regexp.Regexp{ - regexp.MustCompile(`(?i)this (?:function|type) is deprecated`), - regexp.MustCompile(`(?i)deprecated[.!]? use \S* instead`), - regexp.MustCompile(`(?i)\[\[deprecated\]\].*`), - regexp.MustCompile(`(?i)note: deprecated\b.*`), - regexp.MustCompile(`(?i)deprecated in.*`), + c.commonPatterns = []string{ + "this type is deprecated", + "this function is deprecated", + "[[deprecated]]", + "note: deprecated", + "deprecated in", + "deprecated. use", + "deprecated! use", + "deprecated use", // TODO(quasilyte): more of these? } @@ -41,6 +43,7 @@ func FuncOld() int` "Dprecated: ", "Derecated: ", "Depecated: ", + "Depekated: ", "Deprcated: ", "Depreated: ", "Deprected: ", @@ -63,7 +66,7 @@ type deprecatedCommentChecker struct { astwalk.WalkHandler ctx *linter.CheckerContext - commonPatterns []*regexp.Regexp + commonPatterns []string commonTypos []string } @@ -114,7 +117,11 @@ func (c *deprecatedCommentChecker) VisitDocComment(doc *ast.CommentGroup) { // Check for other commonly used patterns. for _, pat := range c.commonPatterns { - if pat.MatchString(l) { + if len(l) < len(pat) { + continue + } + + if strings.EqualFold(l[:len(pat)], pat) { c.warnPattern(comment) return } diff --git a/vendor/github.com/go-critic/go-critic/checkers/embedded_rules.go b/vendor/github.com/go-critic/go-critic/checkers/embedded_rules.go new file mode 100644 index 000000000..e37fdf14a --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/embedded_rules.go @@ -0,0 +1,103 @@ +package checkers + +import ( + "fmt" + "go/ast" + "go/build" + "go/token" + "os" + + "github.com/go-critic/go-critic/checkers/rulesdata" + "github.com/go-critic/go-critic/framework/linter" + "github.com/quasilyte/go-ruleguard/ruleguard" +) + +//go:generate go run ./rules/precompile.go -rules ./rules/rules.go -o ./rulesdata/rulesdata.go + +func init() { + filename := "rules/rules.go" + + fset := token.NewFileSet() + var groups []ruleguard.GoRuleGroup + + var buildContext *build.Context + + ruleguardDebug := os.Getenv("GOCRITIC_RULEGUARD_DEBUG") != "" + + // First we create an Engine to parse all rules. + // We need it to get the structured info about our rules + // that will be used to generate checkers. + // We introduce an extra scope in hope that rootEngine + // will be garbage-collected after we don't need it. + // LoadedGroups() returns a slice copy and that's all what we need. + { + rootEngine := ruleguard.NewEngine() + rootEngine.InferBuildContext() + buildContext = rootEngine.BuildContext + + loadContext := &ruleguard.LoadContext{ + Fset: fset, + DebugImports: ruleguardDebug, + DebugPrint: func(s string) { + fmt.Println("debug:", s) + }, + } + if err := rootEngine.LoadFromIR(loadContext, filename, rulesdata.PrecompiledRules); err != nil { + panic(fmt.Sprintf("load embedded ruleguard rules: %v", err)) + } + groups = rootEngine.LoadedGroups() + } + + // For every rules group we create a new checker and a separate engine. + // That dedicated ruleguard engine will contain rules only from one group. + for i := range groups { + g := groups[i] + info := &linter.CheckerInfo{ + Name: g.Name, + Summary: g.DocSummary, + Before: g.DocBefore, + After: g.DocAfter, + Note: g.DocNote, + Tags: g.DocTags, + + EmbeddedRuleguard: true, + } + collection.AddChecker(info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + parseContext := &ruleguard.LoadContext{ + Fset: fset, + GroupFilter: func(name string) bool { + return name == g.Name + }, + DebugImports: ruleguardDebug, + DebugPrint: func(s string) { + fmt.Println("debug:", s) + }, + } + engine := ruleguard.NewEngine() + engine.BuildContext = buildContext + err := engine.LoadFromIR(parseContext, filename, rulesdata.PrecompiledRules) + if err != nil { + return nil, err + } + c := &embeddedRuleguardChecker{ + ctx: ctx, + engine: engine, + } + return c, nil + }) + } +} + +type embeddedRuleguardChecker struct { + ctx *linter.CheckerContext + engine *ruleguard.Engine +} + +func (c *embeddedRuleguardChecker) WalkFile(f *ast.File) { + runRuleguardEngine(c.ctx, f, c.engine, &ruleguard.RunContext{ + Pkg: c.ctx.Pkg, + Types: c.ctx.TypesInfo, + Sizes: c.ctx.SizesInfo, + Fset: c.ctx.FileSet, + }) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astfind.go b/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astfind.go index 3c0a95afc..a6d0ad7c4 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astfind.go +++ b/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astfind.go @@ -7,21 +7,35 @@ import ( ) // FindNode applies pred for root and all it's childs until it returns true. +// If followFunc is defined, it's called before following any node to check whether it needs to be followed. +// followFunc has to return true in order to continuing traversing the node and return false otherwise. // Matched node is returned. // If none of the nodes matched predicate, nil is returned. -func FindNode(root ast.Node, pred func(ast.Node) bool) ast.Node { - var found ast.Node - astutil.Apply(root, nil, func(cur *astutil.Cursor) bool { - if pred(cur.Node()) { - found = cur.Node() - return false +func FindNode(root ast.Node, followFunc, pred func(ast.Node) bool) ast.Node { + var ( + found ast.Node + preFunc func(*astutil.Cursor) bool + ) + + if followFunc != nil { + preFunc = func(cur *astutil.Cursor) bool { + return followFunc(cur.Node()) } - return true - }) + } + + astutil.Apply(root, + preFunc, + func(cur *astutil.Cursor) bool { + if pred(cur.Node()) { + found = cur.Node() + return false + } + return true + }) return found } // ContainsNode reports whether `FindNode(root, pred)!=nil`. func ContainsNode(root ast.Node, pred func(ast.Node) bool) bool { - return FindNode(root, pred) != nil + return FindNode(root, nil, pred) != nil } diff --git a/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go b/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go index d65669fdd..41d8754ae 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go @@ -12,8 +12,9 @@ import ( "sort" "strings" - "github.com/go-critic/go-critic/framework/linter" "github.com/quasilyte/go-ruleguard/ruleguard" + + "github.com/go-critic/go-critic/framework/linter" ) func init() { @@ -41,6 +42,14 @@ If flag is set, the value must be a comma-separated list of error conditions. * 'import': rule refers to a package that cannot be loaded. * 'dsl': gorule file does not comply with the ruleguard DSL.`, }, + "enable": { + Value: "", + Usage: "comma-separated list of enabled groups or skip empty to enable everything", + }, + "disable": { + Value: "", + Usage: "comma-separated list of disabled groups or skip empty to enable everything", + }, } info.Summary = "Runs user-defined rules using ruleguard linter" info.Details = "Reads a rules file and turns them into go-critic checkers." @@ -124,13 +133,43 @@ func newRuleguardChecker(info *linter.CheckerInfo, ctx *linter.CheckerContext) ( fset := token.NewFileSet() filePatterns := strings.Split(rulesFlag, ",") + enabledGroups := make(map[string]bool) + disabledGroups := make(map[string]bool) + + for _, g := range strings.Split(info.Params.String("disable"), ",") { + g = strings.TrimSpace(g) + disabledGroups[g] = true + } + flagEnable := info.Params.String("enable") + if flagEnable != "" { + for _, g := range strings.Split(flagEnable, ",") { + g = strings.TrimSpace(g) + enabledGroups[g] = true + } + } ruleguardDebug := os.Getenv("GOCRITIC_RULEGUARD_DEBUG") != "" loadContext := &ruleguard.LoadContext{ Fset: fset, DebugImports: ruleguardDebug, - DebugPrint: func(s string) { - fmt.Println("debug:", s) + DebugPrint: debugPrint, + GroupFilter: func(g string) bool { + whyDisabled := "" + enabled := flagEnable == "" || enabledGroups[g] + switch { + case !enabled: + whyDisabled = "not enabled by -enabled flag" + case disabledGroups[g]: + whyDisabled = "disabled by -disable flag" + } + if ruleguardDebug { + if whyDisabled != "" { + debugPrint(fmt.Sprintf("(-) %s is %s", g, whyDisabled)) + } else { + debugPrint(fmt.Sprintf("(+) %s is enabled", g)) + } + } + return whyDisabled == "" }, } @@ -201,13 +240,14 @@ func runRuleguardEngine(ctx *linter.CheckerContext, f *ast.File, e *ruleguard.En } var reports []ruleguardReport - runCtx.Report = func(_ ruleguard.GoRuleInfo, n ast.Node, msg string, fix *ruleguard.Suggestion) { + runCtx.Report = func(data *ruleguard.ReportData) { // TODO(quasilyte): investigate whether we should add a rule name as // a message prefix here. r := ruleguardReport{ - node: n, - message: msg, + node: data.Node, + message: data.Message, } + fix := data.Suggestion if fix != nil { r.fix = linter.QuickFix{ From: fix.From, @@ -236,3 +276,7 @@ func runRuleguardEngine(ctx *linter.CheckerContext, f *ast.File, e *ruleguard.En } } } + +func debugPrint(s string) { + fmt.Println("debug:", s) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/rulesdata/rulesdata.go b/vendor/github.com/go-critic/go-critic/checkers/rulesdata/rulesdata.go index c53265e31..8a952b7c2 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/rulesdata/rulesdata.go +++ b/vendor/github.com/go-critic/go-critic/checkers/rulesdata/rulesdata.go @@ -436,28 +436,30 @@ var PrecompiledRules = &ir.File{ ir.FilterExpr{Line: 119, Op: ir.FilterStringOp, Src: "\"nil\"", Value: "nil"}, }, }, + LocationVar: "nil", }, ir.Rule{ - Line: 123, + Line: 124, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 123, Value: "http.NewRequestWithContext($ctx, $method, $url, $nil)"}, + ir.PatternString{Line: 124, Value: "http.NewRequestWithContext($ctx, $method, $url, $nil)"}, }, ReportTemplate: "http.NoBody should be preferred to the nil request body", SuggestTemplate: "http.NewRequestWithContext($ctx, $method, $url, http.NoBody)", WhereExpr: ir.FilterExpr{ - Line: 124, + Line: 125, Op: ir.FilterEqOp, Src: "m[\"nil\"].Text == \"nil\"", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 124, Op: ir.FilterVarTextOp, Src: "m[\"nil\"].Text", Value: "nil"}, - ir.FilterExpr{Line: 124, Op: ir.FilterStringOp, Src: "\"nil\"", Value: "nil"}, + ir.FilterExpr{Line: 125, Op: ir.FilterVarTextOp, Src: "m[\"nil\"].Text", Value: "nil"}, + ir.FilterExpr{Line: 125, Op: ir.FilterStringOp, Src: "\"nil\"", Value: "nil"}, }, }, + LocationVar: "nil", }, }, }, ir.RuleGroup{ - Line: 134, + Line: 136, Name: "preferDecodeRune", MatcherName: "m", DocTags: []string{ @@ -470,25 +472,25 @@ var PrecompiledRules = &ir.File{ DocNote: "See Go issue for details: https://github.com/golang/go/issues/45260", Rules: []ir.Rule{ ir.Rule{ - Line: 135, + Line: 137, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 135, Value: "[]rune($s)[0]"}, + ir.PatternString{Line: 137, Value: "[]rune($s)[0]"}, }, ReportTemplate: "consider replacing $$ with utf8.DecodeRuneInString($s)", WhereExpr: ir.FilterExpr{ - Line: 136, + Line: 138, Op: ir.FilterVarTypeIsOp, Src: "m[\"s\"].Type.Is(`string`)", Value: "s", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 136, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, + ir.FilterExpr{Line: 138, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, }, }, }, }, }, ir.RuleGroup{ - Line: 144, + Line: 146, Name: "sloppyLen", MatcherName: "m", DocTags: []string{ @@ -499,30 +501,30 @@ var PrecompiledRules = &ir.File{ DocAfter: "len(arr) == 0", Rules: []ir.Rule{ ir.Rule{ - Line: 145, + Line: 147, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 145, Value: "len($_) >= 0"}, + ir.PatternString{Line: 147, Value: "len($_) >= 0"}, }, ReportTemplate: "$$ is always true", }, ir.Rule{ - Line: 146, + Line: 148, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 146, Value: "len($_) < 0"}, + ir.PatternString{Line: 148, Value: "len($_) < 0"}, }, ReportTemplate: "$$ is always false", }, ir.Rule{ - Line: 147, + Line: 149, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 147, Value: "len($x) <= 0"}, + ir.PatternString{Line: 149, Value: "len($x) <= 0"}, }, ReportTemplate: "$$ can be len($x) == 0", }, }, }, ir.RuleGroup{ - Line: 154, + Line: 156, Name: "valSwap", MatcherName: "m", DocTags: []string{ @@ -533,16 +535,16 @@ var PrecompiledRules = &ir.File{ DocAfter: "*x, *y = *y, *x", Rules: []ir.Rule{ ir.Rule{ - Line: 155, + Line: 157, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 155, Value: "$tmp := $y; $y = $x; $x = $tmp"}, + ir.PatternString{Line: 157, Value: "$tmp := $y; $y = $x; $x = $tmp"}, }, ReportTemplate: "can re-write as `$y, $x = $x, $y`", }, }, }, ir.RuleGroup{ - Line: 163, + Line: 165, Name: "switchTrue", MatcherName: "m", DocTags: []string{ @@ -553,23 +555,23 @@ var PrecompiledRules = &ir.File{ DocAfter: "switch {...}", Rules: []ir.Rule{ ir.Rule{ - Line: 164, + Line: 166, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 164, Value: "switch true { $*_ }"}, + ir.PatternString{Line: 166, Value: "switch true { $*_ }"}, }, ReportTemplate: "replace 'switch true {}' with 'switch {}'", }, ir.Rule{ - Line: 166, + Line: 168, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 166, Value: "switch $x; true { $*_ }"}, + ir.PatternString{Line: 168, Value: "switch $x; true { $*_ }"}, }, ReportTemplate: "replace 'switch $x; true {}' with 'switch $x; {}'", }, }, }, ir.RuleGroup{ - Line: 174, + Line: 176, Name: "flagDeref", MatcherName: "m", DocTags: []string{ @@ -580,65 +582,65 @@ var PrecompiledRules = &ir.File{ DocAfter: "var b bool; flag.BoolVar(&b, \"b\", false, \"b docs\")", Rules: []ir.Rule{ ir.Rule{ - Line: 175, + Line: 177, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 175, Value: "*flag.Bool($*_)"}, + ir.PatternString{Line: 177, Value: "*flag.Bool($*_)"}, }, ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.BoolVar", }, ir.Rule{ - Line: 176, + Line: 178, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 176, Value: "*flag.Duration($*_)"}, + ir.PatternString{Line: 178, Value: "*flag.Duration($*_)"}, }, ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.DurationVar", }, ir.Rule{ - Line: 177, + Line: 179, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 177, Value: "*flag.Float64($*_)"}, + ir.PatternString{Line: 179, Value: "*flag.Float64($*_)"}, }, ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.Float64Var", }, ir.Rule{ - Line: 178, + Line: 180, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 178, Value: "*flag.Int($*_)"}, + ir.PatternString{Line: 180, Value: "*flag.Int($*_)"}, }, ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.IntVar", }, ir.Rule{ - Line: 179, + Line: 181, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 179, Value: "*flag.Int64($*_)"}, + ir.PatternString{Line: 181, Value: "*flag.Int64($*_)"}, }, ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.Int64Var", }, ir.Rule{ - Line: 180, + Line: 182, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 180, Value: "*flag.String($*_)"}, + ir.PatternString{Line: 182, Value: "*flag.String($*_)"}, }, ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.StringVar", }, ir.Rule{ - Line: 181, + Line: 183, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 181, Value: "*flag.Uint($*_)"}, + ir.PatternString{Line: 183, Value: "*flag.Uint($*_)"}, }, ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.UintVar", }, ir.Rule{ - Line: 182, + Line: 184, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 182, Value: "*flag.Uint64($*_)"}, + ir.PatternString{Line: 184, Value: "*flag.Uint64($*_)"}, }, ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.Uint64Var", }, }, }, ir.RuleGroup{ - Line: 189, + Line: 191, Name: "emptyStringTest", MatcherName: "m", DocTags: []string{ @@ -650,41 +652,41 @@ var PrecompiledRules = &ir.File{ DocAfter: "s == \"\"", Rules: []ir.Rule{ ir.Rule{ - Line: 190, + Line: 192, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 190, Value: "len($s) != 0"}, + ir.PatternString{Line: 192, Value: "len($s) != 0"}, }, ReportTemplate: "replace `$$` with `$s != \"\"`", WhereExpr: ir.FilterExpr{ - Line: 191, + Line: 193, Op: ir.FilterVarTypeIsOp, Src: "m[\"s\"].Type.Is(`string`)", Value: "s", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 191, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, + ir.FilterExpr{Line: 193, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, }, }, }, ir.Rule{ - Line: 194, + Line: 196, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 194, Value: "len($s) == 0"}, + ir.PatternString{Line: 196, Value: "len($s) == 0"}, }, ReportTemplate: "replace `$$` with `$s == \"\"`", WhereExpr: ir.FilterExpr{ - Line: 195, + Line: 197, Op: ir.FilterVarTypeIsOp, Src: "m[\"s\"].Type.Is(`string`)", Value: "s", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 195, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, + ir.FilterExpr{Line: 197, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, }, }, }, }, }, ir.RuleGroup{ - Line: 203, + Line: 205, Name: "stringXbytes", MatcherName: "m", DocTags: []string{ @@ -695,53 +697,36 @@ var PrecompiledRules = &ir.File{ DocAfter: "copy(b, s)", Rules: []ir.Rule{ ir.Rule{ - Line: 204, + Line: 206, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 204, Value: "copy($_, []byte($s))"}, + ir.PatternString{Line: 206, Value: "copy($_, []byte($s))"}, }, ReportTemplate: "can simplify `[]byte($s)` to `$s`", }, ir.Rule{ - Line: 206, + Line: 208, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 206, Value: "string($b) == \"\""}, + ir.PatternString{Line: 208, Value: "string($b) == \"\""}, }, ReportTemplate: "suggestion: len($b) == 0", SuggestTemplate: "len($b) == 0", WhereExpr: ir.FilterExpr{ - Line: 206, + Line: 208, Op: ir.FilterVarTypeIsOp, Src: "m[\"b\"].Type.Is(`[]byte`)", Value: "b", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 206, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}, - }, - }, - }, - ir.Rule{ - Line: 207, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 207, Value: "string($b) != \"\""}, - }, - ReportTemplate: "suggestion: len($b) != 0", - SuggestTemplate: "len($b) != 0", - WhereExpr: ir.FilterExpr{ - Line: 207, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"b\"].Type.Is(`[]byte`)", - Value: "b", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 207, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}, + ir.FilterExpr{Line: 208, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}, }, }, }, ir.Rule{ Line: 209, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 209, Value: "len(string($b))"}, + ir.PatternString{Line: 209, Value: "string($b) != \"\""}, }, - ReportTemplate: "suggestion: len($b)", - SuggestTemplate: "len($b)", + ReportTemplate: "suggestion: len($b) != 0", + SuggestTemplate: "len($b) != 0", WhereExpr: ir.FilterExpr{ Line: 209, Op: ir.FilterVarTypeIsOp, @@ -755,163 +740,180 @@ var PrecompiledRules = &ir.File{ ir.Rule{ Line: 211, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 211, Value: "string($x) == string($y)"}, + ir.PatternString{Line: 211, Value: "len(string($b))"}, + }, + ReportTemplate: "suggestion: len($b)", + SuggestTemplate: "len($b)", + WhereExpr: ir.FilterExpr{ + Line: 211, + Op: ir.FilterVarTypeIsOp, + Src: "m[\"b\"].Type.Is(`[]byte`)", + Value: "b", + Args: []ir.FilterExpr{ + ir.FilterExpr{Line: 211, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}, + }, + }, + }, + ir.Rule{ + Line: 213, + SyntaxPatterns: []ir.PatternString{ + ir.PatternString{Line: 213, Value: "string($x) == string($y)"}, }, ReportTemplate: "suggestion: bytes.Equal($x, $y)", SuggestTemplate: "bytes.Equal($x, $y)", WhereExpr: ir.FilterExpr{ - Line: 212, + Line: 214, Op: ir.FilterAndOp, Src: "m[\"x\"].Type.Is(`[]byte`) && m[\"y\"].Type.Is(`[]byte`)", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 212, + Line: 214, Op: ir.FilterVarTypeIsOp, Src: "m[\"x\"].Type.Is(`[]byte`)", Value: "x", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 212, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}, + ir.FilterExpr{Line: 214, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}, }, }, ir.FilterExpr{ - Line: 212, + Line: 214, Op: ir.FilterVarTypeIsOp, Src: "m[\"y\"].Type.Is(`[]byte`)", Value: "y", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 212, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}, + ir.FilterExpr{Line: 214, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}, }, }, }, }, }, ir.Rule{ - Line: 215, + Line: 217, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 215, Value: "string($x) != string($y)"}, + ir.PatternString{Line: 217, Value: "string($x) != string($y)"}, }, ReportTemplate: "suggestion: !bytes.Equal($x, $y)", SuggestTemplate: "!bytes.Equal($x, $y)", WhereExpr: ir.FilterExpr{ - Line: 216, + Line: 218, Op: ir.FilterAndOp, Src: "m[\"x\"].Type.Is(`[]byte`) && m[\"y\"].Type.Is(`[]byte`)", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 216, + Line: 218, Op: ir.FilterVarTypeIsOp, Src: "m[\"x\"].Type.Is(`[]byte`)", Value: "x", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 216, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}, + ir.FilterExpr{Line: 218, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}, }, }, ir.FilterExpr{ - Line: 216, + Line: 218, Op: ir.FilterVarTypeIsOp, Src: "m[\"y\"].Type.Is(`[]byte`)", Value: "y", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 216, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}, + ir.FilterExpr{Line: 218, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}, }, }, }, }, }, ir.Rule{ - Line: 219, + Line: 221, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 219, Value: "$re.Match([]byte($s))"}, + ir.PatternString{Line: 221, Value: "$re.Match([]byte($s))"}, }, ReportTemplate: "suggestion: $re.MatchString($s)", SuggestTemplate: "$re.MatchString($s)", WhereExpr: ir.FilterExpr{ - Line: 220, + Line: 222, Op: ir.FilterAndOp, Src: "m[\"re\"].Type.Is(`*regexp.Regexp`) && m[\"s\"].Type.Is(`string`)", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 220, + Line: 222, Op: ir.FilterVarTypeIsOp, Src: "m[\"re\"].Type.Is(`*regexp.Regexp`)", Value: "re", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 220, Op: ir.FilterStringOp, Src: "`*regexp.Regexp`", Value: "*regexp.Regexp"}, + ir.FilterExpr{Line: 222, Op: ir.FilterStringOp, Src: "`*regexp.Regexp`", Value: "*regexp.Regexp"}, }, }, ir.FilterExpr{ - Line: 220, + Line: 222, Op: ir.FilterVarTypeIsOp, Src: "m[\"s\"].Type.Is(`string`)", Value: "s", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 220, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, + ir.FilterExpr{Line: 222, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, }, }, }, }, }, ir.Rule{ - Line: 223, + Line: 225, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 223, Value: "$re.FindIndex([]byte($s))"}, + ir.PatternString{Line: 225, Value: "$re.FindIndex([]byte($s))"}, }, ReportTemplate: "suggestion: $re.FindStringIndex($s)", SuggestTemplate: "$re.FindStringIndex($s)", WhereExpr: ir.FilterExpr{ - Line: 224, + Line: 226, Op: ir.FilterAndOp, Src: "m[\"re\"].Type.Is(`*regexp.Regexp`) && m[\"s\"].Type.Is(`string`)", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 224, + Line: 226, Op: ir.FilterVarTypeIsOp, Src: "m[\"re\"].Type.Is(`*regexp.Regexp`)", Value: "re", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 224, Op: ir.FilterStringOp, Src: "`*regexp.Regexp`", Value: "*regexp.Regexp"}, + ir.FilterExpr{Line: 226, Op: ir.FilterStringOp, Src: "`*regexp.Regexp`", Value: "*regexp.Regexp"}, }, }, ir.FilterExpr{ - Line: 224, + Line: 226, Op: ir.FilterVarTypeIsOp, Src: "m[\"s\"].Type.Is(`string`)", Value: "s", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 224, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, + ir.FilterExpr{Line: 226, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, }, }, }, }, }, ir.Rule{ - Line: 227, + Line: 229, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 227, Value: "$re.FindAllIndex([]byte($s), $n)"}, + ir.PatternString{Line: 229, Value: "$re.FindAllIndex([]byte($s), $n)"}, }, ReportTemplate: "suggestion: $re.FindAllStringIndex($s, $n)", SuggestTemplate: "$re.FindAllStringIndex($s, $n)", WhereExpr: ir.FilterExpr{ - Line: 228, + Line: 230, Op: ir.FilterAndOp, Src: "m[\"re\"].Type.Is(`*regexp.Regexp`) && m[\"s\"].Type.Is(`string`)", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 228, + Line: 230, Op: ir.FilterVarTypeIsOp, Src: "m[\"re\"].Type.Is(`*regexp.Regexp`)", Value: "re", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 228, Op: ir.FilterStringOp, Src: "`*regexp.Regexp`", Value: "*regexp.Regexp"}, + ir.FilterExpr{Line: 230, Op: ir.FilterStringOp, Src: "`*regexp.Regexp`", Value: "*regexp.Regexp"}, }, }, ir.FilterExpr{ - Line: 228, + Line: 230, Op: ir.FilterVarTypeIsOp, Src: "m[\"s\"].Type.Is(`string`)", Value: "s", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 228, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, + ir.FilterExpr{Line: 230, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, }, }, }, @@ -920,7 +922,7 @@ var PrecompiledRules = &ir.File{ }, }, ir.RuleGroup{ - Line: 237, + Line: 239, Name: "indexAlloc", MatcherName: "m", DocTags: []string{ @@ -932,25 +934,25 @@ var PrecompiledRules = &ir.File{ DocNote: "See Go issue for details: https://github.com/golang/go/issues/25864", Rules: []ir.Rule{ ir.Rule{ - Line: 238, + Line: 240, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 238, Value: "strings.Index(string($x), $y)"}, + ir.PatternString{Line: 240, Value: "strings.Index(string($x), $y)"}, }, ReportTemplate: "consider replacing $$ with bytes.Index($x, []byte($y))", WhereExpr: ir.FilterExpr{ - Line: 239, + Line: 241, Op: ir.FilterAndOp, Src: "m[\"x\"].Pure && m[\"y\"].Pure", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 239, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - ir.FilterExpr{Line: 239, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, + ir.FilterExpr{Line: 241, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + ir.FilterExpr{Line: 241, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, }, }, }, }, }, ir.RuleGroup{ - Line: 247, + Line: 249, Name: "wrapperFunc", MatcherName: "m", DocTags: []string{ @@ -961,111 +963,111 @@ var PrecompiledRules = &ir.File{ DocAfter: "wg.Done()", Rules: []ir.Rule{ ir.Rule{ - Line: 248, + Line: 250, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 248, Value: "$wg.Add(-1)"}, + ir.PatternString{Line: 250, Value: "$wg.Add(-1)"}, }, ReportTemplate: "use WaitGroup.Done method in `$$`", WhereExpr: ir.FilterExpr{ - Line: 249, + Line: 251, Op: ir.FilterVarTypeIsOp, Src: "m[\"wg\"].Type.Is(`sync.WaitGroup`)", Value: "wg", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 249, Op: ir.FilterStringOp, Src: "`sync.WaitGroup`", Value: "sync.WaitGroup"}, + ir.FilterExpr{Line: 251, Op: ir.FilterStringOp, Src: "`sync.WaitGroup`", Value: "sync.WaitGroup"}, }, }, }, ir.Rule{ - Line: 252, + Line: 254, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 252, Value: "$buf.Truncate(0)"}, + ir.PatternString{Line: 254, Value: "$buf.Truncate(0)"}, }, ReportTemplate: "use Buffer.Reset method in `$$`", WhereExpr: ir.FilterExpr{ - Line: 253, + Line: 255, Op: ir.FilterVarTypeIsOp, Src: "m[\"buf\"].Type.Is(`bytes.Buffer`)", Value: "buf", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 253, Op: ir.FilterStringOp, Src: "`bytes.Buffer`", Value: "bytes.Buffer"}, + ir.FilterExpr{Line: 255, Op: ir.FilterStringOp, Src: "`bytes.Buffer`", Value: "bytes.Buffer"}, }, }, }, ir.Rule{ - Line: 256, + Line: 258, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 256, Value: "http.HandlerFunc(http.NotFound)"}, + ir.PatternString{Line: 258, Value: "http.HandlerFunc(http.NotFound)"}, }, ReportTemplate: "use http.NotFoundHandler method in `$$`", }, ir.Rule{ - Line: 258, + Line: 260, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 258, Value: "strings.SplitN($_, $_, -1)"}, + ir.PatternString{Line: 260, Value: "strings.SplitN($_, $_, -1)"}, }, ReportTemplate: "use strings.Split method in `$$`", }, ir.Rule{ - Line: 259, + Line: 261, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 259, Value: "strings.Replace($_, $_, $_, -1)"}, + ir.PatternString{Line: 261, Value: "strings.Replace($_, $_, $_, -1)"}, }, ReportTemplate: "use strings.ReplaceAll method in `$$`", }, ir.Rule{ - Line: 260, + Line: 262, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 260, Value: "strings.Map(unicode.ToTitle, $_)"}, + ir.PatternString{Line: 262, Value: "strings.Map(unicode.ToTitle, $_)"}, }, ReportTemplate: "use strings.ToTitle method in `$$`", }, ir.Rule{ - Line: 262, + Line: 264, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 262, Value: "bytes.SplitN(b, []byte(\".\"), -1)"}, + ir.PatternString{Line: 264, Value: "bytes.SplitN(b, []byte(\".\"), -1)"}, }, ReportTemplate: "use bytes.Split method in `$$`", }, ir.Rule{ - Line: 263, + Line: 265, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 263, Value: "bytes.Replace($_, $_, $_, -1)"}, + ir.PatternString{Line: 265, Value: "bytes.Replace($_, $_, $_, -1)"}, }, ReportTemplate: "use bytes.ReplaceAll method in `$$`", }, ir.Rule{ - Line: 264, + Line: 266, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 264, Value: "bytes.Map(unicode.ToUpper, $_)"}, + ir.PatternString{Line: 266, Value: "bytes.Map(unicode.ToUpper, $_)"}, }, ReportTemplate: "use bytes.ToUpper method in `$$`", }, ir.Rule{ - Line: 265, + Line: 267, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 265, Value: "bytes.Map(unicode.ToLower, $_)"}, + ir.PatternString{Line: 267, Value: "bytes.Map(unicode.ToLower, $_)"}, }, ReportTemplate: "use bytes.ToLower method in `$$`", }, ir.Rule{ - Line: 266, + Line: 268, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 266, Value: "bytes.Map(unicode.ToTitle, $_)"}, + ir.PatternString{Line: 268, Value: "bytes.Map(unicode.ToTitle, $_)"}, }, ReportTemplate: "use bytes.ToTitle method in `$$`", }, ir.Rule{ - Line: 268, + Line: 270, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 268, Value: "draw.DrawMask($_, $_, $_, $_, nil, image.Point{}, $_)"}, + ir.PatternString{Line: 270, Value: "draw.DrawMask($_, $_, $_, $_, nil, image.Point{}, $_)"}, }, ReportTemplate: "use draw.Draw method in `$$`", }, }, }, ir.RuleGroup{ - Line: 276, + Line: 278, Name: "regexpMust", MatcherName: "m", DocTags: []string{ @@ -1076,26 +1078,26 @@ var PrecompiledRules = &ir.File{ DocAfter: "re := regexp.MustCompile(\"const pattern\")", Rules: []ir.Rule{ ir.Rule{ - Line: 277, + Line: 279, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 277, Value: "regexp.Compile($pat)"}, + ir.PatternString{Line: 279, Value: "regexp.Compile($pat)"}, }, ReportTemplate: "for const patterns like $pat, use regexp.MustCompile", WhereExpr: ir.FilterExpr{ - Line: 278, + Line: 280, Op: ir.FilterVarConstOp, Src: "m[\"pat\"].Const", Value: "pat", }, }, ir.Rule{ - Line: 281, + Line: 283, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 281, Value: "regexp.CompilePOSIX($pat)"}, + ir.PatternString{Line: 283, Value: "regexp.CompilePOSIX($pat)"}, }, ReportTemplate: "for const patterns like $pat, use regexp.MustCompilePOSIX", WhereExpr: ir.FilterExpr{ - Line: 282, + Line: 284, Op: ir.FilterVarConstOp, Src: "m[\"pat\"].Const", Value: "pat", @@ -1104,7 +1106,7 @@ var PrecompiledRules = &ir.File{ }, }, ir.RuleGroup{ - Line: 290, + Line: 292, Name: "badCall", MatcherName: "m", DocTags: []string{ @@ -1115,24 +1117,24 @@ var PrecompiledRules = &ir.File{ DocAfter: "strings.Replace(s, from, to, -1)", Rules: []ir.Rule{ ir.Rule{ - Line: 291, + Line: 293, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 291, Value: "strings.Replace($_, $_, $_, $zero)"}, + ir.PatternString{Line: 293, Value: "strings.Replace($_, $_, $_, $zero)"}, }, ReportTemplate: "suspicious arg 0, probably meant -1", WhereExpr: ir.FilterExpr{ - Line: 292, + Line: 294, Op: ir.FilterEqOp, Src: "m[\"zero\"].Value.Int() == 0", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 292, + Line: 294, Op: ir.FilterVarValueIntOp, Src: "m[\"zero\"].Value.Int()", Value: "zero", }, ir.FilterExpr{ - Line: 292, + Line: 294, Op: ir.FilterIntOp, Src: "0", Value: int64(0), @@ -1142,24 +1144,24 @@ var PrecompiledRules = &ir.File{ LocationVar: "zero", }, ir.Rule{ - Line: 294, + Line: 296, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 294, Value: "bytes.Replace($_, $_, $_, $zero)"}, + ir.PatternString{Line: 296, Value: "bytes.Replace($_, $_, $_, $zero)"}, }, ReportTemplate: "suspicious arg 0, probably meant -1", WhereExpr: ir.FilterExpr{ - Line: 295, + Line: 297, Op: ir.FilterEqOp, Src: "m[\"zero\"].Value.Int() == 0", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 295, + Line: 297, Op: ir.FilterVarValueIntOp, Src: "m[\"zero\"].Value.Int()", Value: "zero", }, ir.FilterExpr{ - Line: 295, + Line: 297, Op: ir.FilterIntOp, Src: "0", Value: int64(0), @@ -1169,24 +1171,24 @@ var PrecompiledRules = &ir.File{ LocationVar: "zero", }, ir.Rule{ - Line: 298, + Line: 300, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 298, Value: "strings.SplitN($_, $_, $zero)"}, + ir.PatternString{Line: 300, Value: "strings.SplitN($_, $_, $zero)"}, }, ReportTemplate: "suspicious arg 0, probably meant -1", WhereExpr: ir.FilterExpr{ - Line: 299, + Line: 301, Op: ir.FilterEqOp, Src: "m[\"zero\"].Value.Int() == 0", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 299, + Line: 301, Op: ir.FilterVarValueIntOp, Src: "m[\"zero\"].Value.Int()", Value: "zero", }, ir.FilterExpr{ - Line: 299, + Line: 301, Op: ir.FilterIntOp, Src: "0", Value: int64(0), @@ -1196,24 +1198,24 @@ var PrecompiledRules = &ir.File{ LocationVar: "zero", }, ir.Rule{ - Line: 301, + Line: 303, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 301, Value: "bytes.SplitN($_, $_, $zero)"}, + ir.PatternString{Line: 303, Value: "bytes.SplitN($_, $_, $zero)"}, }, ReportTemplate: "suspicious arg 0, probably meant -1", WhereExpr: ir.FilterExpr{ - Line: 302, + Line: 304, Op: ir.FilterEqOp, Src: "m[\"zero\"].Value.Int() == 0", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 302, + Line: 304, Op: ir.FilterVarValueIntOp, Src: "m[\"zero\"].Value.Int()", Value: "zero", }, ir.FilterExpr{ - Line: 302, + Line: 304, Op: ir.FilterIntOp, Src: "0", Value: int64(0), @@ -1222,24 +1224,24 @@ var PrecompiledRules = &ir.File{ }, LocationVar: "zero", }, - ir.Rule{ - Line: 305, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 305, Value: "append($_)"}, - }, - ReportTemplate: "no-op append call, probably missing arguments", - }, ir.Rule{ Line: 307, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 307, Value: "filepath.Join($_)"}, + ir.PatternString{Line: 307, Value: "append($_)"}, + }, + ReportTemplate: "no-op append call, probably missing arguments", + }, + ir.Rule{ + Line: 309, + SyntaxPatterns: []ir.PatternString{ + ir.PatternString{Line: 309, Value: "filepath.Join($_)"}, }, ReportTemplate: "suspicious Join on 1 argument", }, }, }, ir.RuleGroup{ - Line: 314, + Line: 316, Name: "assignOp", MatcherName: "m", DocTags: []string{ @@ -1250,170 +1252,171 @@ var PrecompiledRules = &ir.File{ DocAfter: "x *= 2", Rules: []ir.Rule{ ir.Rule{ - Line: 315, + Line: 317, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 315, Value: "$x = $x + 1"}, + ir.PatternString{Line: 317, Value: "$x = $x + 1"}, }, ReportTemplate: "replace `$$` with `$x++`", - WhereExpr: ir.FilterExpr{Line: 315, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - }, - ir.Rule{ - Line: 316, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 316, Value: "$x = $x - 1"}, - }, - ReportTemplate: "replace `$$` with `$x--`", - WhereExpr: ir.FilterExpr{Line: 316, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + WhereExpr: ir.FilterExpr{Line: 317, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, ir.Rule{ Line: 318, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 318, Value: "$x = $x + $y"}, + ir.PatternString{Line: 318, Value: "$x = $x - 1"}, }, - ReportTemplate: "replace `$$` with `$x += $y`", + ReportTemplate: "replace `$$` with `$x--`", WhereExpr: ir.FilterExpr{Line: 318, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, ir.Rule{ - Line: 319, + Line: 320, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 319, Value: "$x = $x - $y"}, + ir.PatternString{Line: 320, Value: "$x = $x + $y"}, }, - ReportTemplate: "replace `$$` with `$x -= $y`", - WhereExpr: ir.FilterExpr{Line: 319, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + ReportTemplate: "replace `$$` with `$x += $y`", + WhereExpr: ir.FilterExpr{Line: 320, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, ir.Rule{ Line: 321, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 321, Value: "$x = $x * $y"}, + ir.PatternString{Line: 321, Value: "$x = $x - $y"}, }, - ReportTemplate: "replace `$$` with `$x *= $y`", + ReportTemplate: "replace `$$` with `$x -= $y`", WhereExpr: ir.FilterExpr{Line: 321, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, - ir.Rule{ - Line: 322, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 322, Value: "$x = $x / $y"}, - }, - ReportTemplate: "replace `$$` with `$x /= $y`", - WhereExpr: ir.FilterExpr{Line: 322, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - }, ir.Rule{ Line: 323, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 323, Value: "$x = $x % $y"}, + ir.PatternString{Line: 323, Value: "$x = $x * $y"}, }, - ReportTemplate: "replace `$$` with `$x %= $y`", + ReportTemplate: "replace `$$` with `$x *= $y`", WhereExpr: ir.FilterExpr{Line: 323, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, ir.Rule{ Line: 324, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 324, Value: "$x = $x & $y"}, + ir.PatternString{Line: 324, Value: "$x = $x / $y"}, }, - ReportTemplate: "replace `$$` with `$x &= $y`", + ReportTemplate: "replace `$$` with `$x /= $y`", WhereExpr: ir.FilterExpr{Line: 324, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, ir.Rule{ Line: 325, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 325, Value: "$x = $x | $y"}, + ir.PatternString{Line: 325, Value: "$x = $x % $y"}, }, - ReportTemplate: "replace `$$` with `$x |= $y`", + ReportTemplate: "replace `$$` with `$x %= $y`", WhereExpr: ir.FilterExpr{Line: 325, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, ir.Rule{ Line: 326, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 326, Value: "$x = $x ^ $y"}, + ir.PatternString{Line: 326, Value: "$x = $x & $y"}, }, - ReportTemplate: "replace `$$` with `$x ^= $y`", + ReportTemplate: "replace `$$` with `$x &= $y`", WhereExpr: ir.FilterExpr{Line: 326, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, ir.Rule{ Line: 327, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 327, Value: "$x = $x << $y"}, + ir.PatternString{Line: 327, Value: "$x = $x | $y"}, }, - ReportTemplate: "replace `$$` with `$x <<= $y`", + ReportTemplate: "replace `$$` with `$x |= $y`", WhereExpr: ir.FilterExpr{Line: 327, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, ir.Rule{ Line: 328, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 328, Value: "$x = $x >> $y"}, + ir.PatternString{Line: 328, Value: "$x = $x ^ $y"}, }, - ReportTemplate: "replace `$$` with `$x >>= $y`", + ReportTemplate: "replace `$$` with `$x ^= $y`", WhereExpr: ir.FilterExpr{Line: 328, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, ir.Rule{ Line: 329, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 329, Value: "$x = $x &^ $y"}, + ir.PatternString{Line: 329, Value: "$x = $x << $y"}, + }, + ReportTemplate: "replace `$$` with `$x <<= $y`", + WhereExpr: ir.FilterExpr{Line: 329, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + }, + ir.Rule{ + Line: 330, + SyntaxPatterns: []ir.PatternString{ + ir.PatternString{Line: 330, Value: "$x = $x >> $y"}, + }, + ReportTemplate: "replace `$$` with `$x >>= $y`", + WhereExpr: ir.FilterExpr{Line: 330, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + }, + ir.Rule{ + Line: 331, + SyntaxPatterns: []ir.PatternString{ + ir.PatternString{Line: 331, Value: "$x = $x &^ $y"}, }, ReportTemplate: "replace `$$` with `$x &^= $y`", - WhereExpr: ir.FilterExpr{Line: 329, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + WhereExpr: ir.FilterExpr{Line: 331, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, }, }, ir.RuleGroup{ - Line: 336, + Line: 338, Name: "preferWriteByte", MatcherName: "m", DocTags: []string{ "performance", "experimental", + "opinionated", }, - DocSummary: "Detects WriteRune calls with byte literal argument and reports to use WriteByte instead", + DocSummary: "Detects WriteRune calls with rune literal argument that is single byte and reports to use WriteByte instead", DocBefore: "w.WriteRune('\\n')", DocAfter: "w.WriteByte('\\n')", Rules: []ir.Rule{ ir.Rule{ - Line: 337, + Line: 342, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 337, Value: "$w.WriteRune($c)"}, + ir.PatternString{Line: 342, Value: "$w.WriteRune($c)"}, }, - ReportTemplate: "consider replacing $$ with $w.WriteByte($c)", + ReportTemplate: "consider writing single byte rune $c with $w.WriteByte($c)", WhereExpr: ir.FilterExpr{ - Line: 338, + Line: 343, Op: ir.FilterAndOp, - Src: "m[\"w\"].Type.Implements(\"io.ByteWriter\") && (m[\"c\"].Const && m[\"c\"].Value.Int() < 256)", + Src: "m[\"w\"].Type.Implements(\"io.ByteWriter\") && (m[\"c\"].Const && m[\"c\"].Value.Int() < runeSelf)", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 338, + Line: 343, Op: ir.FilterVarTypeImplementsOp, Src: "m[\"w\"].Type.Implements(\"io.ByteWriter\")", Value: "w", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 338, Op: ir.FilterStringOp, Src: "\"io.ByteWriter\"", Value: "io.ByteWriter"}, + ir.FilterExpr{Line: 343, Op: ir.FilterStringOp, Src: "\"io.ByteWriter\"", Value: "io.ByteWriter"}, }, }, ir.FilterExpr{ - Line: 338, + Line: 343, Op: ir.FilterAndOp, - Src: "(m[\"c\"].Const && m[\"c\"].Value.Int() < 256)", + Src: "(m[\"c\"].Const && m[\"c\"].Value.Int() < runeSelf)", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 338, + Line: 343, Op: ir.FilterVarConstOp, Src: "m[\"c\"].Const", Value: "c", }, ir.FilterExpr{ - Line: 338, + Line: 343, Op: ir.FilterLtOp, - Src: "m[\"c\"].Value.Int() < 256", + Src: "m[\"c\"].Value.Int() < runeSelf", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 338, + Line: 343, Op: ir.FilterVarValueIntOp, Src: "m[\"c\"].Value.Int()", Value: "c", }, ir.FilterExpr{ - Line: 338, + Line: 343, Op: ir.FilterIntOp, - Src: "256", - Value: int64(256), + Src: "runeSelf", + Value: int64(128), }, }, }, @@ -1425,41 +1428,24 @@ var PrecompiledRules = &ir.File{ }, }, ir.RuleGroup{ - Line: 346, + Line: 351, Name: "preferFprint", MatcherName: "m", DocTags: []string{ "performance", "experimental", }, - DocSummary: "Detects fmt.Sprint(f|ln) calls which can be replaced with fmt.Fprint(f|ln)", + DocSummary: "Detects fmt.Sprint(f/ln) calls which can be replaced with fmt.Fprint(f/ln)", DocBefore: "w.Write([]byte(fmt.Sprintf(\"%x\", 10)))", DocAfter: "fmt.Fprintf(w, \"%x\", 10)", Rules: []ir.Rule{ ir.Rule{ - Line: 347, + Line: 352, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 347, Value: "$w.Write([]byte(fmt.Sprint($*args)))"}, + ir.PatternString{Line: 352, Value: "$w.Write([]byte(fmt.Sprint($*args)))"}, }, ReportTemplate: "fmt.Fprint($w, $args) should be preferred to the $$", SuggestTemplate: "fmt.Fprint($w, $args)", - WhereExpr: ir.FilterExpr{ - Line: 348, - Op: ir.FilterVarTypeImplementsOp, - Src: "m[\"w\"].Type.Implements(\"io.Writer\")", - Value: "w", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 348, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}, - }, - }, - }, - ir.Rule{ - Line: 352, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 352, Value: "$w.Write([]byte(fmt.Sprintf($*args)))"}, - }, - ReportTemplate: "fmt.Fprintf($w, $args) should be preferred to the $$", - SuggestTemplate: "fmt.Fprintf($w, $args)", WhereExpr: ir.FilterExpr{ Line: 353, Op: ir.FilterVarTypeImplementsOp, @@ -1473,10 +1459,10 @@ var PrecompiledRules = &ir.File{ ir.Rule{ Line: 357, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 357, Value: "$w.Write([]byte(fmt.Sprintln($*args)))"}, + ir.PatternString{Line: 357, Value: "$w.Write([]byte(fmt.Sprintf($*args)))"}, }, - ReportTemplate: "fmt.Fprintln($w, $args) should be preferred to the $$", - SuggestTemplate: "fmt.Fprintln($w, $args)", + ReportTemplate: "fmt.Fprintf($w, $args) should be preferred to the $$", + SuggestTemplate: "fmt.Fprintf($w, $args)", WhereExpr: ir.FilterExpr{ Line: 358, Op: ir.FilterVarTypeImplementsOp, @@ -1487,10 +1473,51 @@ var PrecompiledRules = &ir.File{ }, }, }, + ir.Rule{ + Line: 362, + SyntaxPatterns: []ir.PatternString{ + ir.PatternString{Line: 362, Value: "$w.Write([]byte(fmt.Sprintln($*args)))"}, + }, + ReportTemplate: "fmt.Fprintln($w, $args) should be preferred to the $$", + SuggestTemplate: "fmt.Fprintln($w, $args)", + WhereExpr: ir.FilterExpr{ + Line: 363, + Op: ir.FilterVarTypeImplementsOp, + Src: "m[\"w\"].Type.Implements(\"io.Writer\")", + Value: "w", + Args: []ir.FilterExpr{ + ir.FilterExpr{Line: 363, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}, + }, + }, + }, + ir.Rule{ + Line: 367, + SyntaxPatterns: []ir.PatternString{ + ir.PatternString{Line: 367, Value: "io.WriteString($w, fmt.Sprint($*args))"}, + }, + ReportTemplate: "suggestion: fmt.Fprint($w, $args)", + SuggestTemplate: "fmt.Fprint($w, $args)", + }, + ir.Rule{ + Line: 368, + SyntaxPatterns: []ir.PatternString{ + ir.PatternString{Line: 368, Value: "io.WriteString($w, fmt.Sprintf($*args))"}, + }, + ReportTemplate: "suggestion: fmt.Fprintf($w, $args)", + SuggestTemplate: "fmt.Fprintf($w, $args)", + }, + ir.Rule{ + Line: 369, + SyntaxPatterns: []ir.PatternString{ + ir.PatternString{Line: 369, Value: "io.WriteString($w, fmt.Sprintln($*args))"}, + }, + ReportTemplate: "suggestion: fmt.Fprintln($w, $args)", + SuggestTemplate: "fmt.Fprintln($w, $args)", + }, }, }, ir.RuleGroup{ - Line: 367, + Line: 376, Name: "dupArg", MatcherName: "m", DocTags: []string{ @@ -1501,62 +1528,62 @@ var PrecompiledRules = &ir.File{ DocAfter: "copy(dst, src)", Rules: []ir.Rule{ ir.Rule{ - Line: 368, + Line: 377, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 368, Value: "$x.Equal($x)"}, - ir.PatternString{Line: 368, Value: "$x.Equals($x)"}, - ir.PatternString{Line: 368, Value: "$x.Compare($x)"}, - ir.PatternString{Line: 368, Value: "$x.Cmp($x)"}, + ir.PatternString{Line: 377, Value: "$x.Equal($x)"}, + ir.PatternString{Line: 377, Value: "$x.Equals($x)"}, + ir.PatternString{Line: 377, Value: "$x.Compare($x)"}, + ir.PatternString{Line: 377, Value: "$x.Cmp($x)"}, }, ReportTemplate: "suspicious method call with the same argument and receiver", - WhereExpr: ir.FilterExpr{Line: 369, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + WhereExpr: ir.FilterExpr{Line: 378, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, ir.Rule{ - Line: 372, + Line: 381, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 372, Value: "copy($x, $x)"}, - ir.PatternString{Line: 373, Value: "math.Max($x, $x)"}, - ir.PatternString{Line: 374, Value: "math.Min($x, $x)"}, - ir.PatternString{Line: 375, Value: "reflect.Copy($x, $x)"}, - ir.PatternString{Line: 376, Value: "reflect.DeepEqual($x, $x)"}, - ir.PatternString{Line: 377, Value: "strings.Contains($x, $x)"}, - ir.PatternString{Line: 378, Value: "strings.Compare($x, $x)"}, - ir.PatternString{Line: 379, Value: "strings.EqualFold($x, $x)"}, - ir.PatternString{Line: 380, Value: "strings.HasPrefix($x, $x)"}, - ir.PatternString{Line: 381, Value: "strings.HasSuffix($x, $x)"}, - ir.PatternString{Line: 382, Value: "strings.Index($x, $x)"}, - ir.PatternString{Line: 383, Value: "strings.LastIndex($x, $x)"}, - ir.PatternString{Line: 384, Value: "strings.Split($x, $x)"}, - ir.PatternString{Line: 385, Value: "strings.SplitAfter($x, $x)"}, - ir.PatternString{Line: 386, Value: "strings.SplitAfterN($x, $x, $_)"}, - ir.PatternString{Line: 387, Value: "strings.SplitN($x, $x, $_)"}, - ir.PatternString{Line: 388, Value: "strings.Replace($_, $x, $x, $_)"}, - ir.PatternString{Line: 389, Value: "strings.ReplaceAll($_, $x, $x)"}, - ir.PatternString{Line: 390, Value: "bytes.Contains($x, $x)"}, - ir.PatternString{Line: 391, Value: "bytes.Compare($x, $x)"}, - ir.PatternString{Line: 392, Value: "bytes.Equal($x, $x)"}, - ir.PatternString{Line: 393, Value: "bytes.EqualFold($x, $x)"}, - ir.PatternString{Line: 394, Value: "bytes.HasPrefix($x, $x)"}, - ir.PatternString{Line: 395, Value: "bytes.HasSuffix($x, $x)"}, - ir.PatternString{Line: 396, Value: "bytes.Index($x, $x)"}, - ir.PatternString{Line: 397, Value: "bytes.LastIndex($x, $x)"}, - ir.PatternString{Line: 398, Value: "bytes.Split($x, $x)"}, - ir.PatternString{Line: 399, Value: "bytes.SplitAfter($x, $x)"}, - ir.PatternString{Line: 400, Value: "bytes.SplitAfterN($x, $x, $_)"}, - ir.PatternString{Line: 401, Value: "bytes.SplitN($x, $x, $_)"}, - ir.PatternString{Line: 402, Value: "bytes.Replace($_, $x, $x, $_)"}, - ir.PatternString{Line: 403, Value: "bytes.ReplaceAll($_, $x, $x)"}, - ir.PatternString{Line: 404, Value: "types.Identical($x, $x)"}, - ir.PatternString{Line: 405, Value: "types.IdenticalIgnoreTags($x, $x)"}, - ir.PatternString{Line: 406, Value: "draw.Draw($x, $_, $x, $_, $_)"}, + ir.PatternString{Line: 381, Value: "copy($x, $x)"}, + ir.PatternString{Line: 382, Value: "math.Max($x, $x)"}, + ir.PatternString{Line: 383, Value: "math.Min($x, $x)"}, + ir.PatternString{Line: 384, Value: "reflect.Copy($x, $x)"}, + ir.PatternString{Line: 385, Value: "reflect.DeepEqual($x, $x)"}, + ir.PatternString{Line: 386, Value: "strings.Contains($x, $x)"}, + ir.PatternString{Line: 387, Value: "strings.Compare($x, $x)"}, + ir.PatternString{Line: 388, Value: "strings.EqualFold($x, $x)"}, + ir.PatternString{Line: 389, Value: "strings.HasPrefix($x, $x)"}, + ir.PatternString{Line: 390, Value: "strings.HasSuffix($x, $x)"}, + ir.PatternString{Line: 391, Value: "strings.Index($x, $x)"}, + ir.PatternString{Line: 392, Value: "strings.LastIndex($x, $x)"}, + ir.PatternString{Line: 393, Value: "strings.Split($x, $x)"}, + ir.PatternString{Line: 394, Value: "strings.SplitAfter($x, $x)"}, + ir.PatternString{Line: 395, Value: "strings.SplitAfterN($x, $x, $_)"}, + ir.PatternString{Line: 396, Value: "strings.SplitN($x, $x, $_)"}, + ir.PatternString{Line: 397, Value: "strings.Replace($_, $x, $x, $_)"}, + ir.PatternString{Line: 398, Value: "strings.ReplaceAll($_, $x, $x)"}, + ir.PatternString{Line: 399, Value: "bytes.Contains($x, $x)"}, + ir.PatternString{Line: 400, Value: "bytes.Compare($x, $x)"}, + ir.PatternString{Line: 401, Value: "bytes.Equal($x, $x)"}, + ir.PatternString{Line: 402, Value: "bytes.EqualFold($x, $x)"}, + ir.PatternString{Line: 403, Value: "bytes.HasPrefix($x, $x)"}, + ir.PatternString{Line: 404, Value: "bytes.HasSuffix($x, $x)"}, + ir.PatternString{Line: 405, Value: "bytes.Index($x, $x)"}, + ir.PatternString{Line: 406, Value: "bytes.LastIndex($x, $x)"}, + ir.PatternString{Line: 407, Value: "bytes.Split($x, $x)"}, + ir.PatternString{Line: 408, Value: "bytes.SplitAfter($x, $x)"}, + ir.PatternString{Line: 409, Value: "bytes.SplitAfterN($x, $x, $_)"}, + ir.PatternString{Line: 410, Value: "bytes.SplitN($x, $x, $_)"}, + ir.PatternString{Line: 411, Value: "bytes.Replace($_, $x, $x, $_)"}, + ir.PatternString{Line: 412, Value: "bytes.ReplaceAll($_, $x, $x)"}, + ir.PatternString{Line: 413, Value: "types.Identical($x, $x)"}, + ir.PatternString{Line: 414, Value: "types.IdenticalIgnoreTags($x, $x)"}, + ir.PatternString{Line: 415, Value: "draw.Draw($x, $_, $x, $_, $_)"}, }, ReportTemplate: "suspicious duplicated args in $$", - WhereExpr: ir.FilterExpr{Line: 407, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + WhereExpr: ir.FilterExpr{Line: 416, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, }, }, ir.RuleGroup{ - Line: 415, + Line: 424, Name: "returnAfterHttpError", MatcherName: "m", DocTags: []string{ @@ -1568,9 +1595,9 @@ var PrecompiledRules = &ir.File{ DocAfter: "if err != nil { http.Error(...); return; }", Rules: []ir.Rule{ ir.Rule{ - Line: 416, + Line: 425, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 416, Value: "if $_ { $*_; http.Error($w, $err, $code) }"}, + ir.PatternString{Line: 425, Value: "if $_ { $*_; http.Error($w, $err, $code) }"}, }, ReportTemplate: "Possibly return is missed after the http.Error call", LocationVar: "w", @@ -1578,7 +1605,7 @@ var PrecompiledRules = &ir.File{ }, }, ir.RuleGroup{ - Line: 425, + Line: 434, Name: "preferFilepathJoin", MatcherName: "m", DocTags: []string{ @@ -1590,33 +1617,33 @@ var PrecompiledRules = &ir.File{ DocAfter: "filepath.Join(x, y)", Rules: []ir.Rule{ ir.Rule{ - Line: 426, + Line: 435, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 426, Value: "$x + string(os.PathSeparator) + $y"}, + ir.PatternString{Line: 435, Value: "$x + string(os.PathSeparator) + $y"}, }, ReportTemplate: "filepath.Join($x, $y) should be preferred to the $$", SuggestTemplate: "filepath.Join($x, $y)", WhereExpr: ir.FilterExpr{ - Line: 427, + Line: 436, Op: ir.FilterAndOp, Src: "m[\"x\"].Type.Is(`string`) && m[\"y\"].Type.Is(`string`)", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 427, + Line: 436, Op: ir.FilterVarTypeIsOp, Src: "m[\"x\"].Type.Is(`string`)", Value: "x", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 427, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, + ir.FilterExpr{Line: 436, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, }, }, ir.FilterExpr{ - Line: 427, + Line: 436, Op: ir.FilterVarTypeIsOp, Src: "m[\"y\"].Type.Is(`string`)", Value: "y", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 427, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, + ir.FilterExpr{Line: 436, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, }, }, }, @@ -1625,7 +1652,7 @@ var PrecompiledRules = &ir.File{ }, }, ir.RuleGroup{ - Line: 436, + Line: 445, Name: "preferStringWriter", MatcherName: "m", DocTags: []string{ @@ -1637,43 +1664,43 @@ var PrecompiledRules = &ir.File{ DocAfter: "w.WriteString(\"foo\")", Rules: []ir.Rule{ ir.Rule{ - Line: 437, + Line: 446, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 437, Value: "$w.Write([]byte($s))"}, + ir.PatternString{Line: 446, Value: "$w.Write([]byte($s))"}, }, ReportTemplate: "$w.WriteString($s) should be preferred to the $$", SuggestTemplate: "$w.WriteString($s)", WhereExpr: ir.FilterExpr{ - Line: 438, + Line: 447, Op: ir.FilterVarTypeImplementsOp, Src: "m[\"w\"].Type.Implements(\"io.StringWriter\")", Value: "w", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 438, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}, + ir.FilterExpr{Line: 447, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}, }, }, }, ir.Rule{ - Line: 442, + Line: 451, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 442, Value: "io.WriteString($w, $s)"}, + ir.PatternString{Line: 451, Value: "io.WriteString($w, $s)"}, }, ReportTemplate: "$w.WriteString($s) should be preferred to the $$", SuggestTemplate: "$w.WriteString($s)", WhereExpr: ir.FilterExpr{ - Line: 443, + Line: 452, Op: ir.FilterVarTypeImplementsOp, Src: "m[\"w\"].Type.Implements(\"io.StringWriter\")", Value: "w", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 443, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}, + ir.FilterExpr{Line: 452, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}, }, }, }, }, }, ir.RuleGroup{ - Line: 452, + Line: 461, Name: "sliceClear", MatcherName: "m", DocTags: []string{ @@ -1685,24 +1712,24 @@ var PrecompiledRules = &ir.File{ DocAfter: "for i := range buf { buf[i] = 0 }", Rules: []ir.Rule{ ir.Rule{ - Line: 453, + Line: 462, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 453, Value: "for $i := 0; $i < len($xs); $i++ { $xs[$i] = $zero }"}, + ir.PatternString{Line: 462, Value: "for $i := 0; $i < len($xs); $i++ { $xs[$i] = $zero }"}, }, ReportTemplate: "rewrite as for-range so compiler can recognize this pattern", WhereExpr: ir.FilterExpr{ - Line: 454, + Line: 463, Op: ir.FilterEqOp, Src: "m[\"zero\"].Value.Int() == 0", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 454, + Line: 463, Op: ir.FilterVarValueIntOp, Src: "m[\"zero\"].Value.Int()", Value: "zero", }, ir.FilterExpr{ - Line: 454, + Line: 463, Op: ir.FilterIntOp, Src: "0", Value: int64(0), @@ -1713,7 +1740,7 @@ var PrecompiledRules = &ir.File{ }, }, ir.RuleGroup{ - Line: 462, + Line: 471, Name: "syncMapLoadAndDelete", MatcherName: "m", DocTags: []string{ @@ -1725,29 +1752,29 @@ var PrecompiledRules = &ir.File{ DocAfter: "v, deleted := m.LoadAndDelete(k); if deleted { f(v) }", Rules: []ir.Rule{ ir.Rule{ - Line: 463, + Line: 472, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 463, Value: "$_, $ok := $m.Load($k); if $ok { $m.Delete($k); $*_ }"}, + ir.PatternString{Line: 472, Value: "$_, $ok := $m.Load($k); if $ok { $m.Delete($k); $*_ }"}, }, ReportTemplate: "use $m.LoadAndDelete to perform load+delete operations atomically", WhereExpr: ir.FilterExpr{ - Line: 464, + Line: 473, Op: ir.FilterAndOp, Src: "m.GoVersion().GreaterEqThan(\"1.15\") &&\n\tm[\"m\"].Type.Is(`*sync.Map`)", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 464, + Line: 473, Op: ir.FilterGoVersionGreaterEqThanOp, Src: "m.GoVersion().GreaterEqThan(\"1.15\")", Value: "1.15", }, ir.FilterExpr{ - Line: 465, + Line: 474, Op: ir.FilterVarTypeIsOp, Src: "m[\"m\"].Type.Is(`*sync.Map`)", Value: "m", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 465, Op: ir.FilterStringOp, Src: "`*sync.Map`", Value: "*sync.Map"}, + ir.FilterExpr{Line: 474, Op: ir.FilterStringOp, Src: "`*sync.Map`", Value: "*sync.Map"}, }, }, }, @@ -1756,7 +1783,7 @@ var PrecompiledRules = &ir.File{ }, }, ir.RuleGroup{ - Line: 473, + Line: 482, Name: "sprintfQuotedString", MatcherName: "m", DocTags: []string{ @@ -1768,32 +1795,32 @@ var PrecompiledRules = &ir.File{ DocAfter: "fmt.Sprintf(`%q`, s)", Rules: []ir.Rule{ ir.Rule{ - Line: 474, + Line: 483, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 474, Value: "fmt.Sprintf($s, $*_)"}, + ir.PatternString{Line: 483, Value: "fmt.Sprintf($s, $*_)"}, }, ReportTemplate: "use %q instead of \"%s\" for quoted strings", WhereExpr: ir.FilterExpr{ - Line: 475, + Line: 484, Op: ir.FilterOrOp, Src: "m[\"s\"].Text.Matches(\"^`.*\\\"%s\\\".*`$\") ||\n\tm[\"s\"].Text.Matches(`^\".*\\\\\"%s\\\\\".*\"$`)", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 475, + Line: 484, Op: ir.FilterVarTextMatchesOp, Src: "m[\"s\"].Text.Matches(\"^`.*\\\"%s\\\".*`$\")", Value: "s", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 475, Op: ir.FilterStringOp, Src: "\"^`.*\\\"%s\\\".*`$\"", Value: "^`.*\"%s\".*`$"}, + ir.FilterExpr{Line: 484, Op: ir.FilterStringOp, Src: "\"^`.*\\\"%s\\\".*`$\"", Value: "^`.*\"%s\".*`$"}, }, }, ir.FilterExpr{ - Line: 476, + Line: 485, Op: ir.FilterVarTextMatchesOp, Src: "m[\"s\"].Text.Matches(`^\".*\\\\\"%s\\\\\".*\"$`)", Value: "s", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 476, Op: ir.FilterStringOp, Src: "`^\".*\\\\\"%s\\\\\".*\"$`", Value: "^\".*\\\\\"%s\\\\\".*\"$"}, + ir.FilterExpr{Line: 485, Op: ir.FilterStringOp, Src: "`^\".*\\\\\"%s\\\\\".*\"$`", Value: "^\".*\\\\\"%s\\\\\".*\"$"}, }, }, }, @@ -1802,7 +1829,7 @@ var PrecompiledRules = &ir.File{ }, }, ir.RuleGroup{ - Line: 484, + Line: 493, Name: "offBy1", MatcherName: "m", DocTags: []string{ @@ -1813,59 +1840,39 @@ var PrecompiledRules = &ir.File{ DocAfter: "xs[len(xs)-1]", Rules: []ir.Rule{ ir.Rule{ - Line: 485, + Line: 494, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 485, Value: "$x[len($x)]"}, + ir.PatternString{Line: 494, Value: "$x[len($x)]"}, }, ReportTemplate: "index expr always panics; maybe you wanted $x[len($x)-1]?", SuggestTemplate: "$x[len($x)-1]", WhereExpr: ir.FilterExpr{ - Line: 486, + Line: 495, Op: ir.FilterAndOp, Src: "m[\"x\"].Pure && m[\"x\"].Type.Is(`[]$_`)", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 486, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + ir.FilterExpr{Line: 495, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, ir.FilterExpr{ - Line: 486, + Line: 495, Op: ir.FilterVarTypeIsOp, Src: "m[\"x\"].Type.Is(`[]$_`)", Value: "x", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 486, Op: ir.FilterStringOp, Src: "`[]$_`", Value: "[]$_"}, + ir.FilterExpr{Line: 495, Op: ir.FilterStringOp, Src: "`[]$_`", Value: "[]$_"}, }, }, }, }, }, - ir.Rule{ - Line: 493, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 494, Value: "$i := strings.Index($s, $_); $_ := $slicing[$i:]"}, - ir.PatternString{Line: 495, Value: "$i := strings.Index($s, $_); $_ = $slicing[$i:]"}, - ir.PatternString{Line: 496, Value: "$i := bytes.Index($s, $_); $_ := $slicing[$i:]"}, - ir.PatternString{Line: 497, Value: "$i := bytes.Index($s, $_); $_ = $slicing[$i:]"}, - }, - ReportTemplate: "Index() can return -1; maybe you wanted to do $s[$i+1:]", - WhereExpr: ir.FilterExpr{ - Line: 498, - Op: ir.FilterEqOp, - Src: "m[\"s\"].Text == m[\"slicing\"].Text", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 498, Op: ir.FilterVarTextOp, Src: "m[\"s\"].Text", Value: "s"}, - ir.FilterExpr{Line: 498, Op: ir.FilterVarTextOp, Src: "m[\"slicing\"].Text", Value: "slicing"}, - }, - }, - LocationVar: "slicing", - }, ir.Rule{ Line: 502, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 503, Value: "$i := strings.Index($s, $_); $_ := $slicing[:$i]"}, - ir.PatternString{Line: 504, Value: "$i := strings.Index($s, $_); $_ = $slicing[:$i]"}, - ir.PatternString{Line: 505, Value: "$i := bytes.Index($s, $_); $_ := $slicing[:$i]"}, - ir.PatternString{Line: 506, Value: "$i := bytes.Index($s, $_); $_ = $slicing[:$i]"}, + ir.PatternString{Line: 503, Value: "$i := strings.Index($s, $_); $_ := $slicing[$i:]"}, + ir.PatternString{Line: 504, Value: "$i := strings.Index($s, $_); $_ = $slicing[$i:]"}, + ir.PatternString{Line: 505, Value: "$i := bytes.Index($s, $_); $_ := $slicing[$i:]"}, + ir.PatternString{Line: 506, Value: "$i := bytes.Index($s, $_); $_ = $slicing[$i:]"}, }, - ReportTemplate: "Index() can return -1; maybe you wanted to do $s[:$i+1]", + ReportTemplate: "Index() can return -1; maybe you wanted to do $s[$i+1:]", WhereExpr: ir.FilterExpr{ Line: 507, Op: ir.FilterEqOp, @@ -1880,17 +1887,37 @@ var PrecompiledRules = &ir.File{ ir.Rule{ Line: 511, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 512, Value: "$s[strings.Index($s, $_):]"}, - ir.PatternString{Line: 513, Value: "$s[:strings.Index($s, $_)]"}, - ir.PatternString{Line: 514, Value: "$s[bytes.Index($s, $_):]"}, - ir.PatternString{Line: 515, Value: "$s[:bytes.Index($s, $_)]"}, + ir.PatternString{Line: 512, Value: "$i := strings.Index($s, $_); $_ := $slicing[:$i]"}, + ir.PatternString{Line: 513, Value: "$i := strings.Index($s, $_); $_ = $slicing[:$i]"}, + ir.PatternString{Line: 514, Value: "$i := bytes.Index($s, $_); $_ := $slicing[:$i]"}, + ir.PatternString{Line: 515, Value: "$i := bytes.Index($s, $_); $_ = $slicing[:$i]"}, + }, + ReportTemplate: "Index() can return -1; maybe you wanted to do $s[:$i+1]", + WhereExpr: ir.FilterExpr{ + Line: 516, + Op: ir.FilterEqOp, + Src: "m[\"s\"].Text == m[\"slicing\"].Text", + Args: []ir.FilterExpr{ + ir.FilterExpr{Line: 516, Op: ir.FilterVarTextOp, Src: "m[\"s\"].Text", Value: "s"}, + ir.FilterExpr{Line: 516, Op: ir.FilterVarTextOp, Src: "m[\"slicing\"].Text", Value: "slicing"}, + }, + }, + LocationVar: "slicing", + }, + ir.Rule{ + Line: 520, + SyntaxPatterns: []ir.PatternString{ + ir.PatternString{Line: 521, Value: "$s[strings.Index($s, $_):]"}, + ir.PatternString{Line: 522, Value: "$s[:strings.Index($s, $_)]"}, + ir.PatternString{Line: 523, Value: "$s[bytes.Index($s, $_):]"}, + ir.PatternString{Line: 524, Value: "$s[:bytes.Index($s, $_)]"}, }, ReportTemplate: "Index() can return -1; maybe you wanted to do Index()+1", }, }, }, ir.RuleGroup{ - Line: 523, + Line: 532, Name: "unslice", MatcherName: "m", DocTags: []string{ @@ -1901,33 +1928,33 @@ var PrecompiledRules = &ir.File{ DocAfter: "copy(b, values...)", Rules: []ir.Rule{ ir.Rule{ - Line: 524, + Line: 533, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 524, Value: "$s[:]"}, + ir.PatternString{Line: 533, Value: "$s[:]"}, }, ReportTemplate: "could simplify $$ to $s", SuggestTemplate: "$s", WhereExpr: ir.FilterExpr{ - Line: 525, + Line: 534, Op: ir.FilterOrOp, Src: "m[\"s\"].Type.Is(`string`) || m[\"s\"].Type.Is(`[]$_`)", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 525, + Line: 534, Op: ir.FilterVarTypeIsOp, Src: "m[\"s\"].Type.Is(`string`)", Value: "s", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 525, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, + ir.FilterExpr{Line: 534, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, }, }, ir.FilterExpr{ - Line: 525, + Line: 534, Op: ir.FilterVarTypeIsOp, Src: "m[\"s\"].Type.Is(`[]$_`)", Value: "s", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 525, Op: ir.FilterStringOp, Src: "`[]$_`", Value: "[]$_"}, + ir.FilterExpr{Line: 534, Op: ir.FilterStringOp, Src: "`[]$_`", Value: "[]$_"}, }, }, }, @@ -1936,7 +1963,7 @@ var PrecompiledRules = &ir.File{ }, }, ir.RuleGroup{ - Line: 534, + Line: 543, Name: "yodaStyleExpr", MatcherName: "m", DocTags: []string{ @@ -1948,37 +1975,37 @@ var PrecompiledRules = &ir.File{ DocAfter: "return ptr != nil", Rules: []ir.Rule{ ir.Rule{ - Line: 535, + Line: 544, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 535, Value: "$constval != $x"}, + ir.PatternString{Line: 544, Value: "$constval != $x"}, }, ReportTemplate: "consider to change order in expression to $x != $constval", WhereExpr: ir.FilterExpr{ - Line: 535, + Line: 544, Op: ir.FilterAndOp, Src: "m[\"constval\"].Node.Is(`BasicLit`) && !m[\"x\"].Node.Is(`BasicLit`)", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 535, + Line: 544, Op: ir.FilterVarNodeIsOp, Src: "m[\"constval\"].Node.Is(`BasicLit`)", Value: "constval", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 535, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}, + ir.FilterExpr{Line: 544, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}, }, }, ir.FilterExpr{ - Line: 535, + Line: 544, Op: ir.FilterNotOp, Src: "!m[\"x\"].Node.Is(`BasicLit`)", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 535, + Line: 544, Op: ir.FilterVarNodeIsOp, Src: "m[\"x\"].Node.Is(`BasicLit`)", Value: "x", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 535, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}, + ir.FilterExpr{Line: 544, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}, }, }, }, @@ -1987,37 +2014,37 @@ var PrecompiledRules = &ir.File{ }, }, ir.Rule{ - Line: 537, + Line: 546, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 537, Value: "$constval == $x"}, + ir.PatternString{Line: 546, Value: "$constval == $x"}, }, ReportTemplate: "consider to change order in expression to $x == $constval", WhereExpr: ir.FilterExpr{ - Line: 537, + Line: 546, Op: ir.FilterAndOp, Src: "m[\"constval\"].Node.Is(`BasicLit`) && !m[\"x\"].Node.Is(`BasicLit`)", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 537, + Line: 546, Op: ir.FilterVarNodeIsOp, Src: "m[\"constval\"].Node.Is(`BasicLit`)", Value: "constval", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 537, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}, + ir.FilterExpr{Line: 546, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}, }, }, ir.FilterExpr{ - Line: 537, + Line: 546, Op: ir.FilterNotOp, Src: "!m[\"x\"].Node.Is(`BasicLit`)", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 537, + Line: 546, Op: ir.FilterVarNodeIsOp, Src: "m[\"x\"].Node.Is(`BasicLit`)", Value: "x", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 537, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}, + ir.FilterExpr{Line: 546, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}, }, }, }, @@ -2026,46 +2053,46 @@ var PrecompiledRules = &ir.File{ }, }, ir.Rule{ - Line: 540, + Line: 549, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 540, Value: "nil != $x"}, + ir.PatternString{Line: 549, Value: "nil != $x"}, }, ReportTemplate: "consider to change order in expression to $x != nil", WhereExpr: ir.FilterExpr{ - Line: 540, + Line: 549, Op: ir.FilterNotOp, Src: "!m[\"x\"].Node.Is(`BasicLit`)", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 540, + Line: 549, Op: ir.FilterVarNodeIsOp, Src: "m[\"x\"].Node.Is(`BasicLit`)", Value: "x", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 540, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}, + ir.FilterExpr{Line: 549, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}, }, }, }, }, }, ir.Rule{ - Line: 542, + Line: 551, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 542, Value: "nil == $x"}, + ir.PatternString{Line: 551, Value: "nil == $x"}, }, ReportTemplate: "consider to change order in expression to $x == nil", WhereExpr: ir.FilterExpr{ - Line: 542, + Line: 551, Op: ir.FilterNotOp, Src: "!m[\"x\"].Node.Is(`BasicLit`)", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 542, + Line: 551, Op: ir.FilterVarNodeIsOp, Src: "m[\"x\"].Node.Is(`BasicLit`)", Value: "x", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 542, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}, + ir.FilterExpr{Line: 551, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}, }, }, }, @@ -2074,7 +2101,7 @@ var PrecompiledRules = &ir.File{ }, }, ir.RuleGroup{ - Line: 550, + Line: 559, Name: "equalFold", MatcherName: "m", DocTags: []string{ @@ -2086,114 +2113,114 @@ var PrecompiledRules = &ir.File{ DocAfter: "strings.EqualFold(x, y)", Rules: []ir.Rule{ ir.Rule{ - Line: 559, + Line: 568, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 560, Value: "strings.ToLower($x) == $y"}, - ir.PatternString{Line: 561, Value: "strings.ToLower($x) == strings.ToLower($y)"}, - ir.PatternString{Line: 562, Value: "$x == strings.ToLower($y)"}, - ir.PatternString{Line: 563, Value: "strings.ToUpper($x) == $y"}, - ir.PatternString{Line: 564, Value: "strings.ToUpper($x) == strings.ToUpper($y)"}, - ir.PatternString{Line: 565, Value: "$x == strings.ToUpper($y)"}, + ir.PatternString{Line: 569, Value: "strings.ToLower($x) == $y"}, + ir.PatternString{Line: 570, Value: "strings.ToLower($x) == strings.ToLower($y)"}, + ir.PatternString{Line: 571, Value: "$x == strings.ToLower($y)"}, + ir.PatternString{Line: 572, Value: "strings.ToUpper($x) == $y"}, + ir.PatternString{Line: 573, Value: "strings.ToUpper($x) == strings.ToUpper($y)"}, + ir.PatternString{Line: 574, Value: "$x == strings.ToUpper($y)"}, }, ReportTemplate: "consider replacing with strings.EqualFold($x, $y)", - SuggestTemplate: "strings.EqualFold($x, $y)]", + SuggestTemplate: "strings.EqualFold($x, $y)", WhereExpr: ir.FilterExpr{ - Line: 566, + Line: 575, Op: ir.FilterAndOp, Src: "m[\"x\"].Pure && m[\"y\"].Pure && m[\"x\"].Text != m[\"y\"].Text", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 566, + Line: 575, Op: ir.FilterAndOp, Src: "m[\"x\"].Pure && m[\"y\"].Pure", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 566, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - ir.FilterExpr{Line: 566, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, + ir.FilterExpr{Line: 575, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + ir.FilterExpr{Line: 575, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, }, }, ir.FilterExpr{ - Line: 566, + Line: 575, Op: ir.FilterNeqOp, Src: "m[\"x\"].Text != m[\"y\"].Text", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 566, Op: ir.FilterVarTextOp, Src: "m[\"x\"].Text", Value: "x"}, - ir.FilterExpr{Line: 566, Op: ir.FilterVarTextOp, Src: "m[\"y\"].Text", Value: "y"}, + ir.FilterExpr{Line: 575, Op: ir.FilterVarTextOp, Src: "m[\"x\"].Text", Value: "x"}, + ir.FilterExpr{Line: 575, Op: ir.FilterVarTextOp, Src: "m[\"y\"].Text", Value: "y"}, }, }, }, }, }, ir.Rule{ - Line: 571, + Line: 580, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 572, Value: "strings.ToLower($x) != $y"}, - ir.PatternString{Line: 573, Value: "strings.ToLower($x) != strings.ToLower($y)"}, - ir.PatternString{Line: 574, Value: "$x != strings.ToLower($y)"}, - ir.PatternString{Line: 575, Value: "strings.ToUpper($x) != $y"}, - ir.PatternString{Line: 576, Value: "strings.ToUpper($x) != strings.ToUpper($y)"}, - ir.PatternString{Line: 577, Value: "$x != strings.ToUpper($y)"}, + ir.PatternString{Line: 581, Value: "strings.ToLower($x) != $y"}, + ir.PatternString{Line: 582, Value: "strings.ToLower($x) != strings.ToLower($y)"}, + ir.PatternString{Line: 583, Value: "$x != strings.ToLower($y)"}, + ir.PatternString{Line: 584, Value: "strings.ToUpper($x) != $y"}, + ir.PatternString{Line: 585, Value: "strings.ToUpper($x) != strings.ToUpper($y)"}, + ir.PatternString{Line: 586, Value: "$x != strings.ToUpper($y)"}, }, ReportTemplate: "consider replacing with !strings.EqualFold($x, $y)", - SuggestTemplate: "!strings.EqualFold($x, $y)]", + SuggestTemplate: "!strings.EqualFold($x, $y)", WhereExpr: ir.FilterExpr{ - Line: 578, + Line: 587, Op: ir.FilterAndOp, Src: "m[\"x\"].Pure && m[\"y\"].Pure && m[\"x\"].Text != m[\"y\"].Text", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 578, + Line: 587, Op: ir.FilterAndOp, Src: "m[\"x\"].Pure && m[\"y\"].Pure", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 578, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - ir.FilterExpr{Line: 578, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, + ir.FilterExpr{Line: 587, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + ir.FilterExpr{Line: 587, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, }, }, ir.FilterExpr{ - Line: 578, + Line: 587, Op: ir.FilterNeqOp, Src: "m[\"x\"].Text != m[\"y\"].Text", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 578, Op: ir.FilterVarTextOp, Src: "m[\"x\"].Text", Value: "x"}, - ir.FilterExpr{Line: 578, Op: ir.FilterVarTextOp, Src: "m[\"y\"].Text", Value: "y"}, + ir.FilterExpr{Line: 587, Op: ir.FilterVarTextOp, Src: "m[\"x\"].Text", Value: "x"}, + ir.FilterExpr{Line: 587, Op: ir.FilterVarTextOp, Src: "m[\"y\"].Text", Value: "y"}, }, }, }, }, }, ir.Rule{ - Line: 583, + Line: 592, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 584, Value: "bytes.Equal(bytes.ToLower($x), $y)"}, - ir.PatternString{Line: 585, Value: "bytes.Equal(bytes.ToLower($x), bytes.ToLower($y))"}, - ir.PatternString{Line: 586, Value: "bytes.Equal($x, bytes.ToLower($y))"}, - ir.PatternString{Line: 587, Value: "bytes.Equal(bytes.ToUpper($x), $y)"}, - ir.PatternString{Line: 588, Value: "bytes.Equal(bytes.ToUpper($x), bytes.ToUpper($y))"}, - ir.PatternString{Line: 589, Value: "bytes.Equal($x, bytes.ToUpper($y))"}, + ir.PatternString{Line: 593, Value: "bytes.Equal(bytes.ToLower($x), $y)"}, + ir.PatternString{Line: 594, Value: "bytes.Equal(bytes.ToLower($x), bytes.ToLower($y))"}, + ir.PatternString{Line: 595, Value: "bytes.Equal($x, bytes.ToLower($y))"}, + ir.PatternString{Line: 596, Value: "bytes.Equal(bytes.ToUpper($x), $y)"}, + ir.PatternString{Line: 597, Value: "bytes.Equal(bytes.ToUpper($x), bytes.ToUpper($y))"}, + ir.PatternString{Line: 598, Value: "bytes.Equal($x, bytes.ToUpper($y))"}, }, ReportTemplate: "consider replacing with bytes.EqualFold($x, $y)", - SuggestTemplate: "bytes.EqualFold($x, $y)]", + SuggestTemplate: "bytes.EqualFold($x, $y)", WhereExpr: ir.FilterExpr{ - Line: 590, + Line: 599, Op: ir.FilterAndOp, Src: "m[\"x\"].Pure && m[\"y\"].Pure && m[\"x\"].Text != m[\"y\"].Text", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 590, + Line: 599, Op: ir.FilterAndOp, Src: "m[\"x\"].Pure && m[\"y\"].Pure", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 590, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - ir.FilterExpr{Line: 590, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, + ir.FilterExpr{Line: 599, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + ir.FilterExpr{Line: 599, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, }, }, ir.FilterExpr{ - Line: 590, + Line: 599, Op: ir.FilterNeqOp, Src: "m[\"x\"].Text != m[\"y\"].Text", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 590, Op: ir.FilterVarTextOp, Src: "m[\"x\"].Text", Value: "x"}, - ir.FilterExpr{Line: 590, Op: ir.FilterVarTextOp, Src: "m[\"y\"].Text", Value: "y"}, + ir.FilterExpr{Line: 599, Op: ir.FilterVarTextOp, Src: "m[\"x\"].Text", Value: "x"}, + ir.FilterExpr{Line: 599, Op: ir.FilterVarTextOp, Src: "m[\"y\"].Text", Value: "y"}, }, }, }, @@ -2202,7 +2229,7 @@ var PrecompiledRules = &ir.File{ }, }, ir.RuleGroup{ - Line: 599, + Line: 608, Name: "argOrder", MatcherName: "m", DocTags: []string{ @@ -2213,45 +2240,45 @@ var PrecompiledRules = &ir.File{ DocAfter: "strings.HasPrefix(userpass, \"#\")", Rules: []ir.Rule{ ir.Rule{ - Line: 600, + Line: 609, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 601, Value: "strings.HasPrefix($lit, $s)"}, - ir.PatternString{Line: 602, Value: "bytes.HasPrefix($lit, $s)"}, - ir.PatternString{Line: 603, Value: "strings.HasSuffix($lit, $s)"}, - ir.PatternString{Line: 604, Value: "bytes.HasSuffix($lit, $s)"}, - ir.PatternString{Line: 605, Value: "strings.Contains($lit, $s)"}, - ir.PatternString{Line: 606, Value: "bytes.Contains($lit, $s)"}, - ir.PatternString{Line: 607, Value: "strings.TrimPrefix($lit, $s)"}, - ir.PatternString{Line: 608, Value: "bytes.TrimPrefix($lit, $s)"}, - ir.PatternString{Line: 609, Value: "strings.TrimSuffix($lit, $s)"}, - ir.PatternString{Line: 610, Value: "bytes.TrimSuffix($lit, $s)"}, - ir.PatternString{Line: 611, Value: "strings.Split($lit, $s)"}, - ir.PatternString{Line: 612, Value: "bytes.Split($lit, $s)"}, + ir.PatternString{Line: 610, Value: "strings.HasPrefix($lit, $s)"}, + ir.PatternString{Line: 611, Value: "bytes.HasPrefix($lit, $s)"}, + ir.PatternString{Line: 612, Value: "strings.HasSuffix($lit, $s)"}, + ir.PatternString{Line: 613, Value: "bytes.HasSuffix($lit, $s)"}, + ir.PatternString{Line: 614, Value: "strings.Contains($lit, $s)"}, + ir.PatternString{Line: 615, Value: "bytes.Contains($lit, $s)"}, + ir.PatternString{Line: 616, Value: "strings.TrimPrefix($lit, $s)"}, + ir.PatternString{Line: 617, Value: "bytes.TrimPrefix($lit, $s)"}, + ir.PatternString{Line: 618, Value: "strings.TrimSuffix($lit, $s)"}, + ir.PatternString{Line: 619, Value: "bytes.TrimSuffix($lit, $s)"}, + ir.PatternString{Line: 620, Value: "strings.Split($lit, $s)"}, + ir.PatternString{Line: 621, Value: "bytes.Split($lit, $s)"}, }, ReportTemplate: "$lit and $s arguments order looks reversed", WhereExpr: ir.FilterExpr{ - Line: 613, + Line: 622, Op: ir.FilterAndOp, Src: "(m[\"lit\"].Const || m[\"lit\"].ConstSlice) &&\n\t!(m[\"s\"].Const || m[\"s\"].ConstSlice) &&\n\t!m[\"lit\"].Node.Is(`Ident`)", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 613, + Line: 622, Op: ir.FilterAndOp, Src: "(m[\"lit\"].Const || m[\"lit\"].ConstSlice) &&\n\t!(m[\"s\"].Const || m[\"s\"].ConstSlice)", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 613, + Line: 622, Op: ir.FilterOrOp, Src: "(m[\"lit\"].Const || m[\"lit\"].ConstSlice)", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 613, + Line: 622, Op: ir.FilterVarConstOp, Src: "m[\"lit\"].Const", Value: "lit", }, ir.FilterExpr{ - Line: 613, + Line: 622, Op: ir.FilterVarConstSliceOp, Src: "m[\"lit\"].ConstSlice", Value: "lit", @@ -2259,23 +2286,23 @@ var PrecompiledRules = &ir.File{ }, }, ir.FilterExpr{ - Line: 614, + Line: 623, Op: ir.FilterNotOp, Src: "!(m[\"s\"].Const || m[\"s\"].ConstSlice)", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 614, + Line: 623, Op: ir.FilterOrOp, Src: "(m[\"s\"].Const || m[\"s\"].ConstSlice)", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 614, + Line: 623, Op: ir.FilterVarConstOp, Src: "m[\"s\"].Const", Value: "s", }, ir.FilterExpr{ - Line: 614, + Line: 623, Op: ir.FilterVarConstSliceOp, Src: "m[\"s\"].ConstSlice", Value: "s", @@ -2287,17 +2314,17 @@ var PrecompiledRules = &ir.File{ }, }, ir.FilterExpr{ - Line: 615, + Line: 624, Op: ir.FilterNotOp, Src: "!m[\"lit\"].Node.Is(`Ident`)", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 615, + Line: 624, Op: ir.FilterVarNodeIsOp, Src: "m[\"lit\"].Node.Is(`Ident`)", Value: "lit", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 615, Op: ir.FilterStringOp, Src: "`Ident`", Value: "Ident"}, + ir.FilterExpr{Line: 624, Op: ir.FilterStringOp, Src: "`Ident`", Value: "Ident"}, }, }, }, @@ -2308,7 +2335,7 @@ var PrecompiledRules = &ir.File{ }, }, ir.RuleGroup{ - Line: 623, + Line: 632, Name: "stringConcatSimplify", MatcherName: "m", DocTags: []string{ @@ -2320,25 +2347,25 @@ var PrecompiledRules = &ir.File{ DocAfter: "x + \"_\" + y", Rules: []ir.Rule{ ir.Rule{ - Line: 624, + Line: 633, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 624, Value: "strings.Join([]string{$x, $y}, \"\")"}, + ir.PatternString{Line: 633, Value: "strings.Join([]string{$x, $y}, \"\")"}, }, ReportTemplate: "suggestion: $x + $y", SuggestTemplate: "$x + $y", }, ir.Rule{ - Line: 625, + Line: 634, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 625, Value: "strings.Join([]string{$x, $y, $z}, \"\")"}, + ir.PatternString{Line: 634, Value: "strings.Join([]string{$x, $y, $z}, \"\")"}, }, ReportTemplate: "suggestion: $x + $y + $z", SuggestTemplate: "$x + $y + $z", }, ir.Rule{ - Line: 626, + Line: 635, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 626, Value: "strings.Join([]string{$x, $y}, $glue)"}, + ir.PatternString{Line: 635, Value: "strings.Join([]string{$x, $y}, $glue)"}, }, ReportTemplate: "suggestion: $x + $glue + $y", SuggestTemplate: "$x + $glue + $y", @@ -2346,7 +2373,7 @@ var PrecompiledRules = &ir.File{ }, }, ir.RuleGroup{ - Line: 633, + Line: 642, Name: "timeExprSimplify", MatcherName: "m", DocTags: []string{ @@ -2358,44 +2385,44 @@ var PrecompiledRules = &ir.File{ DocAfter: "t.UnixMilli()", Rules: []ir.Rule{ ir.Rule{ - Line: 634, + Line: 647, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 634, Value: "$t.Unix() / 1000"}, + ir.PatternString{Line: 647, Value: "$t.Unix() / 1000"}, }, ReportTemplate: "use $t.UnixMilli() instead of $$", SuggestTemplate: "$t.UnixMilli()", WhereExpr: ir.FilterExpr{ - Line: 635, + Line: 648, Op: ir.FilterAndOp, - Src: "m.GoVersion().GreaterEqThan(\"1.17\") &&\n\t(m[\"t\"].Type.Is(`time.Time`) || m[\"t\"].Type.Is(`*time.Time`))", + Src: "m.GoVersion().GreaterEqThan(\"1.17\") && isTime(m[\"t\"])", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 635, + Line: 648, Op: ir.FilterGoVersionGreaterEqThanOp, Src: "m.GoVersion().GreaterEqThan(\"1.17\")", Value: "1.17", }, ir.FilterExpr{ - Line: 636, + Line: 648, Op: ir.FilterOrOp, - Src: "(m[\"t\"].Type.Is(`time.Time`) || m[\"t\"].Type.Is(`*time.Time`))", + Src: "isTime(m[\"t\"])", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 636, + Line: 648, Op: ir.FilterVarTypeIsOp, Src: "m[\"t\"].Type.Is(`time.Time`)", Value: "t", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 636, Op: ir.FilterStringOp, Src: "`time.Time`", Value: "time.Time"}, + ir.FilterExpr{Line: 644, Op: ir.FilterStringOp, Src: "`time.Time`", Value: "time.Time"}, }, }, ir.FilterExpr{ - Line: 636, + Line: 648, Op: ir.FilterVarTypeIsOp, Src: "m[\"t\"].Type.Is(`*time.Time`)", Value: "t", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 636, Op: ir.FilterStringOp, Src: "`*time.Time`", Value: "*time.Time"}, + ir.FilterExpr{Line: 644, Op: ir.FilterStringOp, Src: "`*time.Time`", Value: "*time.Time"}, }, }, }, @@ -2404,44 +2431,44 @@ var PrecompiledRules = &ir.File{ }, }, ir.Rule{ - Line: 640, + Line: 652, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 640, Value: "$t.UnixNano() * 1000"}, + ir.PatternString{Line: 652, Value: "$t.UnixNano() * 1000"}, }, ReportTemplate: "use $t.UnixMicro() instead of $$", SuggestTemplate: "$t.UnixMicro()", WhereExpr: ir.FilterExpr{ - Line: 641, + Line: 653, Op: ir.FilterAndOp, - Src: "m.GoVersion().GreaterEqThan(\"1.17\") &&\n\t(m[\"t\"].Type.Is(`time.Time`) || m[\"t\"].Type.Is(`*time.Time`))", + Src: "m.GoVersion().GreaterEqThan(\"1.17\") && isTime(m[\"t\"])", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 641, + Line: 653, Op: ir.FilterGoVersionGreaterEqThanOp, Src: "m.GoVersion().GreaterEqThan(\"1.17\")", Value: "1.17", }, ir.FilterExpr{ - Line: 642, + Line: 653, Op: ir.FilterOrOp, - Src: "(m[\"t\"].Type.Is(`time.Time`) || m[\"t\"].Type.Is(`*time.Time`))", + Src: "isTime(m[\"t\"])", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 642, + Line: 653, Op: ir.FilterVarTypeIsOp, Src: "m[\"t\"].Type.Is(`time.Time`)", Value: "t", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 642, Op: ir.FilterStringOp, Src: "`time.Time`", Value: "time.Time"}, + ir.FilterExpr{Line: 644, Op: ir.FilterStringOp, Src: "`time.Time`", Value: "time.Time"}, }, }, ir.FilterExpr{ - Line: 642, + Line: 653, Op: ir.FilterVarTypeIsOp, Src: "m[\"t\"].Type.Is(`*time.Time`)", Value: "t", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 642, Op: ir.FilterStringOp, Src: "`*time.Time`", Value: "*time.Time"}, + ir.FilterExpr{Line: 644, Op: ir.FilterStringOp, Src: "`*time.Time`", Value: "*time.Time"}, }, }, }, @@ -2452,7 +2479,7 @@ var PrecompiledRules = &ir.File{ }, }, ir.RuleGroup{ - Line: 651, + Line: 662, Name: "exposedSyncMutex", MatcherName: "m", DocTags: []string{ @@ -2464,73 +2491,73 @@ var PrecompiledRules = &ir.File{ DocAfter: "type Foo struct{ ...; mu sync.Mutex; ... }", Rules: []ir.Rule{ ir.Rule{ - Line: 652, + Line: 667, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 652, Value: "type $x struct { $*_; sync.Mutex; $*_ }"}, + ir.PatternString{Line: 667, Value: "type $x struct { $*_; sync.Mutex; $*_ }"}, }, ReportTemplate: "don't embed sync.Mutex", WhereExpr: ir.FilterExpr{ - Line: 653, + Line: 668, Op: ir.FilterVarTextMatchesOp, - Src: "m[\"x\"].Text.Matches(`^\\p{Lu}`)", + Src: "isExported(m[\"x\"])", Value: "x", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 653, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}, + ir.FilterExpr{Line: 664, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}, }, }, }, ir.Rule{ - Line: 656, + Line: 671, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 656, Value: "type $x struct { $*_; *sync.Mutex; $*_ }"}, + ir.PatternString{Line: 671, Value: "type $x struct { $*_; *sync.Mutex; $*_ }"}, }, ReportTemplate: "don't embed *sync.Mutex", WhereExpr: ir.FilterExpr{ - Line: 657, + Line: 672, Op: ir.FilterVarTextMatchesOp, - Src: "m[\"x\"].Text.Matches(`^\\p{Lu}`)", + Src: "isExported(m[\"x\"])", Value: "x", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 657, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}, + ir.FilterExpr{Line: 664, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}, }, }, }, ir.Rule{ - Line: 660, + Line: 675, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 660, Value: "type $x struct { $*_; sync.RWMutex; $*_ }"}, + ir.PatternString{Line: 675, Value: "type $x struct { $*_; sync.RWMutex; $*_ }"}, }, ReportTemplate: "don't embed sync.RWMutex", WhereExpr: ir.FilterExpr{ - Line: 661, + Line: 676, Op: ir.FilterVarTextMatchesOp, - Src: "m[\"x\"].Text.Matches(`^\\p{Lu}`)", + Src: "isExported(m[\"x\"])", Value: "x", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 661, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}, + ir.FilterExpr{Line: 664, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}, }, }, }, ir.Rule{ - Line: 664, + Line: 679, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 664, Value: "type $x struct { $*_; *sync.RWMutex; $*_ }"}, + ir.PatternString{Line: 679, Value: "type $x struct { $*_; *sync.RWMutex; $*_ }"}, }, ReportTemplate: "don't embed *sync.RWMutex", WhereExpr: ir.FilterExpr{ - Line: 665, + Line: 680, Op: ir.FilterVarTextMatchesOp, - Src: "m[\"x\"].Text.Matches(`^\\p{Lu}`)", + Src: "isExported(m[\"x\"])", Value: "x", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 665, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}, + ir.FilterExpr{Line: 664, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}, }, }, }, }, }, ir.RuleGroup{ - Line: 673, + Line: 688, Name: "badSorting", MatcherName: "m", DocTags: []string{ @@ -2542,60 +2569,60 @@ var PrecompiledRules = &ir.File{ DocAfter: "sort.Strings(xs)", Rules: []ir.Rule{ ir.Rule{ - Line: 674, + Line: 689, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 674, Value: "$x = sort.IntSlice($x)"}, + ir.PatternString{Line: 689, Value: "$x = sort.IntSlice($x)"}, }, ReportTemplate: "suspicious sort.IntSlice usage, maybe sort.Ints was intended?", SuggestTemplate: "sort.Ints($x)", WhereExpr: ir.FilterExpr{ - Line: 675, + Line: 690, Op: ir.FilterVarTypeIsOp, Src: "m[\"x\"].Type.Is(`[]int`)", Value: "x", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 675, Op: ir.FilterStringOp, Src: "`[]int`", Value: "[]int"}, + ir.FilterExpr{Line: 690, Op: ir.FilterStringOp, Src: "`[]int`", Value: "[]int"}, }, }, }, ir.Rule{ - Line: 679, + Line: 694, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 679, Value: "$x = sort.Float64Slice($x)"}, + ir.PatternString{Line: 694, Value: "$x = sort.Float64Slice($x)"}, }, ReportTemplate: "suspicious sort.Float64s usage, maybe sort.Float64s was intended?", SuggestTemplate: "sort.Float64s($x)", WhereExpr: ir.FilterExpr{ - Line: 680, + Line: 695, Op: ir.FilterVarTypeIsOp, Src: "m[\"x\"].Type.Is(`[]float64`)", Value: "x", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 680, Op: ir.FilterStringOp, Src: "`[]float64`", Value: "[]float64"}, + ir.FilterExpr{Line: 695, Op: ir.FilterStringOp, Src: "`[]float64`", Value: "[]float64"}, }, }, }, ir.Rule{ - Line: 684, + Line: 699, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 684, Value: "$x = sort.StringSlice($x)"}, + ir.PatternString{Line: 699, Value: "$x = sort.StringSlice($x)"}, }, ReportTemplate: "suspicious sort.StringSlice usage, maybe sort.Strings was intended?", SuggestTemplate: "sort.Strings($x)", WhereExpr: ir.FilterExpr{ - Line: 685, + Line: 700, Op: ir.FilterVarTypeIsOp, Src: "m[\"x\"].Type.Is(`[]string`)", Value: "x", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 685, Op: ir.FilterStringOp, Src: "`[]string`", Value: "[]string"}, + ir.FilterExpr{Line: 700, Op: ir.FilterStringOp, Src: "`[]string`", Value: "[]string"}, }, }, }, }, }, ir.RuleGroup{ - Line: 694, + Line: 709, Name: "externalErrorReassign", MatcherName: "m", DocTags: []string{ @@ -2607,32 +2634,32 @@ var PrecompiledRules = &ir.File{ DocAfter: "/* don't do it */", Rules: []ir.Rule{ ir.Rule{ - Line: 695, + Line: 710, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 695, Value: "$pkg.$err = $x"}, + ir.PatternString{Line: 710, Value: "$pkg.$err = $x"}, }, ReportTemplate: "suspicious reassigment of error from another package", WhereExpr: ir.FilterExpr{ - Line: 696, + Line: 711, Op: ir.FilterAndOp, Src: "m[\"err\"].Type.Is(`error`) && m[\"pkg\"].Object.Is(`PkgName`)", Args: []ir.FilterExpr{ ir.FilterExpr{ - Line: 696, + Line: 711, Op: ir.FilterVarTypeIsOp, Src: "m[\"err\"].Type.Is(`error`)", Value: "err", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 696, Op: ir.FilterStringOp, Src: "`error`", Value: "error"}, + ir.FilterExpr{Line: 711, Op: ir.FilterStringOp, Src: "`error`", Value: "error"}, }, }, ir.FilterExpr{ - Line: 696, + Line: 711, Op: ir.FilterVarObjectIsOp, Src: "m[\"pkg\"].Object.Is(`PkgName`)", Value: "pkg", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 696, Op: ir.FilterStringOp, Src: "`PkgName`", Value: "PkgName"}, + ir.FilterExpr{Line: 711, Op: ir.FilterStringOp, Src: "`PkgName`", Value: "PkgName"}, }, }, }, @@ -2641,7 +2668,7 @@ var PrecompiledRules = &ir.File{ }, }, ir.RuleGroup{ - Line: 704, + Line: 719, Name: "emptyDecl", MatcherName: "m", DocTags: []string{ @@ -2653,28 +2680,71 @@ var PrecompiledRules = &ir.File{ DocAfter: "/* nothing */", Rules: []ir.Rule{ ir.Rule{ - Line: 705, + Line: 720, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 705, Value: "var()"}, + ir.PatternString{Line: 720, Value: "var()"}, }, ReportTemplate: "empty var() block", }, ir.Rule{ - Line: 706, + Line: 721, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 706, Value: "const()"}, + ir.PatternString{Line: 721, Value: "const()"}, }, ReportTemplate: "empty const() block", }, ir.Rule{ - Line: 707, + Line: 722, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 707, Value: "type()"}, + ir.PatternString{Line: 722, Value: "type()"}, }, ReportTemplate: "empty type() block", }, }, }, + ir.RuleGroup{ + Line: 729, + Name: "dynamicFmtString", + MatcherName: "m", + DocTags: []string{ + "diagnostic", + "experimental", + }, + DocSummary: "Detects suspicious formatting strings usage", + DocBefore: "fmt.Errorf(msg)", + DocAfter: "fmt.Errorf(\"%s\", msg)", + Rules: []ir.Rule{ + ir.Rule{ + Line: 730, + SyntaxPatterns: []ir.PatternString{ + ir.PatternString{Line: 730, Value: "fmt.Errorf($f)"}, + }, + ReportTemplate: "use errors.New($f) or fmt.Errorf(\"%s\", $f) instead", + SuggestTemplate: "errors.New($f)", + WhereExpr: ir.FilterExpr{ + Line: 731, + Op: ir.FilterNotOp, + Src: "!m[\"f\"].Const", + Args: []ir.FilterExpr{ + ir.FilterExpr{ + Line: 731, + Op: ir.FilterVarConstOp, + Src: "m[\"f\"].Const", + Value: "f", + }, + }, + }, + }, + ir.Rule{ + Line: 735, + SyntaxPatterns: []ir.PatternString{ + ir.PatternString{Line: 735, Value: "fmt.Errorf($f($*args))"}, + }, + ReportTemplate: "use errors.New($f($*args)) or fmt.Errorf(\"%s\", $f($*args)) instead", + SuggestTemplate: "errors.New($f($*args))", + }, + }, + }, }, } diff --git a/vendor/github.com/go-critic/go-critic/checkers/typeSwitchVar_checker.go b/vendor/github.com/go-critic/go-critic/checkers/typeSwitchVar_checker.go index 6bbec5037..1e11e4937 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/typeSwitchVar_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/typeSwitchVar_checker.go @@ -74,7 +74,7 @@ func (c *typeSwitchVarChecker) checkTypeSwitch(root *ast.TypeSwitchStmt) { // Create artificial node just for matching. assert1 := ast.TypeAssertExpr{X: expr, Type: clause.List[0]} for _, stmt := range clause.Body { - assert2 := lintutil.FindNode(stmt, func(x ast.Node) bool { + assert2 := lintutil.FindNode(stmt, nil, func(x ast.Node) bool { return astequal.Node(&assert1, x) }) if object == c.ctx.TypesInfo.ObjectOf(identOf(assert2)) { diff --git a/vendor/github.com/go-critic/go-critic/checkers/unlabelStmt_checker.go b/vendor/github.com/go-critic/go-critic/checkers/unlabelStmt_checker.go index fab864ec5..bcca24d2a 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/unlabelStmt_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/unlabelStmt_checker.go @@ -87,6 +87,7 @@ func (c *unlabelStmtChecker) VisitStmt(stmt ast.Stmt) { // Only for loops: if last stmt in list is a loop // that contains labeled "continue" to the outer loop label, // it can be refactored to use "break" instead. + // Exceptions: select statements with a labeled "continue" are ignored. if c.isLoop(labeled.Stmt) { body := c.blockStmtOf(labeled.Stmt) if len(body.List) == 0 { @@ -96,11 +97,21 @@ func (c *unlabelStmtChecker) VisitStmt(stmt ast.Stmt) { if !c.isLoop(last) { return } - br := lintutil.FindNode(c.blockStmtOf(last), func(n ast.Node) bool { - br, ok := n.(*ast.BranchStmt) - return ok && br.Label != nil && - br.Label.Name == name && br.Tok == token.CONTINUE - }) + br := lintutil.FindNode(c.blockStmtOf(last), + func(n ast.Node) bool { + switch n.(type) { + case *ast.SelectStmt: + return false + default: + return true + } + }, + func(n ast.Node) bool { + br, ok := n.(*ast.BranchStmt) + return ok && br.Label != nil && + br.Label.Name == name && br.Tok == token.CONTINUE + }) + if br != nil { c.warnLabeledContinue(br, name) } diff --git a/vendor/github.com/go-critic/go-critic/checkers/whyNoLint_checker.go b/vendor/github.com/go-critic/go-critic/checkers/whyNoLint_checker.go index 260039f2b..6829433ea 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/whyNoLint_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/whyNoLint_checker.go @@ -17,12 +17,11 @@ func init() { Before: `//nolint`, After: `//nolint // reason`, } - re := regexp.MustCompile(`^// *nolint(?::[^ ]+)? *(.*)$`) collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { return astwalk.WalkerForComment(&whyNoLintChecker{ ctx: ctx, - re: re, + re: regexp.MustCompile(`^// *nolint(?::[^ ]+)? *(.*)$`), }), nil }) } diff --git a/vendor/github.com/go-critic/go-critic/framework/linter/go_version.go b/vendor/github.com/go-critic/go-critic/framework/linter/go_version.go index 61e5590f3..d8091d453 100644 --- a/vendor/github.com/go-critic/go-critic/framework/linter/go_version.go +++ b/vendor/github.com/go-critic/go-critic/framework/linter/go_version.go @@ -27,25 +27,25 @@ func (v GoVersion) GreaterOrEqual(other GoVersion) bool { return v.Major >= other.Major } -func parseGoVersion(version string) GoVersion { +func ParseGoVersion(version string) (GoVersion, error) { + var result GoVersion version = strings.TrimPrefix(version, "go") if version == "" { - return GoVersion{} + return result, nil } parts := strings.Split(version, ".") if len(parts) != 2 { - panic(fmt.Sprintf("invalid Go version format: %s", version)) + return result, fmt.Errorf("invalid Go version format: %s", version) } major, err := strconv.Atoi(parts[0]) if err != nil { - panic(fmt.Sprintf("invalid major version part: %s: %s", parts[0], err)) + return result, fmt.Errorf("invalid major version part: %s: %w", parts[0], err) } minor, err := strconv.Atoi(parts[1]) if err != nil { - panic(fmt.Sprintf("invalid minor version part: %s: %s", parts[1], err)) - } - return GoVersion{ - Major: major, - Minor: minor, + return result, fmt.Errorf("invalid minor version part: %s: %w", parts[1], err) } + result.Major = major + result.Minor = minor + return result, nil } diff --git a/vendor/github.com/go-critic/go-critic/framework/linter/lintpack.go b/vendor/github.com/go-critic/go-critic/framework/linter/linter.go similarity index 99% rename from vendor/github.com/go-critic/go-critic/framework/linter/lintpack.go rename to vendor/github.com/go-critic/go-critic/framework/linter/linter.go index 8e5bba728..1697ee7de 100644 --- a/vendor/github.com/go-critic/go-critic/framework/linter/lintpack.go +++ b/vendor/github.com/go-critic/go-critic/framework/linter/linter.go @@ -232,7 +232,11 @@ func NewContext(fset *token.FileSet, sizes types.Sizes) *Context { // like all features are available. To make gocritic // more conservative, the upper Go version level should be adjusted. func (c *Context) SetGoVersion(version string) { - c.GoVersion = parseGoVersion(version) + v, err := ParseGoVersion(version) + if err != nil { + panic(err) + } + c.GoVersion = v } // SetPackageInfo sets package-related metadata. diff --git a/vendor/github.com/go-playground/validator/v10/README.md b/vendor/github.com/go-playground/validator/v10/README.md index f56cff15d..5c42ee4fb 100644 --- a/vendor/github.com/go-playground/validator/v10/README.md +++ b/vendor/github.com/go-playground/validator/v10/README.md @@ -1,7 +1,7 @@ Package validator ================= [![Join the chat at https://gitter.im/go-playground/validator](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/go-playground/validator?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) -![Project status](https://img.shields.io/badge/version-10.9.0-green.svg) +![Project status](https://img.shields.io/badge/version-10.10.0-green.svg) [![Build Status](https://travis-ci.org/go-playground/validator.svg?branch=master)](https://travis-ci.org/go-playground/validator) [![Coverage Status](https://coveralls.io/repos/go-playground/validator/badge.svg?branch=master&service=github)](https://coveralls.io/github/go-playground/validator?branch=master) [![Go Report Card](https://goreportcard.com/badge/github.com/go-playground/validator)](https://goreportcard.com/report/github.com/go-playground/validator) @@ -189,6 +189,8 @@ Baked-in Validations | uuid5 | Universally Unique Identifier UUID v5 | | uuid5_rfc4122 | Universally Unique Identifier UUID v5 RFC4122 | | uuid_rfc4122 | Universally Unique Identifier UUID RFC4122 | +| semver | Semantic Versioning 2.0.0 | +| ulid | Universally Unique Lexicographically Sortable Identifier ULID | ### Comparisons: | Tag | Description | diff --git a/vendor/github.com/go-playground/validator/v10/baked_in.go b/vendor/github.com/go-playground/validator/v10/baked_in.go index f5fd2391d..7868b66fa 100644 --- a/vendor/github.com/go-playground/validator/v10/baked_in.go +++ b/vendor/github.com/go-playground/validator/v10/baked_in.go @@ -148,6 +148,7 @@ var ( "uuid3_rfc4122": isUUID3RFC4122, "uuid4_rfc4122": isUUID4RFC4122, "uuid5_rfc4122": isUUID5RFC4122, + "ulid": isULID, "ascii": isASCII, "printascii": isPrintableASCII, "multibyte": hasMultiByteCharacter, @@ -198,6 +199,8 @@ var ( "postcode_iso3166_alpha2": isPostcodeByIso3166Alpha2, "postcode_iso3166_alpha2_field": isPostcodeByIso3166Alpha2Field, "bic": isIsoBicFormat, + "semver": isSemverFormat, + "dns_rfc1035_label": isDnsRFC1035LabelFormat, } ) @@ -498,6 +501,11 @@ func isUUIDRFC4122(fl FieldLevel) bool { return uUIDRFC4122Regex.MatchString(fl.Field().String()) } +// isULID is the validation function for validating if the field's value is a valid ULID. +func isULID(fl FieldLevel) bool { + return uLIDRegex.MatchString(fl.Field().String()) +} + // isISBN is the validation function for validating if the field's value is a valid v10 or v13 ISBN. func isISBN(fl FieldLevel) bool { return isISBN10(fl) || isISBN13(fl) @@ -2351,6 +2359,12 @@ func isIso3166AlphaNumeric(fl FieldLevel) bool { var code int switch field.Kind() { + case reflect.String: + i, err := strconv.Atoi(field.String()) + if err != nil { + return false + } + code = i % 1000 case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: code = int(field.Int() % 1000) case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: @@ -2407,3 +2421,18 @@ func isIsoBicFormat(fl FieldLevel) bool { return bicRegex.MatchString(bicString) } + +// isSemverFormat is the validation function for validating if the current field's value is a valid semver version, defined in Semantic Versioning 2.0.0 +func isSemverFormat(fl FieldLevel) bool { + semverString := fl.Field().String() + + return semverRegex.MatchString(semverString) +} + +// isDnsRFC1035LabelFormat is the validation function +// for validating if the current field's value is +// a valid dns RFC 1035 label, defined in RFC 1035. +func isDnsRFC1035LabelFormat(fl FieldLevel) bool { + val := fl.Field().String() + return dnsRegexRFC1035Label.MatchString(val) +} diff --git a/vendor/github.com/go-playground/validator/v10/doc.go b/vendor/github.com/go-playground/validator/v10/doc.go index 8c2584792..b284c379d 100644 --- a/vendor/github.com/go-playground/validator/v10/doc.go +++ b/vendor/github.com/go-playground/validator/v10/doc.go @@ -1007,6 +1007,12 @@ This validates that a string value contains a valid version 5 UUID. Uppercase U Usage: uuid5 +Universally Unique Lexicographically Sortable Identifier ULID + +This validates that a string value contains a valid ULID value. + + Usage: ulid + ASCII This validates that a string value contains only ASCII characters. @@ -1255,6 +1261,13 @@ More information on https://www.iso.org/standard/60390.html Usage: bic +RFC 1035 label + +This validates that a string value is a valid dns RFC 1035 label, defined in RFC 1035. +More information on https://datatracker.ietf.org/doc/html/rfc1035 + + Usage: dns_rfc1035_label + TimeZone This validates that a string value is a valid time zone based on the time zone database present on the system. @@ -1263,6 +1276,12 @@ More information on https://golang.org/pkg/time/#LoadLocation Usage: timezone +Semantic Version + +This validates that a string value is a valid semver version, defined in Semantic Versioning 2.0.0. +More information on https://semver.org/ + + Usage: semver Alias Validators and Tags diff --git a/vendor/github.com/go-playground/validator/v10/regexes.go b/vendor/github.com/go-playground/validator/v10/regexes.go index df00c4ebc..48e51d571 100644 --- a/vendor/github.com/go-playground/validator/v10/regexes.go +++ b/vendor/github.com/go-playground/validator/v10/regexes.go @@ -10,7 +10,7 @@ const ( numericRegexString = "^[-+]?[0-9]+(?:\\.[0-9]+)?$" numberRegexString = "^[0-9]+$" hexadecimalRegexString = "^(0[xX])?[0-9a-fA-F]+$" - hexColorRegexString = "^#(?:[0-9a-fA-F]{3}|[0-9a-fA-F]{6})$" + hexColorRegexString = "^#(?:[0-9a-fA-F]{3}|[0-9a-fA-F]{4}|[0-9a-fA-F]{6}|[0-9a-fA-F]{8})$" rgbRegexString = "^rgb\\(\\s*(?:(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])|(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])%\\s*,\\s*(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])%\\s*,\\s*(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])%)\\s*\\)$" rgbaRegexString = "^rgba\\(\\s*(?:(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])|(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])%\\s*,\\s*(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])%\\s*,\\s*(?:0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])%)\\s*,\\s*(?:(?:0.[1-9]*)|[01])\\s*\\)$" hslRegexString = "^hsl\\(\\s*(?:0|[1-9]\\d?|[12]\\d\\d|3[0-5]\\d|360)\\s*,\\s*(?:(?:0|[1-9]\\d?|100)%)\\s*,\\s*(?:(?:0|[1-9]\\d?|100)%)\\s*\\)$" @@ -29,6 +29,7 @@ const ( uUID4RFC4122RegexString = "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-4[0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" uUID5RFC4122RegexString = "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-5[0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" uUIDRFC4122RegexString = "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$" + uLIDRegexString = "^[A-HJKMNP-TV-Z0-9]{26}$" aSCIIRegexString = "^[\x00-\x7F]*$" printableASCIIRegexString = "^[\x20-\x7E]*$" multibyteRegexString = "[^\x00-\x7F]" @@ -51,6 +52,8 @@ const ( jWTRegexString = "^[A-Za-z0-9-_]+\\.[A-Za-z0-9-_]+\\.[A-Za-z0-9-_]*$" splitParamsRegexString = `'[^']*'|\S+` bicRegexString = `^[A-Za-z]{6}[A-Za-z0-9]{2}([A-Za-z0-9]{3})?$` + semverRegexString = `^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$` // numbered capture groups https://semver.org/ + dnsRegexStringRFC1035Label = "^[a-z]([-a-z0-9]*[a-z0-9]){0,62}$" ) var ( @@ -80,6 +83,7 @@ var ( uUID4RFC4122Regex = regexp.MustCompile(uUID4RFC4122RegexString) uUID5RFC4122Regex = regexp.MustCompile(uUID5RFC4122RegexString) uUIDRFC4122Regex = regexp.MustCompile(uUIDRFC4122RegexString) + uLIDRegex = regexp.MustCompile(uLIDRegexString) aSCIIRegex = regexp.MustCompile(aSCIIRegexString) printableASCIIRegex = regexp.MustCompile(printableASCIIRegexString) multibyteRegex = regexp.MustCompile(multibyteRegexString) @@ -102,4 +106,6 @@ var ( jWTRegex = regexp.MustCompile(jWTRegexString) splitParamsRegex = regexp.MustCompile(splitParamsRegexString) bicRegex = regexp.MustCompile(bicRegexString) + semverRegex = regexp.MustCompile(semverRegexString) + dnsRegexRFC1035Label = regexp.MustCompile(dnsRegexStringRFC1035Label) ) diff --git a/vendor/github.com/golang-jwt/jwt/v4/README.md b/vendor/github.com/golang-jwt/jwt/v4/README.md index 3072d24a9..01b21646e 100644 --- a/vendor/github.com/golang-jwt/jwt/v4/README.md +++ b/vendor/github.com/golang-jwt/jwt/v4/README.md @@ -46,9 +46,16 @@ See [the project documentation](https://pkg.go.dev/github.com/golang-jwt/jwt) fo ## Extensions -This library publishes all the necessary components for adding your own signing methods. Simply implement the `SigningMethod` interface and register a factory method using `RegisterSigningMethod`. +This library publishes all the necessary components for adding your own signing methods. Simply implement the `SigningMethod` interface and register a factory method using `RegisterSigningMethod`. -Here's an example of an extension that integrates with multiple Google Cloud Platform signing tools (AppEngine, IAM API, Cloud KMS): https://github.com/someone1/gcp-jwt-go +A common use case would be integrating with different 3rd party signature providers, like key management services from various cloud providers or Hardware Security Modules (HSMs). + +| Extension | Purpose | Repo | +|-----------|----------------------------------------------------------------------------------------------|--------------------------------------------| +| GCP | Integrates with multiple Google Cloud Platform signing tools (AppEngine, IAM API, Cloud KMS) | https://github.com/someone1/gcp-jwt-go | +| AWS | Integrates with AWS Key Management Service, KMS | https://github.com/matelang/jwt-go-aws-kms | + +*Disclaimer*: Unless otherwise specified, these integrations are maintained by third parties and should not be considered as a primary offer by any of the mentioned cloud providers ## Compliance @@ -112,3 +119,5 @@ This library uses descriptive error messages whenever possible. If you are not g Documentation can be found [on pkg.go.dev](https://pkg.go.dev/github.com/golang-jwt/jwt). The command line utility included in this project (cmd/jwt) provides a straightforward example of token creation and parsing as well as a useful tool for debugging your own integration. You'll also find several implementation examples in the documentation. + +[golang-jwt](https://github.com/orgs/golang-jwt) incorporates a modified version of the JWT logo, which is distributed under the terms of the [MIT License](https://github.com/jsonwebtoken/jsonwebtoken.github.io/blob/master/LICENSE.txt). diff --git a/vendor/github.com/golang-jwt/jwt/v4/claims.go b/vendor/github.com/golang-jwt/jwt/v4/claims.go index b07ac02de..4f00db2fb 100644 --- a/vendor/github.com/golang-jwt/jwt/v4/claims.go +++ b/vendor/github.com/golang-jwt/jwt/v4/claims.go @@ -56,17 +56,17 @@ func (c RegisteredClaims) Valid() error { // default value in Go, let's not fail the verification for them. if !c.VerifyExpiresAt(now, false) { delta := now.Sub(c.ExpiresAt.Time) - vErr.Inner = fmt.Errorf("token is expired by %v", delta) + vErr.Inner = fmt.Errorf("%s by %v", delta, ErrTokenExpired) vErr.Errors |= ValidationErrorExpired } if !c.VerifyIssuedAt(now, false) { - vErr.Inner = fmt.Errorf("token used before issued") + vErr.Inner = ErrTokenUsedBeforeIssued vErr.Errors |= ValidationErrorIssuedAt } if !c.VerifyNotBefore(now, false) { - vErr.Inner = fmt.Errorf("token is not valid yet") + vErr.Inner = ErrTokenNotValidYet vErr.Errors |= ValidationErrorNotValidYet } @@ -83,7 +83,7 @@ func (c *RegisteredClaims) VerifyAudience(cmp string, req bool) bool { return verifyAud(c.Audience, cmp, req) } -// VerifyExpiresAt compares the exp claim against cmp (cmp <= exp). +// VerifyExpiresAt compares the exp claim against cmp (cmp < exp). // If req is false, it will return true, if exp is unset. func (c *RegisteredClaims) VerifyExpiresAt(cmp time.Time, req bool) bool { if c.ExpiresAt == nil { @@ -113,6 +113,12 @@ func (c *RegisteredClaims) VerifyNotBefore(cmp time.Time, req bool) bool { return verifyNbf(&c.NotBefore.Time, cmp, req) } +// VerifyIssuer compares the iss claim against cmp. +// If required is false, this method will return true if the value matches or is unset +func (c *RegisteredClaims) VerifyIssuer(cmp string, req bool) bool { + return verifyIss(c.Issuer, cmp, req) +} + // StandardClaims are a structured version of the JWT Claims Set, as referenced at // https://datatracker.ietf.org/doc/html/rfc7519#section-4. They do not follow the // specification exactly, since they were based on an earlier draft of the @@ -143,17 +149,17 @@ func (c StandardClaims) Valid() error { // default value in Go, let's not fail the verification for them. if !c.VerifyExpiresAt(now, false) { delta := time.Unix(now, 0).Sub(time.Unix(c.ExpiresAt, 0)) - vErr.Inner = fmt.Errorf("token is expired by %v", delta) + vErr.Inner = fmt.Errorf("%s by %v", delta, ErrTokenExpired) vErr.Errors |= ValidationErrorExpired } if !c.VerifyIssuedAt(now, false) { - vErr.Inner = fmt.Errorf("token used before issued") + vErr.Inner = ErrTokenUsedBeforeIssued vErr.Errors |= ValidationErrorIssuedAt } if !c.VerifyNotBefore(now, false) { - vErr.Inner = fmt.Errorf("token is not valid yet") + vErr.Inner = ErrTokenNotValidYet vErr.Errors |= ValidationErrorNotValidYet } @@ -170,7 +176,7 @@ func (c *StandardClaims) VerifyAudience(cmp string, req bool) bool { return verifyAud([]string{c.Audience}, cmp, req) } -// VerifyExpiresAt compares the exp claim against cmp (cmp <= exp). +// VerifyExpiresAt compares the exp claim against cmp (cmp < exp). // If req is false, it will return true, if exp is unset. func (c *StandardClaims) VerifyExpiresAt(cmp int64, req bool) bool { if c.ExpiresAt == 0 { diff --git a/vendor/github.com/golang-jwt/jwt/v4/errors.go b/vendor/github.com/golang-jwt/jwt/v4/errors.go index f309878b3..10ac8835c 100644 --- a/vendor/github.com/golang-jwt/jwt/v4/errors.go +++ b/vendor/github.com/golang-jwt/jwt/v4/errors.go @@ -9,6 +9,18 @@ var ( ErrInvalidKey = errors.New("key is invalid") ErrInvalidKeyType = errors.New("key is of invalid type") ErrHashUnavailable = errors.New("the requested hash function is unavailable") + + ErrTokenMalformed = errors.New("token is malformed") + ErrTokenUnverifiable = errors.New("token is unverifiable") + ErrTokenSignatureInvalid = errors.New("token signature is invalid") + + ErrTokenInvalidAudience = errors.New("token has invalid audience") + ErrTokenExpired = errors.New("token is expired") + ErrTokenUsedBeforeIssued = errors.New("token used before issued") + ErrTokenInvalidIssuer = errors.New("token has invalid issuer") + ErrTokenNotValidYet = errors.New("token is not valid yet") + ErrTokenInvalidId = errors.New("token has invalid id") + ErrTokenInvalidClaims = errors.New("token has invalid claims") ) // The errors that might occur when parsing and validating a token @@ -53,7 +65,48 @@ func (e ValidationError) Error() string { } } +// Unwrap gives errors.Is and errors.As access to the inner error. +func (e *ValidationError) Unwrap() error { + return e.Inner +} + // No errors func (e *ValidationError) valid() bool { return e.Errors == 0 } + +// Is checks if this ValidationError is of the supplied error. We are first checking for the exact error message +// by comparing the inner error message. If that fails, we compare using the error flags. This way we can use +// custom error messages (mainly for backwards compatability) and still leverage errors.Is using the global error variables. +func (e *ValidationError) Is(err error) bool { + // Check, if our inner error is a direct match + if errors.Is(errors.Unwrap(e), err) { + return true + } + + // Otherwise, we need to match using our error flags + switch err { + case ErrTokenMalformed: + return e.Errors&ValidationErrorMalformed != 0 + case ErrTokenUnverifiable: + return e.Errors&ValidationErrorUnverifiable != 0 + case ErrTokenSignatureInvalid: + return e.Errors&ValidationErrorSignatureInvalid != 0 + case ErrTokenInvalidAudience: + return e.Errors&ValidationErrorAudience != 0 + case ErrTokenExpired: + return e.Errors&ValidationErrorExpired != 0 + case ErrTokenUsedBeforeIssued: + return e.Errors&ValidationErrorIssuedAt != 0 + case ErrTokenInvalidIssuer: + return e.Errors&ValidationErrorIssuer != 0 + case ErrTokenNotValidYet: + return e.Errors&ValidationErrorNotValidYet != 0 + case ErrTokenInvalidId: + return e.Errors&ValidationErrorId != 0 + case ErrTokenInvalidClaims: + return e.Errors&ValidationErrorClaimsInvalid != 0 + } + + return false +} diff --git a/vendor/github.com/golang-jwt/jwt/v4/map_claims.go b/vendor/github.com/golang-jwt/jwt/v4/map_claims.go index e7da633b9..2700d64a0 100644 --- a/vendor/github.com/golang-jwt/jwt/v4/map_claims.go +++ b/vendor/github.com/golang-jwt/jwt/v4/map_claims.go @@ -126,16 +126,19 @@ func (m MapClaims) Valid() error { now := TimeFunc().Unix() if !m.VerifyExpiresAt(now, false) { + // TODO(oxisto): this should be replaced with ErrTokenExpired vErr.Inner = errors.New("Token is expired") vErr.Errors |= ValidationErrorExpired } if !m.VerifyIssuedAt(now, false) { + // TODO(oxisto): this should be replaced with ErrTokenUsedBeforeIssued vErr.Inner = errors.New("Token used before issued") vErr.Errors |= ValidationErrorIssuedAt } if !m.VerifyNotBefore(now, false) { + // TODO(oxisto): this should be replaced with ErrTokenNotValidYet vErr.Inner = errors.New("Token is not valid yet") vErr.Errors |= ValidationErrorNotValidYet } diff --git a/vendor/github.com/golang-jwt/jwt/v4/parser.go b/vendor/github.com/golang-jwt/jwt/v4/parser.go index 0c811f311..2f61a69d7 100644 --- a/vendor/github.com/golang-jwt/jwt/v4/parser.go +++ b/vendor/github.com/golang-jwt/jwt/v4/parser.go @@ -8,14 +8,36 @@ import ( ) type Parser struct { - ValidMethods []string // If populated, only these methods will be considered valid - UseJSONNumber bool // Use JSON Number format in JSON decoder - SkipClaimsValidation bool // Skip claims validation during token parsing + // If populated, only these methods will be considered valid. + // + // Deprecated: In future releases, this field will not be exported anymore and should be set with an option to NewParser instead. + ValidMethods []string + + // Use JSON Number format in JSON decoder. + // + // Deprecated: In future releases, this field will not be exported anymore and should be set with an option to NewParser instead. + UseJSONNumber bool + + // Skip claims validation during token parsing. + // + // Deprecated: In future releases, this field will not be exported anymore and should be set with an option to NewParser instead. + SkipClaimsValidation bool } -// Parse parses, validates, and returns a token. +// NewParser creates a new Parser with the specified options +func NewParser(options ...ParserOption) *Parser { + p := &Parser{} + + // loop through our parsing options and apply them + for _, option := range options { + option(p) + } + + return p +} + +// Parse parses, validates, verifies the signature and returns the parsed token. // keyFunc will receive the parsed token and should return the key for validating. -// If everything is kosher, err will be nil func (p *Parser) Parse(tokenString string, keyFunc Keyfunc) (*Token, error) { return p.ParseWithClaims(tokenString, MapClaims{}, keyFunc) } diff --git a/vendor/github.com/golang-jwt/jwt/v4/parser_option.go b/vendor/github.com/golang-jwt/jwt/v4/parser_option.go new file mode 100644 index 000000000..6ea6f9527 --- /dev/null +++ b/vendor/github.com/golang-jwt/jwt/v4/parser_option.go @@ -0,0 +1,29 @@ +package jwt + +// ParserOption is used to implement functional-style options that modify the behavior of the parser. To add +// new options, just create a function (ideally beginning with With or Without) that returns an anonymous function that +// takes a *Parser type as input and manipulates its configuration accordingly. +type ParserOption func(*Parser) + +// WithValidMethods is an option to supply algorithm methods that the parser will check. Only those methods will be considered valid. +// It is heavily encouraged to use this option in order to prevent attacks such as https://auth0.com/blog/critical-vulnerabilities-in-json-web-token-libraries/. +func WithValidMethods(methods []string) ParserOption { + return func(p *Parser) { + p.ValidMethods = methods + } +} + +// WithJSONNumber is an option to configure the underlying JSON parser with UseNumber +func WithJSONNumber() ParserOption { + return func(p *Parser) { + p.UseJSONNumber = true + } +} + +// WithoutClaimsValidation is an option to disable claims validation. This option should only be used if you exactly know +// what you are doing. +func WithoutClaimsValidation() ParserOption { + return func(p *Parser) { + p.SkipClaimsValidation = true + } +} diff --git a/vendor/github.com/golang-jwt/jwt/v4/signing_method.go b/vendor/github.com/golang-jwt/jwt/v4/signing_method.go index 3269170f3..241ae9c60 100644 --- a/vendor/github.com/golang-jwt/jwt/v4/signing_method.go +++ b/vendor/github.com/golang-jwt/jwt/v4/signing_method.go @@ -33,3 +33,14 @@ func GetSigningMethod(alg string) (method SigningMethod) { } return } + +// GetAlgorithms returns a list of registered "alg" names +func GetAlgorithms() (algs []string) { + signingMethodLock.RLock() + defer signingMethodLock.RUnlock() + + for alg := range signingMethods { + algs = append(algs, alg) + } + return +} diff --git a/vendor/github.com/golang-jwt/jwt/v4/token.go b/vendor/github.com/golang-jwt/jwt/v4/token.go index b896acb0b..09b4cde5a 100644 --- a/vendor/github.com/golang-jwt/jwt/v4/token.go +++ b/vendor/github.com/golang-jwt/jwt/v4/token.go @@ -7,6 +7,14 @@ import ( "time" ) + +// DecodePaddingAllowed will switch the codec used for decoding JWTs respectively. Note that the JWS RFC7515 +// states that the tokens will utilize a Base64url encoding with no padding. Unfortunately, some implementations +// of JWT are producing non-standard tokens, and thus require support for decoding. Note that this is a global +// variable, and updating it will change the behavior on a package level, and is also NOT go-routine safe. +// To use the non-recommended decoding, set this boolean to `true` prior to using this package. +var DecodePaddingAllowed bool + // TimeFunc provides the current time when parsing token to validate "exp" claim (expiration time). // You can override it to use another time value. This is useful for testing or if your // server uses a different time zone than your tokens. @@ -29,11 +37,12 @@ type Token struct { Valid bool // Is the token valid? Populated when you Parse/Verify a token } -// New creates a new Token. Takes a signing method +// New creates a new Token with the specified signing method and an empty map of claims. func New(method SigningMethod) *Token { return NewWithClaims(method, MapClaims{}) } +// NewWithClaims creates a new Token with the specified signing method and claims. func NewWithClaims(method SigningMethod, claims Claims) *Token { return &Token{ Header: map[string]interface{}{ @@ -45,7 +54,8 @@ func NewWithClaims(method SigningMethod, claims Claims) *Token { } } -// SignedString retrieves the complete, signed token +// SignedString creates and returns a complete, signed JWT. +// The token is signed using the SigningMethod specified in the token. func (t *Token) SignedString(key interface{}) (string, error) { var sig, sstr string var err error @@ -64,33 +74,34 @@ func (t *Token) SignedString(key interface{}) (string, error) { // the SignedString. func (t *Token) SigningString() (string, error) { var err error - parts := make([]string, 2) - for i := range parts { - var jsonValue []byte - if i == 0 { - if jsonValue, err = json.Marshal(t.Header); err != nil { - return "", err - } - } else { - if jsonValue, err = json.Marshal(t.Claims); err != nil { - return "", err - } - } + var jsonValue []byte - parts[i] = EncodeSegment(jsonValue) + if jsonValue, err = json.Marshal(t.Header); err != nil { + return "", err } - return strings.Join(parts, "."), nil + header := EncodeSegment(jsonValue) + + if jsonValue, err = json.Marshal(t.Claims); err != nil { + return "", err + } + claim := EncodeSegment(jsonValue) + + return strings.Join([]string{header, claim}, "."), nil } -// Parse parses, validates, and returns a token. -// keyFunc will receive the parsed token and should return the key for validating. -// If everything is kosher, err will be nil -func Parse(tokenString string, keyFunc Keyfunc) (*Token, error) { - return new(Parser).Parse(tokenString, keyFunc) +// Parse parses, validates, verifies the signature and returns the parsed token. +// keyFunc will receive the parsed token and should return the cryptographic key +// for verifying the signature. +// The caller is strongly encouraged to set the WithValidMethods option to +// validate the 'alg' claim in the token matches the expected algorithm. +// For more details about the importance of validating the 'alg' claim, +// see https://auth0.com/blog/critical-vulnerabilities-in-json-web-token-libraries/ +func Parse(tokenString string, keyFunc Keyfunc, options ...ParserOption) (*Token, error) { + return NewParser(options...).Parse(tokenString, keyFunc) } -func ParseWithClaims(tokenString string, claims Claims, keyFunc Keyfunc) (*Token, error) { - return new(Parser).ParseWithClaims(tokenString, claims, keyFunc) +func ParseWithClaims(tokenString string, claims Claims, keyFunc Keyfunc, options ...ParserOption) (*Token, error) { + return NewParser(options...).ParseWithClaims(tokenString, claims, keyFunc) } // EncodeSegment encodes a JWT specific base64url encoding with padding stripped @@ -106,5 +117,12 @@ func EncodeSegment(seg []byte) string { // Deprecated: In a future release, we will demote this function to a non-exported function, since it // should only be used internally func DecodeSegment(seg string) ([]byte, error) { + if DecodePaddingAllowed { + if l := len(seg) % 4; l > 0 { + seg += strings.Repeat("=", 4-l) + } + return base64.URLEncoding.DecodeString(seg) + } + return base64.RawURLEncoding.DecodeString(seg) } diff --git a/vendor/github.com/golang-jwt/jwt/v4/types.go b/vendor/github.com/golang-jwt/jwt/v4/types.go index 15c39a302..2c647fd2e 100644 --- a/vendor/github.com/golang-jwt/jwt/v4/types.go +++ b/vendor/github.com/golang-jwt/jwt/v4/types.go @@ -3,6 +3,7 @@ package jwt import ( "encoding/json" "fmt" + "math" "reflect" "strconv" "time" @@ -41,15 +42,20 @@ func NewNumericDate(t time.Time) *NumericDate { // newNumericDateFromSeconds creates a new *NumericDate out of a float64 representing a // UNIX epoch with the float fraction representing non-integer seconds. func newNumericDateFromSeconds(f float64) *NumericDate { - return NewNumericDate(time.Unix(0, int64(f*float64(time.Second)))) + round, frac := math.Modf(f) + return NewNumericDate(time.Unix(int64(round), int64(frac*1e9))) } // MarshalJSON is an implementation of the json.RawMessage interface and serializes the UNIX epoch // represented in NumericDate to a byte array, using the precision specified in TimePrecision. func (date NumericDate) MarshalJSON() (b []byte, err error) { + var prec int + if TimePrecision < time.Second { + prec = int(math.Log10(float64(time.Second) / float64(TimePrecision))) + } f := float64(date.Truncate(TimePrecision).UnixNano()) / float64(time.Second) - return []byte(strconv.FormatFloat(f, 'f', -1, 64)), nil + return []byte(strconv.FormatFloat(f, 'f', prec, 64)), nil } // UnmarshalJSON is an implementation of the json.RawMessage interface and deserializses a diff --git a/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go b/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go index 5c3fda705..cc6606a39 100644 --- a/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go +++ b/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go @@ -257,7 +257,7 @@ const ( // and to reduce the amount of disk activity caused by using // cache entries, used only updates the mtime if the current // mtime is more than an hour old. This heuristic eliminates -// nearly all of the mtime updates that would otherwise happen, +// nearly all the mtime updates that would otherwise happen, // while still keeping the mtimes useful for cache trimming. func (c *Cache) used(file string) error { info, err := os.Stat(file) @@ -311,7 +311,7 @@ func (c *Cache) trimSubdir(subdir string, cutoff time.Time) { // Read all directory entries from subdir before removing // any files, in case removing files invalidates the file offset // in the directory scan. Also, ignore error from f.Readdirnames, - // because we don't care about reporting the error and we still + // because we don't care about reporting the error, and we still // want to process any entries found before the error. f, err := os.Open(subdir) if err != nil { diff --git a/vendor/github.com/golangci/golangci-lint/internal/pkgcache/pkgcache.go b/vendor/github.com/golangci/golangci-lint/internal/pkgcache/pkgcache.go index 86007d042..83e607387 100644 --- a/vendor/github.com/golangci/golangci-lint/internal/pkgcache/pkgcache.go +++ b/vendor/github.com/golangci/golangci-lint/internal/pkgcache/pkgcache.go @@ -26,7 +26,7 @@ const ( ) // Cache is a per-package data cache. A cached data is invalidated when -// package or it's dependencies change. +// package, or it's dependencies change. type Cache struct { lowLevelCache *cache.Cache pkgHashes sync.Map diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_flaky.go b/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_flaky.go index 5963027ee..6cc2f03d0 100644 --- a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_flaky.go +++ b/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_flaky.go @@ -53,7 +53,7 @@ func retry(f func() (err error, mayRetry bool)) error { // rename is like os.Rename, but retries ephemeral errors. // -// On windows it wraps os.Rename, which (as of 2019-06-04) uses MoveFileEx with +// On Windows it wraps os.Rename, which (as of 2019-06-04) uses MoveFileEx with // MOVEFILE_REPLACE_EXISTING. // // Windows also provides a different system call, ReplaceFile, diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go index 359e2d63c..ac3a3ee63 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go @@ -8,6 +8,7 @@ import ( "github.com/spf13/cobra" "github.com/golangci/golangci-lint/internal/cache" + "github.com/golangci/golangci-lint/pkg/exitcodes" "github.com/golangci/golangci-lint/pkg/fsutils" "github.com/golangci/golangci-lint/pkg/logutils" ) @@ -51,7 +52,7 @@ func (e *Executor) executeCleanCache(_ *cobra.Command, args []string) { e.log.Fatalf("Failed to remove dir %s: %s", cacheDir, err) } - os.Exit(0) + os.Exit(exitcodes.Success) } func (e *Executor) executeCacheStatus(_ *cobra.Command, args []string) { @@ -66,7 +67,7 @@ func (e *Executor) executeCacheStatus(_ *cobra.Command, args []string) { fmt.Fprintf(logutils.StdOut, "Size: %s\n", fsutils.PrettifyBytesCount(cacheSizeBytes)) } - os.Exit(0) + os.Exit(exitcodes.Success) } func dirSizeBytes(path string) (int64, error) { diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/config.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/config.go index 0f2205970..e9546d328 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/commands/config.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/config.go @@ -64,5 +64,5 @@ func (e *Executor) executePathCmd(_ *cobra.Command, args []string) { } fmt.Println(usedConfigFile) - os.Exit(0) + os.Exit(exitcodes.Success) } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/help.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/help.go index dc3bb4731..677018a6c 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/commands/help.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/help.go @@ -9,6 +9,7 @@ import ( "github.com/fatih/color" "github.com/spf13/cobra" + "github.com/golangci/golangci-lint/pkg/exitcodes" "github.com/golangci/golangci-lint/pkg/lint/linter" "github.com/golangci/golangci-lint/pkg/logutils" ) @@ -53,8 +54,13 @@ func printLinterConfigs(lcs []*linter.Config) { linterDescription = linterDescription[:firstNewline] } - fmt.Fprintf(logutils.StdOut, "%s%s: %s [fast: %t, auto-fix: %t]\n", color.YellowString(lc.Name()), - altNamesStr, linterDescription, !lc.IsSlowLinter(), lc.CanAutoFix) + deprecatedMark := "" + if lc.IsDeprecated() { + deprecatedMark = " [" + color.RedString("deprecated") + "]" + } + + fmt.Fprintf(logutils.StdOut, "%s%s%s: %s [fast: %t, auto-fix: %t]\n", color.YellowString(lc.Name()), + altNamesStr, deprecatedMark, linterDescription, !lc.IsSlowLinter(), lc.CanAutoFix) } } @@ -88,5 +94,5 @@ func (e *Executor) executeLintersHelp(_ *cobra.Command, args []string) { fmt.Fprintf(logutils.StdOut, "%s: %s\n", color.YellowString(p), strings.Join(linterNames, ", ")) } - os.Exit(0) + os.Exit(exitcodes.Success) } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/linters.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/linters.go index bb096942f..63328e4e9 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/commands/linters.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/linters.go @@ -7,6 +7,7 @@ import ( "github.com/fatih/color" "github.com/spf13/cobra" + "github.com/golangci/golangci-lint/pkg/exitcodes" "github.com/golangci/golangci-lint/pkg/lint/linter" ) @@ -48,5 +49,5 @@ func (e *Executor) executeLinters(_ *cobra.Command, args []string) { color.Red("\nDisabled by your configuration linters:\n") printLinterConfigs(disabledLCs) - os.Exit(0) + os.Exit(exitcodes.Success) } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/root.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/root.go index f90df9901..141fc87f2 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/commands/root.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/root.go @@ -12,13 +12,14 @@ import ( "github.com/spf13/pflag" "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/exitcodes" "github.com/golangci/golangci-lint/pkg/logutils" ) func (e *Executor) persistentPreRun(_ *cobra.Command, _ []string) { if e.cfg.Run.PrintVersion { fmt.Fprintf(logutils.StdOut, "golangci-lint has version %s built from %s on %s\n", e.version, e.commit, e.date) - os.Exit(0) + os.Exit(exitcodes.Success) } runtime.GOMAXPROCS(e.cfg.Run.Concurrency) diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go index 23a9b064a..f75fa82f3 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go @@ -26,6 +26,8 @@ import ( "github.com/golangci/golangci-lint/pkg/result/processors" ) +const defaultFileMode = 0644 + func getDefaultIssueExcludeHelp() string { parts := []string{"Use or not use default excludes:"} for _, ep := range config.DefaultExcludePatterns { @@ -53,7 +55,7 @@ func wh(text string) string { const defaultTimeout = time.Minute -//nolint:funlen +//nolint:funlen,gomnd func initFlagSet(fs *pflag.FlagSet, cfg *config.Config, m *lintersdb.Manager, isFinalInit bool) { hideFlag := func(name string) { if err := fs.MarkHidden(name); err != nil { @@ -243,7 +245,7 @@ func (e *Executor) initRunConfiguration(cmd *cobra.Command) { func (e *Executor) getConfigForCommandLine() (*config.Config, error) { // We use another pflag.FlagSet here to not set `changed` flag - // on cmd.Flags() options. Otherwise string slice options will be duplicated. + // on cmd.Flags() options. Otherwise, string slice options will be duplicated. fs := pflag.NewFlagSet("config flag set", pflag.ContinueOnError) var cfg config.Config @@ -259,7 +261,7 @@ func (e *Executor) getConfigForCommandLine() (*config.Config, error) { // cfg vs e.cfg. initRootFlagSet(fs, &cfg, true) - fs.Usage = func() {} // otherwise help text will be printed twice + fs.Usage = func() {} // otherwise, help text will be printed twice if err := fs.Parse(os.Args); err != nil { if err == pflag.ErrHelp { return nil, err @@ -400,44 +402,89 @@ func (e *Executor) runAndPrint(ctx context.Context, args []string) error { return err // XXX: don't loose type } - p, err := e.createPrinter() - if err != nil { - return err + formats := strings.Split(e.cfg.Output.Format, ",") + for _, format := range formats { + out := strings.SplitN(format, ":", 2) + if len(out) < 2 { + out = append(out, "") + } + + err := e.printReports(ctx, issues, out[1], out[0]) + if err != nil { + return err + } } e.setExitCodeIfIssuesFound(issues) - if err = p.Print(ctx, issues); err != nil { - return fmt.Errorf("can't print %d issues: %s", len(issues), err) - } - e.fileCache.PrintStats(e.log) return nil } -func (e *Executor) createPrinter() (printers.Printer, error) { +func (e *Executor) printReports(ctx context.Context, issues []result.Issue, path, format string) error { + w, shouldClose, err := e.createWriter(path) + if err != nil { + return fmt.Errorf("can't create output for %s: %w", path, err) + } + + p, err := e.createPrinter(format, w) + if err != nil { + if file, ok := w.(io.Closer); shouldClose && ok { + _ = file.Close() + } + return err + } + + if err = p.Print(ctx, issues); err != nil { + if file, ok := w.(io.Closer); shouldClose && ok { + _ = file.Close() + } + return fmt.Errorf("can't print %d issues: %s", len(issues), err) + } + + if file, ok := w.(io.Closer); shouldClose && ok { + _ = file.Close() + } + + return nil +} + +func (e *Executor) createWriter(path string) (io.Writer, bool, error) { + if path == "" || path == "stdout" { + return logutils.StdOut, false, nil + } + if path == "stderr" { + return logutils.StdErr, false, nil + } + f, err := os.OpenFile(path, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, defaultFileMode) + if err != nil { + return nil, false, err + } + return f, true, nil +} + +func (e *Executor) createPrinter(format string, w io.Writer) (printers.Printer, error) { var p printers.Printer - format := e.cfg.Output.Format switch format { case config.OutFormatJSON: - p = printers.NewJSON(&e.reportData) + p = printers.NewJSON(&e.reportData, w) case config.OutFormatColoredLineNumber, config.OutFormatLineNumber: p = printers.NewText(e.cfg.Output.PrintIssuedLine, format == config.OutFormatColoredLineNumber, e.cfg.Output.PrintLinterName, - e.log.Child("text_printer")) + e.log.Child("text_printer"), w) case config.OutFormatTab: - p = printers.NewTab(e.cfg.Output.PrintLinterName, e.log.Child("tab_printer")) + p = printers.NewTab(e.cfg.Output.PrintLinterName, e.log.Child("tab_printer"), w) case config.OutFormatCheckstyle: - p = printers.NewCheckstyle() + p = printers.NewCheckstyle(w) case config.OutFormatCodeClimate: - p = printers.NewCodeClimate() + p = printers.NewCodeClimate(w) case config.OutFormatHTML: - p = printers.NewHTML() + p = printers.NewHTML(w) case config.OutFormatJunitXML: - p = printers.NewJunitXML() + p = printers.NewJunitXML(w) case config.OutFormatGithubActions: - p = printers.NewGithub() + p = printers.NewGithub(w) default: return nil, fmt.Errorf("unknown output format %s", format) } @@ -479,7 +526,6 @@ func (e *Executor) executeRun(_ *cobra.Command, args []string) { // to be removed when deadline is finally decommissioned func (e *Executor) setTimeoutToDeadlineIfOnlyDeadlineIsSet() { - // nolint:staticcheck deadlineValue := e.cfg.Run.Deadline if deadlineValue != 0 && e.cfg.Run.Timeout == defaultTimeout { e.cfg.Run.Timeout = deadlineValue @@ -497,7 +543,7 @@ func (e *Executor) setupExitCode(ctx context.Context) { return } - needFailOnWarnings := (os.Getenv("GL_TEST_RUN") == "1" || os.Getenv("FAIL_ON_WARNINGS") == "1") + needFailOnWarnings := os.Getenv("GL_TEST_RUN") == "1" || os.Getenv("FAIL_ON_WARNINGS") == "1" if needFailOnWarnings && len(e.reportData.Warnings) != 0 { e.exitCode = exitcodes.WarningInTest return diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/config.go b/vendor/github.com/golangci/golangci-lint/pkg/config/config.go index f41705c89..49df4e495 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/config/config.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/config/config.go @@ -17,7 +17,7 @@ type Config struct { InternalTest bool // Option is used only for testing golangci-lint code, don't use it } -// getConfigDir returns the directory that contains golangci config file. +// GetConfigDir returns the directory that contains golangci config file. func (c *Config) GetConfigDir() string { return c.cfgDir } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go b/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go index 840b283fe..36a9af34c 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go @@ -3,20 +3,35 @@ package config import "github.com/pkg/errors" var defaultLintersSettings = LintersSettings{ - Lll: LllSettings{ - LineLength: 120, - TabWidth: 1, + Decorder: DecorderSettings{ + DecOrder: []string{"type", "const", "var", "func"}, + DisableDecNumCheck: true, + DisableDecOrderCheck: true, + DisableInitFuncFirstCheck: true, }, - Unparam: UnparamSettings{ - Algo: "cha", + Dogsled: DogsledSettings{ + MaxBlankIdentifiers: 2, }, - Nakedret: NakedretSettings{ - MaxFuncLines: 30, + ErrorLint: ErrorLintSettings{ + Errorf: true, + Asserts: true, + Comparison: true, }, - Prealloc: PreallocSettings{ - Simple: true, - RangeLoops: true, - ForLoops: false, + Exhaustive: ExhaustiveSettings{ + CheckGenerated: false, + DefaultSignifiesExhaustive: false, + IgnoreEnumMembers: "", + PackageScopeOnly: false, + }, + Forbidigo: ForbidigoSettings{ + ExcludeGodocExamples: true, + }, + Gci: GciSettings{ + Sections: []string{"standard", "default"}, + SectionSeparator: []string{"newline"}, + }, + Gocognit: GocognitSettings{ + MinComplexity: 30, }, Gocritic: GocriticSettings{ SettingsPerCheck: map[string]GocriticCheckSettings{}, @@ -24,11 +39,55 @@ var defaultLintersSettings = LintersSettings{ Godox: GodoxSettings{ Keywords: []string{}, }, - Dogsled: DogsledSettings{ - MaxBlankIdentifiers: 2, + Godot: GodotSettings{ + Scope: "declarations", + Period: true, }, - Gocognit: GocognitSettings{ - MinComplexity: 30, + Gofumpt: GofumptSettings{ + LangVersion: "", + ExtraRules: false, + }, + Ifshort: IfshortSettings{ + MaxDeclLines: 1, + MaxDeclChars: 30, + }, + Lll: LllSettings{ + LineLength: 120, + TabWidth: 1, + }, + MaintIdx: MaintIdxSettings{ + Under: 20, + }, + Nakedret: NakedretSettings{ + MaxFuncLines: 30, + }, + Nestif: NestifSettings{ + MinComplexity: 5, + }, + NoLintLint: NoLintLintSettings{ + RequireExplanation: false, + AllowLeadingSpace: true, + RequireSpecific: false, + AllowUnused: false, + }, + Prealloc: PreallocSettings{ + Simple: true, + RangeLoops: true, + ForLoops: false, + }, + Predeclared: PredeclaredSettings{ + Ignore: "", + Qualified: false, + }, + Testpackage: TestpackageSettings{ + SkipRegexp: `(export|internal)_test\.go`, + }, + Unparam: UnparamSettings{ + Algo: "cha", + }, + Varnamelen: VarnamelenSettings{ + MaxDistance: 5, + MinNameLength: 3, }, WSL: WSLSettings{ StrictAppend: true, @@ -42,50 +101,17 @@ var defaultLintersSettings = LintersSettings{ ForceExclusiveShortDeclarations: false, ForceCaseTrailingWhitespaceLimit: 0, }, - NoLintLint: NoLintLintSettings{ - RequireExplanation: false, - AllowLeadingSpace: true, - RequireSpecific: false, - AllowUnused: false, - }, - Testpackage: TestpackageSettings{ - SkipRegexp: `(export|internal)_test\.go`, - }, - Nestif: NestifSettings{ - MinComplexity: 5, - }, - Exhaustive: ExhaustiveSettings{ - CheckGenerated: false, - DefaultSignifiesExhaustive: false, - }, - Gofumpt: GofumptSettings{ - LangVersion: "", - ExtraRules: false, - }, - ErrorLint: ErrorLintSettings{ - Errorf: true, - Asserts: true, - Comparison: true, - }, - Ifshort: IfshortSettings{ - MaxDeclLines: 1, - MaxDeclChars: 30, - }, - Predeclared: PredeclaredSettings{ - Ignore: "", - Qualified: false, - }, - Forbidigo: ForbidigoSettings{ - ExcludeGodocExamples: true, - }, } type LintersSettings struct { + BiDiChk BiDiChkSettings Cyclop Cyclop + Decorder DecorderSettings Depguard DepGuardSettings Dogsled DogsledSettings Dupl DuplSettings Errcheck ErrcheckSettings + ErrChkJSON ErrChkJSONSettings ErrorLint ErrorLintSettings Exhaustive ExhaustiveSettings ExhaustiveStruct ExhaustiveStructSettings @@ -109,10 +135,12 @@ type LintersSettings struct { Gosec GoSecSettings Gosimple StaticCheckSettings Govet GovetSettings + Grouper GrouperSettings Ifshort IfshortSettings - Ireturn IreturnSettings ImportAs ImportAsSettings + Ireturn IreturnSettings Lll LllSettings + MaintIdx MaintIdxSettings Makezero MakezeroSettings Maligned MalignedSettings Misspell MisspellSettings @@ -130,9 +158,9 @@ type LintersSettings struct { Structcheck StructCheckSettings Stylecheck StaticCheckSettings Tagliatelle TagliatelleSettings + Tenv TenvSettings Testpackage TestpackageSettings Thelper ThelperSettings - Tenv TenvSettings Unparam UnparamSettings Unused StaticCheckSettings Varcheck VarCheckSettings @@ -144,6 +172,18 @@ type LintersSettings struct { Custom map[string]CustomLinterSettings } +type BiDiChkSettings struct { + LeftToRightEmbedding bool `mapstructure:"left-to-right-embedding"` + RightToLeftEmbedding bool `mapstructure:"right-to-left-embedding"` + PopDirectionalFormatting bool `mapstructure:"pop-directional-formatting"` + LeftToRightOverride bool `mapstructure:"left-to-right-override"` + RightToLeftOverride bool `mapstructure:"right-to-left-override"` + LeftToRightIsolate bool `mapstructure:"left-to-right-isolate"` + RightToLeftIsolate bool `mapstructure:"right-to-left-isolate"` + FirstStrongIsolate bool `mapstructure:"first-strong-isolate"` + PopDirectionalIsolate bool `mapstructure:"pop-directional-isolate"` +} + type Cyclop struct { MaxComplexity int `mapstructure:"max-complexity"` PackageAverage float64 `mapstructure:"package-average"` @@ -153,8 +193,17 @@ type Cyclop struct { type DepGuardSettings struct { ListType string `mapstructure:"list-type"` Packages []string - IncludeGoRoot bool `mapstructure:"include-go-root"` - PackagesWithErrorMessage map[string]string `mapstructure:"packages-with-error-message"` + IncludeGoRoot bool `mapstructure:"include-go-root"` + PackagesWithErrorMessage map[string]string `mapstructure:"packages-with-error-message"` + IgnoreFileRules []string `mapstructure:"ignore-file-rules"` + AdditionalGuards []DepGuardSettings `mapstructure:"additional-guards"` +} + +type DecorderSettings struct { + DecOrder []string `mapstructure:"dec-order"` + DisableDecNumCheck bool `mapstructure:"disable-dec-num-check"` + DisableDecOrderCheck bool `mapstructure:"disable-dec-order-check"` + DisableInitFuncFirstCheck bool `mapstructure:"disable-init-func-first-check"` } type DogsledSettings struct { @@ -175,6 +224,11 @@ type ErrcheckSettings struct { Exclude string `mapstructure:"exclude"` } +type ErrChkJSONSettings struct { + CheckErrorFreeEncoding bool `mapstructure:"check-error-free-encoding"` + ReportNoExported bool `mapstructure:"report-no-exported"` +} + type ErrorLintSettings struct { Errorf bool `mapstructure:"errorf"` Asserts bool `mapstructure:"asserts"` @@ -184,18 +238,14 @@ type ErrorLintSettings struct { type ExhaustiveSettings struct { CheckGenerated bool `mapstructure:"check-generated"` DefaultSignifiesExhaustive bool `mapstructure:"default-signifies-exhaustive"` - IgnorePattern string `mapstructure:"ignore-pattern"` + IgnoreEnumMembers string `mapstructure:"ignore-enum-members"` + PackageScopeOnly bool `mapstructure:"package-scope-only"` } type ExhaustiveStructSettings struct { StructPatterns []string `mapstructure:"struct-patterns"` } -type IreturnSettings struct { - Allow []string `mapstructure:"allow"` - Reject []string `mapstructure:"reject"` -} - type ForbidigoSettings struct { Forbid []string `mapstructure:"forbid"` ExcludeGodocExamples bool `mapstructure:"exclude-godoc-examples"` @@ -207,7 +257,11 @@ type FunlenSettings struct { } type GciSettings struct { - LocalPrefixes string `mapstructure:"local-prefixes"` + LocalPrefixes string `mapstructure:"local-prefixes"` // Deprecated + NoInlineComments bool `mapstructure:"no-inline-comments"` + NoPrefixComments bool `mapstructure:"no-prefix-comments"` + Sections []string `mapstructure:"sections"` + SectionSeparator []string `mapstructure:"section-separators"` } type GocognitSettings struct { @@ -233,6 +287,7 @@ type GodotSettings struct { Scope string `mapstructure:"scope"` Exclude []string `mapstructure:"exclude"` Capital bool `mapstructure:"capital"` + Period bool `mapstructure:"period"` // Deprecated: use `Scope` instead CheckAll bool `mapstructure:"check-all"` @@ -266,7 +321,11 @@ type GoLintSettings struct { } type GoMndSettings struct { - Settings map[string]map[string]interface{} + Settings map[string]map[string]interface{} // Deprecated + Checks []string `mapstructure:"checks"` + IgnoredNumbers []string `mapstructure:"ignored-numbers"` + IgnoredFiles []string `mapstructure:"ignored-files"` + IgnoredFunctions []string `mapstructure:"ignored-functions"` } type GoModDirectivesSettings struct { @@ -326,14 +385,26 @@ func (cfg GovetSettings) Validate() error { return nil } +type GrouperSettings struct { + ConstRequireSingleConst bool `mapstructure:"const-require-single-const"` + ConstRequireGrouping bool `mapstructure:"const-require-grouping"` + ImportRequireSingleImport bool `mapstructure:"import-require-single-import"` + ImportRequireGrouping bool `mapstructure:"import-require-grouping"` + TypeRequireSingleType bool `mapstructure:"type-require-single-type"` + TypeRequireGrouping bool `mapstructure:"type-require-grouping"` + VarRequireSingleVar bool `mapstructure:"var-require-single-var"` + VarRequireGrouping bool `mapstructure:"var-require-grouping"` +} + type IfshortSettings struct { MaxDeclLines int `mapstructure:"max-decl-lines"` MaxDeclChars int `mapstructure:"max-decl-chars"` } type ImportAsSettings struct { - Alias []ImportAsAlias - NoUnaliased bool `mapstructure:"no-unaliased"` + Alias []ImportAsAlias + NoUnaliased bool `mapstructure:"no-unaliased"` + NoExtraAliases bool `mapstructure:"no-extra-aliases"` } type ImportAsAlias struct { @@ -341,11 +412,20 @@ type ImportAsAlias struct { Alias string } +type IreturnSettings struct { + Allow []string `mapstructure:"allow"` + Reject []string `mapstructure:"reject"` +} + type LllSettings struct { LineLength int `mapstructure:"line-length"` TabWidth int `mapstructure:"tab-width"` } +type MaintIdxSettings struct { + Under int `mapstructure:"under"` +} + type MakezeroSettings struct { Always bool } @@ -400,6 +480,7 @@ type PromlinterSettings struct { } type ReviveSettings struct { + MaxOpenFiles int `mapstructure:"max-open-files"` IgnoreGeneratedHeader bool `mapstructure:"ignore-generated-header"` Confidence float64 Severity string @@ -482,11 +563,15 @@ type VarCheckSettings struct { } type VarnamelenSettings struct { - MaxDistance int `mapstructure:"max-distance"` - MinNameLength int `mapstructure:"min-name-length"` - CheckReceiver bool `mapstructure:"check-receiver"` - CheckReturn bool `mapstructure:"check-return"` - IgnoreNames []string `mapstructure:"ignore-names"` + MaxDistance int `mapstructure:"max-distance"` + MinNameLength int `mapstructure:"min-name-length"` + CheckReceiver bool `mapstructure:"check-receiver"` + CheckReturn bool `mapstructure:"check-return"` + IgnoreNames []string `mapstructure:"ignore-names"` + IgnoreTypeAssertOk bool `mapstructure:"ignore-type-assert-ok"` + IgnoreMapIndexOk bool `mapstructure:"ignore-map-index-ok"` + IgnoreChanRecvOk bool `mapstructure:"ignore-chan-recv-ok"` + IgnoreDecls []string `mapstructure:"ignore-decls"` } type WhitespaceSettings struct { @@ -496,6 +581,7 @@ type WhitespaceSettings struct { type WrapcheckSettings struct { IgnoreSigs []string `mapstructure:"ignoreSigs"` + IgnoreSigRegexps []string `mapstructure:"ignoreSigRegexps"` IgnorePackageGlobs []string `mapstructure:"ignorePackageGlobs"` } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings_gocritic.go b/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings_gocritic.go index cd68ef82a..9a3d03d93 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings_gocritic.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings_gocritic.go @@ -355,8 +355,9 @@ func filterByDisableTags(enabledChecks, disableTags []string, log logutils.Log) if len(hitTags) != 0 { delete(enabledChecksSet, enabledCheck) } - debugChecksListf(enabledChecks, "Disabled by config tags %s", sprintStrings(disableTags)) } + debugChecksListf(enabledChecks, "Disabled by config tags %s", sprintStrings(disableTags)) + enabledChecks = nil for enabledCheck := range enabledChecksSet { enabledChecks = append(enabledChecks, enabledCheck) diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/reader.go b/vendor/github.com/golangci/golangci-lint/pkg/config/reader.go index 9f368341b..e8824c753 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/config/reader.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/config/reader.go @@ -10,6 +10,7 @@ import ( "github.com/mitchellh/go-homedir" "github.com/spf13/viper" + "github.com/golangci/golangci-lint/pkg/exitcodes" "github.com/golangci/golangci-lint/pkg/fsutils" "github.com/golangci/golangci-lint/pkg/logutils" "github.com/golangci/golangci-lint/pkg/sliceutil" @@ -87,7 +88,7 @@ func (r *FileReader) parseConfig() error { if r.cfg.InternalTest { // just for testing purposes: to detect config file usage fmt.Fprintln(logutils.StdOut, "test") - os.Exit(0) + os.Exit(exitcodes.Success) } return nil diff --git a/vendor/github.com/golangci/golangci-lint/pkg/exitcodes/exitcodes.go b/vendor/github.com/golangci/golangci-lint/pkg/exitcodes/exitcodes.go index 536f90361..83331dbe7 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/exitcodes/exitcodes.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/exitcodes/exitcodes.go @@ -1,14 +1,14 @@ package exitcodes const ( - Success = 0 - IssuesFound = 1 - WarningInTest = 2 - Failure = 3 - Timeout = 4 - NoGoFiles = 5 - NoConfigFileDetected = 6 - ErrorWasLogged = 7 + Success = iota + IssuesFound + WarningInTest + Failure + Timeout + NoGoFiles + NoConfigFileDetected + ErrorWasLogged ) type ExitError struct { @@ -30,5 +30,3 @@ var ( Code: Failure, } ) - -// 1 diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/bidichk.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/bidichk.go index e1b467cc1..44215b7e9 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/bidichk.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/bidichk.go @@ -1,17 +1,59 @@ package golinters import ( + "strings" + "github.com/breml/bidichk/pkg/bidichk" "golang.org/x/tools/go/analysis" + "github.com/golangci/golangci-lint/pkg/config" "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" ) -func NewBiDiChkFuncName() *goanalysis.Linter { +func NewBiDiChkFuncName(cfg *config.BiDiChkSettings) *goanalysis.Linter { + a := bidichk.NewAnalyzer() + + cfgMap := map[string]map[string]interface{}{} + if cfg != nil { + var opts []string + + if cfg.LeftToRightEmbedding { + opts = append(opts, "LEFT-TO-RIGHT-EMBEDDING") + } + if cfg.RightToLeftEmbedding { + opts = append(opts, "RIGHT-TO-LEFT-EMBEDDING") + } + if cfg.PopDirectionalFormatting { + opts = append(opts, "POP-DIRECTIONAL-FORMATTING") + } + if cfg.LeftToRightOverride { + opts = append(opts, "LEFT-TO-RIGHT-OVERRIDE") + } + if cfg.RightToLeftOverride { + opts = append(opts, "RIGHT-TO-LEFT-OVERRIDE") + } + if cfg.LeftToRightIsolate { + opts = append(opts, "LEFT-TO-RIGHT-ISOLATE") + } + if cfg.RightToLeftIsolate { + opts = append(opts, "RIGHT-TO-LEFT-ISOLATE") + } + if cfg.FirstStrongIsolate { + opts = append(opts, "FIRST-STRONG-ISOLATE") + } + if cfg.PopDirectionalIsolate { + opts = append(opts, "POP-DIRECTIONAL-ISOLATE") + } + + cfgMap[a.Name] = map[string]interface{}{ + "disallowed-runes": strings.Join(opts, ","), + } + } + return goanalysis.NewLinter( "bidichk", "Checks for dangerous unicode character sequences", - []*analysis.Analyzer{bidichk.Analyzer}, - nil, + []*analysis.Analyzer{a}, + cfgMap, ).WithLoadMode(goanalysis.LoadModeSyntax) } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/containedctx.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/containedctx.go new file mode 100644 index 000000000..8592eef1f --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/containedctx.go @@ -0,0 +1,19 @@ +package golinters + +import ( + "github.com/sivchari/containedctx" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewContainedCtx() *goanalysis.Linter { + a := containedctx.Analyzer + + return goanalysis.NewLinter( + a.Name, + a.Doc, + []*analysis.Analyzer{a}, + nil, + ).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/decorder.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/decorder.go new file mode 100644 index 000000000..672f206ea --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/decorder.go @@ -0,0 +1,38 @@ +package golinters + +import ( + "strings" + + "gitlab.com/bosi/decorder" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewDecorder(settings *config.DecorderSettings) *goanalysis.Linter { + a := decorder.Analyzer + + analyzers := []*analysis.Analyzer{a} + + // disable all rules/checks by default + cfg := map[string]interface{}{ + "disable-dec-num-check": true, + "disable-dec-order-check": true, + "disable-init-func-first-check": true, + } + + if settings != nil { + cfg["dec-order"] = strings.Join(settings.DecOrder, ",") + cfg["disable-dec-num-check"] = settings.DisableDecNumCheck + cfg["disable-dec-order-check"] = settings.DisableDecOrderCheck + cfg["disable-init-func-first-check"] = settings.DisableInitFuncFirstCheck + } + + return goanalysis.NewLinter( + a.Name, + a.Doc, + analyzers, + map[string]map[string]interface{}{a.Name: cfg}, + ).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/depguard.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/depguard.go index aa372e956..dd6a79772 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/depguard.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/depguard.go @@ -9,99 +9,42 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/loader" //nolint:staticcheck // require changes in github.com/OpenPeeDeeP/depguard + "github.com/golangci/golangci-lint/pkg/config" "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" "github.com/golangci/golangci-lint/pkg/lint/linter" "github.com/golangci/golangci-lint/pkg/result" ) -func setDepguardListType(dg *depguard.Depguard, lintCtx *linter.Context) error { - listType := lintCtx.Settings().Depguard.ListType - var found bool - dg.ListType, found = depguard.StringToListType[strings.ToLower(listType)] - if !found { - if listType != "" { - return fmt.Errorf("unsure what list type %s is", listType) - } - dg.ListType = depguard.LTBlacklist - } - - return nil -} - -func setupDepguardPackages(dg *depguard.Depguard, lintCtx *linter.Context) { - if dg.ListType == depguard.LTBlacklist { - // if the list type was a blacklist the packages with error messages should - // be included in the blacklist package list - - noMessagePackages := make(map[string]bool) - for _, pkg := range dg.Packages { - noMessagePackages[pkg] = true - } - - for pkg := range lintCtx.Settings().Depguard.PackagesWithErrorMessage { - if _, ok := noMessagePackages[pkg]; !ok { - dg.Packages = append(dg.Packages, pkg) - } - } - } -} +const depguardLinterName = "depguard" func NewDepguard() *goanalysis.Linter { - const linterName = "depguard" var mu sync.Mutex var resIssues []goanalysis.Issue analyzer := &analysis.Analyzer{ - Name: linterName, + Name: depguardLinterName, Doc: goanalysis.TheOnlyanalyzerDoc, } return goanalysis.NewLinter( - linterName, + depguardLinterName, "Go linter that checks if package imports are in a list of acceptable packages", []*analysis.Analyzer{analyzer}, nil, ).WithContextSetter(func(lintCtx *linter.Context) { - dgSettings := &lintCtx.Settings().Depguard - analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { - prog := goanalysis.MakeFakeLoaderProgram(pass) - dg := &depguard.Depguard{ - Packages: dgSettings.Packages, - IncludeGoRoot: dgSettings.IncludeGoRoot, - } - if err := setDepguardListType(dg, lintCtx); err != nil { - return nil, err - } - setupDepguardPackages(dg, lintCtx) + dg, err := newDepGuard(&lintCtx.Settings().Depguard) - loadConfig := &loader.Config{ - Cwd: "", // fallbacked to os.Getcwd - Build: nil, // fallbacked to build.Default - } - issues, err := dg.Run(loadConfig, prog) + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { if err != nil { return nil, err } - if len(issues) == 0 { - return nil, nil - } - msgSuffix := "is in the blacklist" - if dg.ListType == depguard.LTWhitelist { - msgSuffix = "is not in the whitelist" - } - res := make([]goanalysis.Issue, 0, len(issues)) - for _, i := range issues { - userSuppliedMsgSuffix := dgSettings.PackagesWithErrorMessage[i.PackageName] - if userSuppliedMsgSuffix != "" { - userSuppliedMsgSuffix = ": " + userSuppliedMsgSuffix - } - res = append(res, goanalysis.NewIssue(&result.Issue{ - Pos: i.Position, - Text: fmt.Sprintf("%s %s%s", formatCode(i.PackageName, lintCtx.Cfg), msgSuffix, userSuppliedMsgSuffix), - FromLinter: linterName, - }, pass)) + + issues, errRun := dg.run(pass) + if errRun != nil { + return nil, errRun } + mu.Lock() - resIssues = append(resIssues, res...) + resIssues = append(resIssues, issues...) mu.Unlock() return nil, nil @@ -110,3 +53,140 @@ func NewDepguard() *goanalysis.Linter { return resIssues }).WithLoadMode(goanalysis.LoadModeTypesInfo) } + +type depGuard struct { + loadConfig *loader.Config + guardians []*guardian +} + +func newDepGuard(settings *config.DepGuardSettings) (*depGuard, error) { + ps, err := newGuardian(settings) + if err != nil { + return nil, err + } + + d := &depGuard{ + loadConfig: &loader.Config{ + Cwd: "", // fallbacked to os.Getcwd + Build: nil, // fallbacked to build.Default + }, + guardians: []*guardian{ps}, + } + + for _, additional := range settings.AdditionalGuards { + add := additional + ps, err = newGuardian(&add) + if err != nil { + return nil, err + } + + d.guardians = append(d.guardians, ps) + } + + return d, nil +} + +func (d depGuard) run(pass *analysis.Pass) ([]goanalysis.Issue, error) { + prog := goanalysis.MakeFakeLoaderProgram(pass) + + var resIssues []goanalysis.Issue + for _, g := range d.guardians { + issues, errRun := g.run(d.loadConfig, prog, pass) + if errRun != nil { + return nil, errRun + } + + resIssues = append(resIssues, issues...) + } + + return resIssues, nil +} + +type guardian struct { + *depguard.Depguard + pkgsWithErrorMessage map[string]string +} + +func newGuardian(settings *config.DepGuardSettings) (*guardian, error) { + dg := &depguard.Depguard{ + Packages: settings.Packages, + IncludeGoRoot: settings.IncludeGoRoot, + IgnoreFileRules: settings.IgnoreFileRules, + } + + var err error + dg.ListType, err = getDepGuardListType(settings.ListType) + if err != nil { + return nil, err + } + + // if the list type was a blacklist the packages with error messages should be included in the blacklist package list + if dg.ListType == depguard.LTBlacklist { + noMessagePackages := make(map[string]bool) + for _, pkg := range dg.Packages { + noMessagePackages[pkg] = true + } + + for pkg := range settings.PackagesWithErrorMessage { + if _, ok := noMessagePackages[pkg]; !ok { + dg.Packages = append(dg.Packages, pkg) + } + } + } + + return &guardian{ + Depguard: dg, + pkgsWithErrorMessage: settings.PackagesWithErrorMessage, + }, nil +} + +func (g guardian) run(loadConfig *loader.Config, prog *loader.Program, pass *analysis.Pass) ([]goanalysis.Issue, error) { + issues, err := g.Run(loadConfig, prog) + if err != nil { + return nil, err + } + + res := make([]goanalysis.Issue, 0, len(issues)) + + for _, issue := range issues { + res = append(res, + goanalysis.NewIssue(&result.Issue{ + Pos: issue.Position, + Text: g.createMsg(issue.PackageName), + FromLinter: depguardLinterName, + }, pass), + ) + } + + return res, nil +} + +func (g guardian) createMsg(pkgName string) string { + msgSuffix := "is in the blacklist" + if g.ListType == depguard.LTWhitelist { + msgSuffix = "is not in the whitelist" + } + + var userSuppliedMsgSuffix string + if g.pkgsWithErrorMessage != nil { + userSuppliedMsgSuffix = g.pkgsWithErrorMessage[pkgName] + if userSuppliedMsgSuffix != "" { + userSuppliedMsgSuffix = ": " + userSuppliedMsgSuffix + } + } + + return fmt.Sprintf("%s %s%s", formatCode(pkgName, nil), msgSuffix, userSuppliedMsgSuffix) +} + +func getDepGuardListType(listType string) (depguard.ListType, error) { + if listType == "" { + return depguard.LTBlacklist, nil + } + + listT, found := depguard.StringToListType[strings.ToLower(listType)] + if !found { + return depguard.LTBlacklist, fmt.Errorf("unsure what list type %s is", listType) + } + + return listT, nil +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errchkjson.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errchkjson.go new file mode 100644 index 000000000..6dc2b2004 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errchkjson.go @@ -0,0 +1,33 @@ +package golinters + +import ( + "github.com/breml/errchkjson" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewErrChkJSONFuncName(cfg *config.ErrChkJSONSettings) *goanalysis.Linter { + a := errchkjson.NewAnalyzer() + + cfgMap := map[string]map[string]interface{}{} + cfgMap[a.Name] = map[string]interface{}{ + "omit-safe": true, + } + if cfg != nil { + cfgMap[a.Name] = map[string]interface{}{ + "omit-safe": !cfg.CheckErrorFreeEncoding, + "report-no-exported": cfg.ReportNoExported, + } + } + + return goanalysis.NewLinter( + "errchkjson", + "Checks types passed to the json encoding functions. "+ + "Reports unsupported types and optionally reports occations, "+ + "where the check for the returned error can be omitted.", + []*analysis.Analyzer{a}, + cfgMap, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustive.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustive.go index 9acee6a80..ea264687d 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustive.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustive.go @@ -17,7 +17,8 @@ func NewExhaustive(settings *config.ExhaustiveSettings) *goanalysis.Linter { a.Name: { exhaustive.CheckGeneratedFlag: settings.CheckGenerated, exhaustive.DefaultSignifiesExhaustiveFlag: settings.DefaultSignifiesExhaustive, - exhaustive.IgnorePatternFlag: settings.IgnorePattern, + exhaustive.IgnoreEnumMembersFlag: settings.IgnoreEnumMembers, + exhaustive.PackageScopeOnlyFlag: settings.PackageScopeOnly, }, } } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gci.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gci.go index 9886fc5f2..c0c606a7b 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gci.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gci.go @@ -1,96 +1,48 @@ package golinters import ( - "bytes" "fmt" - "sync" + "strings" - "github.com/daixiang0/gci/pkg/gci" - "github.com/pkg/errors" - "github.com/shazow/go-diff/difflib" + gci "github.com/daixiang0/gci/pkg/analyzer" "golang.org/x/tools/go/analysis" + "github.com/golangci/golangci-lint/pkg/config" "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" "github.com/golangci/golangci-lint/pkg/lint/linter" ) const gciName = "gci" -func NewGci() *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - differ := difflib.New() +func NewGci(settings *config.GciSettings) *goanalysis.Linter { + var linterCfg map[string]map[string]interface{} - analyzer := &analysis.Analyzer{ - Name: gciName, - Doc: goanalysis.TheOnlyanalyzerDoc, + if settings != nil { + cfg := map[string]interface{}{ + gci.NoInlineCommentsFlag: settings.NoInlineComments, + gci.NoPrefixCommentsFlag: settings.NoPrefixComments, + gci.SectionsFlag: strings.Join(settings.Sections, gci.SectionDelimiter), + gci.SectionSeparatorsFlag: strings.Join(settings.SectionSeparator, gci.SectionDelimiter), + } + + if settings.LocalPrefixes != "" { + prefix := []string{"standard", "default", fmt.Sprintf("prefix(%s)", settings.LocalPrefixes)} + cfg[gci.SectionsFlag] = strings.Join(prefix, gci.SectionDelimiter) + } + + linterCfg = map[string]map[string]interface{}{ + gci.Analyzer.Name: cfg, + } } + return goanalysis.NewLinter( gciName, - "Gci control golang package import order and make it always deterministic.", - []*analysis.Analyzer{analyzer}, - nil, + "Gci controls golang package import order and makes it always deterministic.", + []*analysis.Analyzer{gci.Analyzer}, + linterCfg, ).WithContextSetter(func(lintCtx *linter.Context) { - localFlag := lintCtx.Settings().Gci.LocalPrefixes - goimportsFlag := lintCtx.Settings().Goimports.LocalPrefixes - if localFlag == "" && goimportsFlag != "" { - localFlag = goimportsFlag + if settings.LocalPrefixes != "" { + lintCtx.Log.Warnf("gci: `local-prefixes` is deprecated, use `sections` and `prefix(%s)` instead.", settings.LocalPrefixes) } - - analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { - var fileNames []string - for _, f := range pass.Files { - pos := pass.Fset.PositionFor(f.Pos(), false) - fileNames = append(fileNames, pos.Filename) - } - - var issues []goanalysis.Issue - - flagSet := gci.FlagSet{ - LocalFlag: gci.ParseLocalFlag(localFlag), - } - - for _, f := range fileNames { - source, result, err := gci.Run(f, &flagSet) - if err != nil { - return nil, err - } - if result == nil { - continue - } - - diff := bytes.Buffer{} - _, err = diff.WriteString(fmt.Sprintf("--- %[1]s\n+++ %[1]s\n", f)) - if err != nil { - return nil, fmt.Errorf("can't write diff header: %v", err) - } - - err = differ.Diff(&diff, bytes.NewReader(source), bytes.NewReader(result)) - if err != nil { - return nil, fmt.Errorf("can't get gci diff output: %v", err) - } - - is, err := extractIssuesFromPatch(diff.String(), lintCtx.Log, lintCtx, gciName) - if err != nil { - return nil, errors.Wrapf(err, "can't extract issues from gci diff output %q", diff.String()) - } - - for i := range is { - issues = append(issues, goanalysis.NewIssue(&is[i], pass)) - } - } - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - } - }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues }).WithLoadMode(goanalysis.LoadModeSyntax) } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner.go index 8b460d16b..c52998fbf 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner.go @@ -185,7 +185,7 @@ func (r *runner) prepareAnalysis(pkgs []*packages.Package, // and analysis-to-analysis (horizontal) dependencies. // This place is memory-intensive: e.g. Istio project has 120k total actions. - // Therefore optimize it carefully. + // Therefore, optimize it carefully. markedActions := make(map[actKey]struct{}, len(analyzers)*len(pkgs)) for _, a := range analyzers { for _, pkg := range pkgs { diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_action.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_action.go index 96c613e83..50ea64c5c 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_action.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_action.go @@ -179,8 +179,8 @@ func (act *action) analyze() { if act.pkg.IllTyped { // It looks like there should be !pass.Analyzer.RunDespiteErrors - // but govet's cgocall crashes on it. Govet itself contains !pass.Analyzer.RunDespiteErrors condition here - // but it exit before it if packages.Load have failed. + // but govet's cgocall crashes on it. Govet itself contains !pass.Analyzer.RunDespiteErrors condition here, + // but it exits before it if packages.Load have failed. act.err = errors.Wrap(&IllTypedError{Pkg: act.pkg}, "analysis skipped") } else { startedAt = time.Now() diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_loadingpackage.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_loadingpackage.go index f9a43f3f5..1ac6b83e3 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_loadingpackage.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_loadingpackage.go @@ -61,7 +61,7 @@ func (lp *loadingPackage) analyze(loadMode LoadMode, loadSem chan struct{}) { if err := lp.loadWithFacts(loadMode); err != nil { werr := errors.Wrapf(err, "failed to load package %s", lp.pkg.Name) // Don't need to write error to errCh, it will be extracted and reported on another layer. - // Unblock depending actions and propagate error. + // Unblock depending on actions and propagate error. for _, act := range lp.actions { close(act.analysisDoneCh) act.err = werr @@ -269,16 +269,16 @@ func (lp *loadingPackage) loadImportedPackageWithFacts(loadMode LoadMode) error // Load package from export data if loadMode >= LoadModeTypesInfo { if err := lp.loadFromExportData(); err != nil { - // We asked Go to give us up to date export data, yet + // We asked Go to give us up-to-date export data, yet // we can't load it. There must be something wrong. // // Attempt loading from source. This should fail (because // otherwise there would be export data); we just want to // get the compile errors. If loading from source succeeds - // we discard the result, anyway. Otherwise we'll fail + // we discard the result, anyway. Otherwise, we'll fail // when trying to reload from export data later. - // Otherwise it panics because uses already existing (from exported data) types. + // Otherwise, it panics because uses already existing (from exported data) types. pkg.Types = types.NewPackage(pkg.PkgPath, pkg.Name) if srcErr := lp.loadFromSource(loadMode); srcErr != nil { return srcErr @@ -311,7 +311,7 @@ func (lp *loadingPackage) loadImportedPackageWithFacts(loadMode LoadMode) error // Cached facts loading failed: analyze later the action from source. To perform // the analysis we need to load the package from source code. - // Otherwise it panics because uses already existing (from exported data) types. + // Otherwise, it panics because uses already existing (from exported data) types. if loadMode >= LoadModeTypesInfo { pkg.Types = types.NewPackage(pkg.PkgPath, pkg.Name) } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals.go index 804865cfc..0732bc6aa 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals.go @@ -10,9 +10,9 @@ import ( func NewGochecknoglobals() *goanalysis.Linter { gochecknoglobals := checknoglobals.Analyzer() - // gochecknoglobals only lints test files if the `-t` flag is passed so we + // gochecknoglobals only lints test files if the `-t` flag is passed, so we // pass the `t` flag as true to the analyzer before running it. This can be - // turned of by using the regular golangci-lint flags such as `--tests` or + // turned off by using the regular golangci-lint flags such as `--tests` or // `--skip-files`. linterConfig := map[string]map[string]interface{}{ gochecknoglobals.Name: { diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic.go index 0c32a8562..ea3a3cbcb 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic.go @@ -48,8 +48,9 @@ Dynamic rules are written declaratively with AST patterns, filters, report messa } linterCtx.SetPackageInfo(pass.TypesInfo, pass.Pkg) - var res []goanalysis.Issue pkgIssues := runGocriticOnPackage(linterCtx, enabledCheckers, pass.Files) + res := make([]goanalysis.Issue, 0, len(pkgIssues)) + for i := range pkgIssues { res = append(res, goanalysis.NewIssue(&pkgIssues[i], pass)) } @@ -119,7 +120,6 @@ func configureCheckerInfo( // but the file parsers (TOML, YAML, JSON) don't create the same representation for raw type. // then we have to convert value types into the expected value types. // Maybe in the future, this kind of conversion will be done in go-critic itself. -//nolint:exhaustive // only 3 types (int, bool, and string) are supported by CheckerParam.Value func normalizeCheckerParamsValue(lintCtx *linter.Context, p interface{}) interface{} { rv := reflect.ValueOf(p) switch rv.Type().Kind() { @@ -180,11 +180,23 @@ func runGocriticOnFile(ctx *gocriticlinter.Context, f *ast.File, checkers []*goc // as read-only structure, so no copying is required. for _, warn := range c.Check(f) { pos := ctx.FileSet.Position(warn.Node.Pos()) - res = append(res, result.Issue{ + issue := result.Issue{ Pos: pos, Text: fmt.Sprintf("%s: %s", c.Info.Name, warn.Text), FromLinter: gocriticName, - }) + } + + if warn.HasQuickFix() { + issue.Replacement = &result.Replacement{ + Inline: &result.InlineFix{ + StartCol: pos.Column - 1, + Length: int(warn.Node.End() - warn.Node.Pos()), + NewString: string(warn.Suggestion.Replacement), + }, + } + } + + res = append(res, issue) } } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot.go index 625245890..cd5b2a43e 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot.go @@ -31,13 +31,14 @@ func NewGodot() *goanalysis.Linter { settings := godot.Settings{ Scope: godot.Scope(cfg.Scope), Exclude: cfg.Exclude, - Period: true, + Period: cfg.Period, Capital: cfg.Capital, } // Convert deprecated setting - if cfg.CheckAll { // nolint: staticcheck - settings.Scope = godot.TopLevelScope + // todo(butuzov): remove on v2 release + if cfg.CheckAll { // nolint:staticcheck + settings.Scope = godot.AllScope } if settings.Scope == "" { diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt_common.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt_common.go index 39e8092e9..4f63e7bed 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt_common.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt_common.go @@ -225,17 +225,6 @@ func getErrorTextForLinter(lintCtx *linter.Context, linterName string) string { if lintCtx.Settings().Goimports.LocalPrefixes != "" { text += " with -local " + lintCtx.Settings().Goimports.LocalPrefixes } - case gciName: - text = "File is not `gci`-ed" - localPrefixes := lintCtx.Settings().Gci.LocalPrefixes - goimportsFlag := lintCtx.Settings().Goimports.LocalPrefixes - if localPrefixes == "" && goimportsFlag != "" { - localPrefixes = goimportsFlag - } - - if localPrefixes != "" { - text += " with -local " + localPrefixes - } } return text } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomnd.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomnd.go index f7e71b7da..15d84b48b 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomnd.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomnd.go @@ -8,20 +8,38 @@ import ( "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" ) -func NewGoMND(cfg *config.Config) *goanalysis.Linter { - analyzers := []*analysis.Analyzer{ - mnd.Analyzer, - } - +func NewGoMND(settings *config.GoMndSettings) *goanalysis.Linter { var linterCfg map[string]map[string]interface{} - if cfg != nil { - linterCfg = cfg.LintersSettings.Gomnd.Settings + + if settings != nil { + // TODO(ldez) For compatibility only, must be drop in v2. + if len(settings.Settings) > 0 { + linterCfg = settings.Settings + } else { + cfg := make(map[string]interface{}) + if len(settings.Checks) > 0 { + cfg["checks"] = settings.Checks + } + if len(settings.IgnoredNumbers) > 0 { + cfg["ignored-numbers"] = settings.IgnoredNumbers + } + if len(settings.IgnoredFiles) > 0 { + cfg["ignored-files"] = settings.IgnoredFiles + } + if len(settings.IgnoredFunctions) > 0 { + cfg["ignored-functions"] = settings.IgnoredFunctions + } + + linterCfg = map[string]map[string]interface{}{ + "mnd": cfg, + } + } } return goanalysis.NewLinter( "gomnd", "An analyzer to detect magic numbers.", - analyzers, + []*analysis.Analyzer{mnd.Analyzer}, linterCfg, ).WithLoadMode(goanalysis.LoadModeSyntax) } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosec.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosec.go index 9610b3e83..b220c3027 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosec.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosec.go @@ -40,7 +40,7 @@ func NewGosec(settings *config.GoSecSettings) *goanalysis.Linter { } } - ruleDefinitions := rules.Generate(filters...) + ruleDefinitions := rules.Generate(false, filters...) logger := log.New(io.Discard, "", 0) @@ -55,8 +55,8 @@ func NewGosec(settings *config.GoSecSettings) *goanalysis.Linter { nil, ).WithContextSetter(func(lintCtx *linter.Context) { analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { - gosecAnalyzer := gosec.NewAnalyzer(gasConfig, true, settings.ExcludeGenerated, logger) - gosecAnalyzer.LoadRules(ruleDefinitions.Builders()) + gosecAnalyzer := gosec.NewAnalyzer(gasConfig, true, settings.ExcludeGenerated, false, logger) + gosecAnalyzer.LoadRules(ruleDefinitions.RulesInfo()) pkg := &packages.Package{ Fset: pass.Fset, diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/grouper.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/grouper.go new file mode 100644 index 000000000..e8c1340e4 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/grouper.go @@ -0,0 +1,32 @@ +package golinters + +import ( + grouper "github.com/leonklingele/grouper/pkg/analyzer" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewGrouper(settings *config.GrouperSettings) *goanalysis.Linter { + linterCfg := map[string]map[string]interface{}{} + if settings != nil { + linterCfg["grouper"] = map[string]interface{}{ + "const-require-single-const": settings.ConstRequireSingleConst, + "const-require-grouping": settings.ConstRequireGrouping, + "import-require-single-import": settings.ImportRequireSingleImport, + "import-require-grouping": settings.ImportRequireGrouping, + "type-require-single-type": settings.TypeRequireSingleType, + "type-require-grouping": settings.TypeRequireGrouping, + "var-require-single-var": settings.VarRequireSingleVar, + "var-require-grouping": settings.VarRequireGrouping, + } + } + + return goanalysis.NewLinter( + "grouper", + "An analyzer to analyze expression groups.", + []*analysis.Analyzer{grouper.New()}, + linterCfg, + ).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/importas.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/importas.go index 523aa257b..d1f042829 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/importas.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/importas.go @@ -28,8 +28,11 @@ func NewImportAs(settings *config.ImportAsSettings) *goanalysis.Linter { lintCtx.Log.Infof("importas settings found, but no aliases listed. List aliases under alias: key.") // nolint: misspell } - err := analyzer.Flags.Set("no-unaliased", strconv.FormatBool(settings.NoUnaliased)) - if err != nil { + if err := analyzer.Flags.Set("no-unaliased", strconv.FormatBool(settings.NoUnaliased)); err != nil { + lintCtx.Log.Errorf("failed to parse configuration: %v", err) + } + + if err := analyzer.Flags.Set("no-extra-aliases", strconv.FormatBool(settings.NoExtraAliases)); err != nil { lintCtx.Log.Errorf("failed to parse configuration: %v", err) } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ireturn.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/ireturn.go index 3b5df66da..f2d4aec92 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ireturn.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/ireturn.go @@ -3,11 +3,11 @@ package golinters import ( "strings" - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/butuzov/ireturn/analyzer" "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" ) func NewIreturn(settings *config.IreturnSettings) *goanalysis.Linter { diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/lll.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/lll.go index 5f26e91dd..e0a9de63c 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/lll.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/lll.go @@ -51,11 +51,11 @@ func getLLLIssuesForFile(filename string, maxLineLen int, tabSpaces string) ([]r // we can return this line as a long line instead of returning an error. // The reason for this change is that this case might happen with autogenerated files // The go-bindata tool for instance might generate a file with a very long line. - // In this case, as it's a auto generated file, the warning returned by lll will + // In this case, as it's an auto generated file, the warning returned by lll will // be ignored. // But if we return a linter error here, and this error happens for an autogenerated // file the error will be discarded (fine), but all the subsequent errors for lll will - // be discarded for other files and we'll miss legit error. + // be discarded for other files, and we'll miss legit error. res = append(res, result.Issue{ Pos: token.Position{ Filename: filename, diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/maintidx.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/maintidx.go new file mode 100644 index 000000000..2b02b948f --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/maintidx.go @@ -0,0 +1,32 @@ +package golinters + +import ( + "github.com/yagipy/maintidx" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewMaintIdx(cfg *config.MaintIdxSettings) *goanalysis.Linter { + analyzer := maintidx.Analyzer + + cfgMap := map[string]map[string]interface{}{ + analyzer.Name: {"under": 20}, + } + + if cfg != nil { + cfgMap[analyzer.Name] = map[string]interface{}{ + "under": cfg.Under, + } + } + + return goanalysis.NewLinter( + analyzer.Name, + analyzer.Doc, + []*analysis.Analyzer{ + analyzer, + }, + cfgMap, + ).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/nolintlint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/nolintlint.go index 4466cab41..064fd61a6 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/nolintlint.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/nolintlint.go @@ -199,7 +199,7 @@ func (l Linter) Run(fset *token.FileSet, nodes ...ast.Node) ([]Issue, error) { position: pos, } - // check for, report and eliminate leading spaces so we can check for other issues + // check for, report and eliminate leading spaces, so we can check for other issues if len(leadingSpace) > 0 { removeWhitespace := &result.Replacement{ Inline: &result.InlineFix{ @@ -281,7 +281,7 @@ func (l Linter) Run(fset *token.FileSet, nodes ...ast.Node) ([]Issue, error) { if (l.needs&NeedsExplanation) != 0 && (explanation == "" || strings.TrimSpace(explanation) == "//") { needsExplanation := len(linters) == 0 // if no linters are mentioned, we must have explanation - // otherwise, check if we are excluding all of the mentioned linters + // otherwise, check if we are excluding all the mentioned linters for _, ll := range linters { if !l.excludeByLinter[ll] { // if a linter does require explanation needsExplanation = true diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive.go index 061c9b475..d8165f3ce 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive.go @@ -65,7 +65,7 @@ func NewRevive(cfg *config.ReviveSettings) *goanalysis.Linter { return nil, err } - revive := lint.New(os.ReadFile) + revive := lint.New(os.ReadFile, cfg.MaxOpenFiles) lintingRules, err := reviveConfig.GetLintingRules(conf) if err != nil { @@ -146,9 +146,9 @@ func reviveToIssue(pass *analysis.Pass, object *jsonObject) goanalysis.Issue { } // This function mimics the GetConfig function of revive. -// This allow to get default values and right types. +// This allows to get default values and right types. // https://github.com/golangci/golangci-lint/issues/1745 -// https://github.com/mgechev/revive/blob/389ba853b0b3587f0c3b71b5f0c61ea4e23928ec/config/config.go#L155 +// https://github.com/mgechev/revive/blob/v1.1.4/config/config.go#L182 func getReviveConfig(cfg *config.ReviveSettings) (*lint.Config, error) { conf := defaultConfig() @@ -162,7 +162,7 @@ func getReviveConfig(cfg *config.ReviveSettings) (*lint.Config, error) { } conf = &lint.Config{} - _, err = toml.DecodeReader(buf, conf) + _, err = toml.NewDecoder(buf).Decode(conf) if err != nil { return nil, errors.Wrap(err, "failed to decode configuration") } @@ -235,7 +235,7 @@ func safeTomlSlice(r []interface{}) []interface{} { } // This element is not exported by revive, so we need copy the code. -// Extracted from https://github.com/mgechev/revive/blob/389ba853b0b3587f0c3b71b5f0c61ea4e23928ec/config/config.go#L15 +// Extracted from https://github.com/mgechev/revive/blob/v1.1.4/config/config.go#L15 var defaultRules = []lint.Rule{ &rule.VarDeclarationsRule{}, &rule.PackageCommentsRule{}, @@ -257,12 +257,80 @@ var defaultRules = []lint.Rule{ &rule.ContextAsArgumentRule{}, } +var allRules = append([]lint.Rule{ + &rule.ArgumentsLimitRule{}, + &rule.CyclomaticRule{}, + &rule.FileHeaderRule{}, + &rule.EmptyBlockRule{}, + &rule.SuperfluousElseRule{}, + &rule.ConfusingNamingRule{}, + &rule.GetReturnRule{}, + &rule.ModifiesParamRule{}, + &rule.ConfusingResultsRule{}, + &rule.DeepExitRule{}, + &rule.UnusedParamRule{}, + &rule.UnreachableCodeRule{}, + &rule.AddConstantRule{}, + &rule.FlagParamRule{}, + &rule.UnnecessaryStmtRule{}, + &rule.StructTagRule{}, + &rule.ModifiesValRecRule{}, + &rule.ConstantLogicalExprRule{}, + &rule.BoolLiteralRule{}, + &rule.RedefinesBuiltinIDRule{}, + &rule.ImportsBlacklistRule{}, + &rule.FunctionResultsLimitRule{}, + &rule.MaxPublicStructsRule{}, + &rule.RangeValInClosureRule{}, + &rule.RangeValAddress{}, + &rule.WaitGroupByValueRule{}, + &rule.AtomicRule{}, + &rule.EmptyLinesRule{}, + &rule.LineLengthLimitRule{}, + &rule.CallToGCRule{}, + &rule.DuplicatedImportsRule{}, + &rule.ImportShadowingRule{}, + &rule.BareReturnRule{}, + &rule.UnusedReceiverRule{}, + &rule.UnhandledErrorRule{}, + &rule.CognitiveComplexityRule{}, + &rule.StringOfIntRule{}, + &rule.StringFormatRule{}, + &rule.EarlyReturnRule{}, + &rule.UnconditionalRecursionRule{}, + &rule.IdenticalBranchesRule{}, + &rule.DeferRule{}, + &rule.UnexportedNamingRule{}, + &rule.FunctionLength{}, + &rule.NestedStructs{}, + &rule.IfReturnRule{}, + &rule.UselessBreak{}, + &rule.TimeEqualRule{}, + &rule.BannedCharsRule{}, + &rule.OptimizeOperandsOrderRule{}, +}, defaultRules...) + +const defaultConfidence = 0.8 + // This element is not exported by revive, so we need copy the code. -// Extracted from https://github.com/mgechev/revive/blob/389ba853b0b3587f0c3b71b5f0c61ea4e23928ec/config/config.go#L133 +// Extracted from https://github.com/mgechev/revive/blob/v1.1.4/config/config.go#L145 func normalizeConfig(cfg *lint.Config) { - if cfg.Confidence == 0 { - cfg.Confidence = 0.8 + if len(cfg.Rules) == 0 { + cfg.Rules = map[string]lint.RuleConfig{} } + if cfg.EnableAllRules { + // Add to the configuration all rules not yet present in it + for _, rule := range allRules { + ruleName := rule.Name() + _, alreadyInConf := cfg.Rules[ruleName] + if alreadyInConf { + continue + } + // Add the rule with an empty conf for + cfg.Rules[ruleName] = lint.RuleConfig{} + } + } + severity := cfg.Severity if severity != "" { for k, v := range cfg.Rules { @@ -281,10 +349,10 @@ func normalizeConfig(cfg *lint.Config) { } // This element is not exported by revive, so we need copy the code. -// Extracted from https://github.com/mgechev/revive/blob/389ba853b0b3587f0c3b71b5f0c61ea4e23928ec/config/config.go#L182 +// Extracted from https://github.com/mgechev/revive/blob/v1.1.4/config/config.go#L214 func defaultConfig() *lint.Config { defaultConfig := lint.Config{ - Confidence: 0.0, + Confidence: defaultConfidence, Severity: lint.SeverityWarning, Rules: map[string]lint.RuleConfig{}, } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/rowerrcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/rowserrcheck.go similarity index 100% rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/rowerrcheck.go rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/rowserrcheck.go diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/varnamelen.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/varnamelen.go index 168c881c4..6e3176f17 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/varnamelen.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/varnamelen.go @@ -17,9 +17,13 @@ func NewVarnamelen(settings *config.VarnamelenSettings) *goanalysis.Linter { cfg := map[string]map[string]interface{}{} if settings != nil { vnlCfg := map[string]interface{}{ - "checkReceiver": strconv.FormatBool(settings.CheckReceiver), - "checkReturn": strconv.FormatBool(settings.CheckReturn), - "ignoreNames": strings.Join(settings.IgnoreNames, ","), + "checkReceiver": strconv.FormatBool(settings.CheckReceiver), + "checkReturn": strconv.FormatBool(settings.CheckReturn), + "ignoreNames": strings.Join(settings.IgnoreNames, ","), + "ignoreTypeAssertOk": strconv.FormatBool(settings.IgnoreTypeAssertOk), + "ignoreMapIndexOk": strconv.FormatBool(settings.IgnoreMapIndexOk), + "ignoreChanRecvOk": strconv.FormatBool(settings.IgnoreChanRecvOk), + "ignoreDecls": strings.Join(settings.IgnoreDecls, ","), } if settings.MaxDistance > 0 { @@ -37,5 +41,5 @@ func NewVarnamelen(settings *config.VarnamelenSettings) *goanalysis.Linter { "checks that the length of a variable's name matches its scope", []*analysis.Analyzer{a}, cfg, - ).WithLoadMode(goanalysis.LoadModeSyntax) + ).WithLoadMode(goanalysis.LoadModeTypesInfo) } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck.go index 5eaf085d7..c52bcb740 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck.go @@ -16,6 +16,9 @@ func NewWrapcheck(settings *config.WrapcheckSettings) *goanalysis.Linter { if len(settings.IgnoreSigs) != 0 { cfg.IgnoreSigs = settings.IgnoreSigs } + if len(settings.IgnoreSigRegexps) != 0 { + cfg.IgnoreSigRegexps = settings.IgnoreSigRegexps + } if len(settings.IgnorePackageGlobs) != 0 { cfg.IgnorePackageGlobs = settings.IgnorePackageGlobs } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go index 7d5695747..d80be62c2 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go @@ -100,18 +100,26 @@ func enableLinterConfigs(lcs []*linter.Config, isEnabled func(lc *linter.Config) //nolint:funlen func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { + var bidichkCfg *config.BiDiChkSettings var cyclopCfg *config.Cyclop + var decorderCfg *config.DecorderSettings + var errchkjsonCfg *config.ErrChkJSONSettings var errorlintCfg *config.ErrorLintSettings var exhaustiveCfg *config.ExhaustiveSettings var exhaustiveStructCfg *config.ExhaustiveStructSettings + var gciCfg *config.GciSettings var goModDirectivesCfg *config.GoModDirectivesSettings + var goMndCfg *config.GoMndSettings var gosecCfg *config.GoSecSettings var gosimpleCfg *config.StaticCheckSettings var govetCfg *config.GovetSettings + var grouperCfg *config.GrouperSettings var ifshortCfg *config.IfshortSettings var importAsCfg *config.ImportAsSettings var ireturnCfg *config.IreturnSettings + var maintIdxCfg *config.MaintIdxSettings var nilNilCfg *config.NilNilSettings + var nlreturnCfg *config.NlreturnSettings var predeclaredCfg *config.PredeclaredSettings var reviveCfg *config.ReviveSettings var staticcheckCfg *config.StaticCheckSettings @@ -123,21 +131,28 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { var unusedCfg *config.StaticCheckSettings var varnamelenCfg *config.VarnamelenSettings var wrapcheckCfg *config.WrapcheckSettings - var nlreturnCfg *config.NlreturnSettings if m.cfg != nil { + bidichkCfg = &m.cfg.LintersSettings.BiDiChk cyclopCfg = &m.cfg.LintersSettings.Cyclop + errchkjsonCfg = &m.cfg.LintersSettings.ErrChkJSON + decorderCfg = &m.cfg.LintersSettings.Decorder errorlintCfg = &m.cfg.LintersSettings.ErrorLint exhaustiveCfg = &m.cfg.LintersSettings.Exhaustive exhaustiveStructCfg = &m.cfg.LintersSettings.ExhaustiveStruct + gciCfg = &m.cfg.LintersSettings.Gci goModDirectivesCfg = &m.cfg.LintersSettings.GoModDirectives + goMndCfg = &m.cfg.LintersSettings.Gomnd gosecCfg = &m.cfg.LintersSettings.Gosec gosimpleCfg = &m.cfg.LintersSettings.Gosimple govetCfg = &m.cfg.LintersSettings.Govet + grouperCfg = &m.cfg.LintersSettings.Grouper ifshortCfg = &m.cfg.LintersSettings.Ifshort importAsCfg = &m.cfg.LintersSettings.ImportAs ireturnCfg = &m.cfg.LintersSettings.Ireturn + maintIdxCfg = &m.cfg.LintersSettings.MaintIdx nilNilCfg = &m.cfg.LintersSettings.NilNil + nlreturnCfg = &m.cfg.LintersSettings.Nlreturn predeclaredCfg = &m.cfg.LintersSettings.Predeclared reviveCfg = &m.cfg.LintersSettings.Revive staticcheckCfg = &m.cfg.LintersSettings.Staticcheck @@ -149,51 +164,464 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { unusedCfg = &m.cfg.LintersSettings.Unused varnamelenCfg = &m.cfg.LintersSettings.Varnamelen wrapcheckCfg = &m.cfg.LintersSettings.Wrapcheck - nlreturnCfg = &m.cfg.LintersSettings.Nlreturn } const megacheckName = "megacheck" + // The linters are sorted in the alphabetical order (case-insensitive). + // When a new linter is added the version in `WithSince(...)` must be the next minor version of golangci-lint. lcs := []*linter.Config{ - linter.NewConfig(golinters.NewGovet(govetCfg)). - WithSince("v1.0.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetBugs, linter.PresetMetaLinter). - WithAlternativeNames("vet", "vetshadow"). - WithURL("https://golang.org/cmd/vet/"), + linter.NewConfig(golinters.NewAsciicheck()). + WithSince("v1.26.0"). + WithPresets(linter.PresetBugs, linter.PresetStyle). + WithURL("https://github.com/tdakkota/asciicheck"), + + linter.NewConfig(golinters.NewBiDiChkFuncName(bidichkCfg)). + WithSince("1.43.0"). + WithPresets(linter.PresetBugs). + WithURL("https://github.com/breml/bidichk"), + linter.NewConfig(golinters.NewBodyclose()). WithSince("v1.18.0"). WithLoadForGoAnalysis(). WithPresets(linter.PresetPerformance, linter.PresetBugs). WithURL("https://github.com/timakin/bodyclose"), - linter.NewConfig(golinters.NewNoctx()). - WithSince("v1.28.0"). + + linter.NewConfig(golinters.NewContainedCtx()). + WithSince("1.44.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/sivchari/containedctx"), + + linter.NewConfig(golinters.NewContextCheck()). + WithSince("v1.43.0"). + WithPresets(linter.PresetBugs). WithLoadForGoAnalysis(). - WithPresets(linter.PresetPerformance, linter.PresetBugs). - WithURL("https://github.com/sonatard/noctx"), + WithURL("https://github.com/sylvia7788/contextcheck"), + + linter.NewConfig(golinters.NewCyclop(cyclopCfg)). + WithSince("v1.37.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetComplexity). + WithURL("https://github.com/bkielbasa/cyclop"), + + linter.NewConfig(golinters.NewDecorder(decorderCfg)). + WithSince("v1.44.0"). + WithPresets(linter.PresetFormatting, linter.PresetStyle). + WithURL("https://gitlab.com/bosi/decorder"), + + linter.NewConfig(golinters.NewDeadcode()). + WithSince("v1.0.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetUnused). + WithURL("https://github.com/remyoudompheng/go-misc/tree/master/deadcode"), + + linter.NewConfig(golinters.NewDepguard()). + WithSince("v1.4.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetStyle, linter.PresetImport, linter.PresetModule). + WithURL("https://github.com/OpenPeeDeeP/depguard"), + + linter.NewConfig(golinters.NewDogsled()). + WithSince("v1.19.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/alexkohler/dogsled"), + + linter.NewConfig(golinters.NewDupl()). + WithSince("v1.0.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/mibk/dupl"), + + linter.NewConfig(golinters.NewDurationCheck()). + WithSince("v1.37.0"). + WithPresets(linter.PresetBugs). + WithLoadForGoAnalysis(). + WithURL("https://github.com/charithe/durationcheck"), + linter.NewConfig(golinters.NewErrcheck()). WithSince("v1.0.0"). WithLoadForGoAnalysis(). WithPresets(linter.PresetBugs, linter.PresetError). WithURL("https://github.com/kisielk/errcheck"), + + linter.NewConfig(golinters.NewErrChkJSONFuncName(errchkjsonCfg)). + WithSince("1.44.0"). + WithPresets(linter.PresetBugs). + WithLoadForGoAnalysis(). + WithURL("https://github.com/breml/errchkjson"), + + linter.NewConfig(golinters.NewErrName()). + WithSince("v1.42.0"). + WithPresets(linter.PresetStyle). + WithLoadForGoAnalysis(). + WithURL("https://github.com/Antonboom/errname"), + + linter.NewConfig(golinters.NewErrorLint(errorlintCfg)). + WithSince("v1.32.0"). + WithPresets(linter.PresetBugs, linter.PresetError). + WithLoadForGoAnalysis(). + WithURL("https://github.com/polyfloyd/go-errorlint"), + + linter.NewConfig(golinters.NewExhaustive(exhaustiveCfg)). + WithSince(" v1.28.0"). + WithPresets(linter.PresetBugs). + WithLoadForGoAnalysis(). + WithURL("https://github.com/nishanths/exhaustive"), + + linter.NewConfig(golinters.NewExhaustiveStruct(exhaustiveStructCfg)). + WithSince("v1.32.0"). + WithPresets(linter.PresetStyle, linter.PresetTest). + WithLoadForGoAnalysis(). + WithURL("https://github.com/mbilski/exhaustivestruct"), + + linter.NewConfig(golinters.NewExportLoopRef()). + WithSince("v1.28.0"). + WithPresets(linter.PresetBugs). + WithLoadForGoAnalysis(). + WithURL("https://github.com/kyoh86/exportloopref"), + + linter.NewConfig(golinters.NewForbidigo()). + WithSince("v1.34.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/ashanbrown/forbidigo"), + + linter.NewConfig(golinters.NewForceTypeAssert()). + WithSince("v1.38.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/gostaticanalysis/forcetypeassert"), + + linter.NewConfig(golinters.NewFunlen()). + WithSince("v1.18.0"). + WithPresets(linter.PresetComplexity). + WithURL("https://github.com/ultraware/funlen"), + + linter.NewConfig(golinters.NewGci(gciCfg)). + WithSince("v1.30.0"). + WithPresets(linter.PresetFormatting, linter.PresetImport). + WithURL("https://github.com/daixiang0/gci"), + + linter.NewConfig(golinters.NewGochecknoglobals()). + WithSince("v1.12.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/leighmcculloch/gochecknoglobals"), + + linter.NewConfig(golinters.NewGochecknoinits()). + WithSince("v1.12.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/leighmcculloch/gochecknoinits"), + + linter.NewConfig(golinters.NewGocognit()). + WithSince("v1.20.0"). + WithPresets(linter.PresetComplexity). + WithURL("https://github.com/uudashr/gocognit"), + + linter.NewConfig(golinters.NewGoconst()). + WithSince("v1.0.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/jgautheron/goconst"), + + linter.NewConfig(golinters.NewGocritic()). + WithSince("v1.12.0"). + WithPresets(linter.PresetStyle, linter.PresetMetaLinter). + WithLoadForGoAnalysis(). + WithURL("https://github.com/go-critic/go-critic"), + + linter.NewConfig(golinters.NewGocyclo()). + WithSince("v1.0.0"). + WithPresets(linter.PresetComplexity). + WithURL("https://github.com/fzipp/gocyclo"), + + linter.NewConfig(golinters.NewGodot()). + WithSince("v1.25.0"). + WithPresets(linter.PresetStyle, linter.PresetComment). + WithAutoFix(). + WithURL("https://github.com/tetafro/godot"), + + linter.NewConfig(golinters.NewGodox()). + WithSince("v1.19.0"). + WithPresets(linter.PresetStyle, linter.PresetComment). + WithURL("https://github.com/matoous/godox"), + + linter.NewConfig(golinters.NewGoerr113()). + WithSince("v1.26.0"). + WithPresets(linter.PresetStyle, linter.PresetError). + WithLoadForGoAnalysis(). + WithURL("https://github.com/Djarvur/go-err113"), + + linter.NewConfig(golinters.NewGofmt()). + WithSince("v1.0.0"). + WithPresets(linter.PresetFormatting). + WithAutoFix(). + WithURL("https://golang.org/cmd/gofmt/"), + + linter.NewConfig(golinters.NewGofumpt()). + WithSince("v1.28.0"). + WithPresets(linter.PresetFormatting). + WithAutoFix(). + WithURL("https://github.com/mvdan/gofumpt"), + + linter.NewConfig(golinters.NewGoHeader()). + WithSince("v1.28.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/denis-tingajkin/go-header"), + + linter.NewConfig(golinters.NewGoimports()). + WithSince("v1.20.0"). + WithPresets(linter.PresetFormatting, linter.PresetImport). + WithAutoFix(). + WithURL("https://godoc.org/golang.org/x/tools/cmd/goimports"), + linter.NewConfig(golinters.NewGolint()). WithSince("v1.0.0"). WithLoadForGoAnalysis(). WithPresets(linter.PresetStyle). WithURL("https://github.com/golang/lint"). Deprecated("The repository of the linter has been archived by the owner.", "v1.41.0", "revive"), + + linter.NewConfig(golinters.NewGoMND(goMndCfg)). + WithSince("v1.22.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/tommy-muehle/go-mnd"), + + linter.NewConfig(golinters.NewGoModDirectives(goModDirectivesCfg)). + WithSince("v1.39.0"). + WithPresets(linter.PresetStyle, linter.PresetModule). + WithURL("https://github.com/ldez/gomoddirectives"), + + linter.NewConfig(golinters.NewGomodguard()). + WithSince("v1.25.0"). + WithPresets(linter.PresetStyle, linter.PresetImport, linter.PresetModule). + WithURL("https://github.com/ryancurrah/gomodguard"), + + linter.NewConfig(golinters.NewGoPrintfFuncName()). + WithSince("v1.23.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/jirfag/go-printf-func-name"), + + linter.NewConfig(golinters.NewGosec(gosecCfg)). + WithSince("v1.0.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetBugs). + WithURL("https://github.com/securego/gosec"). + WithAlternativeNames("gas"), + + linter.NewConfig(golinters.NewGosimple(gosimpleCfg)). + WithSince("v1.20.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetStyle). + WithAlternativeNames(megacheckName). + WithURL("https://github.com/dominikh/go-tools/tree/master/simple"), + + linter.NewConfig(golinters.NewGovet(govetCfg)). + WithSince("v1.0.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetBugs, linter.PresetMetaLinter). + WithAlternativeNames("vet", "vetshadow"). + WithURL("https://golang.org/cmd/vet/"), + + linter.NewConfig(golinters.NewGrouper(grouperCfg)). + WithSince("v1.44.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/leonklingele/grouper"), + + linter.NewConfig(golinters.NewIfshort(ifshortCfg)). + WithSince("v1.36.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/esimonov/ifshort"), + + linter.NewConfig(golinters.NewImportAs(importAsCfg)). + WithSince("v1.38.0"). + WithPresets(linter.PresetStyle). + WithLoadForGoAnalysis(). + WithURL("https://github.com/julz/importas"), + + linter.NewConfig(golinters.NewIneffassign()). + WithSince("v1.0.0"). + WithPresets(linter.PresetUnused). + WithURL("https://github.com/gordonklaus/ineffassign"), + + linter.NewConfig(golinters.NewInterfacer()). + WithSince("v1.0.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/mvdan/interfacer"). + Deprecated("The repository of the linter has been archived by the owner.", "v1.38.0", ""), + + linter.NewConfig(golinters.NewIreturn(ireturnCfg)). + WithSince("v1.43.0"). + WithPresets(linter.PresetStyle). + WithLoadForGoAnalysis(). + WithURL("https://github.com/butuzov/ireturn"), + + linter.NewConfig(golinters.NewLLL()). + WithSince("v1.8.0"). + WithPresets(linter.PresetStyle), + + linter.NewConfig(golinters.NewMaintIdx(maintIdxCfg)). + WithSince("v1.44.0"). + WithPresets(linter.PresetComplexity). + WithURL("https://github.com/yagipy/maintidx"), + + linter.NewConfig(golinters.NewMakezero()). + WithSince("v1.34.0"). + WithPresets(linter.PresetStyle, linter.PresetBugs). + WithLoadForGoAnalysis(). + WithURL("https://github.com/ashanbrown/makezero"), + + linter.NewConfig(golinters.NewMaligned()). + WithSince("v1.0.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetPerformance). + WithURL("https://github.com/mdempsky/maligned"). + Deprecated("The repository of the linter has been archived by the owner.", "v1.38.0", "govet 'fieldalignment'"), + + linter.NewConfig(golinters.NewMisspell()). + WithSince("v1.8.0"). + WithPresets(linter.PresetStyle, linter.PresetComment). + WithAutoFix(). + WithURL("https://github.com/client9/misspell"), + + linter.NewConfig(golinters.NewNakedret()). + WithSince("v1.19.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/alexkohler/nakedret"), + + linter.NewConfig(golinters.NewNestif()). + WithSince("v1.25.0"). + WithPresets(linter.PresetComplexity). + WithURL("https://github.com/nakabonne/nestif"), + + linter.NewConfig(golinters.NewNilErr()). + WithSince("v1.38.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetBugs). + WithURL("https://github.com/gostaticanalysis/nilerr"), + + linter.NewConfig(golinters.NewNilNil(nilNilCfg)). + WithSince("v1.43.0"). + WithPresets(linter.PresetStyle). + WithLoadForGoAnalysis(). + WithURL("https://github.com/Antonboom/nilnil"), + + linter.NewConfig(golinters.NewNLReturn(nlreturnCfg)). + WithSince("v1.30.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/ssgreg/nlreturn"), + + linter.NewConfig(golinters.NewNoctx()). + WithSince("v1.28.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetPerformance, linter.PresetBugs). + WithURL("https://github.com/sonatard/noctx"), + + linter.NewConfig(golinters.NewParallelTest()). + WithSince("v1.33.0"). + WithPresets(linter.PresetStyle, linter.PresetTest). + WithURL("https://github.com/kunwardeep/paralleltest"), + + linter.NewConfig(golinters.NewPrealloc()). + WithSince("v1.19.0"). + WithPresets(linter.PresetPerformance). + WithURL("https://github.com/alexkohler/prealloc"), + + linter.NewConfig(golinters.NewPredeclared(predeclaredCfg)). + WithSince("v1.35.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/nishanths/predeclared"), + + linter.NewConfig(golinters.NewPromlinter()). + WithSince("v1.40.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/yeya24/promlinter"), + + linter.NewConfig(golinters.NewRevive(reviveCfg)). + WithSince("v1.37.0"). + WithPresets(linter.PresetStyle, linter.PresetMetaLinter). + ConsiderSlow(). + WithURL("https://github.com/mgechev/revive"), + linter.NewConfig(golinters.NewRowsErrCheck()). WithSince("v1.23.0"). WithLoadForGoAnalysis(). WithPresets(linter.PresetBugs, linter.PresetSQL). WithURL("https://github.com/jingyugao/rowserrcheck"), + linter.NewConfig(golinters.NewScopelint()). + WithSince("v1.12.0"). + WithPresets(linter.PresetBugs). + WithURL("https://github.com/kyoh86/scopelint"). + Deprecated("The repository of the linter has been deprecated by the owner.", "v1.39.0", "exportloopref"), + + linter.NewConfig(golinters.NewSQLCloseCheck()). + WithSince("v1.28.0"). + WithPresets(linter.PresetBugs, linter.PresetSQL). + WithLoadForGoAnalysis(). + WithURL("https://github.com/ryanrolds/sqlclosecheck"), + linter.NewConfig(golinters.NewStaticcheck(staticcheckCfg)). WithSince("v1.0.0"). WithLoadForGoAnalysis(). WithPresets(linter.PresetBugs, linter.PresetMetaLinter). WithAlternativeNames(megacheckName). WithURL("https://staticcheck.io/"), + + linter.NewConfig(golinters.NewStructcheck()). + WithSince("v1.0.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetUnused). + WithURL("https://github.com/opennota/check"), + + linter.NewConfig(golinters.NewStylecheck(stylecheckCfg)). + WithSince("v1.20.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/dominikh/go-tools/tree/master/stylecheck"), + + linter.NewConfig(golinters.NewTagliatelle(tagliatelleCfg)). + WithSince("v1.40.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/ldez/tagliatelle"), + + linter.NewConfig(golinters.NewTenv(tenvCfg)). + WithSince("v1.43.0"). + WithPresets(linter.PresetStyle). + WithLoadForGoAnalysis(). + WithURL("https://github.com/sivchari/tenv"), + + linter.NewConfig(golinters.NewTestpackage(testpackageCfg)). + WithSince("v1.25.0"). + WithPresets(linter.PresetStyle, linter.PresetTest). + WithURL("https://github.com/maratori/testpackage"), + + linter.NewConfig(golinters.NewThelper(thelperCfg)). + WithSince("v1.34.0"). + WithPresets(linter.PresetStyle). + WithLoadForGoAnalysis(). + WithURL("https://github.com/kulti/thelper"), + + linter.NewConfig(golinters.NewTparallel()). + WithSince("v1.32.0"). + WithPresets(linter.PresetStyle, linter.PresetTest). + WithLoadForGoAnalysis(). + WithURL("https://github.com/moricho/tparallel"), + + linter.NewConfig(golinters.NewTypecheck()). + WithSince("v1.3.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetBugs). + WithURL(""), + + linter.NewConfig(golinters.NewUnconvert()). + WithSince("v1.0.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/mdempsky/unconvert"), + + linter.NewConfig(golinters.NewUnparam()). + WithSince("v1.9.0"). + WithPresets(linter.PresetUnused). + WithLoadForGoAnalysis(). + WithURL("https://github.com/mvdan/unparam"), + linter.NewConfig(golinters.NewUnused(unusedCfg)). WithSince("v1.20.0"). WithLoadForGoAnalysis(). @@ -202,350 +630,41 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { ConsiderSlow(). WithChangeTypes(). WithURL("https://github.com/dominikh/go-tools/tree/master/unused"), - linter.NewConfig(golinters.NewGosimple(gosimpleCfg)). - WithSince("v1.20.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetStyle). - WithAlternativeNames(megacheckName). - WithURL("https://github.com/dominikh/go-tools/tree/master/simple"), - linter.NewConfig(golinters.NewStylecheck(stylecheckCfg)). - WithSince("v1.20.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/dominikh/go-tools/tree/master/stylecheck"), - linter.NewConfig(golinters.NewGosec(gosecCfg)). - WithSince("v1.0.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetBugs). - WithURL("https://github.com/securego/gosec"). - WithAlternativeNames("gas"), - linter.NewConfig(golinters.NewStructcheck()). - WithSince("v1.0.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetUnused). - WithURL("https://github.com/opennota/check"), linter.NewConfig(golinters.NewVarcheck()). WithSince("v1.0.0"). WithLoadForGoAnalysis(). WithPresets(linter.PresetUnused). WithURL("https://github.com/opennota/check"), - linter.NewConfig(golinters.NewInterfacer()). - WithSince("v1.0.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/mvdan/interfacer"). - Deprecated("The repository of the linter has been archived by the owner.", "v1.38.0", ""), - linter.NewConfig(golinters.NewUnconvert()). - WithSince("v1.0.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/mdempsky/unconvert"), - linter.NewConfig(golinters.NewIneffassign()). - WithSince("v1.0.0"). - WithPresets(linter.PresetUnused). - WithURL("https://github.com/gordonklaus/ineffassign"), - linter.NewConfig(golinters.NewDupl()). - WithSince("v1.0.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/mibk/dupl"), - linter.NewConfig(golinters.NewGoconst()). - WithSince("v1.0.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/jgautheron/goconst"), - linter.NewConfig(golinters.NewDeadcode()). - WithSince("v1.0.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetUnused). - WithURL("https://github.com/remyoudompheng/go-misc/tree/master/deadcode"), - linter.NewConfig(golinters.NewGocyclo()). - WithSince("v1.0.0"). - WithPresets(linter.PresetComplexity). - WithURL("https://github.com/fzipp/gocyclo"), - linter.NewConfig(golinters.NewCyclop(cyclopCfg)). - WithSince("v1.37.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetComplexity). - WithURL("https://github.com/bkielbasa/cyclop"), - linter.NewConfig(golinters.NewGocognit()). - WithSince("v1.20.0"). - WithPresets(linter.PresetComplexity). - WithURL("https://github.com/uudashr/gocognit"), - linter.NewConfig(golinters.NewTypecheck()). - WithSince("v1.3.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetBugs). - WithURL(""), - linter.NewConfig(golinters.NewAsciicheck()). - WithSince("v1.26.0"). - WithPresets(linter.PresetBugs, linter.PresetStyle). - WithURL("https://github.com/tdakkota/asciicheck"), - linter.NewConfig(golinters.NewGofmt()). - WithSince("v1.0.0"). - WithPresets(linter.PresetFormatting). - WithAutoFix(). - WithURL("https://golang.org/cmd/gofmt/"), - linter.NewConfig(golinters.NewGofumpt()). - WithSince("v1.28.0"). - WithPresets(linter.PresetFormatting). - WithAutoFix(). - WithURL("https://github.com/mvdan/gofumpt"), - linter.NewConfig(golinters.NewGoimports()). - WithSince("v1.20.0"). - WithPresets(linter.PresetFormatting, linter.PresetImport). - WithAutoFix(). - WithURL("https://godoc.org/golang.org/x/tools/cmd/goimports"), - linter.NewConfig(golinters.NewGoHeader()). - WithSince("v1.28.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/denis-tingajkin/go-header"), - linter.NewConfig(golinters.NewGci()). - WithSince("v1.30.0"). - WithPresets(linter.PresetFormatting, linter.PresetImport). - WithAutoFix(). - WithURL("https://github.com/daixiang0/gci"), - linter.NewConfig(golinters.NewMaligned()). - WithSince("v1.0.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetPerformance). - WithURL("https://github.com/mdempsky/maligned"). - Deprecated("The repository of the linter has been archived by the owner.", "v1.38.0", "govet 'fieldalignment'"), - linter.NewConfig(golinters.NewDepguard()). - WithSince("v1.4.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetStyle, linter.PresetImport, linter.PresetModule). - WithURL("https://github.com/OpenPeeDeeP/depguard"), - linter.NewConfig(golinters.NewMisspell()). - WithSince("v1.8.0"). - WithPresets(linter.PresetStyle, linter.PresetComment). - WithAutoFix(). - WithURL("https://github.com/client9/misspell"), - linter.NewConfig(golinters.NewLLL()). - WithSince("v1.8.0"). - WithPresets(linter.PresetStyle), - linter.NewConfig(golinters.NewUnparam()). - WithSince("v1.9.0"). - WithPresets(linter.PresetUnused). - WithLoadForGoAnalysis(). - WithURL("https://github.com/mvdan/unparam"), - linter.NewConfig(golinters.NewDogsled()). - WithSince("v1.19.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/alexkohler/dogsled"), - linter.NewConfig(golinters.NewNakedret()). - WithSince("v1.19.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/alexkohler/nakedret"), - linter.NewConfig(golinters.NewPrealloc()). - WithSince("v1.19.0"). - WithPresets(linter.PresetPerformance). - WithURL("https://github.com/alexkohler/prealloc"), - linter.NewConfig(golinters.NewScopelint()). - WithSince("v1.12.0"). - WithPresets(linter.PresetBugs). - WithURL("https://github.com/kyoh86/scopelint"). - Deprecated("The repository of the linter has been deprecated by the owner.", "v1.39.0", "exportloopref"), - linter.NewConfig(golinters.NewGocritic()). - WithSince("v1.12.0"). - WithPresets(linter.PresetStyle, linter.PresetMetaLinter). - WithLoadForGoAnalysis(). - WithURL("https://github.com/go-critic/go-critic"), - linter.NewConfig(golinters.NewGochecknoinits()). - WithSince("v1.12.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/leighmcculloch/gochecknoinits"), - linter.NewConfig(golinters.NewGochecknoglobals()). - WithSince("v1.12.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/leighmcculloch/gochecknoglobals"), - linter.NewConfig(golinters.NewGodox()). - WithSince("v1.19.0"). - WithPresets(linter.PresetStyle, linter.PresetComment). - WithURL("https://github.com/matoous/godox"), - linter.NewConfig(golinters.NewFunlen()). - WithSince("v1.18.0"). - WithPresets(linter.PresetComplexity). - WithURL("https://github.com/ultraware/funlen"), - linter.NewConfig(golinters.NewWhitespace()). - WithSince("v1.19.0"). - WithPresets(linter.PresetStyle). - WithAutoFix(). - WithURL("https://github.com/ultraware/whitespace"), - linter.NewConfig(golinters.NewWSL()). - WithSince("v1.20.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/bombsimon/wsl"), - linter.NewConfig(golinters.NewGoPrintfFuncName()). - WithSince("v1.23.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/jirfag/go-printf-func-name"), - linter.NewConfig(golinters.NewGoMND(m.cfg)). - WithSince("v1.22.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/tommy-muehle/go-mnd"), - linter.NewConfig(golinters.NewGoerr113()). - WithSince("v1.26.0"). - WithPresets(linter.PresetStyle, linter.PresetError). - WithLoadForGoAnalysis(). - WithURL("https://github.com/Djarvur/go-err113"), - linter.NewConfig(golinters.NewGomodguard()). - WithSince("v1.25.0"). - WithPresets(linter.PresetStyle, linter.PresetImport, linter.PresetModule). - WithURL("https://github.com/ryancurrah/gomodguard"), - linter.NewConfig(golinters.NewGodot()). - WithSince("v1.25.0"). - WithPresets(linter.PresetStyle, linter.PresetComment). - WithAutoFix(). - WithURL("https://github.com/tetafro/godot"), - linter.NewConfig(golinters.NewTestpackage(testpackageCfg)). - WithSince("v1.25.0"). - WithPresets(linter.PresetStyle, linter.PresetTest). - WithURL("https://github.com/maratori/testpackage"), - linter.NewConfig(golinters.NewNestif()). - WithSince("v1.25.0"). - WithPresets(linter.PresetComplexity). - WithURL("https://github.com/nakabonne/nestif"), - linter.NewConfig(golinters.NewExportLoopRef()). - WithSince("v1.28.0"). - WithPresets(linter.PresetBugs). - WithLoadForGoAnalysis(). - WithURL("https://github.com/kyoh86/exportloopref"), - linter.NewConfig(golinters.NewExhaustive(exhaustiveCfg)). - WithSince(" v1.28.0"). - WithPresets(linter.PresetBugs). - WithLoadForGoAnalysis(). - WithURL("https://github.com/nishanths/exhaustive"), - linter.NewConfig(golinters.NewSQLCloseCheck()). - WithSince("v1.28.0"). - WithPresets(linter.PresetBugs, linter.PresetSQL). - WithLoadForGoAnalysis(). - WithURL("https://github.com/ryanrolds/sqlclosecheck"), - linter.NewConfig(golinters.NewNLReturn(nlreturnCfg)). - WithSince("v1.30.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/ssgreg/nlreturn"), - linter.NewConfig(golinters.NewWrapcheck(wrapcheckCfg)). - WithSince("v1.32.0"). - WithPresets(linter.PresetStyle, linter.PresetError). - WithLoadForGoAnalysis(). - WithURL("https://github.com/tomarrell/wrapcheck"), - linter.NewConfig(golinters.NewThelper(thelperCfg)). - WithSince("v1.34.0"). - WithPresets(linter.PresetStyle). - WithLoadForGoAnalysis(). - WithURL("https://github.com/kulti/thelper"), - linter.NewConfig(golinters.NewTparallel()). - WithSince("v1.32.0"). - WithPresets(linter.PresetStyle, linter.PresetTest). - WithLoadForGoAnalysis(). - WithURL("https://github.com/moricho/tparallel"), - linter.NewConfig(golinters.NewExhaustiveStruct(exhaustiveStructCfg)). - WithSince("v1.32.0"). - WithPresets(linter.PresetStyle, linter.PresetTest). - WithLoadForGoAnalysis(). - WithURL("https://github.com/mbilski/exhaustivestruct"), - linter.NewConfig(golinters.NewErrorLint(errorlintCfg)). - WithSince("v1.32.0"). - WithPresets(linter.PresetBugs, linter.PresetError). - WithLoadForGoAnalysis(). - WithURL("https://github.com/polyfloyd/go-errorlint"), - linter.NewConfig(golinters.NewParallelTest()). - WithSince("v1.33.0"). - WithPresets(linter.PresetStyle, linter.PresetTest). - WithURL("https://github.com/kunwardeep/paralleltest"), - linter.NewConfig(golinters.NewMakezero()). - WithSince("v1.34.0"). - WithPresets(linter.PresetStyle, linter.PresetBugs). - WithLoadForGoAnalysis(). - WithURL("https://github.com/ashanbrown/makezero"), - linter.NewConfig(golinters.NewForbidigo()). - WithSince("v1.34.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/ashanbrown/forbidigo"), - linter.NewConfig(golinters.NewIfshort(ifshortCfg)). - WithSince("v1.36.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/esimonov/ifshort"), - linter.NewConfig(golinters.NewPredeclared(predeclaredCfg)). - WithSince("v1.35.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/nishanths/predeclared"), - linter.NewConfig(golinters.NewRevive(reviveCfg)). - WithSince("v1.37.0"). - WithPresets(linter.PresetStyle, linter.PresetMetaLinter). - ConsiderSlow(). - WithURL("https://github.com/mgechev/revive"), - linter.NewConfig(golinters.NewDurationCheck()). - WithSince("v1.37.0"). - WithPresets(linter.PresetBugs). - WithLoadForGoAnalysis(). - WithURL("https://github.com/charithe/durationcheck"), - linter.NewConfig(golinters.NewWastedAssign()). - WithSince("v1.38.0"). - WithPresets(linter.PresetStyle). - WithLoadForGoAnalysis(). - WithURL("https://github.com/sanposhiho/wastedassign"), - linter.NewConfig(golinters.NewImportAs(importAsCfg)). - WithSince("v1.38.0"). - WithPresets(linter.PresetStyle). - WithLoadForGoAnalysis(). - WithURL("https://github.com/julz/importas"), - linter.NewConfig(golinters.NewNilErr()). - WithSince("v1.38.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetBugs). - WithURL("https://github.com/gostaticanalysis/nilerr"), - linter.NewConfig(golinters.NewForceTypeAssert()). - WithSince("v1.38.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/gostaticanalysis/forcetypeassert"), - linter.NewConfig(golinters.NewGoModDirectives(goModDirectivesCfg)). - WithSince("v1.39.0"). - WithPresets(linter.PresetStyle, linter.PresetModule). - WithURL("https://github.com/ldez/gomoddirectives"), - linter.NewConfig(golinters.NewPromlinter()). - WithSince("v1.40.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/yeya24/promlinter"), - linter.NewConfig(golinters.NewTagliatelle(tagliatelleCfg)). - WithSince("v1.40.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/ldez/tagliatelle"), - linter.NewConfig(golinters.NewErrName()). - WithSince("v1.42.0"). - WithPresets(linter.PresetStyle). - WithLoadForGoAnalysis(). - WithURL("https://github.com/Antonboom/errname"), - linter.NewConfig(golinters.NewIreturn(ireturnCfg)). - WithSince("v1.43.0"). - WithPresets(linter.PresetStyle). - WithLoadForGoAnalysis(). - WithURL("https://github.com/butuzov/ireturn"), - linter.NewConfig(golinters.NewNilNil(nilNilCfg)). - WithSince("v1.43.0"). - WithPresets(linter.PresetStyle). - WithLoadForGoAnalysis(). - WithURL("https://github.com/Antonboom/nilnil"), - linter.NewConfig(golinters.NewTenv(tenvCfg)). - WithSince("v1.43.0"). - WithPresets(linter.PresetStyle). - WithLoadForGoAnalysis(). - WithURL("https://github.com/sivchari/tenv"), - linter.NewConfig(golinters.NewContextCheck()). - WithSince("v1.43.0"). - WithPresets(linter.PresetBugs). - WithLoadForGoAnalysis(). - WithURL("https://github.com/sylvia7788/contextcheck"), linter.NewConfig(golinters.NewVarnamelen(varnamelenCfg)). WithSince("v1.43.0"). WithPresets(linter.PresetStyle). WithLoadForGoAnalysis(). WithURL("https://github.com/blizzy78/varnamelen"), - linter.NewConfig(golinters.NewBiDiChkFuncName()). - WithSince("1.43.0"). - WithPresets(linter.PresetBugs). - WithURL("https://github.com/breml/bidichk"), + + linter.NewConfig(golinters.NewWastedAssign()). + WithSince("v1.38.0"). + WithPresets(linter.PresetStyle). + WithLoadForGoAnalysis(). + WithURL("https://github.com/sanposhiho/wastedassign"), + + linter.NewConfig(golinters.NewWhitespace()). + WithSince("v1.19.0"). + WithPresets(linter.PresetStyle). + WithAutoFix(). + WithURL("https://github.com/ultraware/whitespace"), + + linter.NewConfig(golinters.NewWrapcheck(wrapcheckCfg)). + WithSince("v1.32.0"). + WithPresets(linter.PresetStyle, linter.PresetError). + WithLoadForGoAnalysis(). + WithURL("https://github.com/tomarrell/wrapcheck"), + + linter.NewConfig(golinters.NewWSL()). + WithSince("v1.20.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/bombsimon/wsl"), // nolintlint must be last because it looks at the results of all the previous linters for unused nolint directives linter.NewConfig(golinters.NewNoLintLint()). diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/validator.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/validator.go index 47c128930..2f0035185 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/validator.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/validator.go @@ -21,7 +21,7 @@ func (v Validator) validateLintersNames(cfg *config.Linters) error { allNames := append([]string{}, cfg.Enable...) allNames = append(allNames, cfg.Disable...) - unknownNames := []string{} + var unknownNames []string for _, name := range allNames { if v.m.GetLinterConfigs(name) == nil { diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/load.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/load.go index 69852afb9..a393a1d08 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/lint/load.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/load.go @@ -84,7 +84,7 @@ func (cl *ContextLoader) buildArgs() []string { if strings.HasPrefix(arg, ".") || filepath.IsAbs(arg) { retArgs = append(retArgs, arg) } else { - // go/packages doesn't work well if we don't have prefix ./ for local packages + // go/packages doesn't work well if we don't have the prefix ./ for local packages retArgs = append(retArgs, fmt.Sprintf(".%c%s", filepath.Separator, arg)) } } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go index 856eec6b6..e1a77c7d6 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go @@ -6,6 +6,7 @@ import ( "runtime/debug" "strings" + "github.com/hashicorp/go-multierror" "github.com/pkg/errors" gopackages "golang.org/x/tools/go/packages" @@ -123,7 +124,7 @@ func (r *Runner) runLinterSafe(ctx context.Context, lintCtx *linter.Context, // which affects to the next analysis. // To avoid this issue, we clear type information from the packages. // See https://github.com/golangci/golangci-lint/pull/944. - // Currently DoesChangeTypes is true only for `unused`. + // Currently, DoesChangeTypes is true only for `unused`. lintCtx.ClearTypesInPackages() } @@ -192,20 +193,26 @@ func (r Runner) Run(ctx context.Context, linters []*linter.Config, lintCtx *lint sw := timeutils.NewStopwatch("linters", r.Log) defer sw.Print() - var issues []result.Issue + var ( + lintErrors *multierror.Error + issues []result.Issue + ) + for _, lc := range linters { lc := lc sw.TrackStage(lc.Name(), func() { linterIssues, err := r.runLinterSafe(ctx, lintCtx, lc) if err != nil { + lintErrors = multierror.Append(lintErrors, fmt.Errorf("can't run linter %s: %w", lc.Linter.Name(), err)) r.Log.Warnf("Can't run linter %s: %v", lc.Linter.Name(), err) + return } issues = append(issues, linterIssues...) }) } - return r.processLintResults(issues), nil + return r.processLintResults(issues), lintErrors.ErrorOrNil() } func (r *Runner) processIssues(issues []result.Issue, sw *timeutils.Stopwatch, statPerProcessor map[string]processorStat) []result.Issue { diff --git a/vendor/github.com/golangci/golangci-lint/pkg/logutils/log.go b/vendor/github.com/golangci/golangci-lint/pkg/logutils/log.go index b955417a8..57c35c784 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/logutils/log.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/logutils/log.go @@ -17,11 +17,11 @@ const ( // Debug messages, write to debug logs only by logutils.Debug. LogLevelDebug LogLevel = 0 - // Information messages, don't write too much messages, + // Information messages, don't write too many messages, // only useful ones: they are shown when running with -v. LogLevelInfo LogLevel = 1 - // Hidden errors: non critical errors: work can be continued, no need to fail whole program; + // Hidden errors: non-critical errors: work can be continued, no need to fail whole program; // tests will crash if any warning occurred. LogLevelWarn LogLevel = 2 diff --git a/vendor/github.com/golangci/golangci-lint/pkg/packages/errors.go b/vendor/github.com/golangci/golangci-lint/pkg/packages/errors.go index c620573b9..72fb8601a 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/packages/errors.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/packages/errors.go @@ -9,7 +9,6 @@ import ( "github.com/pkg/errors" ) -//nolint:gomnd func ParseErrorPosition(pos string) (*token.Position, error) { // file:line(:colon) parts := strings.Split(pos, ":") diff --git a/vendor/github.com/golangci/golangci-lint/pkg/packages/util.go b/vendor/github.com/golangci/golangci-lint/pkg/packages/util.go index e4268897f..6a7789ebb 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/packages/util.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/packages/util.go @@ -42,7 +42,7 @@ func ExtractErrors(pkg *packages.Package) []packages.Error { continue } - // change pos to local file to properly process it by processors (properly read line etc) + // change pos to local file to properly process it by processors (properly read line etc.) uniqErrors[i].Msg = fmt.Sprintf("%s: %s", uniqErrors[i].Pos, uniqErrors[i].Msg) uniqErrors[i].Pos = fmt.Sprintf("%s:1", pkg.GoFiles[0]) } @@ -65,7 +65,7 @@ func extractErrorsImpl(pkg *packages.Package, seenPackages map[*packages.Package } seenPackages[pkg] = true - if !pkg.IllTyped { // otherwise it may take hours to traverse all deps many times + if !pkg.IllTyped { // otherwise, it may take hours to traverse all deps many times return nil } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go index c5b948a98..bb347bd2b 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go @@ -4,10 +4,11 @@ import ( "context" "encoding/xml" "fmt" + "io" + "sort" "github.com/go-xmlfmt/xmlfmt" - "github.com/golangci/golangci-lint/pkg/logutils" "github.com/golangci/golangci-lint/pkg/result" ) @@ -32,13 +33,15 @@ type checkstyleError struct { const defaultCheckstyleSeverity = "error" -type Checkstyle struct{} - -func NewCheckstyle() *Checkstyle { - return &Checkstyle{} +type Checkstyle struct { + w io.Writer } -func (Checkstyle) Print(ctx context.Context, issues []result.Issue) error { +func NewCheckstyle(w io.Writer) *Checkstyle { + return &Checkstyle{w: w} +} + +func (p Checkstyle) Print(ctx context.Context, issues []result.Issue) error { out := checkstyleOutput{ Version: "5.0", } @@ -77,11 +80,19 @@ func (Checkstyle) Print(ctx context.Context, issues []result.Issue) error { out.Files = append(out.Files, file) } + sort.Slice(out.Files, func(i, j int) bool { + return out.Files[i].Name < out.Files[j].Name + }) + data, err := xml.Marshal(&out) if err != nil { return err } - fmt.Fprintf(logutils.StdOut, "%s%s\n", xml.Header, xmlfmt.FormatXML(string(data), "", " ")) + _, err = fmt.Fprintf(p.w, "%s%s\n", xml.Header, xmlfmt.FormatXML(string(data), "", " ")) + if err != nil { + return err + } + return nil } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go index d4e5b5e05..8127632e7 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go @@ -4,8 +4,8 @@ import ( "context" "encoding/json" "fmt" + "io" - "github.com/golangci/golangci-lint/pkg/logutils" "github.com/golangci/golangci-lint/pkg/result" ) @@ -24,10 +24,11 @@ type CodeClimateIssue struct { } type CodeClimate struct { + w io.Writer } -func NewCodeClimate() *CodeClimate { - return &CodeClimate{} +func NewCodeClimate(w io.Writer) *CodeClimate { + return &CodeClimate{w: w} } func (p CodeClimate) Print(ctx context.Context, issues []result.Issue) error { @@ -52,6 +53,9 @@ func (p CodeClimate) Print(ctx context.Context, issues []result.Issue) error { return err } - fmt.Fprint(logutils.StdOut, string(outputJSON)) + _, err = fmt.Fprint(p.w, string(outputJSON)) + if err != nil { + return err + } return nil } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/github.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/github.go index c7186ac27..6a4d05d46 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/printers/github.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/github.go @@ -3,20 +3,21 @@ package printers import ( "context" "fmt" + "io" - "github.com/golangci/golangci-lint/pkg/logutils" "github.com/golangci/golangci-lint/pkg/result" ) type github struct { + w io.Writer } const defaultGithubSeverity = "error" // NewGithub output format outputs issues according to GitHub actions format: // https://help.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-error-message -func NewGithub() Printer { - return &github{} +func NewGithub(w io.Writer) Printer { + return &github{w: w} } // print each line as: ::error file=app.js,line=10,col=15::Something went wrong @@ -35,9 +36,9 @@ func formatIssueAsGithub(issue *result.Issue) string { return ret } -func (g *github) Print(_ context.Context, issues []result.Issue) error { +func (p *github) Print(_ context.Context, issues []result.Issue) error { for ind := range issues { - _, err := fmt.Fprintln(logutils.StdOut, formatIssueAsGithub(&issues[ind])) + _, err := fmt.Fprintln(p.w, formatIssueAsGithub(&issues[ind])) if err != nil { return err } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/html.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/html.go index 65ab753bd..3d82d7d8b 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/printers/html.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/html.go @@ -4,9 +4,9 @@ import ( "context" "fmt" "html/template" + "io" "strings" - "github.com/golangci/golangci-lint/pkg/logutils" "github.com/golangci/golangci-lint/pkg/result" ) @@ -123,13 +123,15 @@ type htmlIssue struct { Code string } -type HTML struct{} - -func NewHTML() *HTML { - return &HTML{} +type HTML struct { + w io.Writer } -func (h HTML) Print(_ context.Context, issues []result.Issue) error { +func NewHTML(w io.Writer) *HTML { + return &HTML{w: w} +} + +func (p HTML) Print(_ context.Context, issues []result.Issue) error { var htmlIssues []htmlIssue for i := range issues { @@ -151,5 +153,5 @@ func (h HTML) Print(_ context.Context, issues []result.Issue) error { return err } - return t.Execute(logutils.StdOut, struct{ Issues []htmlIssue }{Issues: htmlIssues}) + return t.Execute(p.w, struct{ Issues []htmlIssue }{Issues: htmlIssues}) } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/json.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/json.go index 6ffa996fb..cfef51f58 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/printers/json.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/json.go @@ -3,20 +3,21 @@ package printers import ( "context" "encoding/json" - "fmt" + "io" - "github.com/golangci/golangci-lint/pkg/logutils" "github.com/golangci/golangci-lint/pkg/report" "github.com/golangci/golangci-lint/pkg/result" ) type JSON struct { rd *report.Data + w io.Writer } -func NewJSON(rd *report.Data) *JSON { +func NewJSON(rd *report.Data, w io.Writer) *JSON { return &JSON{ rd: rd, + w: w, } } @@ -30,12 +31,9 @@ func (p JSON) Print(ctx context.Context, issues []result.Issue) error { Issues: issues, Report: p.rd, } - - outputJSON, err := json.Marshal(res) - if err != nil { - return err + if res.Issues == nil { + res.Issues = []result.Issue{} } - fmt.Fprint(logutils.StdOut, string(outputJSON)) - return nil + return json.NewEncoder(p.w).Encode(res) } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/junitxml.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/junitxml.go index 9277cd66f..0424f78b4 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/printers/junitxml.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/junitxml.go @@ -3,9 +3,11 @@ package printers import ( "context" "encoding/xml" + "fmt" + "io" + "sort" "strings" - "github.com/golangci/golangci-lint/pkg/logutils" "github.com/golangci/golangci-lint/pkg/result" ) @@ -31,17 +33,19 @@ type testCaseXML struct { type failureXML struct { Message string `xml:"message,attr"` + Type string `xml:"type,attr"` Content string `xml:",cdata"` } type JunitXML struct { + w io.Writer } -func NewJunitXML() *JunitXML { - return &JunitXML{} +func NewJunitXML(w io.Writer) *JunitXML { + return &JunitXML{w: w} } -func (JunitXML) Print(ctx context.Context, issues []result.Issue) error { +func (p JunitXML) Print(ctx context.Context, issues []result.Issue) error { suites := make(map[string]testSuiteXML) // use a map to group by file for ind := range issues { @@ -56,8 +60,10 @@ func (JunitXML) Print(ctx context.Context, issues []result.Issue) error { Name: i.FromLinter, ClassName: i.Pos.String(), Failure: failureXML{ - Message: i.Text, - Content: strings.Join(i.SourceLines, "\n"), + Type: i.Severity, + Message: i.Pos.String() + ": " + i.Text, + Content: fmt.Sprintf("%s: %s\nCategory: %s\nFile: %s\nLine: %d\nDetails: %s", + i.Severity, i.Text, i.FromLinter, i.Pos.Filename, i.Pos.Line, strings.Join(i.SourceLines, "\n")), }, } @@ -70,7 +76,11 @@ func (JunitXML) Print(ctx context.Context, issues []result.Issue) error { res.TestSuites = append(res.TestSuites, val) } - enc := xml.NewEncoder(logutils.StdOut) + sort.Slice(res.TestSuites, func(i, j int) bool { + return res.TestSuites[i].Suite < res.TestSuites[j].Suite + }) + + enc := xml.NewEncoder(p.w) enc.Indent("", " ") if err := enc.Encode(res); err != nil { return err diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/tab.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/tab.go index d3cdce673..4a126bde6 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/printers/tab.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/tab.go @@ -15,12 +15,14 @@ import ( type Tab struct { printLinterName bool log logutils.Log + w io.Writer } -func NewTab(printLinterName bool, log logutils.Log) *Tab { +func NewTab(printLinterName bool, log logutils.Log, w io.Writer) *Tab { return &Tab{ printLinterName: printLinterName, log: log, + w: w, } } @@ -30,7 +32,7 @@ func (p Tab) SprintfColored(ca color.Attribute, format string, args ...interface } func (p *Tab) Print(ctx context.Context, issues []result.Issue) error { - w := tabwriter.NewWriter(logutils.StdOut, 0, 0, 2, ' ', 0) + w := tabwriter.NewWriter(p.w, 0, 0, 2, ' ', 0) for i := range issues { p.printIssue(&issues[i], w) diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go index 181452888..c8960e0e9 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go @@ -3,6 +3,7 @@ package printers import ( "context" "fmt" + "io" "strings" "github.com/fatih/color" @@ -17,14 +18,16 @@ type Text struct { printLinterName bool log logutils.Log + w io.Writer } -func NewText(printIssuedLine, useColors, printLinterName bool, log logutils.Log) *Text { +func NewText(printIssuedLine, useColors, printLinterName bool, log logutils.Log, w io.Writer) *Text { return &Text{ printIssuedLine: printIssuedLine, useColors: useColors, printLinterName: printLinterName, log: log, + w: w, } } @@ -61,12 +64,12 @@ func (p Text) printIssue(i *result.Issue) { if i.Pos.Column != 0 { pos += fmt.Sprintf(":%d", i.Pos.Column) } - fmt.Fprintf(logutils.StdOut, "%s: %s\n", pos, text) + fmt.Fprintf(p.w, "%s: %s\n", pos, text) } func (p Text) printSourceCode(i *result.Issue) { for _, line := range i.SourceLines { - fmt.Fprintln(logutils.StdOut, line) + fmt.Fprintln(p.w, line) } } @@ -87,5 +90,5 @@ func (p Text) printUnderLinePointer(i *result.Issue) { } } - fmt.Fprintf(logutils.StdOut, "%s%s\n", string(prefixRunes), p.SprintfColored(color.FgYellow, "^")) + fmt.Fprintf(p.w, "%s%s\n", string(prefixRunes), p.SprintfColored(color.FgYellow, "^")) } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint.go index 8576b22db..12ae0fc2d 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint.go @@ -17,6 +17,7 @@ import ( ) var nolintDebugf = logutils.Debug("nolint") +var nolintRe = regexp.MustCompile(`^nolint( |:|$)`) type ignoredRange struct { linters []string @@ -46,7 +47,7 @@ func (i *ignoredRange) doesMatch(issue *result.Issue) bool { } // handle possible unused nolint directives - // nolintlint generates potential issues for every nolint directive and they are filtered out here + // nolintlint generates potential issues for every nolint directive, and they are filtered out here if issue.FromLinter == golinters.NolintlintName && issue.ExpectNoLint { if issue.ExpectedNoLintLinter != "" { return i.matchedIssueFromLinter[issue.ExpectedNoLintLinter] @@ -234,7 +235,7 @@ func (p *Nolint) extractFileCommentsInlineRanges(fset *token.FileSet, comments . func (p *Nolint) extractInlineRangeFromComment(text string, g ast.Node, fset *token.FileSet) *ignoredRange { text = strings.TrimLeft(text, "/ ") - if ok, _ := regexp.MatchString(`^nolint( |:|$)`, text); !ok { + if !nolintRe.MatchString(text) { return nil } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/sort_results.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/sort_results.go index e726c3adf..f93059590 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/sort_results.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/sort_results.go @@ -9,7 +9,7 @@ import ( ) // Base propose of this functionality to sort results (issues) -// produced by various linters by analyzing code. We achieving this +// produced by various linters by analyzing code. We're achieving this // by sorting results.Issues using processor step, and chain based // rules that can compare different properties of the Issues struct. @@ -63,7 +63,6 @@ func (c compareResult) isNeutral() bool { return c == None || c == Equal } -//nolint:exhaustive func (c compareResult) String() string { switch c { case Less: diff --git a/vendor/github.com/golangci/golangci-lint/pkg/timeutils/stopwatch.go b/vendor/github.com/golangci/golangci-lint/pkg/timeutils/stopwatch.go index b973bbc21..d944dea2e 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/timeutils/stopwatch.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/timeutils/stopwatch.go @@ -71,7 +71,7 @@ func (s *Stopwatch) sprintTopStages(n int) string { stageDurations := s.stageDurationsSorted() - stagesStrings := []string{} + var stagesStrings []string for i := 0; i < len(stageDurations) && i < n; i++ { s := stageDurations[i] stagesStrings = append(stagesStrings, fmt.Sprintf("%s: %s", s.name, s.d)) diff --git a/vendor/github.com/google/go-cmp/cmp/compare.go b/vendor/github.com/google/go-cmp/cmp/compare.go index 86d0903b8..2a5446762 100644 --- a/vendor/github.com/google/go-cmp/cmp/compare.go +++ b/vendor/github.com/google/go-cmp/cmp/compare.go @@ -36,7 +36,6 @@ import ( "strings" "github.com/google/go-cmp/cmp/internal/diff" - "github.com/google/go-cmp/cmp/internal/flags" "github.com/google/go-cmp/cmp/internal/function" "github.com/google/go-cmp/cmp/internal/value" ) @@ -319,7 +318,6 @@ func (s *state) tryMethod(t reflect.Type, vx, vy reflect.Value) bool { } func (s *state) callTRFunc(f, v reflect.Value, step Transform) reflect.Value { - v = sanitizeValue(v, f.Type().In(0)) if !s.dynChecker.Next() { return f.Call([]reflect.Value{v})[0] } @@ -343,8 +341,6 @@ func (s *state) callTRFunc(f, v reflect.Value, step Transform) reflect.Value { } func (s *state) callTTBFunc(f, x, y reflect.Value) bool { - x = sanitizeValue(x, f.Type().In(0)) - y = sanitizeValue(y, f.Type().In(1)) if !s.dynChecker.Next() { return f.Call([]reflect.Value{x, y})[0].Bool() } @@ -372,19 +368,6 @@ func detectRaces(c chan<- reflect.Value, f reflect.Value, vs ...reflect.Value) { ret = f.Call(vs)[0] } -// sanitizeValue converts nil interfaces of type T to those of type R, -// assuming that T is assignable to R. -// Otherwise, it returns the input value as is. -func sanitizeValue(v reflect.Value, t reflect.Type) reflect.Value { - // TODO(≥go1.10): Workaround for reflect bug (https://golang.org/issue/22143). - if !flags.AtLeastGo110 { - if v.Kind() == reflect.Interface && v.IsNil() && v.Type() != t { - return reflect.New(t).Elem() - } - } - return v -} - func (s *state) compareStruct(t reflect.Type, vx, vy reflect.Value) { var addr bool var vax, vay reflect.Value // Addressable versions of vx and vy diff --git a/vendor/github.com/google/go-cmp/cmp/export_panic.go b/vendor/github.com/google/go-cmp/cmp/export_panic.go index 5ff0b4218..ae851fe53 100644 --- a/vendor/github.com/google/go-cmp/cmp/export_panic.go +++ b/vendor/github.com/google/go-cmp/cmp/export_panic.go @@ -2,6 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +//go:build purego // +build purego package cmp diff --git a/vendor/github.com/google/go-cmp/cmp/export_unsafe.go b/vendor/github.com/google/go-cmp/cmp/export_unsafe.go index 21eb54858..e2c0f74e8 100644 --- a/vendor/github.com/google/go-cmp/cmp/export_unsafe.go +++ b/vendor/github.com/google/go-cmp/cmp/export_unsafe.go @@ -2,6 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +//go:build !purego // +build !purego package cmp diff --git a/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_disable.go b/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_disable.go index 1daaaacc5..36062a604 100644 --- a/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_disable.go +++ b/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_disable.go @@ -2,6 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +//go:build !cmp_debug // +build !cmp_debug package diff diff --git a/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_enable.go b/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_enable.go index 4b91dbcac..a3b97a1ad 100644 --- a/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_enable.go +++ b/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_enable.go @@ -2,6 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +//go:build cmp_debug // +build cmp_debug package diff diff --git a/vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_legacy.go b/vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_legacy.go deleted file mode 100644 index 82d1d7fbf..000000000 --- a/vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_legacy.go +++ /dev/null @@ -1,10 +0,0 @@ -// Copyright 2019, The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build !go1.10 - -package flags - -// AtLeastGo110 reports whether the Go toolchain is at least Go 1.10. -const AtLeastGo110 = false diff --git a/vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_recent.go b/vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_recent.go deleted file mode 100644 index 8646f0529..000000000 --- a/vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_recent.go +++ /dev/null @@ -1,10 +0,0 @@ -// Copyright 2019, The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.10 - -package flags - -// AtLeastGo110 reports whether the Go toolchain is at least Go 1.10. -const AtLeastGo110 = true diff --git a/vendor/github.com/google/go-cmp/cmp/internal/value/name.go b/vendor/github.com/google/go-cmp/cmp/internal/value/name.go index b6c12cefb..7b498bb2c 100644 --- a/vendor/github.com/google/go-cmp/cmp/internal/value/name.go +++ b/vendor/github.com/google/go-cmp/cmp/internal/value/name.go @@ -9,6 +9,8 @@ import ( "strconv" ) +var anyType = reflect.TypeOf((*interface{})(nil)).Elem() + // TypeString is nearly identical to reflect.Type.String, // but has an additional option to specify that full type names be used. func TypeString(t reflect.Type, qualified bool) string { @@ -20,6 +22,11 @@ func appendTypeName(b []byte, t reflect.Type, qualified, elideFunc bool) []byte // of the same name and within the same package, // but declared within the namespace of different functions. + // Use the "any" alias instead of "interface{}" for better readability. + if t == anyType { + return append(b, "any"...) + } + // Named type. if t.Name() != "" { if qualified && t.PkgPath() != "" { diff --git a/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_purego.go b/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_purego.go index 44f4a5afd..1a71bfcbd 100644 --- a/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_purego.go +++ b/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_purego.go @@ -2,6 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +//go:build purego // +build purego package value diff --git a/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_unsafe.go b/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_unsafe.go index a605953d4..16e6860af 100644 --- a/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_unsafe.go +++ b/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_unsafe.go @@ -2,6 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +//go:build !purego // +build !purego package value diff --git a/vendor/github.com/google/go-cmp/cmp/path.go b/vendor/github.com/google/go-cmp/cmp/path.go index f01eff318..c71003463 100644 --- a/vendor/github.com/google/go-cmp/cmp/path.go +++ b/vendor/github.com/google/go-cmp/cmp/path.go @@ -178,7 +178,7 @@ type structField struct { unexported bool mayForce bool // Forcibly allow visibility paddr bool // Was parent addressable? - pvx, pvy reflect.Value // Parent values (always addressible) + pvx, pvy reflect.Value // Parent values (always addressable) field reflect.StructField // Field information } diff --git a/vendor/github.com/google/go-cmp/cmp/report_reflect.go b/vendor/github.com/google/go-cmp/cmp/report_reflect.go index 33f03577f..76c04fdbd 100644 --- a/vendor/github.com/google/go-cmp/cmp/report_reflect.go +++ b/vendor/github.com/google/go-cmp/cmp/report_reflect.go @@ -207,9 +207,10 @@ func (opts formatOptions) FormatValue(v reflect.Value, parentKind reflect.Kind, // Check whether this is a []byte of text data. if t.Elem() == reflect.TypeOf(byte(0)) { b := v.Bytes() - isPrintSpace := func(r rune) bool { return unicode.IsPrint(r) && unicode.IsSpace(r) } + isPrintSpace := func(r rune) bool { return unicode.IsPrint(r) || unicode.IsSpace(r) } if len(b) > 0 && utf8.Valid(b) && len(bytes.TrimFunc(b, isPrintSpace)) == 0 { out = opts.formatString("", string(b)) + skipType = true return opts.WithTypeMode(emitType).FormatType(t, out) } } diff --git a/vendor/github.com/google/go-cmp/cmp/report_slices.go b/vendor/github.com/google/go-cmp/cmp/report_slices.go index 2ad3bc85b..68b5c1ae1 100644 --- a/vendor/github.com/google/go-cmp/cmp/report_slices.go +++ b/vendor/github.com/google/go-cmp/cmp/report_slices.go @@ -80,7 +80,7 @@ func (opts formatOptions) CanFormatDiffSlice(v *valueNode) bool { } // Use specialized string diffing for longer slices or strings. - const minLength = 64 + const minLength = 32 return vx.Len() >= minLength && vy.Len() >= minLength } @@ -563,10 +563,10 @@ func cleanupSurroundingIdentical(groups []diffStats, eq func(i, j int) bool) []d nx := ds.NumIdentical + ds.NumRemoved + ds.NumModified ny := ds.NumIdentical + ds.NumInserted + ds.NumModified var numLeadingIdentical, numTrailingIdentical int - for i := 0; i < nx && i < ny && eq(ix+i, iy+i); i++ { + for j := 0; j < nx && j < ny && eq(ix+j, iy+j); j++ { numLeadingIdentical++ } - for i := 0; i < nx && i < ny && eq(ix+nx-1-i, iy+ny-1-i); i++ { + for j := 0; j < nx && j < ny && eq(ix+nx-1-j, iy+ny-1-j); j++ { numTrailingIdentical++ } if numIdentical := numLeadingIdentical + numTrailingIdentical; numIdentical > 0 { diff --git a/vendor/github.com/gordonklaus/ineffassign/pkg/ineffassign/ineffassign.go b/vendor/github.com/gordonklaus/ineffassign/pkg/ineffassign/ineffassign.go index 606eb14aa..c7b4fa978 100644 --- a/vendor/github.com/gordonklaus/ineffassign/pkg/ineffassign/ineffassign.go +++ b/vendor/github.com/gordonklaus/ineffassign/pkg/ineffassign/ineffassign.go @@ -324,8 +324,15 @@ func (bld *builder) Visit(n ast.Node) ast.Visitor { func isZeroInitializer(x ast.Expr) bool { // Assume that a call expression of a single argument is a conversion expression. We can't do better without type information. if c, ok := x.(*ast.CallExpr); ok { - switch c.Fun.(type) { - case *ast.Ident, *ast.SelectorExpr: + fun := c.Fun + if p, ok := fun.(*ast.ParenExpr); ok { + fun = p.X + } + if s, ok := fun.(*ast.StarExpr); ok { + fun = s.X + } + switch fun.(type) { + case *ast.Ident, *ast.SelectorExpr, *ast.ArrayType, *ast.StructType, *ast.FuncType, *ast.InterfaceType, *ast.MapType, *ast.ChanType: default: return false } @@ -342,7 +349,7 @@ func isZeroInitializer(x ast.Expr) bool { return true } case *ast.Ident: - return x.Name == "false" && x.Obj == nil + return (x.Name == "false" || x.Name == "nil") && x.Obj == nil } return false diff --git a/vendor/github.com/gostaticanalysis/forcetypeassert/README.md b/vendor/github.com/gostaticanalysis/forcetypeassert/README.md index 517f69400..36a47594e 100644 --- a/vendor/github.com/gostaticanalysis/forcetypeassert/README.md +++ b/vendor/github.com/gostaticanalysis/forcetypeassert/README.md @@ -11,6 +11,17 @@ func f() { } ``` +You need to check if the assertion failed like so: +```go +func f() { + var a interface{} + _, ok := a.(int) + if !ok { // type assertion failed + // handle error + } +} +``` + [godoc]: https://godoc.org/github.com/gostaticanalysis/forcetypeassert [godoc-badge]: https://img.shields.io/badge/godoc-reference-4F73B3.svg?style=flat-square&label=%20godoc.org diff --git a/vendor/github.com/gostaticanalysis/forcetypeassert/forcetypeassert.go b/vendor/github.com/gostaticanalysis/forcetypeassert/forcetypeassert.go index cdc49e3b5..bb48485d9 100644 --- a/vendor/github.com/gostaticanalysis/forcetypeassert/forcetypeassert.go +++ b/vendor/github.com/gostaticanalysis/forcetypeassert/forcetypeassert.go @@ -2,6 +2,7 @@ package forcetypeassert import ( "go/ast" + "reflect" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" @@ -15,53 +16,128 @@ var Analyzer = &analysis.Analyzer{ Requires: []*analysis.Analyzer{ inspect.Analyzer, }, + ResultType: reflect.TypeOf((*Panicable)(nil)), } -const Doc = "forcetypeassert is finds type assertions which did forcely such as below." +// Panicable stores panicable type assertions. +type Panicable struct { + m map[ast.Node]bool + nodes []ast.Node +} + +// Check checks whether the node may occur panic or not. +func (p *Panicable) Check(n ast.Node) bool { + return p.m[n] +} + +// Len is number of panicable nodes. +func (p *Panicable) Len() int { + return len(p.nodes) +} + +// At returns the i-th panicable node. +func (p *Panicable) At(i int) ast.Node { + return p.nodes[i] +} + +const Doc = "forcetypeassert is finds type assertions which did forcely" func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + inspect, _ := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + result := &Panicable{m: make(map[ast.Node]bool)} nodeFilter := []ast.Node{ (*ast.AssignStmt)(nil), + (*ast.ValueSpec)(nil), + (*ast.TypeAssertExpr)(nil), } - inspect.Preorder(nodeFilter, func(n ast.Node) { + inspect.Nodes(nodeFilter, func(n ast.Node, push bool) bool { + if !push { + return false + } switch n := n.(type) { case *ast.AssignStmt: - if !hasTypeAssertion(n.Rhs) { - return + return checkAssignStmt(pass, result, n) + case *ast.ValueSpec: + return checkValueSpec(pass, result, n) + case *ast.TypeAssertExpr: + if n.Type != nil { + result.m[n] = true + result.nodes = append(result.nodes, n) + pass.Reportf(n.Pos(), "type assertion must be checked") } - // if right hand has 2 or more values, assign statement can't assert boolean value which describes type assertion is succeeded - if len(n.Rhs) > 1 { - pass.Reportf(n.Pos(), "right hand must be only type assertion") - return - } - if len(n.Lhs) == 2 { - return - } - - tae, ok := n.Rhs[0].(*ast.TypeAssertExpr) - if !ok { - pass.Reportf(n.Pos(), "right hand is not TypeAssertion") - return - } - if tae.Type == nil { - return - } - pass.Reportf(n.Pos(), "type assertion must be checked") + return false } + + return true }) - return nil, nil + return result, nil } -func hasTypeAssertion(exprs []ast.Expr) bool { - for _, node := range exprs { - _, ok := node.(*ast.TypeAssertExpr) - if ok { +func checkAssignStmt(pass *analysis.Pass, result *Panicable, n *ast.AssignStmt) bool { + tae := findTypeAssertion(n.Rhs) + if tae == nil { + return true + } + + switch { + // if right hand has 2 or more values, assign statement can't assert boolean value which describes type assertion is succeeded + case len(n.Rhs) > 1: + pass.Reportf(n.Pos(), "right hand must be only type assertion") + return false + case len(n.Lhs) != 2 && tae.Type != nil: + result.m[n] = true + result.nodes = append(result.nodes, n) + pass.Reportf(n.Pos(), "type assertion must be checked") + return false + case len(n.Lhs) == 2: + return false + } + + return true +} + +func checkValueSpec(pass *analysis.Pass, result *Panicable, n *ast.ValueSpec) bool { + tae := findTypeAssertion(n.Values) + if tae == nil { + return true + } + + switch { + // if right hand has 2 or more values, assign statement can't assert boolean value which describes type assertion is succeeded + case len(n.Values) > 1: + pass.Reportf(n.Pos(), "right hand must be only type assertion") + return false + case len(n.Names) != 2 && tae.Type != nil: + result.m[n] = true + result.nodes = append(result.nodes, n) + pass.Reportf(n.Pos(), "type assertion must be checked") + return false + case len(n.Names) == 2: + return false + } + + return true +} + +func findTypeAssertion(exprs []ast.Expr) *ast.TypeAssertExpr { + for _, expr := range exprs { + var typeAssertExpr *ast.TypeAssertExpr + ast.Inspect(expr, func(n ast.Node) bool { + switch n := n.(type) { + case *ast.FuncLit: + return false + case *ast.TypeAssertExpr: + typeAssertExpr = n + return false + } return true + }) + if typeAssertExpr != nil { + return typeAssertExpr } } - return false + return nil } diff --git a/vendor/github.com/hexops/gotextdiff/LICENSE b/vendor/github.com/hexops/gotextdiff/LICENSE new file mode 100644 index 000000000..6a66aea5e --- /dev/null +++ b/vendor/github.com/hexops/gotextdiff/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/hexops/gotextdiff/README.md b/vendor/github.com/hexops/gotextdiff/README.md new file mode 100644 index 000000000..bfd49a0c9 --- /dev/null +++ b/vendor/github.com/hexops/gotextdiff/README.md @@ -0,0 +1,54 @@ +# gotextdiff - unified text diffing in Go Hexops logo + +This is a copy of the Go text diffing packages that [the official Go language server gopls uses internally](https://github.com/golang/tools/tree/master/internal/lsp/diff) to generate unified diffs. + +If you've previously tried to generate unified text diffs in Go (like the ones you see in Git and on GitHub), you may have found [github.com/sergi/go-diff](https://github.com/sergi/go-diff) which is a Go port of Neil Fraser's google-diff-match-patch code - however it [does not support unified diffs](https://github.com/sergi/go-diff/issues/57). + +This is arguably one of the best (and most maintained) unified text diffing packages in Go as of at least 2020. + +(All credit goes to [the Go authors](http://tip.golang.org/AUTHORS), I am merely re-publishing their work so others can use it.) + +## Example usage + +Import the packages: + +```Go +import ( + "github.com/hexops/gotextdiff" + "github.com/hexops/gotextdiff/myers" +) +``` + +Assuming you want to diff `a.txt` and `b.txt`, whose contents are stored in `aString` and `bString` then: + +```Go +edits := myers.ComputeEdits(span.URIFromPath("a.txt"), aString, bString) +diff := fmt.Sprint(gotextdiff.ToUnified("a.txt", "b.txt", aString, edits)) +``` + +`diff` will be a string like: + +```diff +--- a.txt ++++ b.txt +@@ -1,13 +1,28 @@ +-foo ++bar +``` + +## API compatability + +We will publish a new major version anytime the API changes in a backwards-incompatible way. Because the upstream is not being developed with this being a public package in mind, API breakages may occur more often than in other Go packages (but you can always continue using the old version thanks to Go modules.) + +## Alternatives + +- [github.com/andreyvit/diff](https://github.com/andreyvit/diff): Quick'n'easy string diffing functions for Golang based on github.com/sergi/go-diff. +- [github.com/kylelemons/godebug/diff](https://github.com/kylelemons/godebug/tree/master/diff): implements a linewise diff algorithm ([inactive](https://github.com/kylelemons/godebug/issues/22#issuecomment-524573477)). + +## Contributing + +We will only accept changes made [upstream](https://github.com/golang/tools/tree/master/internal/lsp/diff), please send any contributions to the upstream instead! Compared to the upstream, only import paths will be modified (to be non-`internal` so they are importable.) The only thing we add here is this README. + +## License + +See https://github.com/golang/tools/blob/master/LICENSE diff --git a/vendor/github.com/hexops/gotextdiff/diff.go b/vendor/github.com/hexops/gotextdiff/diff.go new file mode 100644 index 000000000..53e499bc0 --- /dev/null +++ b/vendor/github.com/hexops/gotextdiff/diff.go @@ -0,0 +1,159 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// package gotextdiff supports a pluggable diff algorithm. +package gotextdiff + +import ( + "sort" + "strings" + + "github.com/hexops/gotextdiff/span" +) + +// TextEdit represents a change to a section of a document. +// The text within the specified span should be replaced by the supplied new text. +type TextEdit struct { + Span span.Span + NewText string +} + +// ComputeEdits is the type for a function that produces a set of edits that +// convert from the before content to the after content. +type ComputeEdits func(uri span.URI, before, after string) []TextEdit + +// SortTextEdits attempts to order all edits by their starting points. +// The sort is stable so that edits with the same starting point will not +// be reordered. +func SortTextEdits(d []TextEdit) { + // Use a stable sort to maintain the order of edits inserted at the same position. + sort.SliceStable(d, func(i int, j int) bool { + return span.Compare(d[i].Span, d[j].Span) < 0 + }) +} + +// ApplyEdits applies the set of edits to the before and returns the resulting +// content. +// It may panic or produce garbage if the edits are not valid for the provided +// before content. +func ApplyEdits(before string, edits []TextEdit) string { + // Preconditions: + // - all of the edits apply to before + // - and all the spans for each TextEdit have the same URI + if len(edits) == 0 { + return before + } + _, edits, _ = prepareEdits(before, edits) + after := strings.Builder{} + last := 0 + for _, edit := range edits { + start := edit.Span.Start().Offset() + if start > last { + after.WriteString(before[last:start]) + last = start + } + after.WriteString(edit.NewText) + last = edit.Span.End().Offset() + } + if last < len(before) { + after.WriteString(before[last:]) + } + return after.String() +} + +// LineEdits takes a set of edits and expands and merges them as necessary +// to ensure that there are only full line edits left when it is done. +func LineEdits(before string, edits []TextEdit) []TextEdit { + if len(edits) == 0 { + return nil + } + c, edits, partial := prepareEdits(before, edits) + if partial { + edits = lineEdits(before, c, edits) + } + return edits +} + +// prepareEdits returns a sorted copy of the edits +func prepareEdits(before string, edits []TextEdit) (*span.TokenConverter, []TextEdit, bool) { + partial := false + c := span.NewContentConverter("", []byte(before)) + copied := make([]TextEdit, len(edits)) + for i, edit := range edits { + edit.Span, _ = edit.Span.WithAll(c) + copied[i] = edit + partial = partial || + edit.Span.Start().Offset() >= len(before) || + edit.Span.Start().Column() > 1 || edit.Span.End().Column() > 1 + } + SortTextEdits(copied) + return c, copied, partial +} + +// lineEdits rewrites the edits to always be full line edits +func lineEdits(before string, c *span.TokenConverter, edits []TextEdit) []TextEdit { + adjusted := make([]TextEdit, 0, len(edits)) + current := TextEdit{Span: span.Invalid} + for _, edit := range edits { + if current.Span.IsValid() && edit.Span.Start().Line() <= current.Span.End().Line() { + // overlaps with the current edit, need to combine + // first get the gap from the previous edit + gap := before[current.Span.End().Offset():edit.Span.Start().Offset()] + // now add the text of this edit + current.NewText += gap + edit.NewText + // and then adjust the end position + current.Span = span.New(current.Span.URI(), current.Span.Start(), edit.Span.End()) + } else { + // does not overlap, add previous run (if there is one) + adjusted = addEdit(before, adjusted, current) + // and then remember this edit as the start of the next run + current = edit + } + } + // add the current pending run if there is one + return addEdit(before, adjusted, current) +} + +func addEdit(before string, edits []TextEdit, edit TextEdit) []TextEdit { + if !edit.Span.IsValid() { + return edits + } + // if edit is partial, expand it to full line now + start := edit.Span.Start() + end := edit.Span.End() + if start.Column() > 1 { + // prepend the text and adjust to start of line + delta := start.Column() - 1 + start = span.NewPoint(start.Line(), 1, start.Offset()-delta) + edit.Span = span.New(edit.Span.URI(), start, end) + edit.NewText = before[start.Offset():start.Offset()+delta] + edit.NewText + } + if start.Offset() >= len(before) && start.Line() > 1 && before[len(before)-1] != '\n' { + // after end of file that does not end in eol, so join to last line of file + // to do this we need to know where the start of the last line was + eol := strings.LastIndex(before, "\n") + if eol < 0 { + // file is one non terminated line + eol = 0 + } + delta := len(before) - eol + start = span.NewPoint(start.Line()-1, 1, start.Offset()-delta) + edit.Span = span.New(edit.Span.URI(), start, end) + edit.NewText = before[start.Offset():start.Offset()+delta] + edit.NewText + } + if end.Column() > 1 { + remains := before[end.Offset():] + eol := strings.IndexRune(remains, '\n') + if eol < 0 { + eol = len(remains) + } else { + eol++ + } + end = span.NewPoint(end.Line()+1, 1, end.Offset()+eol) + edit.Span = span.New(edit.Span.URI(), start, end) + edit.NewText = edit.NewText + remains[:eol] + } + edits = append(edits, edit) + return edits +} diff --git a/vendor/github.com/hexops/gotextdiff/go.mod b/vendor/github.com/hexops/gotextdiff/go.mod new file mode 100644 index 000000000..e8a357256 --- /dev/null +++ b/vendor/github.com/hexops/gotextdiff/go.mod @@ -0,0 +1,3 @@ +module github.com/hexops/gotextdiff + +go 1.16 diff --git a/vendor/github.com/hexops/gotextdiff/myers/diff.go b/vendor/github.com/hexops/gotextdiff/myers/diff.go new file mode 100644 index 000000000..5e3e92364 --- /dev/null +++ b/vendor/github.com/hexops/gotextdiff/myers/diff.go @@ -0,0 +1,205 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package myers implements the Myers diff algorithm. +package myers + +import ( + "strings" + + diff "github.com/hexops/gotextdiff" + "github.com/hexops/gotextdiff/span" +) + +// Sources: +// https://blog.jcoglan.com/2017/02/17/the-myers-diff-algorithm-part-3/ +// https://www.codeproject.com/Articles/42279/%2FArticles%2F42279%2FInvestigating-Myers-diff-algorithm-Part-1-of-2 + +func ComputeEdits(uri span.URI, before, after string) []diff.TextEdit { + ops := operations(splitLines(before), splitLines(after)) + edits := make([]diff.TextEdit, 0, len(ops)) + for _, op := range ops { + s := span.New(uri, span.NewPoint(op.I1+1, 1, 0), span.NewPoint(op.I2+1, 1, 0)) + switch op.Kind { + case diff.Delete: + // Delete: unformatted[i1:i2] is deleted. + edits = append(edits, diff.TextEdit{Span: s}) + case diff.Insert: + // Insert: formatted[j1:j2] is inserted at unformatted[i1:i1]. + if content := strings.Join(op.Content, ""); content != "" { + edits = append(edits, diff.TextEdit{Span: s, NewText: content}) + } + } + } + return edits +} + +type operation struct { + Kind diff.OpKind + Content []string // content from b + I1, I2 int // indices of the line in a + J1 int // indices of the line in b, J2 implied by len(Content) +} + +// operations returns the list of operations to convert a into b, consolidating +// operations for multiple lines and not including equal lines. +func operations(a, b []string) []*operation { + if len(a) == 0 && len(b) == 0 { + return nil + } + + trace, offset := shortestEditSequence(a, b) + snakes := backtrack(trace, len(a), len(b), offset) + + M, N := len(a), len(b) + + var i int + solution := make([]*operation, len(a)+len(b)) + + add := func(op *operation, i2, j2 int) { + if op == nil { + return + } + op.I2 = i2 + if op.Kind == diff.Insert { + op.Content = b[op.J1:j2] + } + solution[i] = op + i++ + } + x, y := 0, 0 + for _, snake := range snakes { + if len(snake) < 2 { + continue + } + var op *operation + // delete (horizontal) + for snake[0]-snake[1] > x-y { + if op == nil { + op = &operation{ + Kind: diff.Delete, + I1: x, + J1: y, + } + } + x++ + if x == M { + break + } + } + add(op, x, y) + op = nil + // insert (vertical) + for snake[0]-snake[1] < x-y { + if op == nil { + op = &operation{ + Kind: diff.Insert, + I1: x, + J1: y, + } + } + y++ + } + add(op, x, y) + op = nil + // equal (diagonal) + for x < snake[0] { + x++ + y++ + } + if x >= M && y >= N { + break + } + } + return solution[:i] +} + +// backtrack uses the trace for the edit sequence computation and returns the +// "snakes" that make up the solution. A "snake" is a single deletion or +// insertion followed by zero or diagonals. +func backtrack(trace [][]int, x, y, offset int) [][]int { + snakes := make([][]int, len(trace)) + d := len(trace) - 1 + for ; x > 0 && y > 0 && d > 0; d-- { + V := trace[d] + if len(V) == 0 { + continue + } + snakes[d] = []int{x, y} + + k := x - y + + var kPrev int + if k == -d || (k != d && V[k-1+offset] < V[k+1+offset]) { + kPrev = k + 1 + } else { + kPrev = k - 1 + } + + x = V[kPrev+offset] + y = x - kPrev + } + if x < 0 || y < 0 { + return snakes + } + snakes[d] = []int{x, y} + return snakes +} + +// shortestEditSequence returns the shortest edit sequence that converts a into b. +func shortestEditSequence(a, b []string) ([][]int, int) { + M, N := len(a), len(b) + V := make([]int, 2*(N+M)+1) + offset := N + M + trace := make([][]int, N+M+1) + + // Iterate through the maximum possible length of the SES (N+M). + for d := 0; d <= N+M; d++ { + copyV := make([]int, len(V)) + // k lines are represented by the equation y = x - k. We move in + // increments of 2 because end points for even d are on even k lines. + for k := -d; k <= d; k += 2 { + // At each point, we either go down or to the right. We go down if + // k == -d, and we go to the right if k == d. We also prioritize + // the maximum x value, because we prefer deletions to insertions. + var x int + if k == -d || (k != d && V[k-1+offset] < V[k+1+offset]) { + x = V[k+1+offset] // down + } else { + x = V[k-1+offset] + 1 // right + } + + y := x - k + + // Diagonal moves while we have equal contents. + for x < M && y < N && a[x] == b[y] { + x++ + y++ + } + + V[k+offset] = x + + // Return if we've exceeded the maximum values. + if x == M && y == N { + // Makes sure to save the state of the array before returning. + copy(copyV, V) + trace[d] = copyV + return trace, offset + } + } + + // Save the state of the array. + copy(copyV, V) + trace[d] = copyV + } + return nil, 0 +} + +func splitLines(text string) []string { + lines := strings.SplitAfter(text, "\n") + if lines[len(lines)-1] == "" { + lines = lines[:len(lines)-1] + } + return lines +} diff --git a/vendor/github.com/hexops/gotextdiff/span/parse.go b/vendor/github.com/hexops/gotextdiff/span/parse.go new file mode 100644 index 000000000..aa17c84ec --- /dev/null +++ b/vendor/github.com/hexops/gotextdiff/span/parse.go @@ -0,0 +1,100 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package span + +import ( + "strconv" + "strings" + "unicode/utf8" +) + +// Parse returns the location represented by the input. +// Only file paths are accepted, not URIs. +// The returned span will be normalized, and thus if printed may produce a +// different string. +func Parse(input string) Span { + // :0:0#0-0:0#0 + valid := input + var hold, offset int + hadCol := false + suf := rstripSuffix(input) + if suf.sep == "#" { + offset = suf.num + suf = rstripSuffix(suf.remains) + } + if suf.sep == ":" { + valid = suf.remains + hold = suf.num + hadCol = true + suf = rstripSuffix(suf.remains) + } + switch { + case suf.sep == ":": + return New(URIFromPath(suf.remains), NewPoint(suf.num, hold, offset), Point{}) + case suf.sep == "-": + // we have a span, fall out of the case to continue + default: + // separator not valid, rewind to either the : or the start + return New(URIFromPath(valid), NewPoint(hold, 0, offset), Point{}) + } + // only the span form can get here + // at this point we still don't know what the numbers we have mean + // if have not yet seen a : then we might have either a line or a column depending + // on whether start has a column or not + // we build an end point and will fix it later if needed + end := NewPoint(suf.num, hold, offset) + hold, offset = 0, 0 + suf = rstripSuffix(suf.remains) + if suf.sep == "#" { + offset = suf.num + suf = rstripSuffix(suf.remains) + } + if suf.sep != ":" { + // turns out we don't have a span after all, rewind + return New(URIFromPath(valid), end, Point{}) + } + valid = suf.remains + hold = suf.num + suf = rstripSuffix(suf.remains) + if suf.sep != ":" { + // line#offset only + return New(URIFromPath(valid), NewPoint(hold, 0, offset), end) + } + // we have a column, so if end only had one number, it is also the column + if !hadCol { + end = NewPoint(suf.num, end.v.Line, end.v.Offset) + } + return New(URIFromPath(suf.remains), NewPoint(suf.num, hold, offset), end) +} + +type suffix struct { + remains string + sep string + num int +} + +func rstripSuffix(input string) suffix { + if len(input) == 0 { + return suffix{"", "", -1} + } + remains := input + num := -1 + // first see if we have a number at the end + last := strings.LastIndexFunc(remains, func(r rune) bool { return r < '0' || r > '9' }) + if last >= 0 && last < len(remains)-1 { + number, err := strconv.ParseInt(remains[last+1:], 10, 64) + if err == nil { + num = int(number) + remains = remains[:last+1] + } + } + // now see if we have a trailing separator + r, w := utf8.DecodeLastRuneInString(remains) + if r != ':' && r != '#' && r == '#' { + return suffix{input, "", -1} + } + remains = remains[:len(remains)-w] + return suffix{remains, string(r), num} +} diff --git a/vendor/github.com/hexops/gotextdiff/span/span.go b/vendor/github.com/hexops/gotextdiff/span/span.go new file mode 100644 index 000000000..4d2ad0986 --- /dev/null +++ b/vendor/github.com/hexops/gotextdiff/span/span.go @@ -0,0 +1,285 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package span contains support for representing with positions and ranges in +// text files. +package span + +import ( + "encoding/json" + "fmt" + "path" +) + +// Span represents a source code range in standardized form. +type Span struct { + v span +} + +// Point represents a single point within a file. +// In general this should only be used as part of a Span, as on its own it +// does not carry enough information. +type Point struct { + v point +} + +type span struct { + URI URI `json:"uri"` + Start point `json:"start"` + End point `json:"end"` +} + +type point struct { + Line int `json:"line"` + Column int `json:"column"` + Offset int `json:"offset"` +} + +// Invalid is a span that reports false from IsValid +var Invalid = Span{v: span{Start: invalidPoint.v, End: invalidPoint.v}} + +var invalidPoint = Point{v: point{Line: 0, Column: 0, Offset: -1}} + +// Converter is the interface to an object that can convert between line:column +// and offset forms for a single file. +type Converter interface { + //ToPosition converts from an offset to a line:column pair. + ToPosition(offset int) (int, int, error) + //ToOffset converts from a line:column pair to an offset. + ToOffset(line, col int) (int, error) +} + +func New(uri URI, start Point, end Point) Span { + s := Span{v: span{URI: uri, Start: start.v, End: end.v}} + s.v.clean() + return s +} + +func NewPoint(line, col, offset int) Point { + p := Point{v: point{Line: line, Column: col, Offset: offset}} + p.v.clean() + return p +} + +func Compare(a, b Span) int { + if r := CompareURI(a.URI(), b.URI()); r != 0 { + return r + } + if r := comparePoint(a.v.Start, b.v.Start); r != 0 { + return r + } + return comparePoint(a.v.End, b.v.End) +} + +func ComparePoint(a, b Point) int { + return comparePoint(a.v, b.v) +} + +func comparePoint(a, b point) int { + if !a.hasPosition() { + if a.Offset < b.Offset { + return -1 + } + if a.Offset > b.Offset { + return 1 + } + return 0 + } + if a.Line < b.Line { + return -1 + } + if a.Line > b.Line { + return 1 + } + if a.Column < b.Column { + return -1 + } + if a.Column > b.Column { + return 1 + } + return 0 +} + +func (s Span) HasPosition() bool { return s.v.Start.hasPosition() } +func (s Span) HasOffset() bool { return s.v.Start.hasOffset() } +func (s Span) IsValid() bool { return s.v.Start.isValid() } +func (s Span) IsPoint() bool { return s.v.Start == s.v.End } +func (s Span) URI() URI { return s.v.URI } +func (s Span) Start() Point { return Point{s.v.Start} } +func (s Span) End() Point { return Point{s.v.End} } +func (s *Span) MarshalJSON() ([]byte, error) { return json.Marshal(&s.v) } +func (s *Span) UnmarshalJSON(b []byte) error { return json.Unmarshal(b, &s.v) } + +func (p Point) HasPosition() bool { return p.v.hasPosition() } +func (p Point) HasOffset() bool { return p.v.hasOffset() } +func (p Point) IsValid() bool { return p.v.isValid() } +func (p *Point) MarshalJSON() ([]byte, error) { return json.Marshal(&p.v) } +func (p *Point) UnmarshalJSON(b []byte) error { return json.Unmarshal(b, &p.v) } +func (p Point) Line() int { + if !p.v.hasPosition() { + panic(fmt.Errorf("position not set in %v", p.v)) + } + return p.v.Line +} +func (p Point) Column() int { + if !p.v.hasPosition() { + panic(fmt.Errorf("position not set in %v", p.v)) + } + return p.v.Column +} +func (p Point) Offset() int { + if !p.v.hasOffset() { + panic(fmt.Errorf("offset not set in %v", p.v)) + } + return p.v.Offset +} + +func (p point) hasPosition() bool { return p.Line > 0 } +func (p point) hasOffset() bool { return p.Offset >= 0 } +func (p point) isValid() bool { return p.hasPosition() || p.hasOffset() } +func (p point) isZero() bool { + return (p.Line == 1 && p.Column == 1) || (!p.hasPosition() && p.Offset == 0) +} + +func (s *span) clean() { + //this presumes the points are already clean + if !s.End.isValid() || (s.End == point{}) { + s.End = s.Start + } +} + +func (p *point) clean() { + if p.Line < 0 { + p.Line = 0 + } + if p.Column <= 0 { + if p.Line > 0 { + p.Column = 1 + } else { + p.Column = 0 + } + } + if p.Offset == 0 && (p.Line > 1 || p.Column > 1) { + p.Offset = -1 + } +} + +// Format implements fmt.Formatter to print the Location in a standard form. +// The format produced is one that can be read back in using Parse. +func (s Span) Format(f fmt.State, c rune) { + fullForm := f.Flag('+') + preferOffset := f.Flag('#') + // we should always have a uri, simplify if it is file format + //TODO: make sure the end of the uri is unambiguous + uri := string(s.v.URI) + if c == 'f' { + uri = path.Base(uri) + } else if !fullForm { + uri = s.v.URI.Filename() + } + fmt.Fprint(f, uri) + if !s.IsValid() || (!fullForm && s.v.Start.isZero() && s.v.End.isZero()) { + return + } + // see which bits of start to write + printOffset := s.HasOffset() && (fullForm || preferOffset || !s.HasPosition()) + printLine := s.HasPosition() && (fullForm || !printOffset) + printColumn := printLine && (fullForm || (s.v.Start.Column > 1 || s.v.End.Column > 1)) + fmt.Fprint(f, ":") + if printLine { + fmt.Fprintf(f, "%d", s.v.Start.Line) + } + if printColumn { + fmt.Fprintf(f, ":%d", s.v.Start.Column) + } + if printOffset { + fmt.Fprintf(f, "#%d", s.v.Start.Offset) + } + // start is written, do we need end? + if s.IsPoint() { + return + } + // we don't print the line if it did not change + printLine = fullForm || (printLine && s.v.End.Line > s.v.Start.Line) + fmt.Fprint(f, "-") + if printLine { + fmt.Fprintf(f, "%d", s.v.End.Line) + } + if printColumn { + if printLine { + fmt.Fprint(f, ":") + } + fmt.Fprintf(f, "%d", s.v.End.Column) + } + if printOffset { + fmt.Fprintf(f, "#%d", s.v.End.Offset) + } +} + +func (s Span) WithPosition(c Converter) (Span, error) { + if err := s.update(c, true, false); err != nil { + return Span{}, err + } + return s, nil +} + +func (s Span) WithOffset(c Converter) (Span, error) { + if err := s.update(c, false, true); err != nil { + return Span{}, err + } + return s, nil +} + +func (s Span) WithAll(c Converter) (Span, error) { + if err := s.update(c, true, true); err != nil { + return Span{}, err + } + return s, nil +} + +func (s *Span) update(c Converter, withPos, withOffset bool) error { + if !s.IsValid() { + return fmt.Errorf("cannot add information to an invalid span") + } + if withPos && !s.HasPosition() { + if err := s.v.Start.updatePosition(c); err != nil { + return err + } + if s.v.End.Offset == s.v.Start.Offset { + s.v.End = s.v.Start + } else if err := s.v.End.updatePosition(c); err != nil { + return err + } + } + if withOffset && (!s.HasOffset() || (s.v.End.hasPosition() && !s.v.End.hasOffset())) { + if err := s.v.Start.updateOffset(c); err != nil { + return err + } + if s.v.End.Line == s.v.Start.Line && s.v.End.Column == s.v.Start.Column { + s.v.End.Offset = s.v.Start.Offset + } else if err := s.v.End.updateOffset(c); err != nil { + return err + } + } + return nil +} + +func (p *point) updatePosition(c Converter) error { + line, col, err := c.ToPosition(p.Offset) + if err != nil { + return err + } + p.Line = line + p.Column = col + return nil +} + +func (p *point) updateOffset(c Converter) error { + offset, err := c.ToOffset(p.Line, p.Column) + if err != nil { + return err + } + p.Offset = offset + return nil +} diff --git a/vendor/github.com/hexops/gotextdiff/span/token.go b/vendor/github.com/hexops/gotextdiff/span/token.go new file mode 100644 index 000000000..6f8b9b570 --- /dev/null +++ b/vendor/github.com/hexops/gotextdiff/span/token.go @@ -0,0 +1,194 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package span + +import ( + "fmt" + "go/token" +) + +// Range represents a source code range in token.Pos form. +// It also carries the FileSet that produced the positions, so that it is +// self contained. +type Range struct { + FileSet *token.FileSet + Start token.Pos + End token.Pos + Converter Converter +} + +type FileConverter struct { + file *token.File +} + +// TokenConverter is a Converter backed by a token file set and file. +// It uses the file set methods to work out the conversions, which +// makes it fast and does not require the file contents. +type TokenConverter struct { + FileConverter + fset *token.FileSet +} + +// NewRange creates a new Range from a FileSet and two positions. +// To represent a point pass a 0 as the end pos. +func NewRange(fset *token.FileSet, start, end token.Pos) Range { + return Range{ + FileSet: fset, + Start: start, + End: end, + } +} + +// NewTokenConverter returns an implementation of Converter backed by a +// token.File. +func NewTokenConverter(fset *token.FileSet, f *token.File) *TokenConverter { + return &TokenConverter{fset: fset, FileConverter: FileConverter{file: f}} +} + +// NewContentConverter returns an implementation of Converter for the +// given file content. +func NewContentConverter(filename string, content []byte) *TokenConverter { + fset := token.NewFileSet() + f := fset.AddFile(filename, -1, len(content)) + f.SetLinesForContent(content) + return NewTokenConverter(fset, f) +} + +// IsPoint returns true if the range represents a single point. +func (r Range) IsPoint() bool { + return r.Start == r.End +} + +// Span converts a Range to a Span that represents the Range. +// It will fill in all the members of the Span, calculating the line and column +// information. +func (r Range) Span() (Span, error) { + if !r.Start.IsValid() { + return Span{}, fmt.Errorf("start pos is not valid") + } + f := r.FileSet.File(r.Start) + if f == nil { + return Span{}, fmt.Errorf("file not found in FileSet") + } + return FileSpan(f, r.Converter, r.Start, r.End) +} + +// FileSpan returns a span within tok, using converter to translate between +// offsets and positions. +func FileSpan(tok *token.File, converter Converter, start, end token.Pos) (Span, error) { + var s Span + var err error + var startFilename string + startFilename, s.v.Start.Line, s.v.Start.Column, err = position(tok, start) + if err != nil { + return Span{}, err + } + s.v.URI = URIFromPath(startFilename) + if end.IsValid() { + var endFilename string + endFilename, s.v.End.Line, s.v.End.Column, err = position(tok, end) + if err != nil { + return Span{}, err + } + // In the presence of line directives, a single File can have sections from + // multiple file names. + if endFilename != startFilename { + return Span{}, fmt.Errorf("span begins in file %q but ends in %q", startFilename, endFilename) + } + } + s.v.Start.clean() + s.v.End.clean() + s.v.clean() + if converter != nil { + return s.WithOffset(converter) + } + if startFilename != tok.Name() { + return Span{}, fmt.Errorf("must supply Converter for file %q containing lines from %q", tok.Name(), startFilename) + } + return s.WithOffset(&FileConverter{tok}) +} + +func position(f *token.File, pos token.Pos) (string, int, int, error) { + off, err := offset(f, pos) + if err != nil { + return "", 0, 0, err + } + return positionFromOffset(f, off) +} + +func positionFromOffset(f *token.File, offset int) (string, int, int, error) { + if offset > f.Size() { + return "", 0, 0, fmt.Errorf("offset %v is past the end of the file %v", offset, f.Size()) + } + pos := f.Pos(offset) + p := f.Position(pos) + // TODO(golang/go#41029): Consider returning line, column instead of line+1, 1 if + // the file's last character is not a newline. + if offset == f.Size() { + return p.Filename, p.Line + 1, 1, nil + } + return p.Filename, p.Line, p.Column, nil +} + +// offset is a copy of the Offset function in go/token, but with the adjustment +// that it does not panic on invalid positions. +func offset(f *token.File, pos token.Pos) (int, error) { + if int(pos) < f.Base() || int(pos) > f.Base()+f.Size() { + return 0, fmt.Errorf("invalid pos") + } + return int(pos) - f.Base(), nil +} + +// Range converts a Span to a Range that represents the Span for the supplied +// File. +func (s Span) Range(converter *TokenConverter) (Range, error) { + s, err := s.WithOffset(converter) + if err != nil { + return Range{}, err + } + // go/token will panic if the offset is larger than the file's size, + // so check here to avoid panicking. + if s.Start().Offset() > converter.file.Size() { + return Range{}, fmt.Errorf("start offset %v is past the end of the file %v", s.Start(), converter.file.Size()) + } + if s.End().Offset() > converter.file.Size() { + return Range{}, fmt.Errorf("end offset %v is past the end of the file %v", s.End(), converter.file.Size()) + } + return Range{ + FileSet: converter.fset, + Start: converter.file.Pos(s.Start().Offset()), + End: converter.file.Pos(s.End().Offset()), + Converter: converter, + }, nil +} + +func (l *FileConverter) ToPosition(offset int) (int, int, error) { + _, line, col, err := positionFromOffset(l.file, offset) + return line, col, err +} + +func (l *FileConverter) ToOffset(line, col int) (int, error) { + if line < 0 { + return -1, fmt.Errorf("line is not valid") + } + lineMax := l.file.LineCount() + 1 + if line > lineMax { + return -1, fmt.Errorf("line is beyond end of file %v", lineMax) + } else if line == lineMax { + if col > 1 { + return -1, fmt.Errorf("column is beyond end of file") + } + // at the end of the file, allowing for a trailing eol + return l.file.Size(), nil + } + pos := lineStart(l.file, line) + if !pos.IsValid() { + return -1, fmt.Errorf("line is not in file") + } + // we assume that column is in bytes here, and that the first byte of a + // line is at column 1 + pos += token.Pos(col - 1) + return offset(l.file, pos) +} diff --git a/vendor/github.com/hexops/gotextdiff/span/token111.go b/vendor/github.com/hexops/gotextdiff/span/token111.go new file mode 100644 index 000000000..bf7a5406b --- /dev/null +++ b/vendor/github.com/hexops/gotextdiff/span/token111.go @@ -0,0 +1,39 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !go1.12 + +package span + +import ( + "go/token" +) + +// lineStart is the pre-Go 1.12 version of (*token.File).LineStart. For Go +// versions <= 1.11, we borrow logic from the analysisutil package. +// TODO(rstambler): Delete this file when we no longer support Go 1.11. +func lineStart(f *token.File, line int) token.Pos { + // Use binary search to find the start offset of this line. + + min := 0 // inclusive + max := f.Size() // exclusive + for { + offset := (min + max) / 2 + pos := f.Pos(offset) + posn := f.Position(pos) + if posn.Line == line { + return pos - (token.Pos(posn.Column) - 1) + } + + if min+1 >= max { + return token.NoPos + } + + if posn.Line < line { + min = offset + } else { + max = offset + } + } +} diff --git a/vendor/github.com/hexops/gotextdiff/span/token112.go b/vendor/github.com/hexops/gotextdiff/span/token112.go new file mode 100644 index 000000000..017aec9c1 --- /dev/null +++ b/vendor/github.com/hexops/gotextdiff/span/token112.go @@ -0,0 +1,16 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.12 + +package span + +import ( + "go/token" +) + +// TODO(rstambler): Delete this file when we no longer support Go 1.11. +func lineStart(f *token.File, line int) token.Pos { + return f.LineStart(line) +} diff --git a/vendor/github.com/hexops/gotextdiff/span/uri.go b/vendor/github.com/hexops/gotextdiff/span/uri.go new file mode 100644 index 000000000..250492135 --- /dev/null +++ b/vendor/github.com/hexops/gotextdiff/span/uri.go @@ -0,0 +1,169 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package span + +import ( + "fmt" + "net/url" + "os" + "path" + "path/filepath" + "runtime" + "strings" + "unicode" +) + +const fileScheme = "file" + +// URI represents the full URI for a file. +type URI string + +func (uri URI) IsFile() bool { + return strings.HasPrefix(string(uri), "file://") +} + +// Filename returns the file path for the given URI. +// It is an error to call this on a URI that is not a valid filename. +func (uri URI) Filename() string { + filename, err := filename(uri) + if err != nil { + panic(err) + } + return filepath.FromSlash(filename) +} + +func filename(uri URI) (string, error) { + if uri == "" { + return "", nil + } + u, err := url.ParseRequestURI(string(uri)) + if err != nil { + return "", err + } + if u.Scheme != fileScheme { + return "", fmt.Errorf("only file URIs are supported, got %q from %q", u.Scheme, uri) + } + // If the URI is a Windows URI, we trim the leading "/" and lowercase + // the drive letter, which will never be case sensitive. + if isWindowsDriveURIPath(u.Path) { + u.Path = strings.ToUpper(string(u.Path[1])) + u.Path[2:] + } + return u.Path, nil +} + +func URIFromURI(s string) URI { + if !strings.HasPrefix(s, "file://") { + return URI(s) + } + + if !strings.HasPrefix(s, "file:///") { + // VS Code sends URLs with only two slashes, which are invalid. golang/go#39789. + s = "file:///" + s[len("file://"):] + } + // Even though the input is a URI, it may not be in canonical form. VS Code + // in particular over-escapes :, @, etc. Unescape and re-encode to canonicalize. + path, err := url.PathUnescape(s[len("file://"):]) + if err != nil { + panic(err) + } + + // File URIs from Windows may have lowercase drive letters. + // Since drive letters are guaranteed to be case insensitive, + // we change them to uppercase to remain consistent. + // For example, file:///c:/x/y/z becomes file:///C:/x/y/z. + if isWindowsDriveURIPath(path) { + path = path[:1] + strings.ToUpper(string(path[1])) + path[2:] + } + u := url.URL{Scheme: fileScheme, Path: path} + return URI(u.String()) +} + +func CompareURI(a, b URI) int { + if equalURI(a, b) { + return 0 + } + if a < b { + return -1 + } + return 1 +} + +func equalURI(a, b URI) bool { + if a == b { + return true + } + // If we have the same URI basename, we may still have the same file URIs. + if !strings.EqualFold(path.Base(string(a)), path.Base(string(b))) { + return false + } + fa, err := filename(a) + if err != nil { + return false + } + fb, err := filename(b) + if err != nil { + return false + } + // Stat the files to check if they are equal. + infoa, err := os.Stat(filepath.FromSlash(fa)) + if err != nil { + return false + } + infob, err := os.Stat(filepath.FromSlash(fb)) + if err != nil { + return false + } + return os.SameFile(infoa, infob) +} + +// URIFromPath returns a span URI for the supplied file path. +// It will always have the file scheme. +func URIFromPath(path string) URI { + if path == "" { + return "" + } + // Handle standard library paths that contain the literal "$GOROOT". + // TODO(rstambler): The go/packages API should allow one to determine a user's $GOROOT. + const prefix = "$GOROOT" + if len(path) >= len(prefix) && strings.EqualFold(prefix, path[:len(prefix)]) { + suffix := path[len(prefix):] + path = runtime.GOROOT() + suffix + } + if !isWindowsDrivePath(path) { + if abs, err := filepath.Abs(path); err == nil { + path = abs + } + } + // Check the file path again, in case it became absolute. + if isWindowsDrivePath(path) { + path = "/" + strings.ToUpper(string(path[0])) + path[1:] + } + path = filepath.ToSlash(path) + u := url.URL{ + Scheme: fileScheme, + Path: path, + } + return URI(u.String()) +} + +// isWindowsDrivePath returns true if the file path is of the form used by +// Windows. We check if the path begins with a drive letter, followed by a ":". +// For example: C:/x/y/z. +func isWindowsDrivePath(path string) bool { + if len(path) < 3 { + return false + } + return unicode.IsLetter(rune(path[0])) && path[1] == ':' +} + +// isWindowsDriveURI returns true if the file URI is of the format used by +// Windows URIs. The url.Parse package does not specially handle Windows paths +// (see golang/go#6027), so we check if the URI path has a drive prefix (e.g. "/C:"). +func isWindowsDriveURIPath(uri string) bool { + if len(uri) < 4 { + return false + } + return uri[0] == '/' && unicode.IsLetter(rune(uri[1])) && uri[2] == ':' +} diff --git a/vendor/github.com/hexops/gotextdiff/span/utf16.go b/vendor/github.com/hexops/gotextdiff/span/utf16.go new file mode 100644 index 000000000..f06a2468b --- /dev/null +++ b/vendor/github.com/hexops/gotextdiff/span/utf16.go @@ -0,0 +1,91 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package span + +import ( + "fmt" + "unicode/utf16" + "unicode/utf8" +) + +// ToUTF16Column calculates the utf16 column expressed by the point given the +// supplied file contents. +// This is used to convert from the native (always in bytes) column +// representation and the utf16 counts used by some editors. +func ToUTF16Column(p Point, content []byte) (int, error) { + if !p.HasPosition() { + return -1, fmt.Errorf("ToUTF16Column: point is missing position") + } + if !p.HasOffset() { + return -1, fmt.Errorf("ToUTF16Column: point is missing offset") + } + offset := p.Offset() // 0-based + colZero := p.Column() - 1 // 0-based + if colZero == 0 { + // 0-based column 0, so it must be chr 1 + return 1, nil + } else if colZero < 0 { + return -1, fmt.Errorf("ToUTF16Column: column is invalid (%v)", colZero) + } + // work out the offset at the start of the line using the column + lineOffset := offset - colZero + if lineOffset < 0 || offset > len(content) { + return -1, fmt.Errorf("ToUTF16Column: offsets %v-%v outside file contents (%v)", lineOffset, offset, len(content)) + } + // Use the offset to pick out the line start. + // This cannot panic: offset > len(content) and lineOffset < offset. + start := content[lineOffset:] + + // Now, truncate down to the supplied column. + start = start[:colZero] + + // and count the number of utf16 characters + // in theory we could do this by hand more efficiently... + return len(utf16.Encode([]rune(string(start)))) + 1, nil +} + +// FromUTF16Column advances the point by the utf16 character offset given the +// supplied line contents. +// This is used to convert from the utf16 counts used by some editors to the +// native (always in bytes) column representation. +func FromUTF16Column(p Point, chr int, content []byte) (Point, error) { + if !p.HasOffset() { + return Point{}, fmt.Errorf("FromUTF16Column: point is missing offset") + } + // if chr is 1 then no adjustment needed + if chr <= 1 { + return p, nil + } + if p.Offset() >= len(content) { + return p, fmt.Errorf("FromUTF16Column: offset (%v) greater than length of content (%v)", p.Offset(), len(content)) + } + remains := content[p.Offset():] + // scan forward the specified number of characters + for count := 1; count < chr; count++ { + if len(remains) <= 0 { + return Point{}, fmt.Errorf("FromUTF16Column: chr goes beyond the content") + } + r, w := utf8.DecodeRune(remains) + if r == '\n' { + // Per the LSP spec: + // + // > If the character value is greater than the line length it + // > defaults back to the line length. + break + } + remains = remains[w:] + if r >= 0x10000 { + // a two point rune + count++ + // if we finished in a two point rune, do not advance past the first + if count >= chr { + break + } + } + p.v.Column += w + p.v.Offset += w + } + return p, nil +} diff --git a/vendor/github.com/hexops/gotextdiff/unified.go b/vendor/github.com/hexops/gotextdiff/unified.go new file mode 100644 index 000000000..b7d85cfcc --- /dev/null +++ b/vendor/github.com/hexops/gotextdiff/unified.go @@ -0,0 +1,210 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gotextdiff + +import ( + "fmt" + "strings" +) + +// Unified represents a set of edits as a unified diff. +type Unified struct { + // From is the name of the original file. + From string + // To is the name of the modified file. + To string + // Hunks is the set of edit hunks needed to transform the file content. + Hunks []*Hunk +} + +// Hunk represents a contiguous set of line edits to apply. +type Hunk struct { + // The line in the original source where the hunk starts. + FromLine int + // The line in the original source where the hunk finishes. + ToLine int + // The set of line based edits to apply. + Lines []Line +} + +// Line represents a single line operation to apply as part of a Hunk. +type Line struct { + // Kind is the type of line this represents, deletion, insertion or copy. + Kind OpKind + // Content is the content of this line. + // For deletion it is the line being removed, for all others it is the line + // to put in the output. + Content string +} + +// OpKind is used to denote the type of operation a line represents. +type OpKind int + +const ( + // Delete is the operation kind for a line that is present in the input + // but not in the output. + Delete OpKind = iota + // Insert is the operation kind for a line that is new in the output. + Insert + // Equal is the operation kind for a line that is the same in the input and + // output, often used to provide context around edited lines. + Equal +) + +// String returns a human readable representation of an OpKind. It is not +// intended for machine processing. +func (k OpKind) String() string { + switch k { + case Delete: + return "delete" + case Insert: + return "insert" + case Equal: + return "equal" + default: + panic("unknown operation kind") + } +} + +const ( + edge = 3 + gap = edge * 2 +) + +// ToUnified takes a file contents and a sequence of edits, and calculates +// a unified diff that represents those edits. +func ToUnified(from, to string, content string, edits []TextEdit) Unified { + u := Unified{ + From: from, + To: to, + } + if len(edits) == 0 { + return u + } + c, edits, partial := prepareEdits(content, edits) + if partial { + edits = lineEdits(content, c, edits) + } + lines := splitLines(content) + var h *Hunk + last := 0 + toLine := 0 + for _, edit := range edits { + start := edit.Span.Start().Line() - 1 + end := edit.Span.End().Line() - 1 + switch { + case h != nil && start == last: + //direct extension + case h != nil && start <= last+gap: + //within range of previous lines, add the joiners + addEqualLines(h, lines, last, start) + default: + //need to start a new hunk + if h != nil { + // add the edge to the previous hunk + addEqualLines(h, lines, last, last+edge) + u.Hunks = append(u.Hunks, h) + } + toLine += start - last + h = &Hunk{ + FromLine: start + 1, + ToLine: toLine + 1, + } + // add the edge to the new hunk + delta := addEqualLines(h, lines, start-edge, start) + h.FromLine -= delta + h.ToLine -= delta + } + last = start + for i := start; i < end; i++ { + h.Lines = append(h.Lines, Line{Kind: Delete, Content: lines[i]}) + last++ + } + if edit.NewText != "" { + for _, line := range splitLines(edit.NewText) { + h.Lines = append(h.Lines, Line{Kind: Insert, Content: line}) + toLine++ + } + } + } + if h != nil { + // add the edge to the final hunk + addEqualLines(h, lines, last, last+edge) + u.Hunks = append(u.Hunks, h) + } + return u +} + +func splitLines(text string) []string { + lines := strings.SplitAfter(text, "\n") + if lines[len(lines)-1] == "" { + lines = lines[:len(lines)-1] + } + return lines +} + +func addEqualLines(h *Hunk, lines []string, start, end int) int { + delta := 0 + for i := start; i < end; i++ { + if i < 0 { + continue + } + if i >= len(lines) { + return delta + } + h.Lines = append(h.Lines, Line{Kind: Equal, Content: lines[i]}) + delta++ + } + return delta +} + +// Format converts a unified diff to the standard textual form for that diff. +// The output of this function can be passed to tools like patch. +func (u Unified) Format(f fmt.State, r rune) { + if len(u.Hunks) == 0 { + return + } + fmt.Fprintf(f, "--- %s\n", u.From) + fmt.Fprintf(f, "+++ %s\n", u.To) + for _, hunk := range u.Hunks { + fromCount, toCount := 0, 0 + for _, l := range hunk.Lines { + switch l.Kind { + case Delete: + fromCount++ + case Insert: + toCount++ + default: + fromCount++ + toCount++ + } + } + fmt.Fprint(f, "@@") + if fromCount > 1 { + fmt.Fprintf(f, " -%d,%d", hunk.FromLine, fromCount) + } else { + fmt.Fprintf(f, " -%d", hunk.FromLine) + } + if toCount > 1 { + fmt.Fprintf(f, " +%d,%d", hunk.ToLine, toCount) + } else { + fmt.Fprintf(f, " +%d", hunk.ToLine) + } + fmt.Fprint(f, " @@\n") + for _, l := range hunk.Lines { + switch l.Kind { + case Delete: + fmt.Fprintf(f, "-%s", l.Content) + case Insert: + fmt.Fprintf(f, "+%s", l.Content) + default: + fmt.Fprintf(f, " %s", l.Content) + } + if !strings.HasSuffix(l.Content, "\n") { + fmt.Fprintf(f, "\n\\ No newline at end of file\n") + } + } + } +} diff --git a/vendor/github.com/julz/importas/README.md b/vendor/github.com/julz/importas/README.md index 9489fe7d8..1ea7b4fb2 100644 --- a/vendor/github.com/julz/importas/README.md +++ b/vendor/github.com/julz/importas/README.md @@ -31,6 +31,18 @@ importas -no-unaliased \ ./... ~~~~ +### `-no-extra-aliases` option + +By default, importas allows aliases which are not specified by `-alias` flags. +With `-no-extra-aliases` option, importas does not allow any unspecified aliases. + +~~~~ +importas -no-extra-aliases \ + -alias knative.dev/serving/pkg/apis/autoscaling/v1alpha1:autoscalingv1alpha1 \ + -alias knative.dev/serving/pkg/apis/serving/v1:servingv1 \ + ./... +~~~~ + ### Use regular expression You can specify the package path by regular expression, and alias by regular expression replacement syntax like following snippet. diff --git a/vendor/github.com/julz/importas/analyzer.go b/vendor/github.com/julz/importas/analyzer.go index 4fbe104e5..f19653478 100644 --- a/vendor/github.com/julz/importas/analyzer.go +++ b/vendor/github.com/julz/importas/analyzer.go @@ -81,17 +81,42 @@ func visitImportSpecNode(config *Config, node *ast.ImportSpec, pass *analysis.Pa TextEdits: findEdits(node, pass.TypesInfo.Uses, path, alias, required), }}, }) + } else if !exists && config.DisallowExtraAliases { + pass.Report(analysis.Diagnostic{ + Pos: node.Pos(), + End: node.End(), + Message: fmt.Sprintf("import %q has alias %q which is not part of config", path, alias), + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "remove alias", + TextEdits: findEdits(node, pass.TypesInfo.Uses, path, alias, ""), + }}, + }) } } func findEdits(node ast.Node, uses map[*ast.Ident]types.Object, importPath, original, required string) []analysis.TextEdit { // Edit the actual import line. + importLine := strconv.Quote(importPath) + if required != "" { + importLine = required + " " + importLine + } result := []analysis.TextEdit{{ Pos: node.Pos(), End: node.End(), - NewText: []byte(required + " " + strconv.Quote(importPath)), + NewText: []byte(importLine), }} + packageReplacement := required + if required == "" { + packageParts := strings.Split(importPath, "/") + if len(packageParts) != 0 { + packageReplacement = packageParts[len(packageParts)-1] + } else { + // fall back to original + packageReplacement = original + } + } + // Edit all the uses of the alias in the code. for use, pkg := range uses { pkgName, ok := pkg.(*types.PkgName) @@ -108,7 +133,7 @@ func findEdits(node ast.Node, uses map[*ast.Ident]types.Object, importPath, orig result = append(result, analysis.TextEdit{ Pos: use.Pos(), End: use.End(), - NewText: []byte(required), + NewText: []byte(packageReplacement), }) } diff --git a/vendor/github.com/julz/importas/config.go b/vendor/github.com/julz/importas/config.go index 2e1c1d887..8c9c76d91 100644 --- a/vendor/github.com/julz/importas/config.go +++ b/vendor/github.com/julz/importas/config.go @@ -7,9 +7,10 @@ import ( ) type Config struct { - RequiredAlias map[string]string - Rules []*Rule - DisallowUnaliased bool + RequiredAlias map[string]string + Rules []*Rule + DisallowUnaliased bool + DisallowExtraAliases bool } func (c *Config) CompileRegexp() error { diff --git a/vendor/github.com/julz/importas/flags.go b/vendor/github.com/julz/importas/flags.go index 22be4af3e..f8107104a 100644 --- a/vendor/github.com/julz/importas/flags.go +++ b/vendor/github.com/julz/importas/flags.go @@ -11,6 +11,7 @@ func flags(config *Config) flag.FlagSet { fs := flag.FlagSet{} fs.Var(stringMap(config.RequiredAlias), "alias", "required import alias in form path:alias") fs.BoolVar(&config.DisallowUnaliased, "no-unaliased", false, "do not allow unaliased imports of aliased packages") + fs.BoolVar(&config.DisallowExtraAliases, "no-extra-aliases", false, "do not allow non-required aliases") return fs } diff --git a/vendor/github.com/kulti/thelper/pkg/analyzer/analyzer.go b/vendor/github.com/kulti/thelper/pkg/analyzer/analyzer.go index 2f8dba957..fd4bfddb2 100644 --- a/vendor/github.com/kulti/thelper/pkg/analyzer/analyzer.go +++ b/vendor/github.com/kulti/thelper/pkg/analyzer/analyzer.go @@ -144,13 +144,15 @@ func (t thelper) run(pass *analysis.Pass) (interface{}, error) { fd.Body = n.Body fd.Name = n.Name case *ast.CallExpr: - tbRunSubtestExpr := extractSubtestExp(pass, n, tCheckOpts.tbRun) - if tbRunSubtestExpr == nil { - tbRunSubtestExpr = extractSubtestExp(pass, n, bCheckOpts.tbRun) + tbRunSubtestExprs := extractSubtestExp(pass, n, tCheckOpts.tbRun, tCheckOpts.tbTestFuncType) + if len(tbRunSubtestExprs) == 0 { + tbRunSubtestExprs = extractSubtestExp(pass, n, bCheckOpts.tbRun, bCheckOpts.tbTestFuncType) } - if tbRunSubtestExpr != nil { - reports.Filter(funcDefPosition(pass, tbRunSubtestExpr)) + if len(tbRunSubtestExprs) > 0 { + for _, expr := range tbRunSubtestExprs { + reports.Filter(funcDefPosition(pass, expr)) + } } else { reports.NoFilter(funcDefPosition(pass, n.Fun)) } @@ -170,15 +172,16 @@ func (t thelper) run(pass *analysis.Pass) (interface{}, error) { } type checkFuncOpts struct { - skipPrefix string - varName string - tbHelper types.Object - tbRun types.Object - tbType types.Type - ctxType types.Type - checkBegin bool - checkFirst bool - checkName bool + skipPrefix string + varName string + tbHelper types.Object + tbRun types.Object + tbTestFuncType types.Type + tbType types.Type + ctxType types.Type + checkBegin bool + checkFirst bool + checkName bool } func (t thelper) buildCheckFuncOpts(pass *analysis.Pass) (checkFuncOpts, checkFuncOpts, checkFuncOpts, bool) { @@ -222,16 +225,19 @@ func (t thelper) buildTestCheckFuncOpts(pass *analysis.Pass, ctxType types.Type) return checkFuncOpts{}, false } + tbType := types.NewPointer(tObj.Type()) + tVar := types.NewVar(token.NoPos, nil, "t", tbType) return checkFuncOpts{ - skipPrefix: "Test", - varName: "t", - tbHelper: tHelper, - tbRun: tRun, - tbType: types.NewPointer(tObj.Type()), - ctxType: ctxType, - checkBegin: t.enabledChecks.Enabled(checkTBegin), - checkFirst: t.enabledChecks.Enabled(checkTFirst), - checkName: t.enabledChecks.Enabled(checkTName), + skipPrefix: "Test", + varName: "t", + tbHelper: tHelper, + tbRun: tRun, + tbType: tbType, + tbTestFuncType: types.NewSignature(nil, types.NewTuple(tVar), nil, false), + ctxType: ctxType, + checkBegin: t.enabledChecks.Enabled(checkTBegin), + checkFirst: t.enabledChecks.Enabled(checkTFirst), + checkName: t.enabledChecks.Enabled(checkTName), }, true } @@ -251,16 +257,19 @@ func (t thelper) buildBenchmarkCheckFuncOpts(pass *analysis.Pass, ctxType types. return checkFuncOpts{}, false } + tbType := types.NewPointer(bObj.Type()) + bVar := types.NewVar(token.NoPos, nil, "b", tbType) return checkFuncOpts{ - skipPrefix: "Benchmark", - varName: "b", - tbHelper: bHelper, - tbRun: bRun, - tbType: types.NewPointer(bObj.Type()), - ctxType: ctxType, - checkBegin: t.enabledChecks.Enabled(checkBBegin), - checkFirst: t.enabledChecks.Enabled(checkBFirst), - checkName: t.enabledChecks.Enabled(checkBName), + skipPrefix: "Benchmark", + varName: "b", + tbHelper: bHelper, + tbRun: bRun, + tbType: types.NewPointer(bObj.Type()), + tbTestFuncType: types.NewSignature(nil, types.NewTuple(bVar), nil, false), + ctxType: ctxType, + checkBegin: t.enabledChecks.Enabled(checkBBegin), + checkFirst: t.enabledChecks.Enabled(checkBFirst), + checkName: t.enabledChecks.Enabled(checkBName), }, true } @@ -333,20 +342,18 @@ func checkFunc(pass *analysis.Pass, reports *reports, funcDecl funcDecl, opts ch } } +// searchFuncParam search a function param with desired type. +// It returns the param field, its position, and true if something is found. func searchFuncParam(pass *analysis.Pass, f funcDecl, p types.Type) (*ast.Field, int, bool) { for i, f := range f.Type.Params.List { - typeInfo, ok := pass.TypesInfo.Types[f.Type] - if !ok { - continue - } - - if types.Identical(typeInfo.Type, p) { + if isExprHasType(pass, f.Type, p) { return f, i, true } } return nil, 0, false } +// isTHelperCall returns true if provided statement 's' is t.Helper() or b.Helper() call. func isTHelperCall(pass *analysis.Pass, s ast.Stmt, tHelper types.Object) bool { exprStmt, ok := s.(*ast.ExprStmt) if !ok { @@ -366,7 +373,11 @@ func isTHelperCall(pass *analysis.Pass, s ast.Stmt, tHelper types.Object) bool { return isSelectorCall(pass, selExpr, tHelper) } -func extractSubtestExp(pass *analysis.Pass, e *ast.CallExpr, tbRun types.Object) ast.Expr { +// extractSubtestExp analyzes that call expresion 'e' is t.Run or b.Run +// and returns subtest function. +func extractSubtestExp( + pass *analysis.Pass, e *ast.CallExpr, tbRun types.Object, testFuncType types.Type, +) []ast.Expr { selExpr, ok := e.Fun.(*ast.SelectorExpr) if !ok { return nil @@ -380,9 +391,70 @@ func extractSubtestExp(pass *analysis.Pass, e *ast.CallExpr, tbRun types.Object) return nil } - return e.Args[1] + if funcs := unwrapTestingFunctionBuilding(pass, e.Args[1], testFuncType); funcs != nil { + return funcs + } + + return []ast.Expr{e.Args[1]} } +// unwrapTestingFunctionConstruction checks that expresion is build testing functions +// and returns the result of building. +func unwrapTestingFunctionBuilding(pass *analysis.Pass, expr ast.Expr, testFuncType types.Type) []ast.Expr { + callExpr, ok := expr.(*ast.CallExpr) + if !ok { + return nil + } + + var funcDecl funcDecl + switch f := callExpr.Fun.(type) { + case *ast.FuncLit: + funcDecl.Body = f.Body + funcDecl.Type = f.Type + case *ast.Ident: + funObjDecl := findFunctionDeclaration(pass, f) + if funObjDecl == nil { + return nil + } + + funcDecl.Body = funObjDecl.Body + funcDecl.Type = funObjDecl.Type + case *ast.SelectorExpr: + fd := findSelectorDeclaration(pass, f) + if fd == nil { + return nil + } + + funcDecl.Body = fd.Body + funcDecl.Type = fd.Type + default: + return nil + } + + results := funcDecl.Type.Results.List + if len(results) != 1 || !isExprHasType(pass, results[0].Type, testFuncType) { + return nil + } + + var funcs []ast.Expr + ast.Inspect(funcDecl.Body, func(n ast.Node) bool { + if n == nil { + return false + } + + if retStmt, ok := n.(*ast.ReturnStmt); ok { + if len(retStmt.Results) == 1 { + funcs = append(funcs, retStmt.Results[0]) + } + } + return true + }) + + return funcs +} + +// funcDefPosition returns a function's position. +// It works with anonymous functions as well with function names. func funcDefPosition(pass *analysis.Pass, e ast.Expr) token.Pos { anonFunLit, ok := e.(*ast.FuncLit) if ok { @@ -406,6 +478,8 @@ func funcDefPosition(pass *analysis.Pass, e ast.Expr) token.Pos { return funDef.Pos() } +// isSelectorCall checks is selExpr is a call expresion on specific callObj. +// Useful to check Run() call for t.Run or b.Run. func isSelectorCall(pass *analysis.Pass, selExpr *ast.SelectorExpr, callObj types.Object) bool { sel, ok := pass.TypesInfo.Selections[selExpr] if !ok { @@ -414,3 +488,77 @@ func isSelectorCall(pass *analysis.Pass, selExpr *ast.SelectorExpr, callObj type return sel.Obj() == callObj } + +// isExprHasType returns true if expr has expected type. +func isExprHasType(pass *analysis.Pass, expr ast.Expr, expType types.Type) bool { + typeInfo, ok := pass.TypesInfo.Types[expr] + if !ok { + return false + } + + return types.Identical(typeInfo.Type, expType) +} + +// findSelectorDeclaration returns function declaration called by selector expression. +func findSelectorDeclaration(pass *analysis.Pass, expr *ast.SelectorExpr) *ast.FuncDecl { + xsel, ok := pass.TypesInfo.Selections[expr] + if !ok { + return nil + } + + for _, file := range pass.Files { + for _, decl := range file.Decls { + fd, ok := decl.(*ast.FuncDecl) + if ok && fd.Recv != nil && len(fd.Recv.List) == 1 { + recvType, ok := fd.Recv.List[0].Type.(*ast.Ident) + if !ok { + continue + } + + recvObj, ok := pass.TypesInfo.Uses[recvType] + if !ok { + continue + } + + if !(types.Identical(recvObj.Type(), xsel.Recv())) { + continue + } + + if fd.Name.Name == expr.Sel.Name { + return fd + } + } + } + } + + return nil +} + +// findFunctionDeclaration returns function declaration called by identity. +func findFunctionDeclaration(pass *analysis.Pass, ident *ast.Ident) *ast.FuncDecl { + if ident.Obj != nil { + if funObjDecl, ok := ident.Obj.Decl.(*ast.FuncDecl); ok { + return funObjDecl + } + } + + obj := pass.TypesInfo.ObjectOf(ident) + if obj == nil { + return nil + } + + for _, file := range pass.Files { + for _, decl := range file.Decls { + funcDecl, ok := decl.(*ast.FuncDecl) + if !ok { + continue + } + + if funcDecl.Name.Pos() == obj.Pos() { + return funcDecl + } + } + } + + return nil +} diff --git a/vendor/github.com/ldez/tagliatelle/.golangci.yml b/vendor/github.com/ldez/tagliatelle/.golangci.yml index b897103e9..53313e308 100644 --- a/vendor/github.com/ldez/tagliatelle/.golangci.yml +++ b/vendor/github.com/ldez/tagliatelle/.golangci.yml @@ -10,8 +10,6 @@ linters-settings: - fieldalignment gocyclo: min-complexity: 15 - maligned: - suggest-new: true goconst: min-len: 5 min-occurrences: 3 @@ -48,6 +46,7 @@ linters-settings: linters: enable-all: true disable: + - golint # deprecated - maligned # deprecated - interfacer # deprecated - scopelint # deprecated @@ -69,6 +68,9 @@ linters: - prealloc - ifshort - forcetypeassert + - varnamelen + - nilnil + - errchkjson issues: exclude-use-default: false diff --git a/vendor/github.com/ldez/tagliatelle/Makefile b/vendor/github.com/ldez/tagliatelle/Makefile index f66a39804..196f70c02 100644 --- a/vendor/github.com/ldez/tagliatelle/Makefile +++ b/vendor/github.com/ldez/tagliatelle/Makefile @@ -12,4 +12,4 @@ check: golangci-lint run build: - go build -v -ldflags "-s -w" -trimpath ./cmd/tagliatelle/ + go build -ldflags "-s -w" -trimpath ./cmd/tagliatelle/ diff --git a/vendor/github.com/ldez/tagliatelle/readme.md b/vendor/github.com/ldez/tagliatelle/readme.md index 846767b2c..85849eab4 100644 --- a/vendor/github.com/ldez/tagliatelle/readme.md +++ b/vendor/github.com/ldez/tagliatelle/readme.md @@ -10,14 +10,67 @@ Supported string casing: - `camel` - `pascal` - `kebab` -- `smake` -- `goCamel` -- `goPascal` -- `goKebab` -- `goSmake` +- `snake` +- `goCamel` Respects [Go's common initialisms](https://github.com/golang/lint/blob/83fdc39ff7b56453e3793356bcff3070b9b96445/lint.go#L770-L809) (e.g. HttpResponse -> HTTPResponse). +- `goPascal` Respects [Go's common initialisms](https://github.com/golang/lint/blob/83fdc39ff7b56453e3793356bcff3070b9b96445/lint.go#L770-L809) (e.g. HttpResponse -> HTTPResponse). +- `goKebab` Respects [Go's common initialisms](https://github.com/golang/lint/blob/83fdc39ff7b56453e3793356bcff3070b9b96445/lint.go#L770-L809) (e.g. HttpResponse -> HTTPResponse). +- `goSnake` Respects [Go's common initialisms](https://github.com/golang/lint/blob/83fdc39ff7b56453e3793356bcff3070b9b96445/lint.go#L770-L809) (e.g. HttpResponse -> HTTPResponse). - `upper` - `lower` +| Source | Camel Case | Go Camel Case | +|----------------|----------------|----------------| +| GooID | gooId | gooID | +| HTTPStatusCode | httpStatusCode | httpStatusCode | +| FooBAR | fooBar | fooBar | +| URL | url | url | +| ID | id | id | +| hostIP | hostIp | hostIP | +| JSON | json | json | +| JSONName | jsonName | jsonName | +| NameJSON | nameJson | nameJSON | +| UneTête | uneTête | uneTête | + +| Source | Pascal Case | Go Pascal Case | +|----------------|----------------|----------------| +| GooID | GooId | GooID | +| HTTPStatusCode | HttpStatusCode | HTTPStatusCode | +| FooBAR | FooBar | FooBar | +| URL | Url | URL | +| ID | Id | ID | +| hostIP | HostIp | HostIP | +| JSON | Json | JSON | +| JSONName | JsonName | JSONName | +| NameJSON | NameJson | NameJSON | +| UneTête | UneTête | UneTête | + +| Source | Snake Case | Go Snake Case | +|----------------|------------------|------------------| +| GooID | goo_id | goo_ID | +| HTTPStatusCode | http_status_code | HTTP_status_code | +| FooBAR | foo_bar | foo_bar | +| URL | url | URL | +| ID | id | ID | +| hostIP | host_ip | host_IP | +| JSON | json | JSON | +| JSONName | json_name | JSON_name | +| NameJSON | name_json | name_JSON | +| UneTête | une_tête | une_tête | + +| Source | Kebab Case | Go KebabCase | +|----------------|------------------|------------------| +| GooID | goo-id | goo-ID | +| HTTPStatusCode | http-status-code | HTTP-status-code | +| FooBAR | foo-bar | foo-bar | +| URL | url | URL | +| ID | id | ID | +| hostIP | host-ip | host-IP | +| JSON | json | JSON | +| JSONName | json-name | JSON-name | +| NameJSON | name-json | name-JSON | +| UneTête | une-tête | une-tête | + + ## Examples ```go diff --git a/vendor/github.com/ldez/tagliatelle/tagliatelle.go b/vendor/github.com/ldez/tagliatelle/tagliatelle.go index dfb302b12..53e77d1cb 100644 --- a/vendor/github.com/ldez/tagliatelle/tagliatelle.go +++ b/vendor/github.com/ldez/tagliatelle/tagliatelle.go @@ -85,7 +85,7 @@ func analyze(pass *analysis.Pass, config Config, n *ast.StructType, field *ast.F continue } - value, ok := lookupTagValue(field.Tag, key) + value, flags, ok := lookupTagValue(field.Tag, key) if !ok { // skip when no struct tag for the key continue @@ -96,11 +96,19 @@ func analyze(pass *analysis.Pass, config Config, n *ast.StructType, field *ast.F continue } - if value == "" { - // skip empty value, it can change in the future + // TODO(ldez): need to be rethink. + // This is an exception because of a bug. + // https://github.com/ldez/tagliatelle/issues/8 + // For now, tagliatelle should try to remain neutral in terms of format. + if hasTagFlag(flags, "inline") { + // skip for inline children (no name to lint) continue } + if value == "" { + value = fieldName + } + converter, err := getConverter(convName) if err != nil { pass.Reportf(n.Pos(), "%s(%s): %v", key, convName, err) @@ -143,25 +151,35 @@ func getTypeName(exp ast.Expr) (string, error) { return getTypeName(typ.Sel) default: bytes, _ := json.Marshal(exp) - return "", fmt.Errorf("unexpected eror: type %T: %s", typ, string(bytes)) + return "", fmt.Errorf("unexpected error: type %T: %s", typ, string(bytes)) } } -func lookupTagValue(tag *ast.BasicLit, key string) (string, bool) { +func lookupTagValue(tag *ast.BasicLit, key string) (name string, flags []string, ok bool) { raw := strings.Trim(tag.Value, "`") value, ok := reflect.StructTag(raw).Lookup(key) if !ok { - return value, ok + return value, nil, ok } values := strings.Split(value, ",") if len(values) < 1 { - return "", true + return "", nil, true } - return values[0], true + return values[0], values[1:], true +} + +func hasTagFlag(flags []string, query string) bool { + for _, flag := range flags { + if flag == query { + return true + } + } + + return false } func getConverter(c string) (func(s string) string, error) { diff --git a/vendor/github.com/leonklingele/grouper/LICENSE b/vendor/github.com/leonklingele/grouper/LICENSE new file mode 100644 index 000000000..15bc112be --- /dev/null +++ b/vendor/github.com/leonklingele/grouper/LICENSE @@ -0,0 +1,662 @@ + + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published + by the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. diff --git a/vendor/github.com/leonklingele/grouper/pkg/analyzer/analyzer.go b/vendor/github.com/leonklingele/grouper/pkg/analyzer/analyzer.go new file mode 100644 index 000000000..9852c7838 --- /dev/null +++ b/vendor/github.com/leonklingele/grouper/pkg/analyzer/analyzer.go @@ -0,0 +1,89 @@ +package analyzer + +import ( + "fmt" + "go/ast" + + "github.com/leonklingele/grouper/pkg/analyzer/consts" + "github.com/leonklingele/grouper/pkg/analyzer/imports" + "github.com/leonklingele/grouper/pkg/analyzer/types" + "github.com/leonklingele/grouper/pkg/analyzer/vars" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" +) + +const ( + Name = "grouper" + Doc = `expression group analyzer: require 'import', 'const', 'var' and/or 'type' declaration groups` +) + +func New() *analysis.Analyzer { + return &analysis.Analyzer{ //nolint:exhaustivestruct // we do not need all fields + Name: Name, + Doc: Doc, + Flags: Flags(), + Run: run, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + } +} + +func run(p *analysis.Pass) (interface{}, error) { + flagLookupBool := func(name string) bool { + return p.Analyzer.Flags.Lookup(name).Value.String() == "true" + } + + c := &Config{ + ConstsConfig: &consts.Config{ + RequireSingleConst: flagLookupBool(FlagNameConstRequireSingleConst), + RequireGrouping: flagLookupBool(FlagNameConstRequireGrouping), + }, + + ImportsConfig: &imports.Config{ + RequireSingleImport: flagLookupBool(FlagNameImportRequireSingleImport), + RequireGrouping: flagLookupBool(FlagNameImportRequireGrouping), + }, + + TypesConfig: &types.Config{ + RequireSingleType: flagLookupBool(FlagNameTypeRequireSingleType), + RequireGrouping: flagLookupBool(FlagNameTypeRequireGrouping), + }, + + VarsConfig: &vars.Config{ + RequireSingleVar: flagLookupBool(FlagNameVarRequireSingleVar), + RequireGrouping: flagLookupBool(FlagNameVarRequireGrouping), + }, + } + + return nil, pass(c, p) +} + +func pass(c *Config, p *analysis.Pass) error { + for _, f := range p.Files { + if err := filepass(c, p, f); err != nil { + return err + } + } + + return nil +} + +func filepass(c *Config, p *analysis.Pass, f *ast.File) error { + if err := consts.Filepass(c.ConstsConfig, p, f); err != nil { + return fmt.Errorf("failed to consts.Filepass: %w", err) + } + + if err := imports.Filepass(c.ImportsConfig, p, f); err != nil { + return fmt.Errorf("failed to imports.Filepass: %w", err) + } + + if err := types.Filepass(c.TypesConfig, p, f); err != nil { + return fmt.Errorf("failed to types.Filepass: %w", err) + } + + if err := vars.Filepass(c.VarsConfig, p, f); err != nil { + return fmt.Errorf("failed to vars.Filepass: %w", err) + } + + return nil +} diff --git a/vendor/github.com/leonklingele/grouper/pkg/analyzer/config.go b/vendor/github.com/leonklingele/grouper/pkg/analyzer/config.go new file mode 100644 index 000000000..b00595f9a --- /dev/null +++ b/vendor/github.com/leonklingele/grouper/pkg/analyzer/config.go @@ -0,0 +1,15 @@ +package analyzer + +import ( + "github.com/leonklingele/grouper/pkg/analyzer/consts" + "github.com/leonklingele/grouper/pkg/analyzer/imports" + "github.com/leonklingele/grouper/pkg/analyzer/types" + "github.com/leonklingele/grouper/pkg/analyzer/vars" +) + +type Config struct { + ConstsConfig *consts.Config + ImportsConfig *imports.Config + TypesConfig *types.Config + VarsConfig *vars.Config +} diff --git a/vendor/github.com/leonklingele/grouper/pkg/analyzer/consts/analyzer.go b/vendor/github.com/leonklingele/grouper/pkg/analyzer/consts/analyzer.go new file mode 100644 index 000000000..e4e04c127 --- /dev/null +++ b/vendor/github.com/leonklingele/grouper/pkg/analyzer/consts/analyzer.go @@ -0,0 +1,19 @@ +package consts + +import ( + "go/ast" + "go/token" + + "github.com/leonklingele/grouper/pkg/analyzer/globals" + + "golang.org/x/tools/go/analysis" +) + +// https://go.dev/ref/spec#Constant_declarations + +func Filepass(c *Config, p *analysis.Pass, f *ast.File) error { + return globals.Filepass( + p, f, + token.CONST, c.RequireSingleConst, c.RequireGrouping, + ) +} diff --git a/vendor/github.com/leonklingele/grouper/pkg/analyzer/consts/config.go b/vendor/github.com/leonklingele/grouper/pkg/analyzer/consts/config.go new file mode 100644 index 000000000..aeeab40c7 --- /dev/null +++ b/vendor/github.com/leonklingele/grouper/pkg/analyzer/consts/config.go @@ -0,0 +1,6 @@ +package consts + +type Config struct { + RequireSingleConst bool // Require the use of a single global 'const' declaration only + RequireGrouping bool // Require the use of grouped global 'const' declarations +} diff --git a/vendor/github.com/leonklingele/grouper/pkg/analyzer/flags.go b/vendor/github.com/leonklingele/grouper/pkg/analyzer/flags.go new file mode 100644 index 000000000..42447cbef --- /dev/null +++ b/vendor/github.com/leonklingele/grouper/pkg/analyzer/flags.go @@ -0,0 +1,37 @@ +package analyzer + +import ( + "flag" +) + +const ( + FlagNameConstRequireSingleConst = "const-require-single-const" + FlagNameConstRequireGrouping = "const-require-grouping" + + FlagNameImportRequireSingleImport = "import-require-single-import" + FlagNameImportRequireGrouping = "import-require-grouping" + + FlagNameTypeRequireSingleType = "type-require-single-type" + FlagNameTypeRequireGrouping = "type-require-grouping" + + FlagNameVarRequireSingleVar = "var-require-single-var" + FlagNameVarRequireGrouping = "var-require-grouping" +) + +func Flags() flag.FlagSet { + fs := flag.NewFlagSet(Name, flag.ExitOnError) + + fs.Bool(FlagNameConstRequireSingleConst, false, "require the use of a single global 'const' declaration only") + fs.Bool(FlagNameConstRequireGrouping, false, "require the use of grouped global 'const' declarations") + + fs.Bool(FlagNameImportRequireSingleImport, false, "require the use of a single 'import' declaration only") + fs.Bool(FlagNameImportRequireGrouping, false, "require the use of grouped 'import' declarations") + + fs.Bool(FlagNameTypeRequireSingleType, false, "require the use of a single global 'type' declaration only") + fs.Bool(FlagNameTypeRequireGrouping, false, "require the use of grouped global 'type' declarations") + + fs.Bool(FlagNameVarRequireSingleVar, false, "require the use of a single global 'var' declaration only") + fs.Bool(FlagNameVarRequireGrouping, false, "require the use of grouped global 'var' declarations") + + return *fs +} diff --git a/vendor/github.com/leonklingele/grouper/pkg/analyzer/globals/analyzer.go b/vendor/github.com/leonklingele/grouper/pkg/analyzer/globals/analyzer.go new file mode 100644 index 000000000..15940a480 --- /dev/null +++ b/vendor/github.com/leonklingele/grouper/pkg/analyzer/globals/analyzer.go @@ -0,0 +1,105 @@ +package globals + +import ( + "fmt" + "go/ast" + "go/token" + + "golang.org/x/tools/go/analysis" +) + +type Global struct { + Decl *ast.GenDecl + IsGroup bool +} + +func Filepass( + p *analysis.Pass, f *ast.File, + tkn token.Token, requireSingle, requireGrouping bool, +) error { + var globals []*Global + for _, decl := range f.Decls { + genDecl, ok := decl.(*ast.GenDecl) + if !ok { + continue + } + + if genDecl.Tok == tkn { + globals = append(globals, &Global{ + Decl: genDecl, + IsGroup: genDecl.Lparen != 0, + }) + } + } + + numGlobals := len(globals) + if numGlobals == 0 { + // Bail out early + return nil + } + + if requireSingle && numGlobals > 1 { + msg := fmt.Sprintf("should only use a single global '%s' declaration, %d found", tkn.String(), numGlobals) + dups := globals[1:] + firstdup := dups[0] + decl := firstdup.Decl + + report := analysis.Diagnostic{ //nolint:exhaustivestruct // we do not need all fields + Pos: decl.Pos(), + End: decl.End(), + Message: msg, + // TODO(leon): Suggest fix + } + + if len(dups) > 1 { + report.Related = toRelated(dups[1:]) + } + + p.Report(report) + } + + if requireGrouping { + var ungrouped []*Global + for _, g := range globals { + if !g.IsGroup { + ungrouped = append(ungrouped, g) + } + } + + if numUngrouped := len(ungrouped); numUngrouped != 0 { + msg := fmt.Sprintf("should only use grouped global '%s' declarations", tkn.String()) + firstmatch := ungrouped[0] + decl := firstmatch.Decl + + report := analysis.Diagnostic{ //nolint:exhaustivestruct // we do not need all fields + Pos: decl.Pos(), + End: decl.End(), + Message: msg, + // TODO(leon): Suggest fix + } + + if numUngrouped > 1 { + report.Related = toRelated(ungrouped[1:]) + } + + p.Report(report) + } + } + + return nil +} + +func toRelated(globals []*Global) []analysis.RelatedInformation { + related := make([]analysis.RelatedInformation, 0, len(globals)) + for _, g := range globals { + decl := g.Decl + + related = append(related, analysis.RelatedInformation{ + Pos: decl.Pos(), + End: decl.End(), + Message: "found here", + }) + } + + return related +} diff --git a/vendor/github.com/leonklingele/grouper/pkg/analyzer/imports/analyzer.go b/vendor/github.com/leonklingele/grouper/pkg/analyzer/imports/analyzer.go new file mode 100644 index 000000000..b545f00c0 --- /dev/null +++ b/vendor/github.com/leonklingele/grouper/pkg/analyzer/imports/analyzer.go @@ -0,0 +1,103 @@ +package imports + +import ( + "fmt" + "go/ast" + "go/token" + + "golang.org/x/tools/go/analysis" +) + +// https://go.dev/ref/spec#Import_declarations + +type Import struct { + Decl *ast.GenDecl + IsGroup bool +} + +func Filepass(c *Config, p *analysis.Pass, f *ast.File) error { + var imports []*Import + ast.Inspect(f, func(n ast.Node) bool { + if decl, ok := n.(*ast.GenDecl); ok { + if decl.Tok == token.IMPORT { + imports = append(imports, &Import{ + Decl: decl, + IsGroup: decl.Lparen != 0, + }) + } + } + + return true + }) + + numImports := len(imports) + if numImports == 0 { + // Bail out early + return nil + } + + if c.RequireSingleImport && numImports > 1 { + msg := fmt.Sprintf("should only use a single 'import' declaration, %d found", numImports) + dups := imports[1:] + firstdup := dups[0] + decl := firstdup.Decl + + report := analysis.Diagnostic{ //nolint:exhaustivestruct // we do not need all fields + Pos: decl.Pos(), + End: decl.End(), + Message: msg, + // TODO(leon): Suggest fix + } + + if len(dups) > 1 { + report.Related = toRelated(dups[1:]) + } + + p.Report(report) + } + + if c.RequireGrouping { + var ungroupedImports []*Import + for _, imp := range imports { + if !imp.IsGroup { + ungroupedImports = append(ungroupedImports, imp) + } + } + + if numUngroupedImports := len(ungroupedImports); numUngroupedImports != 0 { + msg := "should only use grouped 'import' declarations" + firstmatch := ungroupedImports[0] + decl := firstmatch.Decl + + report := analysis.Diagnostic{ //nolint:exhaustivestruct // we do not need all fields + Pos: decl.Pos(), + End: decl.End(), + Message: msg, + // TODO(leon): Suggest fix + } + + if numUngroupedImports > 1 { + report.Related = toRelated(ungroupedImports[1:]) + } + + p.Report(report) + } + } + + return nil +} + +func toRelated(imports []*Import) []analysis.RelatedInformation { + related := make([]analysis.RelatedInformation, 0, len(imports)) + for _, imp := range imports { + decl := imp.Decl + + related = append(related, analysis.RelatedInformation{ + Pos: decl.Pos(), + End: decl.End(), + Message: "found here", + }) + } + + return related +} diff --git a/vendor/github.com/leonklingele/grouper/pkg/analyzer/imports/config.go b/vendor/github.com/leonklingele/grouper/pkg/analyzer/imports/config.go new file mode 100644 index 000000000..6a6971b4a --- /dev/null +++ b/vendor/github.com/leonklingele/grouper/pkg/analyzer/imports/config.go @@ -0,0 +1,6 @@ +package imports + +type Config struct { + RequireSingleImport bool // Require the use of a single 'import' declaration only + RequireGrouping bool // Require the use of grouped 'import' declarations +} diff --git a/vendor/github.com/leonklingele/grouper/pkg/analyzer/types/analyzer.go b/vendor/github.com/leonklingele/grouper/pkg/analyzer/types/analyzer.go new file mode 100644 index 000000000..63bbab33b --- /dev/null +++ b/vendor/github.com/leonklingele/grouper/pkg/analyzer/types/analyzer.go @@ -0,0 +1,19 @@ +package types + +import ( + "go/ast" + "go/token" + + "github.com/leonklingele/grouper/pkg/analyzer/globals" + + "golang.org/x/tools/go/analysis" +) + +// https://go.dev/ref/spec#Type_declarations + +func Filepass(c *Config, p *analysis.Pass, f *ast.File) error { + return globals.Filepass( + p, f, + token.TYPE, c.RequireSingleType, c.RequireGrouping, + ) +} diff --git a/vendor/github.com/leonklingele/grouper/pkg/analyzer/types/config.go b/vendor/github.com/leonklingele/grouper/pkg/analyzer/types/config.go new file mode 100644 index 000000000..e24cef9da --- /dev/null +++ b/vendor/github.com/leonklingele/grouper/pkg/analyzer/types/config.go @@ -0,0 +1,6 @@ +package types + +type Config struct { + RequireSingleType bool // Require the use of a single global 'type' declaration only + RequireGrouping bool // Require the use of grouped global 'type' declarations +} diff --git a/vendor/github.com/leonklingele/grouper/pkg/analyzer/vars/analyzer.go b/vendor/github.com/leonklingele/grouper/pkg/analyzer/vars/analyzer.go new file mode 100644 index 000000000..20c781223 --- /dev/null +++ b/vendor/github.com/leonklingele/grouper/pkg/analyzer/vars/analyzer.go @@ -0,0 +1,19 @@ +package vars + +import ( + "go/ast" + "go/token" + + "github.com/leonklingele/grouper/pkg/analyzer/globals" + + "golang.org/x/tools/go/analysis" +) + +// https://go.dev/ref/spec#Variable_declarations + +func Filepass(c *Config, p *analysis.Pass, f *ast.File) error { + return globals.Filepass( + p, f, + token.VAR, c.RequireSingleVar, c.RequireGrouping, + ) +} diff --git a/vendor/github.com/leonklingele/grouper/pkg/analyzer/vars/config.go b/vendor/github.com/leonklingele/grouper/pkg/analyzer/vars/config.go new file mode 100644 index 000000000..4c7c1d838 --- /dev/null +++ b/vendor/github.com/leonklingele/grouper/pkg/analyzer/vars/config.go @@ -0,0 +1,6 @@ +package vars + +type Config struct { + RequireSingleVar bool // Require the use of a single global 'var' declaration only + RequireGrouping bool // Require the use of grouped global 'var' declarations +} diff --git a/vendor/github.com/mattn/go-colorable/noncolorable.go b/vendor/github.com/mattn/go-colorable/noncolorable.go index 3df68f360..05d6f74bf 100644 --- a/vendor/github.com/mattn/go-colorable/noncolorable.go +++ b/vendor/github.com/mattn/go-colorable/noncolorable.go @@ -42,7 +42,6 @@ loop: continue } - var buf bytes.Buffer for { c, err := er.ReadByte() if err != nil { @@ -51,7 +50,6 @@ loop: if ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || c == '@' { break } - buf.Write([]byte(string(c))) } } diff --git a/vendor/github.com/mattn/go-sqlite3/README.md b/vendor/github.com/mattn/go-sqlite3/README.md index 746621f9f..e455133fc 100644 --- a/vendor/github.com/mattn/go-sqlite3/README.md +++ b/vendor/github.com/mattn/go-sqlite3/README.md @@ -7,17 +7,17 @@ go-sqlite3 [![codecov](https://codecov.io/gh/mattn/go-sqlite3/branch/master/graph/badge.svg)](https://codecov.io/gh/mattn/go-sqlite3) [![Go Report Card](https://goreportcard.com/badge/github.com/mattn/go-sqlite3)](https://goreportcard.com/report/github.com/mattn/go-sqlite3) -Latest stable version is v1.14 or later not v2. +Latest stable version is v1.14 or later, not v2. ~~**NOTE:** The increase to v2 was an accident. There were no major changes or features.~~ # Description -sqlite3 driver conforming to the built-in database/sql interface +A sqlite3 driver that conforms to the built-in database/sql interface. -Supported Golang version: See [.github/workflows/go.yaml](./.github/workflows/go.yaml) +Supported Golang version: See [.github/workflows/go.yaml](./.github/workflows/go.yaml). -[This package follows the official Golang Release Policy.](https://golang.org/doc/devel/release.html#policy) +This package follows the official [Golang Release Policy](https://golang.org/doc/devel/release.html#policy). ### Overview @@ -64,7 +64,7 @@ Supported Golang version: See [.github/workflows/go.yaml](./.github/workflows/go # Installation -This package can be installed with the go get command: +This package can be installed with the `go get` command: go get github.com/mattn/go-sqlite3 @@ -72,28 +72,28 @@ _go-sqlite3_ is *cgo* package. If you want to build your app using go-sqlite3, you need gcc. However, after you have built and installed _go-sqlite3_ with `go install github.com/mattn/go-sqlite3` (which requires gcc), you can build your app without relying on gcc in future. -***Important: because this is a `CGO` enabled package you are required to set the environment variable `CGO_ENABLED=1` and have a `gcc` compile present within your path.*** +***Important: because this is a `CGO` enabled package, you are required to set the environment variable `CGO_ENABLED=1` and have a `gcc` compile present within your path.*** # API Reference -API documentation can be found here: http://godoc.org/github.com/mattn/go-sqlite3 +API documentation can be found [here](http://godoc.org/github.com/mattn/go-sqlite3). -Examples can be found under the [examples](./_example) directory +Examples can be found under the [examples](./_example) directory. # Connection String When creating a new SQLite database or connection to an existing one, with the file name additional options can be given. -This is also known as a DSN string. (Data Source Name). +This is also known as a DSN (Data Source Name) string. Options are append after the filename of the SQLite database. -The database filename and options are seperated by an `?` (Question Mark). +The database filename and options are separated by an `?` (Question Mark). Options should be URL-encoded (see [url.QueryEscape](https://golang.org/pkg/net/url/#QueryEscape)). This also applies when using an in-memory database instead of a file. Options can be given using the following format: `KEYWORD=VALUE` and multiple options can be combined with the `&` ampersand. -This library supports dsn options of SQLite itself and provides additional options. +This library supports DSN options of SQLite itself and provides additional options. Boolean values can be one of: * `0` `no` `false` `off` @@ -138,19 +138,18 @@ file:test.db?cache=shared&mode=memory This package allows additional configuration of features available within SQLite3 to be enabled or disabled by golang build constraints also known as build `tags`. -[Click here for more information about build tags / constraints.](https://golang.org/pkg/go/build/#hdr-Build_Constraints) +Click [here](https://golang.org/pkg/go/build/#hdr-Build_Constraints) for more information about build tags / constraints. ### Usage -If you wish to build this library with additional extensions / features. -Use the following command. +If you wish to build this library with additional extensions / features, use the following command: ```bash go build --tags "" ``` -For available features see the extension list. -When using multiple build tags, all the different tags should be space delimted. +For available features, see the extension list. +When using multiple build tags, all the different tags should be space delimited. Example: @@ -181,9 +180,9 @@ go build --tags "icu json1 fts5 secure_delete" # Compilation -This package requires `CGO_ENABLED=1` ennvironment variable if not set by default, and the presence of the `gcc` compiler. +This package requires the `CGO_ENABLED=1` ennvironment variable if not set by default, and the presence of the `gcc` compiler. -If you need to add additional CFLAGS or LDFLAGS to the build command, and do not want to modify this package. Then this can be achieved by using the `CGO_CFLAGS` and `CGO_LDFLAGS` environment variables. +If you need to add additional CFLAGS or LDFLAGS to the build command, and do not want to modify this package, then this can be achieved by using the `CGO_CFLAGS` and `CGO_LDFLAGS` environment variables. ## Android @@ -198,7 +197,7 @@ For more information see [#201](https://github.com/mattn/go-sqlite3/issues/201) # ARM -To compile for `ARM` use the following environment. +To compile for `ARM` use the following environment: ```bash env CC=arm-linux-gnueabihf-gcc CXX=arm-linux-gnueabihf-g++ \ @@ -234,7 +233,7 @@ Please work only with compiled final binaries. ## Linux -To compile this package on Linux you must install the development tools for your linux distribution. +To compile this package on Linux, you must install the development tools for your linux distribution. To compile under linux use the build tag `linux`. @@ -250,7 +249,7 @@ go build --tags "libsqlite3 linux" ### Alpine -When building in an `alpine` container run the following command before building. +When building in an `alpine` container run the following command before building: ``` apk add --update gcc musl-dev @@ -270,29 +269,29 @@ sudo apt-get install build-essential ## Mac OSX -OSX should have all the tools present to compile this package, if not install XCode this will add all the developers tools. +OSX should have all the tools present to compile this package. If not, install XCode to add all the developers tools. -Required dependency +Required dependency: ```bash brew install sqlite3 ``` -For OSX there is an additional package install which is required if you wish to build the `icu` extension. +For OSX, there is an additional package to install which is required if you wish to build the `icu` extension. -This additional package can be installed with `homebrew`. +This additional package can be installed with `homebrew`: ```bash brew upgrade icu4c ``` -To compile for Mac OSX. +To compile for Mac OSX: ```bash go build --tags "darwin" ``` -If you wish to link directly to libsqlite3 then you can use the `libsqlite3` build tag. +If you wish to link directly to libsqlite3, use the `libsqlite3` build tag: ``` go build --tags "libsqlite3 darwin" @@ -304,14 +303,14 @@ Additional information: ## Windows -To compile this package on Windows OS you must have the `gcc` compiler installed. +To compile this package on Windows, you must have the `gcc` compiler installed. 1) Install a Windows `gcc` toolchain. -2) Add the `bin` folders to the Windows path if the installer did not do this by default. -3) Open a terminal for the TDM-GCC toolchain, can be found in the Windows Start menu. +2) Add the `bin` folder to the Windows path, if the installer did not do this by default. +3) Open a terminal for the TDM-GCC toolchain, which can be found in the Windows Start menu. 4) Navigate to your project folder and run the `go build ...` command for this package. -For example the TDM-GCC Toolchain can be found [here](https://sourceforge.net/projects/tdm-gcc/). +For example the TDM-GCC Toolchain can be found [here](https://jmeubank.github.io/tdm-gcc/). ## Errors @@ -349,28 +348,28 @@ This package supports the SQLite User Authentication module. ## Compile -To use the User authentication module the package has to be compiled with the tag `sqlite_userauth`. See [Features](#features). +To use the User authentication module, the package has to be compiled with the tag `sqlite_userauth`. See [Features](#features). ## Usage ### Create protected database -To create a database protected by user authentication provide the following argument to the connection string `_auth`. +To create a database protected by user authentication, provide the following argument to the connection string `_auth`. This will enable user authentication within the database. This option however requires two additional arguments: - `_auth_user` - `_auth_pass` -When `_auth` is present on the connection string user authentication will be enabled and the provided user will be created +When `_auth` is present in the connection string user authentication will be enabled and the provided user will be created as an `admin` user. After initial creation, the parameter `_auth` has no effect anymore and can be omitted from the connection string. -Example connection string: +Example connection strings: -Create an user authentication database with user `admin` and password `admin`. +Create an user authentication database with user `admin` and password `admin`: `file:test.s3db?_auth&_auth_user=admin&_auth_pass=admin` -Create an user authentication database with user `admin` and password `admin` and use `SHA1` for the password encoding. +Create an user authentication database with user `admin` and password `admin` and use `SHA1` for the password encoding: `file:test.s3db?_auth&_auth_user=admin&_auth_pass=admin&_auth_crypt=sha1` @@ -396,11 +395,11 @@ salt this can be configured with `_auth_salt`. ### Restrictions -Operations on the database regarding to user management can only be preformed by an administrator user. +Operations on the database regarding user management can only be preformed by an administrator user. ### Support -The user authentication supports two kinds of users +The user authentication supports two kinds of users: - administrators - regular users @@ -411,7 +410,7 @@ User management can be done by directly using the `*SQLiteConn` or by SQL. #### SQL -The following sql functions are available for user management. +The following sql functions are available for user management: | Function | Arguments | Description | |----------|-----------|-------------| @@ -420,7 +419,7 @@ The following sql functions are available for user management. | `auth_user_change` | username `string`, password `string`, admin `int` | Function to modify an user. Users can change their own password, but only an administrator can change the administrator flag. | | `authUserDelete` | username `string` | Delete an user from the database. Can only be used by an administrator. The current logged in administrator cannot be deleted. This is to make sure their is always an administrator remaining. | -These functions will return an integer. +These functions will return an integer: - 0 (SQLITE_OK) - 23 (SQLITE_AUTH) Failed to perform due to authentication or insufficient privileges @@ -441,7 +440,7 @@ SELECT user_delete('user'); #### *SQLiteConn -The following functions are available for User authentication from the `*SQLiteConn`. +The following functions are available for User authentication from the `*SQLiteConn`: | Function | Description | |----------|-------------| @@ -452,16 +451,16 @@ The following functions are available for User authentication from the `*SQLiteC ### Attached database -When using attached databases. SQLite will use the authentication from the `main` database for the attached database(s). +When using attached databases, SQLite will use the authentication from the `main` database for the attached database(s). # Extensions -If you want your own extension to be listed here or you want to add a reference to an extension; please submit an Issue for this. +If you want your own extension to be listed here, or you want to add a reference to an extension; please submit an Issue for this. ## Spatialite Spatialite is available as an extension to SQLite, and can be used in combination with this repository. -For an example see [shaxbee/go-spatialite](https://github.com/shaxbee/go-spatialite). +For an example, see [shaxbee/go-spatialite](https://github.com/shaxbee/go-spatialite). ## extension-functions.c from SQLite3 Contrib @@ -471,7 +470,7 @@ extension-functions.c is available as an extension to SQLite, and provides the f - String: replicate, charindex, leftstr, rightstr, ltrim, rtrim, trim, replace, reverse, proper, padl, padr, padc, strfilter. - Aggregate: stdev, variance, mode, median, lower_quartile, upper_quartile -For an example see [dinedal/go-sqlite3-extension-functions](https://github.com/dinedal/go-sqlite3-extension-functions). +For an example, see [dinedal/go-sqlite3-extension-functions](https://github.com/dinedal/go-sqlite3-extension-functions). # FAQ @@ -491,7 +490,7 @@ For an example see [dinedal/go-sqlite3-extension-functions](https://github.com/d - Can I use this in multiple routines concurrently? - Yes for readonly. But, No for writable. See [#50](https://github.com/mattn/go-sqlite3/issues/50), [#51](https://github.com/mattn/go-sqlite3/issues/51), [#209](https://github.com/mattn/go-sqlite3/issues/209), [#274](https://github.com/mattn/go-sqlite3/issues/274). + Yes for readonly. But not for writable. See [#50](https://github.com/mattn/go-sqlite3/issues/50), [#51](https://github.com/mattn/go-sqlite3/issues/51), [#209](https://github.com/mattn/go-sqlite3/issues/209), [#274](https://github.com/mattn/go-sqlite3/issues/274). - Why I'm getting `no such table` error? @@ -505,7 +504,7 @@ For an example see [dinedal/go-sqlite3-extension-functions](https://github.com/d Note that if the last database connection in the pool closes, the in-memory database is deleted. Make sure the [max idle connection limit](https://golang.org/pkg/database/sql/#DB.SetMaxIdleConns) is > 0, and the [connection lifetime](https://golang.org/pkg/database/sql/#DB.SetConnMaxLifetime) is infinite. - For more information see + For more information see: * [#204](https://github.com/mattn/go-sqlite3/issues/204) * [#511](https://github.com/mattn/go-sqlite3/issues/511) * https://www.sqlite.org/sharedcache.html#shared_cache_and_in_memory_databases @@ -515,20 +514,20 @@ For an example see [dinedal/go-sqlite3-extension-functions](https://github.com/d OS X limits OS-wide to not have more than 1000 files open simultaneously by default. - For more information see [#289](https://github.com/mattn/go-sqlite3/issues/289) + For more information, see [#289](https://github.com/mattn/go-sqlite3/issues/289) - Trying to execute a `.` (dot) command throws an error. Error: `Error: near ".": syntax error` - Dot command are part of SQLite3 CLI not of this library. + Dot command are part of SQLite3 CLI, not of this library. You need to implement the feature or call the sqlite3 cli. - More information see [#305](https://github.com/mattn/go-sqlite3/issues/305) + More information see [#305](https://github.com/mattn/go-sqlite3/issues/305). - Error: `database is locked` - When you get a database is locked. Please use the following options. + When you get a database is locked, please use the following options. Add to DSN: `cache=shared` @@ -537,24 +536,24 @@ For an example see [dinedal/go-sqlite3-extension-functions](https://github.com/d db, err := sql.Open("sqlite3", "file:locked.sqlite?cache=shared") ``` - Second please set the database connections of the SQL package to 1. + Next, please set the database connections of the SQL package to 1: ```go db.SetMaxOpenConns(1) ``` - More information see [#209](https://github.com/mattn/go-sqlite3/issues/209) + For more information, see [#209](https://github.com/mattn/go-sqlite3/issues/209). ## Contributors ### Code Contributors -This project exists thanks to all the people who contribute. [[Contribute](CONTRIBUTING.md)]. +This project exists thanks to all the people who [[contribute](CONTRIBUTING.md)]. ### Financial Contributors -Become a financial contributor and help us sustain our community. [[Contribute](https://opencollective.com/mattn-go-sqlite3/contribute)] +Become a financial contributor and help us sustain our community. [[Contribute here](https://opencollective.com/mattn-go-sqlite3/contribute)]. #### Individuals diff --git a/vendor/github.com/mattn/go-sqlite3/backup.go b/vendor/github.com/mattn/go-sqlite3/backup.go index e222cc888..ecbb46974 100644 --- a/vendor/github.com/mattn/go-sqlite3/backup.go +++ b/vendor/github.com/mattn/go-sqlite3/backup.go @@ -7,7 +7,7 @@ package sqlite3 /* #ifndef USE_LIBSQLITE3 -#include +#include "sqlite3-binding.h" #else #include #endif diff --git a/vendor/github.com/mattn/go-sqlite3/callback.go b/vendor/github.com/mattn/go-sqlite3/callback.go index c3ce75207..b020fe37c 100644 --- a/vendor/github.com/mattn/go-sqlite3/callback.go +++ b/vendor/github.com/mattn/go-sqlite3/callback.go @@ -12,7 +12,7 @@ package sqlite3 /* #ifndef USE_LIBSQLITE3 -#include +#include "sqlite3-binding.h" #else #include #endif diff --git a/vendor/github.com/mattn/go-sqlite3/error.go b/vendor/github.com/mattn/go-sqlite3/error.go index 696281c73..58ab252e6 100644 --- a/vendor/github.com/mattn/go-sqlite3/error.go +++ b/vendor/github.com/mattn/go-sqlite3/error.go @@ -7,7 +7,7 @@ package sqlite3 /* #ifndef USE_LIBSQLITE3 -#include +#include "sqlite3-binding.h" #else #include #endif diff --git a/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.c b/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.c index ac5d0b7d7..bb9dc50ec 100644 --- a/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.c +++ b/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.c @@ -1,7 +1,7 @@ #ifndef USE_LIBSQLITE3 /****************************************************************************** ** This file is an amalgamation of many separate C source files from SQLite -** version 3.36.0. By combining all the individual C code files into this +** version 3.37.0. By combining all the individual C code files into this ** single large file, the entire code can be compiled as a single translation ** unit. This allows many compilers to do optimizations that would not be ** possible if the files were compiled separately. Performance improvements @@ -23,793 +23,6 @@ #ifndef SQLITE_PRIVATE # define SQLITE_PRIVATE static #endif -/************** Begin file ctime.c *******************************************/ -/* -** 2010 February 23 -** -** The author disclaims copyright to this source code. In place of -** a legal notice, here is a blessing: -** -** May you do good and not evil. -** May you find forgiveness for yourself and forgive others. -** May you share freely, never taking more than you give. -** -************************************************************************* -** -** This file implements routines used to report what compile-time options -** SQLite was built with. -*/ - -#ifndef SQLITE_OMIT_COMPILEOPTION_DIAGS /* IMP: R-16824-07538 */ - -/* -** Include the configuration header output by 'configure' if we're using the -** autoconf-based build -*/ -#if defined(_HAVE_SQLITE_CONFIG_H) && !defined(SQLITECONFIG_H) -#include "config.h" -#define SQLITECONFIG_H 1 -#endif - -/* These macros are provided to "stringify" the value of the define -** for those options in which the value is meaningful. */ -#define CTIMEOPT_VAL_(opt) #opt -#define CTIMEOPT_VAL(opt) CTIMEOPT_VAL_(opt) - -/* Like CTIMEOPT_VAL, but especially for SQLITE_DEFAULT_LOOKASIDE. This -** option requires a separate macro because legal values contain a single -** comma. e.g. (-DSQLITE_DEFAULT_LOOKASIDE="100,100") */ -#define CTIMEOPT_VAL2_(opt1,opt2) #opt1 "," #opt2 -#define CTIMEOPT_VAL2(opt) CTIMEOPT_VAL2_(opt) - -/* -** An array of names of all compile-time options. This array should -** be sorted A-Z. -** -** This array looks large, but in a typical installation actually uses -** only a handful of compile-time options, so most times this array is usually -** rather short and uses little memory space. -*/ -static const char * const sqlite3azCompileOpt[] = { - -/* -** BEGIN CODE GENERATED BY tool/mkctime.tcl -*/ -#if SQLITE_32BIT_ROWID - "32BIT_ROWID", -#endif -#if SQLITE_4_BYTE_ALIGNED_MALLOC - "4_BYTE_ALIGNED_MALLOC", -#endif -#if SQLITE_64BIT_STATS - "64BIT_STATS", -#endif -#ifdef SQLITE_ALLOW_COVERING_INDEX_SCAN -# if SQLITE_ALLOW_COVERING_INDEX_SCAN != 1 - "ALLOW_COVERING_INDEX_SCAN=" CTIMEOPT_VAL(SQLITE_ALLOW_COVERING_INDEX_SCAN), -# endif -#endif -#if SQLITE_ALLOW_URI_AUTHORITY - "ALLOW_URI_AUTHORITY", -#endif -#ifdef SQLITE_BITMASK_TYPE - "BITMASK_TYPE=" CTIMEOPT_VAL(SQLITE_BITMASK_TYPE), -#endif -#if SQLITE_BUG_COMPATIBLE_20160819 - "BUG_COMPATIBLE_20160819", -#endif -#if SQLITE_CASE_SENSITIVE_LIKE - "CASE_SENSITIVE_LIKE", -#endif -#if SQLITE_CHECK_PAGES - "CHECK_PAGES", -#endif -#if defined(__clang__) && defined(__clang_major__) - "COMPILER=clang-" CTIMEOPT_VAL(__clang_major__) "." - CTIMEOPT_VAL(__clang_minor__) "." - CTIMEOPT_VAL(__clang_patchlevel__), -#elif defined(_MSC_VER) - "COMPILER=msvc-" CTIMEOPT_VAL(_MSC_VER), -#elif defined(__GNUC__) && defined(__VERSION__) - "COMPILER=gcc-" __VERSION__, -#endif -#if SQLITE_COVERAGE_TEST - "COVERAGE_TEST", -#endif -#if SQLITE_DEBUG - "DEBUG", -#endif -#if SQLITE_DEFAULT_AUTOMATIC_INDEX - "DEFAULT_AUTOMATIC_INDEX", -#endif -#if SQLITE_DEFAULT_AUTOVACUUM - "DEFAULT_AUTOVACUUM", -#endif -#ifdef SQLITE_DEFAULT_CACHE_SIZE - "DEFAULT_CACHE_SIZE=" CTIMEOPT_VAL(SQLITE_DEFAULT_CACHE_SIZE), -#endif -#if SQLITE_DEFAULT_CKPTFULLFSYNC - "DEFAULT_CKPTFULLFSYNC", -#endif -#ifdef SQLITE_DEFAULT_FILE_FORMAT - "DEFAULT_FILE_FORMAT=" CTIMEOPT_VAL(SQLITE_DEFAULT_FILE_FORMAT), -#endif -#ifdef SQLITE_DEFAULT_FILE_PERMISSIONS - "DEFAULT_FILE_PERMISSIONS=" CTIMEOPT_VAL(SQLITE_DEFAULT_FILE_PERMISSIONS), -#endif -#if SQLITE_DEFAULT_FOREIGN_KEYS - "DEFAULT_FOREIGN_KEYS", -#endif -#ifdef SQLITE_DEFAULT_JOURNAL_SIZE_LIMIT - "DEFAULT_JOURNAL_SIZE_LIMIT=" CTIMEOPT_VAL(SQLITE_DEFAULT_JOURNAL_SIZE_LIMIT), -#endif -#ifdef SQLITE_DEFAULT_LOCKING_MODE - "DEFAULT_LOCKING_MODE=" CTIMEOPT_VAL(SQLITE_DEFAULT_LOCKING_MODE), -#endif -#ifdef SQLITE_DEFAULT_LOOKASIDE - "DEFAULT_LOOKASIDE=" CTIMEOPT_VAL2(SQLITE_DEFAULT_LOOKASIDE), -#endif -#ifdef SQLITE_DEFAULT_MEMSTATUS -# if SQLITE_DEFAULT_MEMSTATUS != 1 - "DEFAULT_MEMSTATUS=" CTIMEOPT_VAL(SQLITE_DEFAULT_MEMSTATUS), -# endif -#endif -#ifdef SQLITE_DEFAULT_MMAP_SIZE - "DEFAULT_MMAP_SIZE=" CTIMEOPT_VAL(SQLITE_DEFAULT_MMAP_SIZE), -#endif -#ifdef SQLITE_DEFAULT_PAGE_SIZE - "DEFAULT_PAGE_SIZE=" CTIMEOPT_VAL(SQLITE_DEFAULT_PAGE_SIZE), -#endif -#ifdef SQLITE_DEFAULT_PCACHE_INITSZ - "DEFAULT_PCACHE_INITSZ=" CTIMEOPT_VAL(SQLITE_DEFAULT_PCACHE_INITSZ), -#endif -#ifdef SQLITE_DEFAULT_PROXYDIR_PERMISSIONS - "DEFAULT_PROXYDIR_PERMISSIONS=" CTIMEOPT_VAL(SQLITE_DEFAULT_PROXYDIR_PERMISSIONS), -#endif -#if SQLITE_DEFAULT_RECURSIVE_TRIGGERS - "DEFAULT_RECURSIVE_TRIGGERS", -#endif -#ifdef SQLITE_DEFAULT_ROWEST - "DEFAULT_ROWEST=" CTIMEOPT_VAL(SQLITE_DEFAULT_ROWEST), -#endif -#ifdef SQLITE_DEFAULT_SECTOR_SIZE - "DEFAULT_SECTOR_SIZE=" CTIMEOPT_VAL(SQLITE_DEFAULT_SECTOR_SIZE), -#endif -#ifdef SQLITE_DEFAULT_SYNCHRONOUS - "DEFAULT_SYNCHRONOUS=" CTIMEOPT_VAL(SQLITE_DEFAULT_SYNCHRONOUS), -#endif -#ifdef SQLITE_DEFAULT_WAL_AUTOCHECKPOINT - "DEFAULT_WAL_AUTOCHECKPOINT=" CTIMEOPT_VAL(SQLITE_DEFAULT_WAL_AUTOCHECKPOINT), -#endif -#ifdef SQLITE_DEFAULT_WAL_SYNCHRONOUS - "DEFAULT_WAL_SYNCHRONOUS=" CTIMEOPT_VAL(SQLITE_DEFAULT_WAL_SYNCHRONOUS), -#endif -#ifdef SQLITE_DEFAULT_WORKER_THREADS - "DEFAULT_WORKER_THREADS=" CTIMEOPT_VAL(SQLITE_DEFAULT_WORKER_THREADS), -#endif -#if SQLITE_DIRECT_OVERFLOW_READ - "DIRECT_OVERFLOW_READ", -#endif -#if SQLITE_DISABLE_DIRSYNC - "DISABLE_DIRSYNC", -#endif -#if SQLITE_DISABLE_FTS3_UNICODE - "DISABLE_FTS3_UNICODE", -#endif -#if SQLITE_DISABLE_FTS4_DEFERRED - "DISABLE_FTS4_DEFERRED", -#endif -#if SQLITE_DISABLE_INTRINSIC - "DISABLE_INTRINSIC", -#endif -#if SQLITE_DISABLE_LFS - "DISABLE_LFS", -#endif -#if SQLITE_DISABLE_PAGECACHE_OVERFLOW_STATS - "DISABLE_PAGECACHE_OVERFLOW_STATS", -#endif -#if SQLITE_DISABLE_SKIPAHEAD_DISTINCT - "DISABLE_SKIPAHEAD_DISTINCT", -#endif -#ifdef SQLITE_ENABLE_8_3_NAMES - "ENABLE_8_3_NAMES=" CTIMEOPT_VAL(SQLITE_ENABLE_8_3_NAMES), -#endif -#if SQLITE_ENABLE_API_ARMOR - "ENABLE_API_ARMOR", -#endif -#if SQLITE_ENABLE_ATOMIC_WRITE - "ENABLE_ATOMIC_WRITE", -#endif -#if SQLITE_ENABLE_BATCH_ATOMIC_WRITE - "ENABLE_BATCH_ATOMIC_WRITE", -#endif -#if SQLITE_ENABLE_BYTECODE_VTAB - "ENABLE_BYTECODE_VTAB", -#endif -#ifdef SQLITE_ENABLE_CEROD - "ENABLE_CEROD=" CTIMEOPT_VAL(SQLITE_ENABLE_CEROD), -#endif -#if SQLITE_ENABLE_COLUMN_METADATA - "ENABLE_COLUMN_METADATA", -#endif -#if SQLITE_ENABLE_COLUMN_USED_MASK - "ENABLE_COLUMN_USED_MASK", -#endif -#if SQLITE_ENABLE_COSTMULT - "ENABLE_COSTMULT", -#endif -#if SQLITE_ENABLE_CURSOR_HINTS - "ENABLE_CURSOR_HINTS", -#endif -#if SQLITE_ENABLE_DBPAGE_VTAB - "ENABLE_DBPAGE_VTAB", -#endif -#if SQLITE_ENABLE_DBSTAT_VTAB - "ENABLE_DBSTAT_VTAB", -#endif -#if SQLITE_ENABLE_EXPENSIVE_ASSERT - "ENABLE_EXPENSIVE_ASSERT", -#endif -#if SQLITE_ENABLE_EXPLAIN_COMMENTS - "ENABLE_EXPLAIN_COMMENTS", -#endif -#if SQLITE_ENABLE_FTS3 - "ENABLE_FTS3", -#endif -#if SQLITE_ENABLE_FTS3_PARENTHESIS - "ENABLE_FTS3_PARENTHESIS", -#endif -#if SQLITE_ENABLE_FTS3_TOKENIZER - "ENABLE_FTS3_TOKENIZER", -#endif -#if SQLITE_ENABLE_FTS4 - "ENABLE_FTS4", -#endif -#if SQLITE_ENABLE_FTS5 - "ENABLE_FTS5", -#endif -#if SQLITE_ENABLE_GEOPOLY - "ENABLE_GEOPOLY", -#endif -#if SQLITE_ENABLE_HIDDEN_COLUMNS - "ENABLE_HIDDEN_COLUMNS", -#endif -#if SQLITE_ENABLE_ICU - "ENABLE_ICU", -#endif -#if SQLITE_ENABLE_IOTRACE - "ENABLE_IOTRACE", -#endif -#if SQLITE_ENABLE_JSON1 - "ENABLE_JSON1", -#endif -#if SQLITE_ENABLE_LOAD_EXTENSION - "ENABLE_LOAD_EXTENSION", -#endif -#ifdef SQLITE_ENABLE_LOCKING_STYLE - "ENABLE_LOCKING_STYLE=" CTIMEOPT_VAL(SQLITE_ENABLE_LOCKING_STYLE), -#endif -#if SQLITE_ENABLE_MATH_FUNCTIONS - "ENABLE_MATH_FUNCTIONS", -#endif -#if SQLITE_ENABLE_MEMORY_MANAGEMENT - "ENABLE_MEMORY_MANAGEMENT", -#endif -#if SQLITE_ENABLE_MEMSYS3 - "ENABLE_MEMSYS3", -#endif -#if SQLITE_ENABLE_MEMSYS5 - "ENABLE_MEMSYS5", -#endif -#if SQLITE_ENABLE_MULTIPLEX - "ENABLE_MULTIPLEX", -#endif -#if SQLITE_ENABLE_NORMALIZE - "ENABLE_NORMALIZE", -#endif -#if SQLITE_ENABLE_NULL_TRIM - "ENABLE_NULL_TRIM", -#endif -#if SQLITE_ENABLE_OFFSET_SQL_FUNC - "ENABLE_OFFSET_SQL_FUNC", -#endif -#if SQLITE_ENABLE_OVERSIZE_CELL_CHECK - "ENABLE_OVERSIZE_CELL_CHECK", -#endif -#if SQLITE_ENABLE_PREUPDATE_HOOK - "ENABLE_PREUPDATE_HOOK", -#endif -#if SQLITE_ENABLE_QPSG - "ENABLE_QPSG", -#endif -#if SQLITE_ENABLE_RBU - "ENABLE_RBU", -#endif -#if SQLITE_ENABLE_RTREE - "ENABLE_RTREE", -#endif -#if SQLITE_ENABLE_SELECTTRACE - "ENABLE_SELECTTRACE", -#endif -#if SQLITE_ENABLE_SESSION - "ENABLE_SESSION", -#endif -#if SQLITE_ENABLE_SNAPSHOT - "ENABLE_SNAPSHOT", -#endif -#if SQLITE_ENABLE_SORTER_REFERENCES - "ENABLE_SORTER_REFERENCES", -#endif -#if SQLITE_ENABLE_SQLLOG - "ENABLE_SQLLOG", -#endif -#if SQLITE_ENABLE_STAT4 - "ENABLE_STAT4", -#endif -#if SQLITE_ENABLE_STMTVTAB - "ENABLE_STMTVTAB", -#endif -#if SQLITE_ENABLE_STMT_SCANSTATUS - "ENABLE_STMT_SCANSTATUS", -#endif -#if SQLITE_ENABLE_UNKNOWN_SQL_FUNCTION - "ENABLE_UNKNOWN_SQL_FUNCTION", -#endif -#if SQLITE_ENABLE_UNLOCK_NOTIFY - "ENABLE_UNLOCK_NOTIFY", -#endif -#if SQLITE_ENABLE_UPDATE_DELETE_LIMIT - "ENABLE_UPDATE_DELETE_LIMIT", -#endif -#if SQLITE_ENABLE_URI_00_ERROR - "ENABLE_URI_00_ERROR", -#endif -#if SQLITE_ENABLE_VFSTRACE - "ENABLE_VFSTRACE", -#endif -#if SQLITE_ENABLE_WHERETRACE - "ENABLE_WHERETRACE", -#endif -#if SQLITE_ENABLE_ZIPVFS - "ENABLE_ZIPVFS", -#endif -#if SQLITE_EXPLAIN_ESTIMATED_ROWS - "EXPLAIN_ESTIMATED_ROWS", -#endif -#if SQLITE_EXTRA_IFNULLROW - "EXTRA_IFNULLROW", -#endif -#ifdef SQLITE_EXTRA_INIT - "EXTRA_INIT=" CTIMEOPT_VAL(SQLITE_EXTRA_INIT), -#endif -#ifdef SQLITE_EXTRA_SHUTDOWN - "EXTRA_SHUTDOWN=" CTIMEOPT_VAL(SQLITE_EXTRA_SHUTDOWN), -#endif -#ifdef SQLITE_FTS3_MAX_EXPR_DEPTH - "FTS3_MAX_EXPR_DEPTH=" CTIMEOPT_VAL(SQLITE_FTS3_MAX_EXPR_DEPTH), -#endif -#if SQLITE_FTS5_ENABLE_TEST_MI - "FTS5_ENABLE_TEST_MI", -#endif -#if SQLITE_FTS5_NO_WITHOUT_ROWID - "FTS5_NO_WITHOUT_ROWID", -#endif -#if HAVE_ISNAN || SQLITE_HAVE_ISNAN - "HAVE_ISNAN", -#endif -#ifdef SQLITE_HOMEGROWN_RECURSIVE_MUTEX -# if SQLITE_HOMEGROWN_RECURSIVE_MUTEX != 1 - "HOMEGROWN_RECURSIVE_MUTEX=" CTIMEOPT_VAL(SQLITE_HOMEGROWN_RECURSIVE_MUTEX), -# endif -#endif -#if SQLITE_IGNORE_AFP_LOCK_ERRORS - "IGNORE_AFP_LOCK_ERRORS", -#endif -#if SQLITE_IGNORE_FLOCK_LOCK_ERRORS - "IGNORE_FLOCK_LOCK_ERRORS", -#endif -#if SQLITE_INLINE_MEMCPY - "INLINE_MEMCPY", -#endif -#if SQLITE_INT64_TYPE - "INT64_TYPE", -#endif -#ifdef SQLITE_INTEGRITY_CHECK_ERROR_MAX - "INTEGRITY_CHECK_ERROR_MAX=" CTIMEOPT_VAL(SQLITE_INTEGRITY_CHECK_ERROR_MAX), -#endif -#if SQLITE_LIKE_DOESNT_MATCH_BLOBS - "LIKE_DOESNT_MATCH_BLOBS", -#endif -#if SQLITE_LOCK_TRACE - "LOCK_TRACE", -#endif -#if SQLITE_LOG_CACHE_SPILL - "LOG_CACHE_SPILL", -#endif -#ifdef SQLITE_MALLOC_SOFT_LIMIT - "MALLOC_SOFT_LIMIT=" CTIMEOPT_VAL(SQLITE_MALLOC_SOFT_LIMIT), -#endif -#ifdef SQLITE_MAX_ATTACHED - "MAX_ATTACHED=" CTIMEOPT_VAL(SQLITE_MAX_ATTACHED), -#endif -#ifdef SQLITE_MAX_COLUMN - "MAX_COLUMN=" CTIMEOPT_VAL(SQLITE_MAX_COLUMN), -#endif -#ifdef SQLITE_MAX_COMPOUND_SELECT - "MAX_COMPOUND_SELECT=" CTIMEOPT_VAL(SQLITE_MAX_COMPOUND_SELECT), -#endif -#ifdef SQLITE_MAX_DEFAULT_PAGE_SIZE - "MAX_DEFAULT_PAGE_SIZE=" CTIMEOPT_VAL(SQLITE_MAX_DEFAULT_PAGE_SIZE), -#endif -#ifdef SQLITE_MAX_EXPR_DEPTH - "MAX_EXPR_DEPTH=" CTIMEOPT_VAL(SQLITE_MAX_EXPR_DEPTH), -#endif -#ifdef SQLITE_MAX_FUNCTION_ARG - "MAX_FUNCTION_ARG=" CTIMEOPT_VAL(SQLITE_MAX_FUNCTION_ARG), -#endif -#ifdef SQLITE_MAX_LENGTH - "MAX_LENGTH=" CTIMEOPT_VAL(SQLITE_MAX_LENGTH), -#endif -#ifdef SQLITE_MAX_LIKE_PATTERN_LENGTH - "MAX_LIKE_PATTERN_LENGTH=" CTIMEOPT_VAL(SQLITE_MAX_LIKE_PATTERN_LENGTH), -#endif -#ifdef SQLITE_MAX_MEMORY - "MAX_MEMORY=" CTIMEOPT_VAL(SQLITE_MAX_MEMORY), -#endif -#ifdef SQLITE_MAX_MMAP_SIZE - "MAX_MMAP_SIZE=" CTIMEOPT_VAL(SQLITE_MAX_MMAP_SIZE), -#endif -#ifdef SQLITE_MAX_MMAP_SIZE_ - "MAX_MMAP_SIZE_=" CTIMEOPT_VAL(SQLITE_MAX_MMAP_SIZE_), -#endif -#ifdef SQLITE_MAX_PAGE_COUNT - "MAX_PAGE_COUNT=" CTIMEOPT_VAL(SQLITE_MAX_PAGE_COUNT), -#endif -#ifdef SQLITE_MAX_PAGE_SIZE - "MAX_PAGE_SIZE=" CTIMEOPT_VAL(SQLITE_MAX_PAGE_SIZE), -#endif -#ifdef SQLITE_MAX_SCHEMA_RETRY - "MAX_SCHEMA_RETRY=" CTIMEOPT_VAL(SQLITE_MAX_SCHEMA_RETRY), -#endif -#ifdef SQLITE_MAX_SQL_LENGTH - "MAX_SQL_LENGTH=" CTIMEOPT_VAL(SQLITE_MAX_SQL_LENGTH), -#endif -#ifdef SQLITE_MAX_TRIGGER_DEPTH - "MAX_TRIGGER_DEPTH=" CTIMEOPT_VAL(SQLITE_MAX_TRIGGER_DEPTH), -#endif -#ifdef SQLITE_MAX_VARIABLE_NUMBER - "MAX_VARIABLE_NUMBER=" CTIMEOPT_VAL(SQLITE_MAX_VARIABLE_NUMBER), -#endif -#ifdef SQLITE_MAX_VDBE_OP - "MAX_VDBE_OP=" CTIMEOPT_VAL(SQLITE_MAX_VDBE_OP), -#endif -#ifdef SQLITE_MAX_WORKER_THREADS - "MAX_WORKER_THREADS=" CTIMEOPT_VAL(SQLITE_MAX_WORKER_THREADS), -#endif -#if SQLITE_MEMDEBUG - "MEMDEBUG", -#endif -#if SQLITE_MIXED_ENDIAN_64BIT_FLOAT - "MIXED_ENDIAN_64BIT_FLOAT", -#endif -#if SQLITE_MMAP_READWRITE - "MMAP_READWRITE", -#endif -#if SQLITE_MUTEX_NOOP - "MUTEX_NOOP", -#endif -#if SQLITE_MUTEX_OMIT - "MUTEX_OMIT", -#endif -#if SQLITE_MUTEX_PTHREADS - "MUTEX_PTHREADS", -#endif -#if SQLITE_MUTEX_W32 - "MUTEX_W32", -#endif -#if SQLITE_NEED_ERR_NAME - "NEED_ERR_NAME", -#endif -#if SQLITE_NOINLINE - "NOINLINE", -#endif -#if SQLITE_NO_SYNC - "NO_SYNC", -#endif -#if SQLITE_OMIT_ALTERTABLE - "OMIT_ALTERTABLE", -#endif -#if SQLITE_OMIT_ANALYZE - "OMIT_ANALYZE", -#endif -#if SQLITE_OMIT_ATTACH - "OMIT_ATTACH", -#endif -#if SQLITE_OMIT_AUTHORIZATION - "OMIT_AUTHORIZATION", -#endif -#if SQLITE_OMIT_AUTOINCREMENT - "OMIT_AUTOINCREMENT", -#endif -#if SQLITE_OMIT_AUTOINIT - "OMIT_AUTOINIT", -#endif -#if SQLITE_OMIT_AUTOMATIC_INDEX - "OMIT_AUTOMATIC_INDEX", -#endif -#if SQLITE_OMIT_AUTORESET - "OMIT_AUTORESET", -#endif -#if SQLITE_OMIT_AUTOVACUUM - "OMIT_AUTOVACUUM", -#endif -#if SQLITE_OMIT_BETWEEN_OPTIMIZATION - "OMIT_BETWEEN_OPTIMIZATION", -#endif -#if SQLITE_OMIT_BLOB_LITERAL - "OMIT_BLOB_LITERAL", -#endif -#if SQLITE_OMIT_CAST - "OMIT_CAST", -#endif -#if SQLITE_OMIT_CHECK - "OMIT_CHECK", -#endif -#if SQLITE_OMIT_COMPLETE - "OMIT_COMPLETE", -#endif -#if SQLITE_OMIT_COMPOUND_SELECT - "OMIT_COMPOUND_SELECT", -#endif -#if SQLITE_OMIT_CONFLICT_CLAUSE - "OMIT_CONFLICT_CLAUSE", -#endif -#if SQLITE_OMIT_CTE - "OMIT_CTE", -#endif -#if defined(SQLITE_OMIT_DATETIME_FUNCS) || defined(SQLITE_OMIT_FLOATING_POINT) - "OMIT_DATETIME_FUNCS", -#endif -#if SQLITE_OMIT_DECLTYPE - "OMIT_DECLTYPE", -#endif -#if SQLITE_OMIT_DEPRECATED - "OMIT_DEPRECATED", -#endif -#if SQLITE_OMIT_DESERIALIZE - "OMIT_DESERIALIZE", -#endif -#if SQLITE_OMIT_DISKIO - "OMIT_DISKIO", -#endif -#if SQLITE_OMIT_EXPLAIN - "OMIT_EXPLAIN", -#endif -#if SQLITE_OMIT_FLAG_PRAGMAS - "OMIT_FLAG_PRAGMAS", -#endif -#if SQLITE_OMIT_FLOATING_POINT - "OMIT_FLOATING_POINT", -#endif -#if SQLITE_OMIT_FOREIGN_KEY - "OMIT_FOREIGN_KEY", -#endif -#if SQLITE_OMIT_GET_TABLE - "OMIT_GET_TABLE", -#endif -#if SQLITE_OMIT_HEX_INTEGER - "OMIT_HEX_INTEGER", -#endif -#if SQLITE_OMIT_INCRBLOB - "OMIT_INCRBLOB", -#endif -#if SQLITE_OMIT_INTEGRITY_CHECK - "OMIT_INTEGRITY_CHECK", -#endif -#if SQLITE_OMIT_INTROSPECTION_PRAGMAS - "OMIT_INTROSPECTION_PRAGMAS", -#endif -#if SQLITE_OMIT_LIKE_OPTIMIZATION - "OMIT_LIKE_OPTIMIZATION", -#endif -#if SQLITE_OMIT_LOAD_EXTENSION - "OMIT_LOAD_EXTENSION", -#endif -#if SQLITE_OMIT_LOCALTIME - "OMIT_LOCALTIME", -#endif -#if SQLITE_OMIT_LOOKASIDE - "OMIT_LOOKASIDE", -#endif -#if SQLITE_OMIT_MEMORYDB - "OMIT_MEMORYDB", -#endif -#if SQLITE_OMIT_OR_OPTIMIZATION - "OMIT_OR_OPTIMIZATION", -#endif -#if SQLITE_OMIT_PAGER_PRAGMAS - "OMIT_PAGER_PRAGMAS", -#endif -#if SQLITE_OMIT_PARSER_TRACE - "OMIT_PARSER_TRACE", -#endif -#if SQLITE_OMIT_POPEN - "OMIT_POPEN", -#endif -#if SQLITE_OMIT_PRAGMA - "OMIT_PRAGMA", -#endif -#if SQLITE_OMIT_PROGRESS_CALLBACK - "OMIT_PROGRESS_CALLBACK", -#endif -#if SQLITE_OMIT_QUICKBALANCE - "OMIT_QUICKBALANCE", -#endif -#if SQLITE_OMIT_REINDEX - "OMIT_REINDEX", -#endif -#if SQLITE_OMIT_SCHEMA_PRAGMAS - "OMIT_SCHEMA_PRAGMAS", -#endif -#if SQLITE_OMIT_SCHEMA_VERSION_PRAGMAS - "OMIT_SCHEMA_VERSION_PRAGMAS", -#endif -#if SQLITE_OMIT_SHARED_CACHE - "OMIT_SHARED_CACHE", -#endif -#if SQLITE_OMIT_SHUTDOWN_DIRECTORIES - "OMIT_SHUTDOWN_DIRECTORIES", -#endif -#if SQLITE_OMIT_SUBQUERY - "OMIT_SUBQUERY", -#endif -#if SQLITE_OMIT_TCL_VARIABLE - "OMIT_TCL_VARIABLE", -#endif -#if SQLITE_OMIT_TEMPDB - "OMIT_TEMPDB", -#endif -#if SQLITE_OMIT_TEST_CONTROL - "OMIT_TEST_CONTROL", -#endif -#ifdef SQLITE_OMIT_TRACE -# if SQLITE_OMIT_TRACE != 1 - "OMIT_TRACE=" CTIMEOPT_VAL(SQLITE_OMIT_TRACE), -# endif -#endif -#if SQLITE_OMIT_TRIGGER - "OMIT_TRIGGER", -#endif -#if SQLITE_OMIT_TRUNCATE_OPTIMIZATION - "OMIT_TRUNCATE_OPTIMIZATION", -#endif -#if SQLITE_OMIT_UTF16 - "OMIT_UTF16", -#endif -#if SQLITE_OMIT_VACUUM - "OMIT_VACUUM", -#endif -#if SQLITE_OMIT_VIEW - "OMIT_VIEW", -#endif -#if SQLITE_OMIT_VIRTUALTABLE - "OMIT_VIRTUALTABLE", -#endif -#if SQLITE_OMIT_WAL - "OMIT_WAL", -#endif -#if SQLITE_OMIT_WSD - "OMIT_WSD", -#endif -#if SQLITE_OMIT_XFER_OPT - "OMIT_XFER_OPT", -#endif -#if SQLITE_PCACHE_SEPARATE_HEADER - "PCACHE_SEPARATE_HEADER", -#endif -#if SQLITE_PERFORMANCE_TRACE - "PERFORMANCE_TRACE", -#endif -#ifdef SQLITE_POWERSAFE_OVERWRITE -# if SQLITE_POWERSAFE_OVERWRITE != 1 - "POWERSAFE_OVERWRITE=" CTIMEOPT_VAL(SQLITE_POWERSAFE_OVERWRITE), -# endif -#endif -#if SQLITE_PREFER_PROXY_LOCKING - "PREFER_PROXY_LOCKING", -#endif -#if SQLITE_PROXY_DEBUG - "PROXY_DEBUG", -#endif -#if SQLITE_REVERSE_UNORDERED_SELECTS - "REVERSE_UNORDERED_SELECTS", -#endif -#if SQLITE_RTREE_INT_ONLY - "RTREE_INT_ONLY", -#endif -#if SQLITE_SECURE_DELETE - "SECURE_DELETE", -#endif -#if SQLITE_SMALL_STACK - "SMALL_STACK", -#endif -#ifdef SQLITE_SORTER_PMASZ - "SORTER_PMASZ=" CTIMEOPT_VAL(SQLITE_SORTER_PMASZ), -#endif -#if SQLITE_SOUNDEX - "SOUNDEX", -#endif -#ifdef SQLITE_STAT4_SAMPLES - "STAT4_SAMPLES=" CTIMEOPT_VAL(SQLITE_STAT4_SAMPLES), -#endif -#ifdef SQLITE_STMTJRNL_SPILL - "STMTJRNL_SPILL=" CTIMEOPT_VAL(SQLITE_STMTJRNL_SPILL), -#endif -#if SQLITE_SUBSTR_COMPATIBILITY - "SUBSTR_COMPATIBILITY", -#endif -#if (!defined(SQLITE_WIN32_MALLOC) \ - && !defined(SQLITE_ZERO_MALLOC) \ - && !defined(SQLITE_MEMDEBUG) \ - ) || defined(SQLITE_SYSTEM_MALLOC) - "SYSTEM_MALLOC", -#endif -#if SQLITE_TCL - "TCL", -#endif -#ifdef SQLITE_TEMP_STORE - "TEMP_STORE=" CTIMEOPT_VAL(SQLITE_TEMP_STORE), -#endif -#if SQLITE_TEST - "TEST", -#endif -#if defined(SQLITE_THREADSAFE) - "THREADSAFE=" CTIMEOPT_VAL(SQLITE_THREADSAFE), -#elif defined(THREADSAFE) - "THREADSAFE=" CTIMEOPT_VAL(THREADSAFE), -#else - "THREADSAFE=1", -#endif -#if SQLITE_UNLINK_AFTER_CLOSE - "UNLINK_AFTER_CLOSE", -#endif -#if SQLITE_UNTESTABLE - "UNTESTABLE", -#endif -#if SQLITE_USER_AUTHENTICATION - "USER_AUTHENTICATION", -#endif -#if SQLITE_USE_ALLOCA - "USE_ALLOCA", -#endif -#if SQLITE_USE_FCNTL_TRACE - "USE_FCNTL_TRACE", -#endif -#if SQLITE_USE_URI - "USE_URI", -#endif -#if SQLITE_VDBE_COVERAGE - "VDBE_COVERAGE", -#endif -#if SQLITE_WIN32_MALLOC - "WIN32_MALLOC", -#endif -#if SQLITE_ZERO_MALLOC - "ZERO_MALLOC", -#endif -/* -** END CODE GENERATED BY tool/mkctime.tcl -*/ -}; - -SQLITE_PRIVATE const char **sqlite3CompileOptions(int *pnOpt){ - *pnOpt = sizeof(sqlite3azCompileOpt) / sizeof(sqlite3azCompileOpt[0]); - return (const char**)sqlite3azCompileOpt; -} - -#endif /* SQLITE_OMIT_COMPILEOPTION_DIAGS */ - -/************** End of ctime.c ***********************************************/ /************** Begin file sqliteInt.h ***************************************/ /* ** 2001 September 15 @@ -1075,6 +288,17 @@ SQLITE_PRIVATE const char **sqlite3CompileOptions(int *pnOpt){ # define _USE_32BIT_TIME_T #endif +/* Optionally #include a user-defined header, whereby compilation options +** may be set prior to where they take effect, but after platform setup. +** If SQLITE_CUSTOM_INCLUDE=? is defined, its value names the #include +** file. +*/ +#ifdef SQLITE_CUSTOM_INCLUDE +# define INC_STRINGIFY_(f) #f +# define INC_STRINGIFY(f) INC_STRINGIFY_(f) +# include INC_STRINGIFY(SQLITE_CUSTOM_INCLUDE) +#endif + /* The public SQLite interface. The _FILE_OFFSET_BITS macro must appear ** first in QNX. Also, the _USE_32BIT_TIME_T macro must appear first for ** MinGW. @@ -1126,7 +350,30 @@ extern "C" { /* -** Provide the ability to override linkage features of the interface. +** Facilitate override of interface linkage and calling conventions. +** Be aware that these macros may not be used within this particular +** translation of the amalgamation and its associated header file. +** +** The SQLITE_EXTERN and SQLITE_API macros are used to instruct the +** compiler that the target identifier should have external linkage. +** +** The SQLITE_CDECL macro is used to set the calling convention for +** public functions that accept a variable number of arguments. +** +** The SQLITE_APICALL macro is used to set the calling convention for +** public functions that accept a fixed number of arguments. +** +** The SQLITE_STDCALL macro is no longer used and is now deprecated. +** +** The SQLITE_CALLBACK macro is used to set the calling convention for +** function pointers. +** +** The SQLITE_SYSAPI macro is used to set the calling convention for +** functions provided by the operating system. +** +** Currently, the SQLITE_CDECL, SQLITE_APICALL, SQLITE_CALLBACK, and +** SQLITE_SYSAPI macros are used only when building for environments +** that require non-default calling conventions. */ #ifndef SQLITE_EXTERN # define SQLITE_EXTERN extern @@ -1206,9 +453,9 @@ extern "C" { ** [sqlite3_libversion_number()], [sqlite3_sourceid()], ** [sqlite_version()] and [sqlite_source_id()]. */ -#define SQLITE_VERSION "3.36.0" -#define SQLITE_VERSION_NUMBER 3036000 -#define SQLITE_SOURCE_ID "2021-06-18 18:36:39 5c9a6c06871cb9fe42814af9c039eb6da5427a6ec28f187af7ebfb62eafa66e5" +#define SQLITE_VERSION "3.37.0" +#define SQLITE_VERSION_NUMBER 3037000 +#define SQLITE_SOURCE_ID "2021-11-27 14:13:22 bd41822c7424d393a30e92ff6cb254d25c26769889c1499a18a0b9339f5d6c8a" /* ** CAPI3REF: Run-Time Library Version Numbers @@ -1620,6 +867,7 @@ SQLITE_API int sqlite3_exec( #define SQLITE_CONSTRAINT_VTAB (SQLITE_CONSTRAINT | (9<<8)) #define SQLITE_CONSTRAINT_ROWID (SQLITE_CONSTRAINT |(10<<8)) #define SQLITE_CONSTRAINT_PINNED (SQLITE_CONSTRAINT |(11<<8)) +#define SQLITE_CONSTRAINT_DATATYPE (SQLITE_CONSTRAINT |(12<<8)) #define SQLITE_NOTICE_RECOVER_WAL (SQLITE_NOTICE | (1<<8)) #define SQLITE_NOTICE_RECOVER_ROLLBACK (SQLITE_NOTICE | (2<<8)) #define SQLITE_WARNING_AUTOINDEX (SQLITE_WARNING | (1<<8)) @@ -1633,6 +881,19 @@ SQLITE_API int sqlite3_exec( ** These bit values are intended for use in the ** 3rd parameter to the [sqlite3_open_v2()] interface and ** in the 4th parameter to the [sqlite3_vfs.xOpen] method. +** +** Only those flags marked as "Ok for sqlite3_open_v2()" may be +** used as the third argument to the [sqlite3_open_v2()] interface. +** The other flags have historically been ignored by sqlite3_open_v2(), +** though future versions of SQLite might change so that an error is +** raised if any of the disallowed bits are passed into sqlite3_open_v2(). +** Applications should not depend on the historical behavior. +** +** Note in particular that passing the SQLITE_OPEN_EXCLUSIVE flag into +** [sqlite3_open_v2()] does *not* cause the underlying database file +** to be opened using O_EXCL. Passing SQLITE_OPEN_EXCLUSIVE into +** [sqlite3_open_v2()] has historically be a no-op and might become an +** error in future versions of SQLite. */ #define SQLITE_OPEN_READONLY 0x00000001 /* Ok for sqlite3_open_v2() */ #define SQLITE_OPEN_READWRITE 0x00000002 /* Ok for sqlite3_open_v2() */ @@ -1655,6 +916,7 @@ SQLITE_API int sqlite3_exec( #define SQLITE_OPEN_PRIVATECACHE 0x00040000 /* Ok for sqlite3_open_v2() */ #define SQLITE_OPEN_WAL 0x00080000 /* VFS only */ #define SQLITE_OPEN_NOFOLLOW 0x01000000 /* Ok for sqlite3_open_v2() */ +#define SQLITE_OPEN_EXRESCODE 0x02000000 /* Extended result codes */ /* Reserved: 0x00F00000 */ /* Legacy compatibility: */ @@ -3547,11 +2809,14 @@ SQLITE_API void sqlite3_set_last_insert_rowid(sqlite3*,sqlite3_int64); ** CAPI3REF: Count The Number Of Rows Modified ** METHOD: sqlite3 ** -** ^This function returns the number of rows modified, inserted or +** ^These functions return the number of rows modified, inserted or ** deleted by the most recently completed INSERT, UPDATE or DELETE ** statement on the database connection specified by the only parameter. -** ^Executing any other type of SQL statement does not modify the value -** returned by this function. +** The two functions are identical except for the type of the return value +** and that if the number of rows modified by the most recent INSERT, UPDATE +** or DELETE is greater than the maximum value supported by type "int", then +** the return value of sqlite3_changes() is undefined. ^Executing any other +** type of SQL statement does not modify the value returned by these functions. ** ** ^Only changes made directly by the INSERT, UPDATE or DELETE statement are ** considered - auxiliary changes caused by [CREATE TRIGGER | triggers], @@ -3600,16 +2865,21 @@ SQLITE_API void sqlite3_set_last_insert_rowid(sqlite3*,sqlite3_int64); ** */ SQLITE_API int sqlite3_changes(sqlite3*); +SQLITE_API sqlite3_int64 sqlite3_changes64(sqlite3*); /* ** CAPI3REF: Total Number Of Rows Modified ** METHOD: sqlite3 ** -** ^This function returns the total number of rows inserted, modified or +** ^These functions return the total number of rows inserted, modified or ** deleted by all [INSERT], [UPDATE] or [DELETE] statements completed ** since the database connection was opened, including those executed as -** part of trigger programs. ^Executing any other type of SQL statement -** does not affect the value returned by sqlite3_total_changes(). +** part of trigger programs. The two functions are identical except for the +** type of the return value and that if the number of rows modified by the +** connection exceeds the maximum value supported by type "int", then +** the return value of sqlite3_total_changes() is undefined. ^Executing +** any other type of SQL statement does not affect the value returned by +** sqlite3_total_changes(). ** ** ^Changes made as part of [foreign key actions] are included in the ** count, but those made as part of REPLACE constraint resolution are @@ -3637,6 +2907,7 @@ SQLITE_API int sqlite3_changes(sqlite3*); ** */ SQLITE_API int sqlite3_total_changes(sqlite3*); +SQLITE_API sqlite3_int64 sqlite3_total_changes64(sqlite3*); /* ** CAPI3REF: Interrupt A Long-Running Query @@ -4466,6 +3737,14 @@ SQLITE_API void sqlite3_progress_handler(sqlite3*, int, int(*)(void*), void*); ** the default shared cache setting provided by ** [sqlite3_enable_shared_cache()].)^ ** +** [[OPEN_EXRESCODE]] ^(
[SQLITE_OPEN_EXRESCODE]
+**
The database connection comes up in "extended result code mode". +** In other words, the database behaves has if +** [sqlite3_extended_result_codes(db,1)] where called on the database +** connection as soon as the connection is created. In addition to setting +** the extended result code mode, this flag also causes [sqlite3_open_v2()] +** to return an extended result code.
+** ** [[OPEN_NOFOLLOW]] ^(
[SQLITE_OPEN_NOFOLLOW]
**
The database filename is not allowed to be a symbolic link
** )^ @@ -4473,7 +3752,15 @@ SQLITE_API void sqlite3_progress_handler(sqlite3*, int, int(*)(void*), void*); ** If the 3rd parameter to sqlite3_open_v2() is not one of the ** required combinations shown above optionally combined with other ** [SQLITE_OPEN_READONLY | SQLITE_OPEN_* bits] -** then the behavior is undefined. +** then the behavior is undefined. Historic versions of SQLite +** have silently ignored surplus bits in the flags parameter to +** sqlite3_open_v2(), however that behavior might not be carried through +** into future versions of SQLite and so applications should not rely +** upon it. Note in particular that the SQLITE_OPEN_EXCLUSIVE flag is a no-op +** for sqlite3_open_v2(). The SQLITE_OPEN_EXCLUSIVE does *not* cause +** the open to fail if the database already exists. The SQLITE_OPEN_EXCLUSIVE +** flag is intended for use by the [sqlite3_vfs|VFS interface] only, and not +** by sqlite3_open_v2(). ** ** ^The fourth parameter to sqlite3_open_v2() is the name of the ** [sqlite3_vfs] object that defines the operating system interface that @@ -5241,12 +4528,17 @@ SQLITE_API int sqlite3_prepare16_v3( ** are managed by SQLite and are automatically freed when the prepared ** statement is finalized. ** ^The string returned by sqlite3_expanded_sql(P), on the other hand, -** is obtained from [sqlite3_malloc()] and must be free by the application +** is obtained from [sqlite3_malloc()] and must be freed by the application ** by passing it to [sqlite3_free()]. +** +** ^The sqlite3_normalized_sql() interface is only available if +** the [SQLITE_ENABLE_NORMALIZE] compile-time option is defined. */ SQLITE_API const char *sqlite3_sql(sqlite3_stmt *pStmt); SQLITE_API char *sqlite3_expanded_sql(sqlite3_stmt *pStmt); +#ifdef SQLITE_ENABLE_NORMALIZE SQLITE_API const char *sqlite3_normalized_sql(sqlite3_stmt *pStmt); +#endif /* ** CAPI3REF: Determine If An SQL Statement Writes The Database @@ -7430,6 +6722,72 @@ SQLITE_API sqlite3_stmt *sqlite3_next_stmt(sqlite3 *pDb, sqlite3_stmt *pStmt); SQLITE_API void *sqlite3_commit_hook(sqlite3*, int(*)(void*), void*); SQLITE_API void *sqlite3_rollback_hook(sqlite3*, void(*)(void *), void*); +/* +** CAPI3REF: Autovacuum Compaction Amount Callback +** METHOD: sqlite3 +** +** ^The sqlite3_autovacuum_pages(D,C,P,X) interface registers a callback +** function C that is invoked prior to each autovacuum of the database +** file. ^The callback is passed a copy of the generic data pointer (P), +** the schema-name of the attached database that is being autovacuumed, +** the the size of the database file in pages, the number of free pages, +** and the number of bytes per page, respectively. The callback should +** return the number of free pages that should be removed by the +** autovacuum. ^If the callback returns zero, then no autovacuum happens. +** ^If the value returned is greater than or equal to the number of +** free pages, then a complete autovacuum happens. +** +**

^If there are multiple ATTACH-ed database files that are being +** modified as part of a transaction commit, then the autovacuum pages +** callback is invoked separately for each file. +** +**

The callback is not reentrant. The callback function should +** not attempt to invoke any other SQLite interface. If it does, bad +** things may happen, including segmentation faults and corrupt database +** files. The callback function should be a simple function that +** does some arithmetic on its input parameters and returns a result. +** +** ^The X parameter to sqlite3_autovacuum_pages(D,C,P,X) is an optional +** destructor for the P parameter. ^If X is not NULL, then X(P) is +** invoked whenever the database connection closes or when the callback +** is overwritten by another invocation of sqlite3_autovacuum_pages(). +** +**

^There is only one autovacuum pages callback per database connection. +** ^Each call to the sqlite3_autovacuum_pages() interface overrides all +** previous invocations for that database connection. ^If the callback +** argument (C) to sqlite3_autovacuum_pages(D,C,P,X) is a NULL pointer, +** then the autovacuum steps callback is cancelled. The return value +** from sqlite3_autovacuum_pages() is normally SQLITE_OK, but might +** be some other error code if something goes wrong. The current +** implementation will only return SQLITE_OK or SQLITE_MISUSE, but other +** return codes might be added in future releases. +** +**

If no autovacuum pages callback is specified (the usual case) or +** a NULL pointer is provided for the callback, +** then the default behavior is to vacuum all free pages. So, in other +** words, the default behavior is the same as if the callback function +** were something like this: +** +**

+**     unsigned int demonstration_autovac_pages_callback(
+**       void *pClientData,
+**       const char *zSchema,
+**       unsigned int nDbPage,
+**       unsigned int nFreePage,
+**       unsigned int nBytePerPage
+**     ){
+**       return nFreePage;
+**     }
+** 
+*/ +SQLITE_API int sqlite3_autovacuum_pages( + sqlite3 *db, + unsigned int(*)(void*,const char*,unsigned int,unsigned int,unsigned int), + void*, + void(*)(void*) +); + + /* ** CAPI3REF: Data Change Notification Callbacks ** METHOD: sqlite3 @@ -10093,8 +9451,9 @@ SQLITE_API void sqlite3_log(int iErrCode, const char *zFormat, ...); ** ** A single database handle may have at most a single write-ahead log callback ** registered at one time. ^Calling [sqlite3_wal_hook()] replaces any -** previously registered write-ahead log callback. ^Note that the -** [sqlite3_wal_autocheckpoint()] interface and the +** previously registered write-ahead log callback. ^The return value is +** a copy of the third parameter from the previous call, if any, or 0. +** ^Note that the [sqlite3_wal_autocheckpoint()] interface and the ** [wal_autocheckpoint pragma] both invoke [sqlite3_wal_hook()] and will ** overwrite any prior [sqlite3_wal_hook()] settings. */ @@ -10961,6 +10320,10 @@ SQLITE_API unsigned char *sqlite3_serialize( ** database is currently in a read transaction or is involved in a backup ** operation. ** +** It is not possible to deserialized into the TEMP database. If the +** S argument to sqlite3_deserialize(D,S,P,N,M,F) is "temp" then the +** function returns SQLITE_ERROR. +** ** If sqlite3_deserialize(D,S,P,N,M,F) fails for any reason and if the ** SQLITE_DESERIALIZE_FREEONCLOSE bit is set in argument F, then ** [sqlite3_free()] is invoked on argument P prior to returning. @@ -13443,7 +12806,7 @@ struct fts5_api { ** autoconf-based build */ #if defined(_HAVE_SQLITE_CONFIG_H) && !defined(SQLITECONFIG_H) -/* #include "config.h" */ +#include "config.h" #define SQLITECONFIG_H 1 #endif @@ -13679,11 +13042,12 @@ struct fts5_api { #ifndef __has_extension # define __has_extension(x) 0 /* compatibility with non-clang compilers */ #endif -#if GCC_VERSION>=4007000 || \ - (__has_extension(c_atomic) && __has_extension(c_atomic_store_n)) +#if GCC_VERSION>=4007000 || __has_extension(c_atomic) +# define SQLITE_ATOMIC_INTRINSICS 1 # define AtomicLoad(PTR) __atomic_load_n((PTR),__ATOMIC_RELAXED) # define AtomicStore(PTR,VAL) __atomic_store_n((PTR),(VAL),__ATOMIC_RELAXED) #else +# define SQLITE_ATOMIC_INTRINSICS 0 # define AtomicLoad(PTR) (*(PTR)) # define AtomicStore(PTR,VAL) (*(PTR) = (VAL)) #endif @@ -13888,11 +13252,12 @@ struct fts5_api { ** is significant and used at least once. On switch statements ** where multiple cases go to the same block of code, testcase() ** can insure that all cases are evaluated. -** */ -#ifdef SQLITE_COVERAGE_TEST -SQLITE_PRIVATE void sqlite3Coverage(int); -# define testcase(X) if( X ){ sqlite3Coverage(__LINE__); } +#if defined(SQLITE_COVERAGE_TEST) || defined(SQLITE_DEBUG) +# ifndef SQLITE_AMALGAMATION + extern unsigned int sqlite3CoverageCounter; +# endif +# define testcase(X) if( X ){ sqlite3CoverageCounter += (unsigned)__LINE__; } #else # define testcase(X) #endif @@ -13922,6 +13287,14 @@ SQLITE_PRIVATE void sqlite3Coverage(int); # define VVA_ONLY(X) #endif +/* +** Disable ALWAYS() and NEVER() (make them pass-throughs) for coverage +** and mutation testing +*/ +#if defined(SQLITE_COVERAGE_TEST) || defined(SQLITE_MUTATION_TEST) +# define SQLITE_OMIT_AUXILIARY_SAFETY_CHECKS 1 +#endif + /* ** The ALWAYS and NEVER macros surround boolean expressions which ** are intended to always be true or false, respectively. Such @@ -13937,7 +13310,7 @@ SQLITE_PRIVATE void sqlite3Coverage(int); ** be true and false so that the unreachable code they specify will ** not be counted as untested code. */ -#if defined(SQLITE_COVERAGE_TEST) || defined(SQLITE_MUTATION_TEST) +#if defined(SQLITE_OMIT_AUXILIARY_SAFETY_CHECKS) # define ALWAYS(X) (1) # define NEVER(X) (0) #elif !defined(NDEBUG) @@ -13948,26 +13321,6 @@ SQLITE_PRIVATE void sqlite3Coverage(int); # define NEVER(X) (X) #endif -/* -** The harmless(X) macro indicates that expression X is usually false -** but can be true without causing any problems, but we don't know of -** any way to cause X to be true. -** -** In debugging and testing builds, this macro will abort if X is ever -** true. In this way, developers are alerted to a possible test case -** that causes X to be true. If a harmless macro ever fails, that is -** an opportunity to change the macro into a testcase() and add a new -** test case to the test suite. -** -** For normal production builds, harmless(X) is a no-op, since it does -** not matter whether expression X is true or false. -*/ -#ifdef SQLITE_DEBUG -# define harmless(X) assert(!(X)); -#else -# define harmless(X) -#endif - /* ** Some conditionals are optimizations only. In other words, if the ** conditionals are replaced with a constant 1 (true) or 0 (false) then @@ -14031,6 +13384,13 @@ SQLITE_PRIVATE void sqlite3Coverage(int); # undef SQLITE_ENABLE_EXPLAIN_COMMENTS #endif +/* +** SQLITE_OMIT_VIRTUALTABLE implies SQLITE_OMIT_ALTERTABLE +*/ +#if defined(SQLITE_OMIT_VIRTUALTABLE) && !defined(SQLITE_OMIT_ALTERTABLE) +# define SQLITE_OMIT_ALTERTABLE +#endif + /* ** Return true (non-zero) if the input is an integer that is too large ** to fit in 32-bits. This macro is used inside of various testcase() @@ -14143,7 +13503,7 @@ SQLITE_PRIVATE void sqlite3HashClear(Hash*); /* ** Number of entries in a hash table */ -/* #define sqliteHashCount(H) ((H)->count) // NOT USED */ +#define sqliteHashCount(H) ((H)->count) #endif /* SQLITE_HASH_H */ @@ -14175,8 +13535,8 @@ SQLITE_PRIVATE void sqlite3HashClear(Hash*); #define TK_LP 22 #define TK_RP 23 #define TK_AS 24 -#define TK_WITHOUT 25 -#define TK_COMMA 26 +#define TK_COMMA 25 +#define TK_WITHOUT 26 #define TK_ABORT 27 #define TK_ACTION 28 #define TK_AFTER 29 @@ -14439,7 +13799,7 @@ SQLITE_PRIVATE void sqlite3HashClear(Hash*); ** number of pages. A negative number N translations means that a buffer ** of -1024*N bytes is allocated and used for as many pages as it will hold. ** -** The default value of "20" was choosen to minimize the run-time of the +** The default value of "20" was chosen to minimize the run-time of the ** speedtest1 test program with options: --shrink-memory --reprepare */ #ifndef SQLITE_DEFAULT_PCACHE_INITSZ @@ -14601,6 +13961,7 @@ typedef INT16_TYPE LogEst; # define SQLITE_PTRSIZE __SIZEOF_POINTER__ # elif defined(i386) || defined(__i386__) || defined(_M_IX86) || \ defined(_M_ARM) || defined(__arm__) || defined(__x86) || \ + (defined(__APPLE__) && defined(__POWERPC__)) || \ (defined(__TOS_AIX__) && !defined(__64BIT__)) # define SQLITE_PTRSIZE 4 # else @@ -14795,11 +14156,25 @@ struct BusyHandler { /* ** Name of table that holds the database schema. +** +** The PREFERRED names are used whereever possible. But LEGACY is also +** used for backwards compatibility. +** +** 1. Queries can use either the PREFERRED or the LEGACY names +** 2. The sqlite3_set_authorizer() callback uses the LEGACY name +** 3. The PRAGMA table_list statement uses the PREFERRED name +** +** The LEGACY names are stored in the internal symbol hash table +** in support of (2). Names are translated using sqlite3PreferredTableName() +** for (3). The sqlite3FindTable() function takes care of translating +** names for (1). +** +** Note that "sqlite_temp_schema" can also be called "temp.sqlite_schema". */ -#define DFLT_SCHEMA_TABLE "sqlite_master" -#define DFLT_TEMP_SCHEMA_TABLE "sqlite_temp_master" -#define ALT_SCHEMA_TABLE "sqlite_schema" -#define ALT_TEMP_SCHEMA_TABLE "sqlite_temp_schema" +#define LEGACY_SCHEMA_TABLE "sqlite_master" +#define LEGACY_TEMP_SCHEMA_TABLE "sqlite_temp_master" +#define PREFERRED_SCHEMA_TABLE "sqlite_schema" +#define PREFERRED_TEMP_SCHEMA_TABLE "sqlite_temp_schema" /* @@ -14811,7 +14186,7 @@ struct BusyHandler { ** The name of the schema table. The name is different for TEMP. */ #define SCHEMA_TABLE(x) \ - ((!OMIT_TEMPDB)&&(x==1)?DFLT_TEMP_SCHEMA_TABLE:DFLT_SCHEMA_TABLE) + ((!OMIT_TEMPDB)&&(x==1)?LEGACY_TEMP_SCHEMA_TABLE:LEGACY_SCHEMA_TABLE) /* ** A convenience macro that returns the number of elements in @@ -15352,7 +14727,7 @@ SQLITE_PRIVATE int sqlite3BtreeIncrVacuum(Btree *); #define BTREE_BLOBKEY 2 /* Table has keys only - no data */ SQLITE_PRIVATE int sqlite3BtreeDropTable(Btree*, int, int*); -SQLITE_PRIVATE int sqlite3BtreeClearTable(Btree*, int, int*); +SQLITE_PRIVATE int sqlite3BtreeClearTable(Btree*, int, i64*); SQLITE_PRIVATE int sqlite3BtreeClearTableOfCursor(BtCursor*); SQLITE_PRIVATE int sqlite3BtreeTripAllCursors(Btree*, int, int); @@ -15476,13 +14851,17 @@ SQLITE_PRIVATE void sqlite3BtreeCursorHint(BtCursor*, int, ...); #endif SQLITE_PRIVATE int sqlite3BtreeCloseCursor(BtCursor*); -SQLITE_PRIVATE int sqlite3BtreeMovetoUnpacked( +SQLITE_PRIVATE int sqlite3BtreeTableMoveto( BtCursor*, - UnpackedRecord *pUnKey, i64 intKey, int bias, int *pRes ); +SQLITE_PRIVATE int sqlite3BtreeIndexMoveto( + BtCursor*, + UnpackedRecord *pUnKey, + int *pRes +); SQLITE_PRIVATE int sqlite3BtreeCursorHasMoved(BtCursor*); SQLITE_PRIVATE int sqlite3BtreeCursorRestore(BtCursor*, int*); SQLITE_PRIVATE int sqlite3BtreeDelete(BtCursor*, u8 flags); @@ -15833,35 +15212,35 @@ typedef struct VdbeOpList VdbeOpList; #define OP_If 18 /* jump */ #define OP_Not 19 /* same as TK_NOT, synopsis: r[P2]= !r[P1] */ #define OP_IfNot 20 /* jump */ -#define OP_IfNullRow 21 /* jump, synopsis: if P1.nullRow then r[P3]=NULL, goto P2 */ -#define OP_SeekLT 22 /* jump, synopsis: key=r[P3@P4] */ -#define OP_SeekLE 23 /* jump, synopsis: key=r[P3@P4] */ -#define OP_SeekGE 24 /* jump, synopsis: key=r[P3@P4] */ -#define OP_SeekGT 25 /* jump, synopsis: key=r[P3@P4] */ -#define OP_IfNotOpen 26 /* jump, synopsis: if( !csr[P1] ) goto P2 */ -#define OP_IfNoHope 27 /* jump, synopsis: key=r[P3@P4] */ -#define OP_NoConflict 28 /* jump, synopsis: key=r[P3@P4] */ -#define OP_NotFound 29 /* jump, synopsis: key=r[P3@P4] */ -#define OP_Found 30 /* jump, synopsis: key=r[P3@P4] */ -#define OP_SeekRowid 31 /* jump, synopsis: intkey=r[P3] */ -#define OP_NotExists 32 /* jump, synopsis: intkey=r[P3] */ -#define OP_Last 33 /* jump */ -#define OP_IfSmaller 34 /* jump */ -#define OP_SorterSort 35 /* jump */ -#define OP_Sort 36 /* jump */ -#define OP_Rewind 37 /* jump */ -#define OP_IdxLE 38 /* jump, synopsis: key=r[P3@P4] */ -#define OP_IdxGT 39 /* jump, synopsis: key=r[P3@P4] */ -#define OP_IdxLT 40 /* jump, synopsis: key=r[P3@P4] */ -#define OP_IdxGE 41 /* jump, synopsis: key=r[P3@P4] */ -#define OP_RowSetRead 42 /* jump, synopsis: r[P3]=rowset(P1) */ +#define OP_IsNullOrType 21 /* jump, synopsis: if typeof(r[P1]) IN (P3,5) goto P2 */ +#define OP_IfNullRow 22 /* jump, synopsis: if P1.nullRow then r[P3]=NULL, goto P2 */ +#define OP_SeekLT 23 /* jump, synopsis: key=r[P3@P4] */ +#define OP_SeekLE 24 /* jump, synopsis: key=r[P3@P4] */ +#define OP_SeekGE 25 /* jump, synopsis: key=r[P3@P4] */ +#define OP_SeekGT 26 /* jump, synopsis: key=r[P3@P4] */ +#define OP_IfNotOpen 27 /* jump, synopsis: if( !csr[P1] ) goto P2 */ +#define OP_IfNoHope 28 /* jump, synopsis: key=r[P3@P4] */ +#define OP_NoConflict 29 /* jump, synopsis: key=r[P3@P4] */ +#define OP_NotFound 30 /* jump, synopsis: key=r[P3@P4] */ +#define OP_Found 31 /* jump, synopsis: key=r[P3@P4] */ +#define OP_SeekRowid 32 /* jump, synopsis: intkey=r[P3] */ +#define OP_NotExists 33 /* jump, synopsis: intkey=r[P3] */ +#define OP_Last 34 /* jump */ +#define OP_IfSmaller 35 /* jump */ +#define OP_SorterSort 36 /* jump */ +#define OP_Sort 37 /* jump */ +#define OP_Rewind 38 /* jump */ +#define OP_IdxLE 39 /* jump, synopsis: key=r[P3@P4] */ +#define OP_IdxGT 40 /* jump, synopsis: key=r[P3@P4] */ +#define OP_IdxLT 41 /* jump, synopsis: key=r[P3@P4] */ +#define OP_IdxGE 42 /* jump, synopsis: key=r[P3@P4] */ #define OP_Or 43 /* same as TK_OR, synopsis: r[P3]=(r[P1] || r[P2]) */ #define OP_And 44 /* same as TK_AND, synopsis: r[P3]=(r[P1] && r[P2]) */ -#define OP_RowSetTest 45 /* jump, synopsis: if r[P3] in rowset(P1) goto P2 */ -#define OP_Program 46 /* jump */ -#define OP_FkIfZero 47 /* jump, synopsis: if fkctr[P1]==0 goto P2 */ -#define OP_IfPos 48 /* jump, synopsis: if r[P1]>0 then r[P1]-=P3, goto P2 */ -#define OP_IfNotZero 49 /* jump, synopsis: if r[P1]!=0 then r[P1]--, goto P2 */ +#define OP_RowSetRead 45 /* jump, synopsis: r[P3]=rowset(P1) */ +#define OP_RowSetTest 46 /* jump, synopsis: if r[P3] in rowset(P1) goto P2 */ +#define OP_Program 47 /* jump */ +#define OP_FkIfZero 48 /* jump, synopsis: if fkctr[P1]==0 goto P2 */ +#define OP_IfPos 49 /* jump, synopsis: if r[P1]>0 then r[P1]-=P3, goto P2 */ #define OP_IsNull 50 /* jump, same as TK_ISNULL, synopsis: if r[P1]==NULL goto P2 */ #define OP_NotNull 51 /* jump, same as TK_NOTNULL, synopsis: if r[P1]!=NULL goto P2 */ #define OP_Ne 52 /* jump, same as TK_NE, synopsis: IF r[P3]!=r[P1] */ @@ -15871,49 +15250,49 @@ typedef struct VdbeOpList VdbeOpList; #define OP_Lt 56 /* jump, same as TK_LT, synopsis: IF r[P3]=r[P1] */ #define OP_ElseEq 58 /* jump, same as TK_ESCAPE */ -#define OP_DecrJumpZero 59 /* jump, synopsis: if (--r[P1])==0 goto P2 */ -#define OP_IncrVacuum 60 /* jump */ -#define OP_VNext 61 /* jump */ -#define OP_Init 62 /* jump, synopsis: Start at P2 */ -#define OP_PureFunc 63 /* synopsis: r[P3]=func(r[P2@NP]) */ -#define OP_Function 64 /* synopsis: r[P3]=func(r[P2@NP]) */ -#define OP_Return 65 -#define OP_EndCoroutine 66 -#define OP_HaltIfNull 67 /* synopsis: if r[P3]=null halt */ -#define OP_Halt 68 -#define OP_Integer 69 /* synopsis: r[P2]=P1 */ -#define OP_Int64 70 /* synopsis: r[P2]=P4 */ -#define OP_String 71 /* synopsis: r[P2]='P4' (len=P1) */ -#define OP_Null 72 /* synopsis: r[P2..P3]=NULL */ -#define OP_SoftNull 73 /* synopsis: r[P1]=NULL */ -#define OP_Blob 74 /* synopsis: r[P2]=P4 (len=P1) */ -#define OP_Variable 75 /* synopsis: r[P2]=parameter(P1,P4) */ -#define OP_Move 76 /* synopsis: r[P2@P3]=r[P1@P3] */ -#define OP_Copy 77 /* synopsis: r[P2@P3+1]=r[P1@P3+1] */ -#define OP_SCopy 78 /* synopsis: r[P2]=r[P1] */ -#define OP_IntCopy 79 /* synopsis: r[P2]=r[P1] */ -#define OP_ChngCntRow 80 /* synopsis: output=r[P1] */ -#define OP_ResultRow 81 /* synopsis: output=r[P1@P2] */ -#define OP_CollSeq 82 -#define OP_AddImm 83 /* synopsis: r[P1]=r[P1]+P2 */ -#define OP_RealAffinity 84 -#define OP_Cast 85 /* synopsis: affinity(r[P1]) */ -#define OP_Permutation 86 -#define OP_Compare 87 /* synopsis: r[P1@P3] <-> r[P2@P3] */ -#define OP_IsTrue 88 /* synopsis: r[P2] = coalesce(r[P1]==TRUE,P3) ^ P4 */ -#define OP_ZeroOrNull 89 /* synopsis: r[P2] = 0 OR NULL */ -#define OP_Offset 90 /* synopsis: r[P3] = sqlite_offset(P1) */ -#define OP_Column 91 /* synopsis: r[P3]=PX */ -#define OP_Affinity 92 /* synopsis: affinity(r[P1@P2]) */ -#define OP_MakeRecord 93 /* synopsis: r[P3]=mkrec(r[P1@P2]) */ -#define OP_Count 94 /* synopsis: r[P2]=count() */ -#define OP_ReadCookie 95 -#define OP_SetCookie 96 -#define OP_ReopenIdx 97 /* synopsis: root=P2 iDb=P3 */ -#define OP_OpenRead 98 /* synopsis: root=P2 iDb=P3 */ -#define OP_OpenWrite 99 /* synopsis: root=P2 iDb=P3 */ -#define OP_OpenDup 100 -#define OP_OpenAutoindex 101 /* synopsis: nColumn=P2 */ +#define OP_IfNotZero 59 /* jump, synopsis: if r[P1]!=0 then r[P1]--, goto P2 */ +#define OP_DecrJumpZero 60 /* jump, synopsis: if (--r[P1])==0 goto P2 */ +#define OP_IncrVacuum 61 /* jump */ +#define OP_VNext 62 /* jump */ +#define OP_Init 63 /* jump, synopsis: Start at P2 */ +#define OP_PureFunc 64 /* synopsis: r[P3]=func(r[P2@NP]) */ +#define OP_Function 65 /* synopsis: r[P3]=func(r[P2@NP]) */ +#define OP_Return 66 +#define OP_EndCoroutine 67 +#define OP_HaltIfNull 68 /* synopsis: if r[P3]=null halt */ +#define OP_Halt 69 +#define OP_Integer 70 /* synopsis: r[P2]=P1 */ +#define OP_Int64 71 /* synopsis: r[P2]=P4 */ +#define OP_String 72 /* synopsis: r[P2]='P4' (len=P1) */ +#define OP_Null 73 /* synopsis: r[P2..P3]=NULL */ +#define OP_SoftNull 74 /* synopsis: r[P1]=NULL */ +#define OP_Blob 75 /* synopsis: r[P2]=P4 (len=P1) */ +#define OP_Variable 76 /* synopsis: r[P2]=parameter(P1,P4) */ +#define OP_Move 77 /* synopsis: r[P2@P3]=r[P1@P3] */ +#define OP_Copy 78 /* synopsis: r[P2@P3+1]=r[P1@P3+1] */ +#define OP_SCopy 79 /* synopsis: r[P2]=r[P1] */ +#define OP_IntCopy 80 /* synopsis: r[P2]=r[P1] */ +#define OP_ChngCntRow 81 /* synopsis: output=r[P1] */ +#define OP_ResultRow 82 /* synopsis: output=r[P1@P2] */ +#define OP_CollSeq 83 +#define OP_AddImm 84 /* synopsis: r[P1]=r[P1]+P2 */ +#define OP_RealAffinity 85 +#define OP_Cast 86 /* synopsis: affinity(r[P1]) */ +#define OP_Permutation 87 +#define OP_Compare 88 /* synopsis: r[P1@P3] <-> r[P2@P3] */ +#define OP_IsTrue 89 /* synopsis: r[P2] = coalesce(r[P1]==TRUE,P3) ^ P4 */ +#define OP_ZeroOrNull 90 /* synopsis: r[P2] = 0 OR NULL */ +#define OP_Offset 91 /* synopsis: r[P3] = sqlite_offset(P1) */ +#define OP_Column 92 /* synopsis: r[P3]=PX */ +#define OP_TypeCheck 93 /* synopsis: typecheck(r[P1@P2]) */ +#define OP_Affinity 94 /* synopsis: affinity(r[P1@P2]) */ +#define OP_MakeRecord 95 /* synopsis: r[P3]=mkrec(r[P1@P2]) */ +#define OP_Count 96 /* synopsis: r[P2]=count() */ +#define OP_ReadCookie 97 +#define OP_SetCookie 98 +#define OP_ReopenIdx 99 /* synopsis: root=P2 iDb=P3 */ +#define OP_OpenRead 100 /* synopsis: root=P2 iDb=P3 */ +#define OP_OpenWrite 101 /* synopsis: root=P2 iDb=P3 */ #define OP_BitAnd 102 /* same as TK_BITAND, synopsis: r[P3]=r[P1]&r[P2] */ #define OP_BitOr 103 /* same as TK_BITOR, synopsis: r[P3]=r[P1]|r[P2] */ #define OP_ShiftLeft 104 /* same as TK_LSHIFT, synopsis: r[P3]=r[P2]<0 then r[P2]=r[P1]+max(0,r[P3]) else r[P2]=(-1) */ -#define OP_AggInverse 157 /* synopsis: accum=r[P3] inverse(r[P2@P5]) */ -#define OP_AggStep 158 /* synopsis: accum=r[P3] step(r[P2@P5]) */ -#define OP_AggStep1 159 /* synopsis: accum=r[P3] step(r[P2@P5]) */ -#define OP_AggValue 160 /* synopsis: r[P3]=value N=P2 */ -#define OP_AggFinal 161 /* synopsis: accum=r[P1] N=P2 */ -#define OP_Expire 162 -#define OP_CursorLock 163 -#define OP_CursorUnlock 164 -#define OP_TableLock 165 /* synopsis: iDb=P1 root=P2 write=P3 */ -#define OP_VBegin 166 -#define OP_VCreate 167 -#define OP_VDestroy 168 -#define OP_VOpen 169 -#define OP_VColumn 170 /* synopsis: r[P3]=vcolumn(P2) */ -#define OP_VRename 171 -#define OP_Pagecount 172 -#define OP_MaxPgcnt 173 -#define OP_Trace 174 -#define OP_CursorHint 175 -#define OP_ReleaseReg 176 /* synopsis: release r[P1@P2] mask P3 */ -#define OP_Noop 177 -#define OP_Explain 178 -#define OP_Abortable 179 +#define OP_IntegrityCk 153 +#define OP_RowSetAdd 154 /* synopsis: rowset(P1)=r[P2] */ +#define OP_Param 155 +#define OP_FkCounter 156 /* synopsis: fkctr[P1]+=P2 */ +#define OP_MemMax 157 /* synopsis: r[P1]=max(r[P1],r[P2]) */ +#define OP_OffsetLimit 158 /* synopsis: if r[P1]>0 then r[P2]=r[P1]+max(0,r[P3]) else r[P2]=(-1) */ +#define OP_AggInverse 159 /* synopsis: accum=r[P3] inverse(r[P2@P5]) */ +#define OP_AggStep 160 /* synopsis: accum=r[P3] step(r[P2@P5]) */ +#define OP_AggStep1 161 /* synopsis: accum=r[P3] step(r[P2@P5]) */ +#define OP_AggValue 162 /* synopsis: r[P3]=value N=P2 */ +#define OP_AggFinal 163 /* synopsis: accum=r[P1] N=P2 */ +#define OP_Expire 164 +#define OP_CursorLock 165 +#define OP_CursorUnlock 166 +#define OP_TableLock 167 /* synopsis: iDb=P1 root=P2 write=P3 */ +#define OP_VBegin 168 +#define OP_VCreate 169 +#define OP_VDestroy 170 +#define OP_VOpen 171 +#define OP_VColumn 172 /* synopsis: r[P3]=vcolumn(P2) */ +#define OP_VRename 173 +#define OP_Pagecount 174 +#define OP_MaxPgcnt 175 +#define OP_Trace 176 +#define OP_CursorHint 177 +#define OP_ReleaseReg 178 /* synopsis: release r[P1@P2] mask P3 */ +#define OP_Noop 179 +#define OP_Explain 180 +#define OP_Abortable 181 /* Properties such as "out2" or "jump" that are specified in ** comments following the "case" for each opcode in the vdbe.c @@ -16006,27 +15387,27 @@ typedef struct VdbeOpList VdbeOpList; #define OPFLG_INITIALIZER {\ /* 0 */ 0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x00, 0x10,\ /* 8 */ 0x00, 0x01, 0x00, 0x01, 0x01, 0x01, 0x03, 0x03,\ -/* 16 */ 0x01, 0x01, 0x03, 0x12, 0x03, 0x01, 0x09, 0x09,\ -/* 24 */ 0x09, 0x09, 0x01, 0x09, 0x09, 0x09, 0x09, 0x09,\ -/* 32 */ 0x09, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,\ -/* 40 */ 0x01, 0x01, 0x23, 0x26, 0x26, 0x0b, 0x01, 0x01,\ -/* 48 */ 0x03, 0x03, 0x03, 0x03, 0x0b, 0x0b, 0x0b, 0x0b,\ -/* 56 */ 0x0b, 0x0b, 0x01, 0x03, 0x01, 0x01, 0x01, 0x00,\ -/* 64 */ 0x00, 0x02, 0x02, 0x08, 0x00, 0x10, 0x10, 0x10,\ -/* 72 */ 0x10, 0x00, 0x10, 0x10, 0x00, 0x00, 0x10, 0x10,\ -/* 80 */ 0x00, 0x00, 0x00, 0x02, 0x02, 0x02, 0x00, 0x00,\ -/* 88 */ 0x12, 0x1e, 0x20, 0x00, 0x00, 0x00, 0x10, 0x10,\ -/* 96 */ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x26, 0x26,\ +/* 16 */ 0x01, 0x01, 0x03, 0x12, 0x03, 0x03, 0x01, 0x09,\ +/* 24 */ 0x09, 0x09, 0x09, 0x01, 0x09, 0x09, 0x09, 0x09,\ +/* 32 */ 0x09, 0x09, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,\ +/* 40 */ 0x01, 0x01, 0x01, 0x26, 0x26, 0x23, 0x0b, 0x01,\ +/* 48 */ 0x01, 0x03, 0x03, 0x03, 0x0b, 0x0b, 0x0b, 0x0b,\ +/* 56 */ 0x0b, 0x0b, 0x01, 0x03, 0x03, 0x01, 0x01, 0x01,\ +/* 64 */ 0x00, 0x00, 0x02, 0x02, 0x08, 0x00, 0x10, 0x10,\ +/* 72 */ 0x10, 0x10, 0x00, 0x10, 0x10, 0x00, 0x00, 0x10,\ +/* 80 */ 0x10, 0x00, 0x00, 0x00, 0x02, 0x02, 0x02, 0x00,\ +/* 88 */ 0x00, 0x12, 0x1e, 0x20, 0x00, 0x00, 0x00, 0x00,\ +/* 96 */ 0x10, 0x10, 0x00, 0x00, 0x00, 0x00, 0x26, 0x26,\ /* 104 */ 0x26, 0x26, 0x26, 0x26, 0x26, 0x26, 0x26, 0x26,\ /* 112 */ 0x00, 0x12, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,\ -/* 120 */ 0x00, 0x00, 0x10, 0x10, 0x00, 0x00, 0x00, 0x00,\ -/* 128 */ 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x04, 0x04,\ -/* 136 */ 0x00, 0x00, 0x10, 0x00, 0x10, 0x00, 0x00, 0x10,\ -/* 144 */ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06,\ -/* 152 */ 0x10, 0x10, 0x00, 0x04, 0x1a, 0x00, 0x00, 0x00,\ +/* 120 */ 0x00, 0x00, 0x00, 0x00, 0x10, 0x10, 0x00, 0x00,\ +/* 128 */ 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00,\ +/* 136 */ 0x04, 0x04, 0x00, 0x00, 0x10, 0x00, 0x10, 0x00,\ +/* 144 */ 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\ +/* 152 */ 0x10, 0x00, 0x06, 0x10, 0x00, 0x04, 0x1a, 0x00,\ /* 160 */ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\ -/* 168 */ 0x00, 0x00, 0x00, 0x00, 0x10, 0x10, 0x00, 0x00,\ -/* 176 */ 0x00, 0x00, 0x00, 0x00,} +/* 168 */ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x10,\ +/* 176 */ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,} /* The resolve3P2Values() routine is able to run faster if it knows ** the value of the largest JUMP opcode. The smaller the maximum @@ -16034,7 +15415,7 @@ typedef struct VdbeOpList VdbeOpList; ** generated this include file strives to group all JUMP opcodes ** together near the beginning of the list. */ -#define SQLITE_MX_JUMP_OPCODE 62 /* Maximum JUMP opcode */ +#define SQLITE_MX_JUMP_OPCODE 63 /* Maximum JUMP opcode */ /************** End of opcodes.h *********************************************/ /************** Continuing where we left off in vdbe.h ***********************/ @@ -17111,10 +16492,10 @@ struct sqlite3 { u8 mTrace; /* zero or more SQLITE_TRACE flags */ u8 noSharedCache; /* True if no shared-cache backends */ u8 nSqlExec; /* Number of pending OP_SqlExec opcodes */ + u8 eOpenState; /* Current condition of the connection */ int nextPagesize; /* Pagesize after VACUUM if >0 */ - u32 magic; /* Magic number for detect library misuse */ - int nChange; /* Value returned by sqlite3_changes() */ - int nTotalChange; /* Value returned by sqlite3_total_changes() */ + i64 nChange; /* Value returned by sqlite3_changes() */ + i64 nTotalChange; /* Value returned by sqlite3_total_changes() */ int aLimit[SQLITE_N_LIMIT]; /* Limits */ int nMaxSorterMmap; /* Maximum size of regions mapped by sorter */ struct sqlite3InitInfo { /* Information used during initialization */ @@ -17124,7 +16505,7 @@ struct sqlite3 { unsigned orphanTrigger : 1; /* Last statement is orphaned TEMP trigger */ unsigned imposterTable : 1; /* Building an imposter table */ unsigned reopenMemdb : 1; /* ATTACH is really a reopen using MemDB */ - char **azInit; /* "type", "name", and "tbl_name" columns */ + const char **azInit; /* "type", "name", and "tbl_name" columns */ } init; int nVdbeActive; /* Number of VDBEs currently running */ int nVdbeRead; /* Number of active VDBEs that read or write */ @@ -17134,10 +16515,10 @@ struct sqlite3 { int nExtension; /* Number of loaded extensions */ void **aExtension; /* Array of shared library handles */ union { - void (*xLegacy)(void*,const char*); /* Legacy trace function */ - int (*xV2)(u32,void*,void*,void*); /* V2 Trace function */ + void (*xLegacy)(void*,const char*); /* mTrace==SQLITE_TRACE_LEGACY */ + int (*xV2)(u32,void*,void*,void*); /* All other mTrace values */ } trace; - void *pTraceArg; /* Argument to the trace function */ + void *pTraceArg; /* Argument to the trace function */ #ifndef SQLITE_OMIT_DEPRECATED void (*xProfile)(void*,const char*,u64); /* Profiling function */ void *pProfileArg; /* Argument to profile function */ @@ -17148,6 +16529,9 @@ struct sqlite3 { void (*xRollbackCallback)(void*); /* Invoked at every commit. */ void *pUpdateArg; void (*xUpdateCallback)(void*,int, const char*,const char*,sqlite_int64); + void *pAutovacPagesArg; /* Client argument to autovac_pages */ + void (*xAutovacDestr)(void*); /* Destructor for pAutovacPAgesArg */ + unsigned int (*xAutovacPages)(void*,const char*,u32,u32,u32); Parse *pParse; /* Current parse */ #ifdef SQLITE_ENABLE_PREUPDATE_HOOK void *pPreUpdateArg; /* First argument to xPreUpdateCallback */ @@ -17277,6 +16661,7 @@ struct sqlite3 { #define SQLITE_CountRows HI(0x00001) /* Count rows changed by INSERT, */ /* DELETE, or UPDATE and return */ /* the count using a callback. */ +#define SQLITE_CorruptRdOnly HI(0x00002) /* Prohibit writes due to error */ /* Flags used only if debugging */ #ifdef SQLITE_DEBUG @@ -17323,6 +16708,8 @@ struct sqlite3 { #define SQLITE_PropagateConst 0x00008000 /* The constant propagation opt */ #define SQLITE_MinMaxOpt 0x00010000 /* The min/max optimization */ #define SQLITE_SeekScan 0x00020000 /* The OP_SeekScan optimization */ +#define SQLITE_OmitOrderBy 0x00040000 /* Omit pointless ORDER BY */ + /* TH3 expects this value ^^^^^^^^^^ to be 0x40000. Coordinate any change */ #define SQLITE_AllOpts 0xffffffff /* All optimizations */ /* @@ -17337,17 +16724,16 @@ struct sqlite3 { */ #define ConstFactorOk(P) ((P)->okConstFactor) -/* -** Possible values for the sqlite.magic field. -** The numbers are obtained at random and have no special meaning, other -** than being distinct from one another. +/* Possible values for the sqlite3.eOpenState field. +** The numbers are randomly selected such that a minimum of three bits must +** change to convert any number to another or to zero */ -#define SQLITE_MAGIC_OPEN 0xa029a697 /* Database is open */ -#define SQLITE_MAGIC_CLOSED 0x9f3c2d33 /* Database is closed */ -#define SQLITE_MAGIC_SICK 0x4b771290 /* Error and awaiting close */ -#define SQLITE_MAGIC_BUSY 0xf03b7906 /* Database currently in use */ -#define SQLITE_MAGIC_ERROR 0xb5357930 /* An SQLITE_MISUSE error occurred */ -#define SQLITE_MAGIC_ZOMBIE 0x64cffc7f /* Close with last statement close */ +#define SQLITE_STATE_OPEN 0x76 /* Database is open */ +#define SQLITE_STATE_CLOSED 0xce /* Database is closed */ +#define SQLITE_STATE_SICK 0xba /* Error and awaiting close */ +#define SQLITE_STATE_BUSY 0x6d /* Database currently in use */ +#define SQLITE_STATE_ERROR 0xd5 /* An SQLITE_MISUSE error occurred */ +#define SQLITE_STATE_ZOMBIE 0xa7 /* Close with last statement close */ /* ** Each SQL function is defined by an instance of the following @@ -17372,7 +16758,7 @@ struct FuncDef { union { FuncDef *pHash; /* Next with a different name but the same hash */ FuncDestructor *pDestructor; /* Reference counted destructor function */ - } u; + } u; /* pHash if SQLITE_FUNC_BUILTIN, pDestructor otherwise */ }; /* @@ -17402,12 +16788,13 @@ struct FuncDestructor { ** are assert() statements in the code to verify this. ** ** Value constraints (enforced via assert()): -** SQLITE_FUNC_MINMAX == NC_MinMaxAgg == SF_MinMaxAgg -** SQLITE_FUNC_LENGTH == OPFLAG_LENGTHARG -** SQLITE_FUNC_TYPEOF == OPFLAG_TYPEOFARG -** SQLITE_FUNC_CONSTANT == SQLITE_DETERMINISTIC from the API -** SQLITE_FUNC_DIRECT == SQLITE_DIRECTONLY from the API -** SQLITE_FUNC_UNSAFE == SQLITE_INNOCUOUS +** SQLITE_FUNC_MINMAX == NC_MinMaxAgg == SF_MinMaxAgg +** SQLITE_FUNC_ANYORDER == NC_OrderAgg == SF_OrderByReqd +** SQLITE_FUNC_LENGTH == OPFLAG_LENGTHARG +** SQLITE_FUNC_TYPEOF == OPFLAG_TYPEOFARG +** SQLITE_FUNC_CONSTANT == SQLITE_DETERMINISTIC from the API +** SQLITE_FUNC_DIRECT == SQLITE_DIRECTONLY from the API +** SQLITE_FUNC_UNSAFE == SQLITE_INNOCUOUS ** SQLITE_FUNC_ENCMASK depends on SQLITE_UTF* macros in the API */ #define SQLITE_FUNC_ENCMASK 0x0003 /* SQLITE_UTF8, SQLITE_UTF16BE or UTF16LE */ @@ -17432,6 +16819,8 @@ struct FuncDestructor { #define SQLITE_FUNC_SUBTYPE 0x00100000 /* Result likely to have sub-type */ #define SQLITE_FUNC_UNSAFE 0x00200000 /* Function has side effects */ #define SQLITE_FUNC_INLINE 0x00400000 /* Functions implemented in-line */ +#define SQLITE_FUNC_BUILTIN 0x00800000 /* This is a built-in function */ +#define SQLITE_FUNC_ANYORDER 0x08000000 /* count/min/max aggregate */ /* Identifier numbers for each in-line function */ #define INLINEFUNC_coalesce 0 @@ -17509,44 +16898,51 @@ struct FuncDestructor { ** parameter. */ #define FUNCTION(zName, nArg, iArg, bNC, xFunc) \ - {nArg, SQLITE_FUNC_CONSTANT|SQLITE_UTF8|(bNC*SQLITE_FUNC_NEEDCOLL), \ + {nArg, SQLITE_FUNC_BUILTIN|\ + SQLITE_FUNC_CONSTANT|SQLITE_UTF8|(bNC*SQLITE_FUNC_NEEDCOLL), \ SQLITE_INT_TO_PTR(iArg), 0, xFunc, 0, 0, 0, #zName, {0} } #define VFUNCTION(zName, nArg, iArg, bNC, xFunc) \ - {nArg, SQLITE_UTF8|(bNC*SQLITE_FUNC_NEEDCOLL), \ + {nArg, SQLITE_FUNC_BUILTIN|SQLITE_UTF8|(bNC*SQLITE_FUNC_NEEDCOLL), \ SQLITE_INT_TO_PTR(iArg), 0, xFunc, 0, 0, 0, #zName, {0} } #define SFUNCTION(zName, nArg, iArg, bNC, xFunc) \ - {nArg, SQLITE_UTF8|SQLITE_DIRECTONLY|SQLITE_FUNC_UNSAFE, \ + {nArg, SQLITE_FUNC_BUILTIN|SQLITE_UTF8|SQLITE_DIRECTONLY|SQLITE_FUNC_UNSAFE, \ SQLITE_INT_TO_PTR(iArg), 0, xFunc, 0, 0, 0, #zName, {0} } #define MFUNCTION(zName, nArg, xPtr, xFunc) \ - {nArg, SQLITE_FUNC_CONSTANT|SQLITE_UTF8, \ + {nArg, SQLITE_FUNC_BUILTIN|SQLITE_FUNC_CONSTANT|SQLITE_UTF8, \ xPtr, 0, xFunc, 0, 0, 0, #zName, {0} } #define INLINE_FUNC(zName, nArg, iArg, mFlags) \ - {nArg, SQLITE_UTF8|SQLITE_FUNC_INLINE|SQLITE_FUNC_CONSTANT|(mFlags), \ + {nArg, SQLITE_FUNC_BUILTIN|\ + SQLITE_UTF8|SQLITE_FUNC_INLINE|SQLITE_FUNC_CONSTANT|(mFlags), \ SQLITE_INT_TO_PTR(iArg), 0, noopFunc, 0, 0, 0, #zName, {0} } #define TEST_FUNC(zName, nArg, iArg, mFlags) \ - {nArg, SQLITE_UTF8|SQLITE_FUNC_INTERNAL|SQLITE_FUNC_TEST| \ + {nArg, SQLITE_FUNC_BUILTIN|\ + SQLITE_UTF8|SQLITE_FUNC_INTERNAL|SQLITE_FUNC_TEST| \ SQLITE_FUNC_INLINE|SQLITE_FUNC_CONSTANT|(mFlags), \ SQLITE_INT_TO_PTR(iArg), 0, noopFunc, 0, 0, 0, #zName, {0} } #define DFUNCTION(zName, nArg, iArg, bNC, xFunc) \ - {nArg, SQLITE_FUNC_SLOCHNG|SQLITE_UTF8, \ + {nArg, SQLITE_FUNC_BUILTIN|SQLITE_FUNC_SLOCHNG|SQLITE_UTF8, \ 0, 0, xFunc, 0, 0, 0, #zName, {0} } #define PURE_DATE(zName, nArg, iArg, bNC, xFunc) \ - {nArg, SQLITE_FUNC_SLOCHNG|SQLITE_UTF8|SQLITE_FUNC_CONSTANT, \ + {nArg, SQLITE_FUNC_BUILTIN|\ + SQLITE_FUNC_SLOCHNG|SQLITE_UTF8|SQLITE_FUNC_CONSTANT, \ (void*)&sqlite3Config, 0, xFunc, 0, 0, 0, #zName, {0} } #define FUNCTION2(zName, nArg, iArg, bNC, xFunc, extraFlags) \ - {nArg,SQLITE_FUNC_CONSTANT|SQLITE_UTF8|(bNC*SQLITE_FUNC_NEEDCOLL)|extraFlags,\ + {nArg, SQLITE_FUNC_BUILTIN|\ + SQLITE_FUNC_CONSTANT|SQLITE_UTF8|(bNC*SQLITE_FUNC_NEEDCOLL)|extraFlags,\ SQLITE_INT_TO_PTR(iArg), 0, xFunc, 0, 0, 0, #zName, {0} } #define STR_FUNCTION(zName, nArg, pArg, bNC, xFunc) \ - {nArg, SQLITE_FUNC_SLOCHNG|SQLITE_UTF8|(bNC*SQLITE_FUNC_NEEDCOLL), \ + {nArg, SQLITE_FUNC_BUILTIN|\ + SQLITE_FUNC_SLOCHNG|SQLITE_UTF8|(bNC*SQLITE_FUNC_NEEDCOLL), \ pArg, 0, xFunc, 0, 0, 0, #zName, } #define LIKEFUNC(zName, nArg, arg, flags) \ - {nArg, SQLITE_FUNC_CONSTANT|SQLITE_UTF8|flags, \ + {nArg, SQLITE_FUNC_BUILTIN|SQLITE_FUNC_CONSTANT|SQLITE_UTF8|flags, \ (void *)arg, 0, likeFunc, 0, 0, 0, #zName, {0} } #define WAGGREGATE(zName, nArg, arg, nc, xStep, xFinal, xValue, xInverse, f) \ - {nArg, SQLITE_UTF8|(nc*SQLITE_FUNC_NEEDCOLL)|f, \ + {nArg, SQLITE_FUNC_BUILTIN|SQLITE_UTF8|(nc*SQLITE_FUNC_NEEDCOLL)|f, \ SQLITE_INT_TO_PTR(arg), 0, xStep,xFinal,xValue,xInverse,#zName, {0}} #define INTERNAL_FUNCTION(zName, nArg, xFunc) \ - {nArg, SQLITE_FUNC_INTERNAL|SQLITE_UTF8|SQLITE_FUNC_CONSTANT, \ + {nArg, SQLITE_FUNC_BUILTIN|\ + SQLITE_FUNC_INTERNAL|SQLITE_UTF8|SQLITE_FUNC_CONSTANT, \ 0, 0, xFunc, 0, 0, 0, #zName, {0} } @@ -17602,18 +16998,42 @@ struct Module { ** or equal to the table column index. It is ** equal if and only if there are no VIRTUAL ** columns to the left. +** +** Notes on zCnName: +** The zCnName field stores the name of the column, the datatype of the +** column, and the collating sequence for the column, in that order, all in +** a single allocation. Each string is 0x00 terminated. The datatype +** is only included if the COLFLAG_HASTYPE bit of colFlags is set and the +** collating sequence name is only included if the COLFLAG_HASCOLL bit is +** set. */ struct Column { - char *zName; /* Name of this column, \000, then the type */ - Expr *pDflt; /* Default value or GENERATED ALWAYS AS value */ - char *zColl; /* Collating sequence. If NULL, use the default */ - u8 notNull; /* An OE_ code for handling a NOT NULL constraint */ - char affinity; /* One of the SQLITE_AFF_... values */ - u8 szEst; /* Estimated size of value in this column. sizeof(INT)==1 */ - u8 hName; /* Column name hash for faster lookup */ - u16 colFlags; /* Boolean properties. See COLFLAG_ defines below */ + char *zCnName; /* Name of this column */ + unsigned notNull :4; /* An OE_ code for handling a NOT NULL constraint */ + unsigned eCType :4; /* One of the standard types */ + char affinity; /* One of the SQLITE_AFF_... values */ + u8 szEst; /* Est size of value in this column. sizeof(INT)==1 */ + u8 hName; /* Column name hash for faster lookup */ + u16 iDflt; /* 1-based index of DEFAULT. 0 means "none" */ + u16 colFlags; /* Boolean properties. See COLFLAG_ defines below */ }; +/* Allowed values for Column.eCType. +** +** Values must match entries in the global constant arrays +** sqlite3StdTypeLen[] and sqlite3StdType[]. Each value is one more +** than the offset into these arrays for the corresponding name. +** Adjust the SQLITE_N_STDTYPE value if adding or removing entries. +*/ +#define COLTYPE_CUSTOM 0 /* Type appended to zName */ +#define COLTYPE_ANY 1 +#define COLTYPE_BLOB 2 +#define COLTYPE_INT 3 +#define COLTYPE_INTEGER 4 +#define COLTYPE_REAL 5 +#define COLTYPE_TEXT 6 +#define SQLITE_N_STDTYPE 6 /* Number of standard types */ + /* Allowed values for Column.colFlags. ** ** Constraints: @@ -17630,6 +17050,7 @@ struct Column { #define COLFLAG_STORED 0x0040 /* GENERATED ALWAYS AS ... STORED */ #define COLFLAG_NOTAVAIL 0x0080 /* STORED column not yet calculated */ #define COLFLAG_BUSY 0x0100 /* Blocks recursion on GENERATED columns */ +#define COLFLAG_HASCOLL 0x0200 /* Has collating sequence name in zCnName */ #define COLFLAG_GENERATED 0x0060 /* Combo: _STORED, _VIRTUAL */ #define COLFLAG_NOINSERT 0x0062 /* Combo: _HIDDEN, _STORED, _VIRTUAL */ @@ -17759,15 +17180,13 @@ struct VTable { #define SQLITE_VTABRISK_High 2 /* -** The schema for each SQL table and view is represented in memory -** by an instance of the following structure. +** The schema for each SQL table, virtual table, and view is represented +** in memory by an instance of the following structure. */ struct Table { char *zName; /* Name of the table or view */ Column *aCol; /* Information about each column */ Index *pIndex; /* List of SQL indexes on this table. */ - Select *pSelect; /* NULL for tables. Points to definition if a view. */ - FKey *pFKey; /* Linked list of all foreign keys in this table */ char *zColAff; /* String defining the affinity of each column */ ExprList *pCheck; /* All CHECK constraints */ /* ... also used as column name list in a VIEW */ @@ -17783,15 +17202,24 @@ struct Table { LogEst costMult; /* Cost multiplier for using this table */ #endif u8 keyConf; /* What to do in case of uniqueness conflict on iPKey */ -#ifndef SQLITE_OMIT_ALTERTABLE - int addColOffset; /* Offset in CREATE TABLE stmt to add a new column */ -#endif -#ifndef SQLITE_OMIT_VIRTUALTABLE - int nModuleArg; /* Number of arguments to the module */ - char **azModuleArg; /* 0: module 1: schema 2: vtab name 3...: args */ - VTable *pVTable; /* List of VTable objects. */ -#endif - Trigger *pTrigger; /* List of triggers stored in pSchema */ + u8 eTabType; /* 0: normal, 1: virtual, 2: view */ + union { + struct { /* Used by ordinary tables: */ + int addColOffset; /* Offset in CREATE TABLE stmt to add a new column */ + FKey *pFKey; /* Linked list of all foreign keys in this table */ + ExprList *pDfltList; /* DEFAULT clauses on various columns. + ** Or the AS clause for generated columns. */ + } tab; + struct { /* Used by views: */ + Select *pSelect; /* View definition */ + } view; + struct { /* Used by virtual tables only: */ + int nArg; /* Number of arguments to the module */ + char **azArg; /* 0: module 1: schema 2: vtab name 3...: args */ + VTable *p; /* List of VTable objects. */ + } vtab; + } u; + Trigger *pTrigger; /* List of triggers on this object */ Schema *pSchema; /* Schema that contains this table */ }; @@ -17810,24 +17238,35 @@ struct Table { ** TF_HasStored == COLFLAG_STORED ** TF_HasHidden == COLFLAG_HIDDEN */ -#define TF_Readonly 0x0001 /* Read-only system table */ -#define TF_HasHidden 0x0002 /* Has one or more hidden columns */ -#define TF_HasPrimaryKey 0x0004 /* Table has a primary key */ -#define TF_Autoincrement 0x0008 /* Integer primary key is autoincrement */ -#define TF_HasStat1 0x0010 /* nRowLogEst set from sqlite_stat1 */ -#define TF_HasVirtual 0x0020 /* Has one or more VIRTUAL columns */ -#define TF_HasStored 0x0040 /* Has one or more STORED columns */ -#define TF_HasGenerated 0x0060 /* Combo: HasVirtual + HasStored */ -#define TF_WithoutRowid 0x0080 /* No rowid. PRIMARY KEY is the key */ -#define TF_StatsUsed 0x0100 /* Query planner decisions affected by +#define TF_Readonly 0x00000001 /* Read-only system table */ +#define TF_HasHidden 0x00000002 /* Has one or more hidden columns */ +#define TF_HasPrimaryKey 0x00000004 /* Table has a primary key */ +#define TF_Autoincrement 0x00000008 /* Integer primary key is autoincrement */ +#define TF_HasStat1 0x00000010 /* nRowLogEst set from sqlite_stat1 */ +#define TF_HasVirtual 0x00000020 /* Has one or more VIRTUAL columns */ +#define TF_HasStored 0x00000040 /* Has one or more STORED columns */ +#define TF_HasGenerated 0x00000060 /* Combo: HasVirtual + HasStored */ +#define TF_WithoutRowid 0x00000080 /* No rowid. PRIMARY KEY is the key */ +#define TF_StatsUsed 0x00000100 /* Query planner decisions affected by ** Index.aiRowLogEst[] values */ -#define TF_NoVisibleRowid 0x0200 /* No user-visible "rowid" column */ -#define TF_OOOHidden 0x0400 /* Out-of-Order hidden columns */ -#define TF_HasNotNull 0x0800 /* Contains NOT NULL constraints */ -#define TF_Shadow 0x1000 /* True for a shadow table */ -#define TF_HasStat4 0x2000 /* STAT4 info available for this table */ -#define TF_Ephemeral 0x4000 /* An ephemeral table */ -#define TF_Eponymous 0x8000 /* An eponymous virtual table */ +#define TF_NoVisibleRowid 0x00000200 /* No user-visible "rowid" column */ +#define TF_OOOHidden 0x00000400 /* Out-of-Order hidden columns */ +#define TF_HasNotNull 0x00000800 /* Contains NOT NULL constraints */ +#define TF_Shadow 0x00001000 /* True for a shadow table */ +#define TF_HasStat4 0x00002000 /* STAT4 info available for this table */ +#define TF_Ephemeral 0x00004000 /* An ephemeral table */ +#define TF_Eponymous 0x00008000 /* An eponymous virtual table */ +#define TF_Strict 0x00010000 /* STRICT mode */ + +/* +** Allowed values for Table.eTabType +*/ +#define TABTYP_NORM 0 /* Ordinary table */ +#define TABTYP_VTAB 1 /* Virtual table */ +#define TABTYP_VIEW 2 /* A view */ + +#define IsView(X) ((X)->eTabType==TABTYP_VIEW) +#define IsOrdinaryTable(X) ((X)->eTabType==TABTYP_NORM) /* ** Test to see whether or not a table is a virtual table. This is @@ -17835,9 +17274,9 @@ struct Table { ** table support is omitted from the build. */ #ifndef SQLITE_OMIT_VIRTUALTABLE -# define IsVirtual(X) ((X)->nModuleArg) +# define IsVirtual(X) ((X)->eTabType==TABTYP_VTAB) # define ExprIsVtab(X) \ - ((X)->op==TK_COLUMN && (X)->y.pTab!=0 && (X)->y.pTab->nModuleArg) + ((X)->op==TK_COLUMN && (X)->y.pTab!=0 && (X)->y.pTab->eTabType==TABTYP_VTAB) #else # define IsVirtual(X) 0 # define ExprIsVtab(X) 0 @@ -18226,10 +17665,10 @@ typedef int ynVar; ** tree. ** ** If the expression is an SQL literal (TK_INTEGER, TK_FLOAT, TK_BLOB, -** or TK_STRING), then Expr.token contains the text of the SQL literal. If -** the expression is a variable (TK_VARIABLE), then Expr.token contains the +** or TK_STRING), then Expr.u.zToken contains the text of the SQL literal. If +** the expression is a variable (TK_VARIABLE), then Expr.u.zToken contains the ** variable name. Finally, if the expression is an SQL function (TK_FUNCTION), -** then Expr.token contains the name of the function. +** then Expr.u.zToken contains the name of the function. ** ** Expr.pRight and Expr.pLeft are the left and right subexpressions of a ** binary operator. Either or both may be NULL. @@ -18269,7 +17708,7 @@ typedef int ynVar; ** help reduce memory requirements, sometimes an Expr object will be ** truncated. And to reduce the number of memory allocations, sometimes ** two or more Expr objects will be stored in a single memory allocation, -** together with Expr.zToken strings. +** together with Expr.u.zToken strings. ** ** If the EP_Reduced and EP_TokenOnly flags are set when ** an Expr object is truncated. When EP_Reduced is set, then all @@ -18338,8 +17777,7 @@ struct Expr { } y; }; -/* -** The following are the meanings of bits in the Expr.flags field. +/* The following are the meanings of bits in the Expr.flags field. ** Value restrictions: ** ** EP_Agg == NC_HasAgg == SF_HasAgg @@ -18378,14 +17816,12 @@ struct Expr { #define EP_FromDDL 0x40000000 /* Originates from sqlite_schema */ /* 0x80000000 // Available */ -/* -** The EP_Propagate mask is a set of properties that automatically propagate +/* The EP_Propagate mask is a set of properties that automatically propagate ** upwards into parent nodes. */ #define EP_Propagate (EP_Collate|EP_Subquery|EP_HasFunc) -/* -** These macros can be used to test, set, or clear bits in the +/* Macros can be used to test, set, or clear bits in the ** Expr.flags field. */ #define ExprHasProperty(E,P) (((E)->flags&(P))!=0) @@ -18395,6 +17831,16 @@ struct Expr { #define ExprAlwaysTrue(E) (((E)->flags&(EP_FromJoin|EP_IsTrue))==EP_IsTrue) #define ExprAlwaysFalse(E) (((E)->flags&(EP_FromJoin|EP_IsFalse))==EP_IsFalse) +/* Macros used to ensure that the correct members of unions are accessed +** in Expr. +*/ +#define ExprUseUToken(E) (((E)->flags&EP_IntValue)==0) +#define ExprUseUValue(E) (((E)->flags&EP_IntValue)!=0) +#define ExprUseXList(E) (((E)->flags&EP_xIsSelect)==0) +#define ExprUseXSelect(E) (((E)->flags&EP_xIsSelect)!=0) +#define ExprUseYTab(E) (((E)->flags&(EP_WinFunc|EP_Subrtn))==0) +#define ExprUseYWin(E) (((E)->flags&EP_WinFunc)!=0) +#define ExprUseYSub(E) (((E)->flags&EP_Subrtn)!=0) /* Flags for use with Expr.vvaFlags */ @@ -18477,11 +17923,12 @@ struct ExprList { unsigned bSorterRef :1; /* Defer evaluation until after sorting */ unsigned bNulls: 1; /* True if explicit "NULLS FIRST/LAST" */ union { - struct { + struct { /* Used by any ExprList other than Parse.pConsExpr */ u16 iOrderByCol; /* For ORDER BY, column number in result set */ u16 iAlias; /* Index into Parse.aAlias[] for zName */ } x; - int iConstExprReg; /* Register in which Expr value is cached */ + int iConstExprReg; /* Register in which Expr value is cached. Used only + ** by Parse.pConstExpr */ } u; } a[1]; /* One slot for each expression in the list */ }; @@ -18519,6 +17966,13 @@ struct IdList { /* ** The SrcItem object represents a single term in the FROM clause of a query. ** The SrcList object is mostly an array of SrcItems. +** +** Union member validity: +** +** u1.zIndexedBy fg.isIndexedBy && !fg.isTabFunc +** u1.pFuncArg fg.isTabFunc && !fg.isIndexedBy +** u2.pIBIndex fg.isIndexedBy && !fg.isCte +** u2.pCteUse fg.isCte && !fg.isIndexedBy */ struct SrcItem { Schema *pSchema; /* Schema to which this item is fixed */ @@ -18667,31 +18121,33 @@ struct NameContext { ** Allowed values for the NameContext, ncFlags field. ** ** Value constraints (all checked via assert()): -** NC_HasAgg == SF_HasAgg == EP_Agg -** NC_MinMaxAgg == SF_MinMaxAgg == SQLITE_FUNC_MINMAX +** NC_HasAgg == SF_HasAgg == EP_Agg +** NC_MinMaxAgg == SF_MinMaxAgg == SQLITE_FUNC_MINMAX +** NC_OrderAgg == SF_OrderByReqd == SQLITE_FUNC_ANYORDER ** NC_HasWin == EP_Win ** */ -#define NC_AllowAgg 0x00001 /* Aggregate functions are allowed here */ -#define NC_PartIdx 0x00002 /* True if resolving a partial index WHERE */ -#define NC_IsCheck 0x00004 /* True if resolving a CHECK constraint */ -#define NC_GenCol 0x00008 /* True for a GENERATED ALWAYS AS clause */ -#define NC_HasAgg 0x00010 /* One or more aggregate functions seen */ -#define NC_IdxExpr 0x00020 /* True if resolving columns of CREATE INDEX */ -#define NC_SelfRef 0x0002e /* Combo: PartIdx, isCheck, GenCol, and IdxExpr */ -#define NC_VarSelect 0x00040 /* A correlated subquery has been seen */ -#define NC_UEList 0x00080 /* True if uNC.pEList is used */ -#define NC_UAggInfo 0x00100 /* True if uNC.pAggInfo is used */ -#define NC_UUpsert 0x00200 /* True if uNC.pUpsert is used */ -#define NC_UBaseReg 0x00400 /* True if uNC.iBaseReg is used */ -#define NC_MinMaxAgg 0x01000 /* min/max aggregates seen. See note above */ -#define NC_Complex 0x02000 /* True if a function or subquery seen */ -#define NC_AllowWin 0x04000 /* Window functions are allowed here */ -#define NC_HasWin 0x08000 /* One or more window functions seen */ -#define NC_IsDDL 0x10000 /* Resolving names in a CREATE statement */ -#define NC_InAggFunc 0x20000 /* True if analyzing arguments to an agg func */ -#define NC_FromDDL 0x40000 /* SQL text comes from sqlite_schema */ -#define NC_NoSelect 0x80000 /* Do not descend into sub-selects */ +#define NC_AllowAgg 0x000001 /* Aggregate functions are allowed here */ +#define NC_PartIdx 0x000002 /* True if resolving a partial index WHERE */ +#define NC_IsCheck 0x000004 /* True if resolving a CHECK constraint */ +#define NC_GenCol 0x000008 /* True for a GENERATED ALWAYS AS clause */ +#define NC_HasAgg 0x000010 /* One or more aggregate functions seen */ +#define NC_IdxExpr 0x000020 /* True if resolving columns of CREATE INDEX */ +#define NC_SelfRef 0x00002e /* Combo: PartIdx, isCheck, GenCol, and IdxExpr */ +#define NC_VarSelect 0x000040 /* A correlated subquery has been seen */ +#define NC_UEList 0x000080 /* True if uNC.pEList is used */ +#define NC_UAggInfo 0x000100 /* True if uNC.pAggInfo is used */ +#define NC_UUpsert 0x000200 /* True if uNC.pUpsert is used */ +#define NC_UBaseReg 0x000400 /* True if uNC.iBaseReg is used */ +#define NC_MinMaxAgg 0x001000 /* min/max aggregates seen. See note above */ +#define NC_Complex 0x002000 /* True if a function or subquery seen */ +#define NC_AllowWin 0x004000 /* Window functions are allowed here */ +#define NC_HasWin 0x008000 /* One or more window functions seen */ +#define NC_IsDDL 0x010000 /* Resolving names in a CREATE statement */ +#define NC_InAggFunc 0x020000 /* True if analyzing arguments to an agg func */ +#define NC_FromDDL 0x040000 /* SQL text comes from sqlite_schema */ +#define NC_NoSelect 0x080000 /* Do not descend into sub-selects */ +#define NC_OrderAgg 0x8000000 /* Has an aggregate other than count/min/max */ /* ** An instance of the following object describes a single ON CONFLICT @@ -18774,9 +18230,10 @@ struct Select { ** "Select Flag". ** ** Value constraints (all checked via assert()) -** SF_HasAgg == NC_HasAgg -** SF_MinMaxAgg == NC_MinMaxAgg == SQLITE_FUNC_MINMAX -** SF_FixedLimit == WHERE_USE_LIMIT +** SF_HasAgg == NC_HasAgg +** SF_MinMaxAgg == NC_MinMaxAgg == SQLITE_FUNC_MINMAX +** SF_OrderByReqd == NC_OrderAgg == SQLITE_FUNC_ANYORDER +** SF_FixedLimit == WHERE_USE_LIMIT */ #define SF_Distinct 0x0000001 /* Output should be DISTINCT */ #define SF_All 0x0000002 /* Includes the ALL keyword */ @@ -18801,10 +18258,11 @@ struct Select { #define SF_WinRewrite 0x0100000 /* Window function rewrite accomplished */ #define SF_View 0x0200000 /* SELECT statement is a view */ #define SF_NoopOrderBy 0x0400000 /* ORDER BY is ignored for this query */ -#define SF_UpdateFrom 0x0800000 /* Statement is an UPDATE...FROM */ +#define SF_UFSrcCheck 0x0800000 /* Check pSrc as required by UPDATE...FROM */ #define SF_PushDown 0x1000000 /* SELECT has be modified by push-down opt */ #define SF_MultiPart 0x2000000 /* Has multiple incompatible PARTITIONs */ #define SF_CopyCte 0x4000000 /* SELECT statement is a copy of a CTE */ +#define SF_OrderByReqd 0x8000000 /* The ORDER BY clause may not be omitted */ /* ** The results of a SELECT can be distributed in several ways, as defined @@ -19046,7 +18504,6 @@ struct Parse { AutoincInfo *pAinc; /* Information about AUTOINCREMENT counters */ Parse *pToplevel; /* Parse structure for main program (or NULL) */ Table *pTriggerTab; /* Table triggers are being coded for */ - Parse *pParentParse; /* Parent parser if this parser is nested */ union { int addrCrTab; /* Address of OP_CreateBtree on CREATE TABLE */ Returning *pReturning; /* The RETURNING clause */ @@ -19109,6 +18566,8 @@ struct Parse { #endif }; +/* Allowed values for Parse.eParseMode +*/ #define PARSE_MODE_NORMAL 0 #define PARSE_MODE_DECLARE_VTAB 1 #define PARSE_MODE_RENAME 2 @@ -19330,8 +18789,10 @@ typedef struct { /* ** Allowed values for mInitFlags */ +#define INITFLAG_AlterMask 0x0003 /* Types of ALTER */ #define INITFLAG_AlterRename 0x0001 /* Reparse after a RENAME */ #define INITFLAG_AlterDrop 0x0002 /* Reparse after a DROP COLUMN */ +#define INITFLAG_AlterAdd 0x0003 /* Reparse after an ADD COLUMN */ /* Tuning parameters are set using SQLITE_TESTCTRL_TUNE and are controlled ** on debug-builds of the CLI using ".testctrl tune ID VALUE". Tuning @@ -19452,8 +18913,8 @@ struct Walker { int n; /* A counter */ int iCur; /* A cursor number */ SrcList *pSrcList; /* FROM clause */ - struct SrcCount *pSrcCount; /* Counting column references */ struct CCurHint *pCCurHint; /* Used by codeCursorHint() */ + struct RefSrcList *pRefSrcList; /* sqlite3ReferencesSrcList() */ int *aiCol; /* array of column indexes */ struct IdxCover *pIdxCover; /* Check for index coverage */ struct IdxExprTrans *pIdxTrans; /* Convert idxed expr to column */ @@ -19638,7 +19099,7 @@ SQLITE_PRIVATE void sqlite3WindowListDelete(sqlite3 *db, Window *p); SQLITE_PRIVATE Window *sqlite3WindowAlloc(Parse*, int, int, Expr*, int , Expr*, u8); SQLITE_PRIVATE void sqlite3WindowAttach(Parse*, Expr*, Window*); SQLITE_PRIVATE void sqlite3WindowLink(Select *pSel, Window *pWin); -SQLITE_PRIVATE int sqlite3WindowCompare(Parse*, Window*, Window*, int); +SQLITE_PRIVATE int sqlite3WindowCompare(const Parse*, const Window*, const Window*, int); SQLITE_PRIVATE void sqlite3WindowCodeInit(Parse*, Select*); SQLITE_PRIVATE void sqlite3WindowCodeStep(Parse*, Select*, WhereInfo*, int, int); SQLITE_PRIVATE int sqlite3WindowRewrite(Parse*, Select*); @@ -19770,8 +19231,8 @@ SQLITE_PRIVATE void *sqlite3DbReallocOrFree(sqlite3 *, void *, u64); SQLITE_PRIVATE void *sqlite3DbRealloc(sqlite3 *, void *, u64); SQLITE_PRIVATE void sqlite3DbFree(sqlite3*, void*); SQLITE_PRIVATE void sqlite3DbFreeNN(sqlite3*, void*); -SQLITE_PRIVATE int sqlite3MallocSize(void*); -SQLITE_PRIVATE int sqlite3DbMallocSize(sqlite3*, void*); +SQLITE_PRIVATE int sqlite3MallocSize(const void*); +SQLITE_PRIVATE int sqlite3DbMallocSize(sqlite3*, const void*); SQLITE_PRIVATE void *sqlite3PageMalloc(int); SQLITE_PRIVATE void sqlite3PageFree(void*); SQLITE_PRIVATE void sqlite3MemSetDefault(void); @@ -19887,6 +19348,7 @@ SQLITE_PRIVATE void sqlite3ErrorMsg(Parse*, const char*, ...); SQLITE_PRIVATE int sqlite3ErrorToParser(sqlite3*,int); SQLITE_PRIVATE void sqlite3Dequote(char*); SQLITE_PRIVATE void sqlite3DequoteExpr(Expr*); +SQLITE_PRIVATE void sqlite3DequoteToken(Token*); SQLITE_PRIVATE void sqlite3TokenInit(Token*,char*); SQLITE_PRIVATE int sqlite3KeywordCode(const unsigned char*, int); SQLITE_PRIVATE int sqlite3RunParser(Parse*, const char*, char **); @@ -19906,16 +19368,17 @@ SQLITE_PRIVATE Expr *sqlite3PExpr(Parse*, int, Expr*, Expr*); SQLITE_PRIVATE void sqlite3PExprAddSelect(Parse*, Expr*, Select*); SQLITE_PRIVATE Expr *sqlite3ExprAnd(Parse*,Expr*, Expr*); SQLITE_PRIVATE Expr *sqlite3ExprSimplifiedAndOr(Expr*); -SQLITE_PRIVATE Expr *sqlite3ExprFunction(Parse*,ExprList*, Token*, int); -SQLITE_PRIVATE void sqlite3ExprFunctionUsable(Parse*,Expr*,FuncDef*); +SQLITE_PRIVATE Expr *sqlite3ExprFunction(Parse*,ExprList*, const Token*, int); +SQLITE_PRIVATE void sqlite3ExprFunctionUsable(Parse*,const Expr*,const FuncDef*); SQLITE_PRIVATE void sqlite3ExprAssignVarNumber(Parse*, Expr*, u32); SQLITE_PRIVATE void sqlite3ExprDelete(sqlite3*, Expr*); SQLITE_PRIVATE void sqlite3ExprDeferredDelete(Parse*, Expr*); SQLITE_PRIVATE void sqlite3ExprUnmapAndDelete(Parse*, Expr*); SQLITE_PRIVATE ExprList *sqlite3ExprListAppend(Parse*,ExprList*,Expr*); SQLITE_PRIVATE ExprList *sqlite3ExprListAppendVector(Parse*,ExprList*,IdList*,Expr*); +SQLITE_PRIVATE Select *sqlite3ExprListToValues(Parse*, int, ExprList*); SQLITE_PRIVATE void sqlite3ExprListSetSortOrder(ExprList*,int,int); -SQLITE_PRIVATE void sqlite3ExprListSetName(Parse*,ExprList*,Token*,int); +SQLITE_PRIVATE void sqlite3ExprListSetName(Parse*,ExprList*,const Token*,int); SQLITE_PRIVATE void sqlite3ExprListSetSpan(Parse*,ExprList*,const char*,const char*); SQLITE_PRIVATE void sqlite3ExprListDelete(sqlite3*, ExprList*); SQLITE_PRIVATE u32 sqlite3ExprListFlags(const ExprList*); @@ -19931,6 +19394,10 @@ SQLITE_PRIVATE void sqlite3ResetAllSchemasOfConnection(sqlite3*); SQLITE_PRIVATE void sqlite3ResetOneSchema(sqlite3*,int); SQLITE_PRIVATE void sqlite3CollapseDatabaseArray(sqlite3*); SQLITE_PRIVATE void sqlite3CommitInternalChanges(sqlite3*); +SQLITE_PRIVATE void sqlite3ColumnSetExpr(Parse*,Table*,Column*,Expr*); +SQLITE_PRIVATE Expr *sqlite3ColumnExpr(Table*,Column*); +SQLITE_PRIVATE void sqlite3ColumnSetColl(sqlite3*,Column*,const char*zColl); +SQLITE_PRIVATE const char *sqlite3ColumnColl(Column*); SQLITE_PRIVATE void sqlite3DeleteColumnNames(sqlite3*,Table*); SQLITE_PRIVATE void sqlite3GenerateColumnNames(Parse *pParse, Select *pSelect); SQLITE_PRIVATE int sqlite3ColumnsFromExprList(Parse*,ExprList*,i16*,Column**); @@ -19952,14 +19419,14 @@ SQLITE_PRIVATE void sqlite3ColumnPropertiesFromName(Table*, Column*); #else # define sqlite3ColumnPropertiesFromName(T,C) /* no-op */ #endif -SQLITE_PRIVATE void sqlite3AddColumn(Parse*,Token*,Token*); +SQLITE_PRIVATE void sqlite3AddColumn(Parse*,Token,Token); SQLITE_PRIVATE void sqlite3AddNotNull(Parse*, int); SQLITE_PRIVATE void sqlite3AddPrimaryKey(Parse*, ExprList*, int, int, int); SQLITE_PRIVATE void sqlite3AddCheckConstraint(Parse*, Expr*, const char*, const char*); SQLITE_PRIVATE void sqlite3AddDefaultValue(Parse*,Expr*,const char*,const char*); SQLITE_PRIVATE void sqlite3AddCollateType(Parse*, Token*); SQLITE_PRIVATE void sqlite3AddGenerated(Parse*,Expr*,Token*); -SQLITE_PRIVATE void sqlite3EndTable(Parse*,Token*,Token*,u8,Select*); +SQLITE_PRIVATE void sqlite3EndTable(Parse*,Token*,Token*,u32,Select*); SQLITE_PRIVATE void sqlite3AddReturning(Parse*,ExprList*); SQLITE_PRIVATE int sqlite3ParseUri(const char*,const char*,unsigned int*, sqlite3_vfs**,char**,char **); @@ -20069,7 +19536,7 @@ SQLITE_PRIVATE void sqlite3ExprCodeGetColumnOfTable(Vdbe*, Table*, int, int, int SQLITE_PRIVATE void sqlite3ExprCodeMove(Parse*, int, int, int); SQLITE_PRIVATE void sqlite3ExprCode(Parse*, Expr*, int); #ifndef SQLITE_OMIT_GENERATED_COLUMNS -SQLITE_PRIVATE void sqlite3ExprCodeGeneratedColumn(Parse*, Column*, int); +SQLITE_PRIVATE void sqlite3ExprCodeGeneratedColumn(Parse*, Table*, Column*, int); #endif SQLITE_PRIVATE void sqlite3ExprCodeCopy(Parse*, Expr*, int); SQLITE_PRIVATE void sqlite3ExprCodeFactorable(Parse*, Expr*, int); @@ -20088,23 +19555,24 @@ SQLITE_PRIVATE Table *sqlite3FindTable(sqlite3*,const char*, const char*); #define LOCATE_VIEW 0x01 #define LOCATE_NOERR 0x02 SQLITE_PRIVATE Table *sqlite3LocateTable(Parse*,u32 flags,const char*, const char*); +SQLITE_PRIVATE const char *sqlite3PreferredTableName(const char*); SQLITE_PRIVATE Table *sqlite3LocateTableItem(Parse*,u32 flags,SrcItem *); SQLITE_PRIVATE Index *sqlite3FindIndex(sqlite3*,const char*, const char*); SQLITE_PRIVATE void sqlite3UnlinkAndDeleteTable(sqlite3*,int,const char*); SQLITE_PRIVATE void sqlite3UnlinkAndDeleteIndex(sqlite3*,int,const char*); SQLITE_PRIVATE void sqlite3Vacuum(Parse*,Token*,Expr*); SQLITE_PRIVATE int sqlite3RunVacuum(char**, sqlite3*, int, sqlite3_value*); -SQLITE_PRIVATE char *sqlite3NameFromToken(sqlite3*, Token*); -SQLITE_PRIVATE int sqlite3ExprCompare(Parse*,Expr*, Expr*, int); -SQLITE_PRIVATE int sqlite3ExprCompareSkip(Expr*, Expr*, int); -SQLITE_PRIVATE int sqlite3ExprListCompare(ExprList*, ExprList*, int); -SQLITE_PRIVATE int sqlite3ExprImpliesExpr(Parse*,Expr*, Expr*, int); +SQLITE_PRIVATE char *sqlite3NameFromToken(sqlite3*, const Token*); +SQLITE_PRIVATE int sqlite3ExprCompare(const Parse*,const Expr*,const Expr*, int); +SQLITE_PRIVATE int sqlite3ExprCompareSkip(Expr*,Expr*,int); +SQLITE_PRIVATE int sqlite3ExprListCompare(const ExprList*,const ExprList*, int); +SQLITE_PRIVATE int sqlite3ExprImpliesExpr(const Parse*,const Expr*,const Expr*, int); SQLITE_PRIVATE int sqlite3ExprImpliesNonNullRow(Expr*,int); SQLITE_PRIVATE void sqlite3AggInfoPersistWalkerInit(Walker*,Parse*); SQLITE_PRIVATE void sqlite3ExprAnalyzeAggregates(NameContext*, Expr*); SQLITE_PRIVATE void sqlite3ExprAnalyzeAggList(NameContext*,ExprList*); SQLITE_PRIVATE int sqlite3ExprCoveredByIndex(Expr*, int iCur, Index *pIdx); -SQLITE_PRIVATE int sqlite3FunctionUsesThisSrc(Expr*, SrcList*); +SQLITE_PRIVATE int sqlite3ReferencesSrcList(Parse*, Expr*, SrcList*); SQLITE_PRIVATE Vdbe *sqlite3GetVdbe(Parse*); #ifndef SQLITE_UNTESTABLE SQLITE_PRIVATE void sqlite3PrngSaveState(void); @@ -20129,7 +19597,7 @@ SQLITE_PRIVATE int sqlite3ExprIsTableConstant(Expr*,int); #ifdef SQLITE_ENABLE_CURSOR_HINTS SQLITE_PRIVATE int sqlite3ExprContainsSubquery(Expr*); #endif -SQLITE_PRIVATE int sqlite3ExprIsInteger(Expr*, int*); +SQLITE_PRIVATE int sqlite3ExprIsInteger(const Expr*, int*); SQLITE_PRIVATE int sqlite3ExprCanBeNull(const Expr*); SQLITE_PRIVATE int sqlite3ExprNeedsNoAffinityChange(const Expr*, char); SQLITE_PRIVATE int sqlite3IsRowid(const char*); @@ -20154,11 +19622,11 @@ SQLITE_PRIVATE void sqlite3MayAbort(Parse*); SQLITE_PRIVATE void sqlite3HaltConstraint(Parse*, int, int, char*, i8, u8); SQLITE_PRIVATE void sqlite3UniqueConstraint(Parse*, int, Index*); SQLITE_PRIVATE void sqlite3RowidConstraint(Parse*, int, Table*); -SQLITE_PRIVATE Expr *sqlite3ExprDup(sqlite3*,Expr*,int); -SQLITE_PRIVATE ExprList *sqlite3ExprListDup(sqlite3*,ExprList*,int); -SQLITE_PRIVATE SrcList *sqlite3SrcListDup(sqlite3*,SrcList*,int); -SQLITE_PRIVATE IdList *sqlite3IdListDup(sqlite3*,IdList*); -SQLITE_PRIVATE Select *sqlite3SelectDup(sqlite3*,Select*,int); +SQLITE_PRIVATE Expr *sqlite3ExprDup(sqlite3*,const Expr*,int); +SQLITE_PRIVATE ExprList *sqlite3ExprListDup(sqlite3*,const ExprList*,int); +SQLITE_PRIVATE SrcList *sqlite3SrcListDup(sqlite3*,const SrcList*,int); +SQLITE_PRIVATE IdList *sqlite3IdListDup(sqlite3*,const IdList*); +SQLITE_PRIVATE Select *sqlite3SelectDup(sqlite3*,const Select*,int); SQLITE_PRIVATE FuncDef *sqlite3FunctionSearch(int,const char*); SQLITE_PRIVATE void sqlite3InsertBuiltinFuncs(FuncDef*,int); SQLITE_PRIVATE FuncDef *sqlite3FindFunction(sqlite3*,const char*,int,u8,u8); @@ -20296,7 +19764,7 @@ SQLITE_PRIVATE const char *sqlite3IndexAffinityStr(sqlite3*, Index*); SQLITE_PRIVATE void sqlite3TableAffinity(Vdbe*, Table*, int); SQLITE_PRIVATE char sqlite3CompareAffinity(const Expr *pExpr, char aff2); SQLITE_PRIVATE int sqlite3IndexAffinityOk(const Expr *pExpr, char idx_affinity); -SQLITE_PRIVATE char sqlite3TableColumnAffinity(Table*,int); +SQLITE_PRIVATE char sqlite3TableColumnAffinity(const Table*,int); SQLITE_PRIVATE char sqlite3ExprAffinity(const Expr *pExpr); SQLITE_PRIVATE int sqlite3Atoi64(const char*, i64*, int, u8); SQLITE_PRIVATE int sqlite3DecOrHexToI64(const char*, i64*); @@ -20325,14 +19793,14 @@ SQLITE_PRIVATE void sqlite3SetTextEncoding(sqlite3 *db, u8); SQLITE_PRIVATE CollSeq *sqlite3ExprCollSeq(Parse *pParse, const Expr *pExpr); SQLITE_PRIVATE CollSeq *sqlite3ExprNNCollSeq(Parse *pParse, const Expr *pExpr); SQLITE_PRIVATE int sqlite3ExprCollSeqMatch(Parse*,const Expr*,const Expr*); -SQLITE_PRIVATE Expr *sqlite3ExprAddCollateToken(Parse *pParse, Expr*, const Token*, int); -SQLITE_PRIVATE Expr *sqlite3ExprAddCollateString(Parse*,Expr*,const char*); +SQLITE_PRIVATE Expr *sqlite3ExprAddCollateToken(const Parse *pParse, Expr*, const Token*, int); +SQLITE_PRIVATE Expr *sqlite3ExprAddCollateString(const Parse*,Expr*,const char*); SQLITE_PRIVATE Expr *sqlite3ExprSkipCollate(Expr*); SQLITE_PRIVATE Expr *sqlite3ExprSkipCollateAndLikely(Expr*); SQLITE_PRIVATE int sqlite3CheckCollSeq(Parse *, CollSeq *); SQLITE_PRIVATE int sqlite3WritableSchema(sqlite3*); SQLITE_PRIVATE int sqlite3CheckObjectName(Parse*, const char*,const char*,const char*); -SQLITE_PRIVATE void sqlite3VdbeSetChanges(sqlite3 *, int); +SQLITE_PRIVATE void sqlite3VdbeSetChanges(sqlite3 *, i64); SQLITE_PRIVATE int sqlite3AddInt64(i64*,i64); SQLITE_PRIVATE int sqlite3SubInt64(i64*,i64); SQLITE_PRIVATE int sqlite3MulInt64(i64*,i64); @@ -20357,11 +19825,15 @@ SQLITE_PRIVATE sqlite3_value *sqlite3ValueNew(sqlite3 *); #ifndef SQLITE_OMIT_UTF16 SQLITE_PRIVATE char *sqlite3Utf16to8(sqlite3 *, const void*, int, u8); #endif -SQLITE_PRIVATE int sqlite3ValueFromExpr(sqlite3 *, Expr *, u8, u8, sqlite3_value **); +SQLITE_PRIVATE int sqlite3ValueFromExpr(sqlite3 *, const Expr *, u8, u8, sqlite3_value **); SQLITE_PRIVATE void sqlite3ValueApplyAffinity(sqlite3_value *, u8, u8); #ifndef SQLITE_AMALGAMATION SQLITE_PRIVATE const unsigned char sqlite3OpcodeProperty[]; SQLITE_PRIVATE const char sqlite3StrBINARY[]; +SQLITE_PRIVATE const unsigned char sqlite3StdTypeLen[]; +SQLITE_PRIVATE const char sqlite3StdTypeAffinity[]; +SQLITE_PRIVATE const char sqlite3StdTypeMap[]; +SQLITE_PRIVATE const char *sqlite3StdType[]; SQLITE_PRIVATE const unsigned char sqlite3UpperToLower[]; SQLITE_PRIVATE const unsigned char *sqlite3aLTb; SQLITE_PRIVATE const unsigned char *sqlite3aEQb; @@ -20405,9 +19877,9 @@ SQLITE_PRIVATE int sqlite3ResolveOrderGroupBy(Parse*, Select*, ExprList*, const SQLITE_PRIVATE void sqlite3ColumnDefault(Vdbe *, Table *, int, int); SQLITE_PRIVATE void sqlite3AlterFinishAddColumn(Parse *, Token *); SQLITE_PRIVATE void sqlite3AlterBeginAddColumn(Parse *, SrcList *); -SQLITE_PRIVATE void sqlite3AlterDropColumn(Parse*, SrcList*, Token*); -SQLITE_PRIVATE void *sqlite3RenameTokenMap(Parse*, void*, Token*); -SQLITE_PRIVATE void sqlite3RenameTokenRemap(Parse*, void *pTo, void *pFrom); +SQLITE_PRIVATE void sqlite3AlterDropColumn(Parse*, SrcList*, const Token*); +SQLITE_PRIVATE const void *sqlite3RenameTokenMap(Parse*, const void*, const Token*); +SQLITE_PRIVATE void sqlite3RenameTokenRemap(Parse*, const void *pTo, const void *pFrom); SQLITE_PRIVATE void sqlite3RenameExprUnmap(Parse*, Expr*); SQLITE_PRIVATE void sqlite3RenameExprlistUnmap(Parse*, ExprList*); SQLITE_PRIVATE CollSeq *sqlite3GetCollSeq(Parse*, u8, CollSeq *, const char*); @@ -20451,6 +19923,8 @@ SQLITE_PRIVATE int sqlite3OpenTempDatabase(Parse *); SQLITE_PRIVATE void sqlite3StrAccumInit(StrAccum*, sqlite3*, char*, int, int); SQLITE_PRIVATE char *sqlite3StrAccumFinish(StrAccum*); +SQLITE_PRIVATE void sqlite3StrAccumSetError(StrAccum*, u8); +SQLITE_PRIVATE void sqlite3ResultStrAccum(sqlite3_context*,StrAccum*); SQLITE_PRIVATE void sqlite3SelectDestInit(SelectDest*,int,int); SQLITE_PRIVATE Expr *sqlite3CreateColumnExpr(sqlite3 *, SrcList *, int, int); @@ -20503,7 +19977,7 @@ SQLITE_PRIVATE int sqlite3Utf8To8(unsigned char*); #endif #ifdef SQLITE_OMIT_VIRTUALTABLE -# define sqlite3VtabClear(Y) +# define sqlite3VtabClear(D,T) # define sqlite3VtabSync(X,Y) SQLITE_OK # define sqlite3VtabRollback(X) # define sqlite3VtabCommit(X) @@ -20540,9 +20014,11 @@ SQLITE_PRIVATE int sqlite3ReadOnlyShadowTables(sqlite3 *db); #ifndef SQLITE_OMIT_VIRTUALTABLE SQLITE_PRIVATE int sqlite3ShadowTableName(sqlite3 *db, const char *zName); SQLITE_PRIVATE int sqlite3IsShadowTableOf(sqlite3*,Table*,const char*); +SQLITE_PRIVATE void sqlite3MarkAllShadowTablesOf(sqlite3*, Table*); #else # define sqlite3ShadowTableName(A,B) 0 # define sqlite3IsShadowTableOf(A,B,C) 0 +# define sqlite3MarkAllShadowTablesOf(A,B) #endif SQLITE_PRIVATE int sqlite3VtabEponymousTableInit(Parse*,Module*); SQLITE_PRIVATE void sqlite3VtabEponymousTableClear(sqlite3*,Module*); @@ -20585,7 +20061,7 @@ SQLITE_PRIVATE With *sqlite3WithPush(Parse*, With*, u8); # define sqlite3CteDelete(D,C) # define sqlite3CteWithAdd(P,W,C) ((void*)0) # define sqlite3WithDelete(x,y) -# define sqlite3WithPush(x,y,z) +# define sqlite3WithPush(x,y,z) ((void*)0) #endif #ifndef SQLITE_OMIT_UPSERT SQLITE_PRIVATE Upsert *sqlite3UpsertNew(sqlite3*,ExprList*,Expr*,ExprList*,Expr*,Upsert*); @@ -20682,7 +20158,7 @@ SQLITE_PRIVATE void sqlite3MemJournalOpen(sqlite3_file *); SQLITE_PRIVATE void sqlite3ExprSetHeightAndFlags(Parse *pParse, Expr *p); #if SQLITE_MAX_EXPR_DEPTH>0 -SQLITE_PRIVATE int sqlite3SelectExprHeight(Select *); +SQLITE_PRIVATE int sqlite3SelectExprHeight(const Select *); SQLITE_PRIVATE int sqlite3ExprCheckHeight(Parse*, int); #else #define sqlite3SelectExprHeight(x) 0 @@ -20753,8 +20229,8 @@ SQLITE_API SQLITE_EXTERN void (SQLITE_CDECL *sqlite3IoTrace)(const char*,...); */ #ifdef SQLITE_MEMDEBUG SQLITE_PRIVATE void sqlite3MemdebugSetType(void*,u8); -SQLITE_PRIVATE int sqlite3MemdebugHasType(void*,u8); -SQLITE_PRIVATE int sqlite3MemdebugNoType(void*,u8); +SQLITE_PRIVATE int sqlite3MemdebugHasType(const void*,u8); +SQLITE_PRIVATE int sqlite3MemdebugNoType(const void*,u8); #else # define sqlite3MemdebugSetType(X,Y) /* no-op */ # define sqlite3MemdebugHasType(X,Y) 1 @@ -20779,10 +20255,10 @@ SQLITE_PRIVATE int sqlite3DbpageRegister(sqlite3*); SQLITE_PRIVATE int sqlite3DbstatRegister(sqlite3*); #endif -SQLITE_PRIVATE int sqlite3ExprVectorSize(Expr *pExpr); -SQLITE_PRIVATE int sqlite3ExprIsVector(Expr *pExpr); +SQLITE_PRIVATE int sqlite3ExprVectorSize(const Expr *pExpr); +SQLITE_PRIVATE int sqlite3ExprIsVector(const Expr *pExpr); SQLITE_PRIVATE Expr *sqlite3VectorFieldSubexpr(Expr*, int); -SQLITE_PRIVATE Expr *sqlite3ExprForVectorField(Parse*,Expr*,int); +SQLITE_PRIVATE Expr *sqlite3ExprForVectorField(Parse*,Expr*,int,int); SQLITE_PRIVATE void sqlite3VectorErrorMsg(Parse*, Expr*); #ifndef SQLITE_OMIT_COMPILEOPTION_DIAGS @@ -20792,6 +20268,990 @@ SQLITE_PRIVATE const char **sqlite3CompileOptions(int *pnOpt); #endif /* SQLITEINT_H */ /************** End of sqliteInt.h *******************************************/ +/************** Begin file os_common.h ***************************************/ +/* +** 2004 May 22 +** +** The author disclaims copyright to this source code. In place of +** a legal notice, here is a blessing: +** +** May you do good and not evil. +** May you find forgiveness for yourself and forgive others. +** May you share freely, never taking more than you give. +** +****************************************************************************** +** +** This file contains macros and a little bit of code that is common to +** all of the platform-specific files (os_*.c) and is #included into those +** files. +** +** This file should be #included by the os_*.c files only. It is not a +** general purpose header file. +*/ +#ifndef _OS_COMMON_H_ +#define _OS_COMMON_H_ + +/* +** At least two bugs have slipped in because we changed the MEMORY_DEBUG +** macro to SQLITE_DEBUG and some older makefiles have not yet made the +** switch. The following code should catch this problem at compile-time. +*/ +#ifdef MEMORY_DEBUG +# error "The MEMORY_DEBUG macro is obsolete. Use SQLITE_DEBUG instead." +#endif + +/* +** Macros for performance tracing. Normally turned off. Only works +** on i486 hardware. +*/ +#ifdef SQLITE_PERFORMANCE_TRACE + +/* +** hwtime.h contains inline assembler code for implementing +** high-performance timing routines. +*/ +/************** Include hwtime.h in the middle of os_common.h ****************/ +/************** Begin file hwtime.h ******************************************/ +/* +** 2008 May 27 +** +** The author disclaims copyright to this source code. In place of +** a legal notice, here is a blessing: +** +** May you do good and not evil. +** May you find forgiveness for yourself and forgive others. +** May you share freely, never taking more than you give. +** +****************************************************************************** +** +** This file contains inline asm code for retrieving "high-performance" +** counters for x86 and x86_64 class CPUs. +*/ +#ifndef SQLITE_HWTIME_H +#define SQLITE_HWTIME_H + +/* +** The following routine only works on pentium-class (or newer) processors. +** It uses the RDTSC opcode to read the cycle count value out of the +** processor and returns that value. This can be used for high-res +** profiling. +*/ +#if !defined(__STRICT_ANSI__) && \ + (defined(__GNUC__) || defined(_MSC_VER)) && \ + (defined(i386) || defined(__i386__) || defined(_M_IX86)) + + #if defined(__GNUC__) + + __inline__ sqlite_uint64 sqlite3Hwtime(void){ + unsigned int lo, hi; + __asm__ __volatile__ ("rdtsc" : "=a" (lo), "=d" (hi)); + return (sqlite_uint64)hi << 32 | lo; + } + + #elif defined(_MSC_VER) + + __declspec(naked) __inline sqlite_uint64 __cdecl sqlite3Hwtime(void){ + __asm { + rdtsc + ret ; return value at EDX:EAX + } + } + + #endif + +#elif !defined(__STRICT_ANSI__) && (defined(__GNUC__) && defined(__x86_64__)) + + __inline__ sqlite_uint64 sqlite3Hwtime(void){ + unsigned long val; + __asm__ __volatile__ ("rdtsc" : "=A" (val)); + return val; + } + +#elif !defined(__STRICT_ANSI__) && (defined(__GNUC__) && defined(__ppc__)) + + __inline__ sqlite_uint64 sqlite3Hwtime(void){ + unsigned long long retval; + unsigned long junk; + __asm__ __volatile__ ("\n\ + 1: mftbu %1\n\ + mftb %L0\n\ + mftbu %0\n\ + cmpw %0,%1\n\ + bne 1b" + : "=r" (retval), "=r" (junk)); + return retval; + } + +#else + + /* + ** asm() is needed for hardware timing support. Without asm(), + ** disable the sqlite3Hwtime() routine. + ** + ** sqlite3Hwtime() is only used for some obscure debugging + ** and analysis configurations, not in any deliverable, so this + ** should not be a great loss. + */ +SQLITE_PRIVATE sqlite_uint64 sqlite3Hwtime(void){ return ((sqlite_uint64)0); } + +#endif + +#endif /* !defined(SQLITE_HWTIME_H) */ + +/************** End of hwtime.h **********************************************/ +/************** Continuing where we left off in os_common.h ******************/ + +static sqlite_uint64 g_start; +static sqlite_uint64 g_elapsed; +#define TIMER_START g_start=sqlite3Hwtime() +#define TIMER_END g_elapsed=sqlite3Hwtime()-g_start +#define TIMER_ELAPSED g_elapsed +#else +#define TIMER_START +#define TIMER_END +#define TIMER_ELAPSED ((sqlite_uint64)0) +#endif + +/* +** If we compile with the SQLITE_TEST macro set, then the following block +** of code will give us the ability to simulate a disk I/O error. This +** is used for testing the I/O recovery logic. +*/ +#if defined(SQLITE_TEST) +SQLITE_API extern int sqlite3_io_error_hit; +SQLITE_API extern int sqlite3_io_error_hardhit; +SQLITE_API extern int sqlite3_io_error_pending; +SQLITE_API extern int sqlite3_io_error_persist; +SQLITE_API extern int sqlite3_io_error_benign; +SQLITE_API extern int sqlite3_diskfull_pending; +SQLITE_API extern int sqlite3_diskfull; +#define SimulateIOErrorBenign(X) sqlite3_io_error_benign=(X) +#define SimulateIOError(CODE) \ + if( (sqlite3_io_error_persist && sqlite3_io_error_hit) \ + || sqlite3_io_error_pending-- == 1 ) \ + { local_ioerr(); CODE; } +static void local_ioerr(){ + IOTRACE(("IOERR\n")); + sqlite3_io_error_hit++; + if( !sqlite3_io_error_benign ) sqlite3_io_error_hardhit++; +} +#define SimulateDiskfullError(CODE) \ + if( sqlite3_diskfull_pending ){ \ + if( sqlite3_diskfull_pending == 1 ){ \ + local_ioerr(); \ + sqlite3_diskfull = 1; \ + sqlite3_io_error_hit = 1; \ + CODE; \ + }else{ \ + sqlite3_diskfull_pending--; \ + } \ + } +#else +#define SimulateIOErrorBenign(X) +#define SimulateIOError(A) +#define SimulateDiskfullError(A) +#endif /* defined(SQLITE_TEST) */ + +/* +** When testing, keep a count of the number of open files. +*/ +#if defined(SQLITE_TEST) +SQLITE_API extern int sqlite3_open_file_count; +#define OpenCounter(X) sqlite3_open_file_count+=(X) +#else +#define OpenCounter(X) +#endif /* defined(SQLITE_TEST) */ + +#endif /* !defined(_OS_COMMON_H_) */ + +/************** End of os_common.h *******************************************/ +/************** Begin file ctime.c *******************************************/ +/* +** 2010 February 23 +** +** The author disclaims copyright to this source code. In place of +** a legal notice, here is a blessing: +** +** May you do good and not evil. +** May you find forgiveness for yourself and forgive others. +** May you share freely, never taking more than you give. +** +************************************************************************* +** +** This file implements routines used to report what compile-time options +** SQLite was built with. +*/ +#ifndef SQLITE_OMIT_COMPILEOPTION_DIAGS /* IMP: R-16824-07538 */ + +/* +** Include the configuration header output by 'configure' if we're using the +** autoconf-based build +*/ +#if defined(_HAVE_SQLITE_CONFIG_H) && !defined(SQLITECONFIG_H) +/* #include "config.h" */ +#define SQLITECONFIG_H 1 +#endif + +/* These macros are provided to "stringify" the value of the define +** for those options in which the value is meaningful. */ +#define CTIMEOPT_VAL_(opt) #opt +#define CTIMEOPT_VAL(opt) CTIMEOPT_VAL_(opt) + +/* Like CTIMEOPT_VAL, but especially for SQLITE_DEFAULT_LOOKASIDE. This +** option requires a separate macro because legal values contain a single +** comma. e.g. (-DSQLITE_DEFAULT_LOOKASIDE="100,100") */ +#define CTIMEOPT_VAL2_(opt1,opt2) #opt1 "," #opt2 +#define CTIMEOPT_VAL2(opt) CTIMEOPT_VAL2_(opt) +/* #include "sqliteInt.h" */ + +/* +** An array of names of all compile-time options. This array should +** be sorted A-Z. +** +** This array looks large, but in a typical installation actually uses +** only a handful of compile-time options, so most times this array is usually +** rather short and uses little memory space. +*/ +static const char * const sqlite3azCompileOpt[] = { + +/* +** BEGIN CODE GENERATED BY tool/mkctime.tcl +*/ +#ifdef SQLITE_32BIT_ROWID + "32BIT_ROWID", +#endif +#ifdef SQLITE_4_BYTE_ALIGNED_MALLOC + "4_BYTE_ALIGNED_MALLOC", +#endif +#ifdef SQLITE_64BIT_STATS + "64BIT_STATS", +#endif +#ifdef SQLITE_ALLOW_COVERING_INDEX_SCAN +# if SQLITE_ALLOW_COVERING_INDEX_SCAN != 1 + "ALLOW_COVERING_INDEX_SCAN=" CTIMEOPT_VAL(SQLITE_ALLOW_COVERING_INDEX_SCAN), +# endif +#endif +#ifdef SQLITE_ALLOW_URI_AUTHORITY + "ALLOW_URI_AUTHORITY", +#endif +#ifdef SQLITE_ATOMIC_INTRINSICS + "ATOMIC_INTRINSICS=" CTIMEOPT_VAL(SQLITE_ATOMIC_INTRINSICS), +#endif +#ifdef SQLITE_BITMASK_TYPE + "BITMASK_TYPE=" CTIMEOPT_VAL(SQLITE_BITMASK_TYPE), +#endif +#ifdef SQLITE_BUG_COMPATIBLE_20160819 + "BUG_COMPATIBLE_20160819", +#endif +#ifdef SQLITE_CASE_SENSITIVE_LIKE + "CASE_SENSITIVE_LIKE", +#endif +#ifdef SQLITE_CHECK_PAGES + "CHECK_PAGES", +#endif +#if defined(__clang__) && defined(__clang_major__) + "COMPILER=clang-" CTIMEOPT_VAL(__clang_major__) "." + CTIMEOPT_VAL(__clang_minor__) "." + CTIMEOPT_VAL(__clang_patchlevel__), +#elif defined(_MSC_VER) + "COMPILER=msvc-" CTIMEOPT_VAL(_MSC_VER), +#elif defined(__GNUC__) && defined(__VERSION__) + "COMPILER=gcc-" __VERSION__, +#endif +#ifdef SQLITE_COVERAGE_TEST + "COVERAGE_TEST", +#endif +#ifdef SQLITE_DEBUG + "DEBUG", +#endif +#ifdef SQLITE_DEFAULT_AUTOMATIC_INDEX + "DEFAULT_AUTOMATIC_INDEX", +#endif +#ifdef SQLITE_DEFAULT_AUTOVACUUM + "DEFAULT_AUTOVACUUM", +#endif +#ifdef SQLITE_DEFAULT_CACHE_SIZE + "DEFAULT_CACHE_SIZE=" CTIMEOPT_VAL(SQLITE_DEFAULT_CACHE_SIZE), +#endif +#ifdef SQLITE_DEFAULT_CKPTFULLFSYNC + "DEFAULT_CKPTFULLFSYNC", +#endif +#ifdef SQLITE_DEFAULT_FILE_FORMAT + "DEFAULT_FILE_FORMAT=" CTIMEOPT_VAL(SQLITE_DEFAULT_FILE_FORMAT), +#endif +#ifdef SQLITE_DEFAULT_FILE_PERMISSIONS + "DEFAULT_FILE_PERMISSIONS=" CTIMEOPT_VAL(SQLITE_DEFAULT_FILE_PERMISSIONS), +#endif +#ifdef SQLITE_DEFAULT_FOREIGN_KEYS + "DEFAULT_FOREIGN_KEYS", +#endif +#ifdef SQLITE_DEFAULT_JOURNAL_SIZE_LIMIT + "DEFAULT_JOURNAL_SIZE_LIMIT=" CTIMEOPT_VAL(SQLITE_DEFAULT_JOURNAL_SIZE_LIMIT), +#endif +#ifdef SQLITE_DEFAULT_LOCKING_MODE + "DEFAULT_LOCKING_MODE=" CTIMEOPT_VAL(SQLITE_DEFAULT_LOCKING_MODE), +#endif +#ifdef SQLITE_DEFAULT_LOOKASIDE + "DEFAULT_LOOKASIDE=" CTIMEOPT_VAL2(SQLITE_DEFAULT_LOOKASIDE), +#endif +#ifdef SQLITE_DEFAULT_MEMSTATUS +# if SQLITE_DEFAULT_MEMSTATUS != 1 + "DEFAULT_MEMSTATUS=" CTIMEOPT_VAL(SQLITE_DEFAULT_MEMSTATUS), +# endif +#endif +#ifdef SQLITE_DEFAULT_MMAP_SIZE + "DEFAULT_MMAP_SIZE=" CTIMEOPT_VAL(SQLITE_DEFAULT_MMAP_SIZE), +#endif +#ifdef SQLITE_DEFAULT_PAGE_SIZE + "DEFAULT_PAGE_SIZE=" CTIMEOPT_VAL(SQLITE_DEFAULT_PAGE_SIZE), +#endif +#ifdef SQLITE_DEFAULT_PCACHE_INITSZ + "DEFAULT_PCACHE_INITSZ=" CTIMEOPT_VAL(SQLITE_DEFAULT_PCACHE_INITSZ), +#endif +#ifdef SQLITE_DEFAULT_PROXYDIR_PERMISSIONS + "DEFAULT_PROXYDIR_PERMISSIONS=" CTIMEOPT_VAL(SQLITE_DEFAULT_PROXYDIR_PERMISSIONS), +#endif +#ifdef SQLITE_DEFAULT_RECURSIVE_TRIGGERS + "DEFAULT_RECURSIVE_TRIGGERS", +#endif +#ifdef SQLITE_DEFAULT_ROWEST + "DEFAULT_ROWEST=" CTIMEOPT_VAL(SQLITE_DEFAULT_ROWEST), +#endif +#ifdef SQLITE_DEFAULT_SECTOR_SIZE + "DEFAULT_SECTOR_SIZE=" CTIMEOPT_VAL(SQLITE_DEFAULT_SECTOR_SIZE), +#endif +#ifdef SQLITE_DEFAULT_SYNCHRONOUS + "DEFAULT_SYNCHRONOUS=" CTIMEOPT_VAL(SQLITE_DEFAULT_SYNCHRONOUS), +#endif +#ifdef SQLITE_DEFAULT_WAL_AUTOCHECKPOINT + "DEFAULT_WAL_AUTOCHECKPOINT=" CTIMEOPT_VAL(SQLITE_DEFAULT_WAL_AUTOCHECKPOINT), +#endif +#ifdef SQLITE_DEFAULT_WAL_SYNCHRONOUS + "DEFAULT_WAL_SYNCHRONOUS=" CTIMEOPT_VAL(SQLITE_DEFAULT_WAL_SYNCHRONOUS), +#endif +#ifdef SQLITE_DEFAULT_WORKER_THREADS + "DEFAULT_WORKER_THREADS=" CTIMEOPT_VAL(SQLITE_DEFAULT_WORKER_THREADS), +#endif +#ifdef SQLITE_DIRECT_OVERFLOW_READ + "DIRECT_OVERFLOW_READ", +#endif +#ifdef SQLITE_DISABLE_DIRSYNC + "DISABLE_DIRSYNC", +#endif +#ifdef SQLITE_DISABLE_FTS3_UNICODE + "DISABLE_FTS3_UNICODE", +#endif +#ifdef SQLITE_DISABLE_FTS4_DEFERRED + "DISABLE_FTS4_DEFERRED", +#endif +#ifdef SQLITE_DISABLE_INTRINSIC + "DISABLE_INTRINSIC", +#endif +#ifdef SQLITE_DISABLE_LFS + "DISABLE_LFS", +#endif +#ifdef SQLITE_DISABLE_PAGECACHE_OVERFLOW_STATS + "DISABLE_PAGECACHE_OVERFLOW_STATS", +#endif +#ifdef SQLITE_DISABLE_SKIPAHEAD_DISTINCT + "DISABLE_SKIPAHEAD_DISTINCT", +#endif +#ifdef SQLITE_ENABLE_8_3_NAMES + "ENABLE_8_3_NAMES=" CTIMEOPT_VAL(SQLITE_ENABLE_8_3_NAMES), +#endif +#ifdef SQLITE_ENABLE_API_ARMOR + "ENABLE_API_ARMOR", +#endif +#ifdef SQLITE_ENABLE_ATOMIC_WRITE + "ENABLE_ATOMIC_WRITE", +#endif +#ifdef SQLITE_ENABLE_BATCH_ATOMIC_WRITE + "ENABLE_BATCH_ATOMIC_WRITE", +#endif +#ifdef SQLITE_ENABLE_BYTECODE_VTAB + "ENABLE_BYTECODE_VTAB", +#endif +#ifdef SQLITE_ENABLE_CEROD + "ENABLE_CEROD=" CTIMEOPT_VAL(SQLITE_ENABLE_CEROD), +#endif +#ifdef SQLITE_ENABLE_COLUMN_METADATA + "ENABLE_COLUMN_METADATA", +#endif +#ifdef SQLITE_ENABLE_COLUMN_USED_MASK + "ENABLE_COLUMN_USED_MASK", +#endif +#ifdef SQLITE_ENABLE_COSTMULT + "ENABLE_COSTMULT", +#endif +#ifdef SQLITE_ENABLE_CURSOR_HINTS + "ENABLE_CURSOR_HINTS", +#endif +#ifdef SQLITE_ENABLE_DBPAGE_VTAB + "ENABLE_DBPAGE_VTAB", +#endif +#ifdef SQLITE_ENABLE_DBSTAT_VTAB + "ENABLE_DBSTAT_VTAB", +#endif +#ifdef SQLITE_ENABLE_EXPENSIVE_ASSERT + "ENABLE_EXPENSIVE_ASSERT", +#endif +#ifdef SQLITE_ENABLE_EXPLAIN_COMMENTS + "ENABLE_EXPLAIN_COMMENTS", +#endif +#ifdef SQLITE_ENABLE_FTS3 + "ENABLE_FTS3", +#endif +#ifdef SQLITE_ENABLE_FTS3_PARENTHESIS + "ENABLE_FTS3_PARENTHESIS", +#endif +#ifdef SQLITE_ENABLE_FTS3_TOKENIZER + "ENABLE_FTS3_TOKENIZER", +#endif +#ifdef SQLITE_ENABLE_FTS4 + "ENABLE_FTS4", +#endif +#ifdef SQLITE_ENABLE_FTS5 + "ENABLE_FTS5", +#endif +#ifdef SQLITE_ENABLE_GEOPOLY + "ENABLE_GEOPOLY", +#endif +#ifdef SQLITE_ENABLE_HIDDEN_COLUMNS + "ENABLE_HIDDEN_COLUMNS", +#endif +#ifdef SQLITE_ENABLE_ICU + "ENABLE_ICU", +#endif +#ifdef SQLITE_ENABLE_IOTRACE + "ENABLE_IOTRACE", +#endif +#ifdef SQLITE_ENABLE_JSON1 + "ENABLE_JSON1", +#endif +#ifdef SQLITE_ENABLE_LOAD_EXTENSION + "ENABLE_LOAD_EXTENSION", +#endif +#ifdef SQLITE_ENABLE_LOCKING_STYLE + "ENABLE_LOCKING_STYLE=" CTIMEOPT_VAL(SQLITE_ENABLE_LOCKING_STYLE), +#endif +#ifdef SQLITE_ENABLE_MATH_FUNCTIONS + "ENABLE_MATH_FUNCTIONS", +#endif +#ifdef SQLITE_ENABLE_MEMORY_MANAGEMENT + "ENABLE_MEMORY_MANAGEMENT", +#endif +#ifdef SQLITE_ENABLE_MEMSYS3 + "ENABLE_MEMSYS3", +#endif +#ifdef SQLITE_ENABLE_MEMSYS5 + "ENABLE_MEMSYS5", +#endif +#ifdef SQLITE_ENABLE_MULTIPLEX + "ENABLE_MULTIPLEX", +#endif +#ifdef SQLITE_ENABLE_NORMALIZE + "ENABLE_NORMALIZE", +#endif +#ifdef SQLITE_ENABLE_NULL_TRIM + "ENABLE_NULL_TRIM", +#endif +#ifdef SQLITE_ENABLE_OFFSET_SQL_FUNC + "ENABLE_OFFSET_SQL_FUNC", +#endif +#ifdef SQLITE_ENABLE_OVERSIZE_CELL_CHECK + "ENABLE_OVERSIZE_CELL_CHECK", +#endif +#ifdef SQLITE_ENABLE_PREUPDATE_HOOK + "ENABLE_PREUPDATE_HOOK", +#endif +#ifdef SQLITE_ENABLE_QPSG + "ENABLE_QPSG", +#endif +#ifdef SQLITE_ENABLE_RBU + "ENABLE_RBU", +#endif +#ifdef SQLITE_ENABLE_RTREE + "ENABLE_RTREE", +#endif +#ifdef SQLITE_ENABLE_SELECTTRACE + "ENABLE_SELECTTRACE", +#endif +#ifdef SQLITE_ENABLE_SESSION + "ENABLE_SESSION", +#endif +#ifdef SQLITE_ENABLE_SNAPSHOT + "ENABLE_SNAPSHOT", +#endif +#ifdef SQLITE_ENABLE_SORTER_REFERENCES + "ENABLE_SORTER_REFERENCES", +#endif +#ifdef SQLITE_ENABLE_SQLLOG + "ENABLE_SQLLOG", +#endif +#ifdef SQLITE_ENABLE_STAT4 + "ENABLE_STAT4", +#endif +#ifdef SQLITE_ENABLE_STMTVTAB + "ENABLE_STMTVTAB", +#endif +#ifdef SQLITE_ENABLE_STMT_SCANSTATUS + "ENABLE_STMT_SCANSTATUS", +#endif +#ifdef SQLITE_ENABLE_UNKNOWN_SQL_FUNCTION + "ENABLE_UNKNOWN_SQL_FUNCTION", +#endif +#ifdef SQLITE_ENABLE_UNLOCK_NOTIFY + "ENABLE_UNLOCK_NOTIFY", +#endif +#ifdef SQLITE_ENABLE_UPDATE_DELETE_LIMIT + "ENABLE_UPDATE_DELETE_LIMIT", +#endif +#ifdef SQLITE_ENABLE_URI_00_ERROR + "ENABLE_URI_00_ERROR", +#endif +#ifdef SQLITE_ENABLE_VFSTRACE + "ENABLE_VFSTRACE", +#endif +#ifdef SQLITE_ENABLE_WHERETRACE + "ENABLE_WHERETRACE", +#endif +#ifdef SQLITE_ENABLE_ZIPVFS + "ENABLE_ZIPVFS", +#endif +#ifdef SQLITE_EXPLAIN_ESTIMATED_ROWS + "EXPLAIN_ESTIMATED_ROWS", +#endif +#ifdef SQLITE_EXTRA_IFNULLROW + "EXTRA_IFNULLROW", +#endif +#ifdef SQLITE_EXTRA_INIT + "EXTRA_INIT=" CTIMEOPT_VAL(SQLITE_EXTRA_INIT), +#endif +#ifdef SQLITE_EXTRA_SHUTDOWN + "EXTRA_SHUTDOWN=" CTIMEOPT_VAL(SQLITE_EXTRA_SHUTDOWN), +#endif +#ifdef SQLITE_FTS3_MAX_EXPR_DEPTH + "FTS3_MAX_EXPR_DEPTH=" CTIMEOPT_VAL(SQLITE_FTS3_MAX_EXPR_DEPTH), +#endif +#ifdef SQLITE_FTS5_ENABLE_TEST_MI + "FTS5_ENABLE_TEST_MI", +#endif +#ifdef SQLITE_FTS5_NO_WITHOUT_ROWID + "FTS5_NO_WITHOUT_ROWID", +#endif +#if HAVE_ISNAN || SQLITE_HAVE_ISNAN + "HAVE_ISNAN", +#endif +#ifdef SQLITE_HOMEGROWN_RECURSIVE_MUTEX +# if SQLITE_HOMEGROWN_RECURSIVE_MUTEX != 1 + "HOMEGROWN_RECURSIVE_MUTEX=" CTIMEOPT_VAL(SQLITE_HOMEGROWN_RECURSIVE_MUTEX), +# endif +#endif +#ifdef SQLITE_IGNORE_AFP_LOCK_ERRORS + "IGNORE_AFP_LOCK_ERRORS", +#endif +#ifdef SQLITE_IGNORE_FLOCK_LOCK_ERRORS + "IGNORE_FLOCK_LOCK_ERRORS", +#endif +#ifdef SQLITE_INLINE_MEMCPY + "INLINE_MEMCPY", +#endif +#ifdef SQLITE_INT64_TYPE + "INT64_TYPE", +#endif +#ifdef SQLITE_INTEGRITY_CHECK_ERROR_MAX + "INTEGRITY_CHECK_ERROR_MAX=" CTIMEOPT_VAL(SQLITE_INTEGRITY_CHECK_ERROR_MAX), +#endif +#ifdef SQLITE_LIKE_DOESNT_MATCH_BLOBS + "LIKE_DOESNT_MATCH_BLOBS", +#endif +#ifdef SQLITE_LOCK_TRACE + "LOCK_TRACE", +#endif +#ifdef SQLITE_LOG_CACHE_SPILL + "LOG_CACHE_SPILL", +#endif +#ifdef SQLITE_MALLOC_SOFT_LIMIT + "MALLOC_SOFT_LIMIT=" CTIMEOPT_VAL(SQLITE_MALLOC_SOFT_LIMIT), +#endif +#ifdef SQLITE_MAX_ATTACHED + "MAX_ATTACHED=" CTIMEOPT_VAL(SQLITE_MAX_ATTACHED), +#endif +#ifdef SQLITE_MAX_COLUMN + "MAX_COLUMN=" CTIMEOPT_VAL(SQLITE_MAX_COLUMN), +#endif +#ifdef SQLITE_MAX_COMPOUND_SELECT + "MAX_COMPOUND_SELECT=" CTIMEOPT_VAL(SQLITE_MAX_COMPOUND_SELECT), +#endif +#ifdef SQLITE_MAX_DEFAULT_PAGE_SIZE + "MAX_DEFAULT_PAGE_SIZE=" CTIMEOPT_VAL(SQLITE_MAX_DEFAULT_PAGE_SIZE), +#endif +#ifdef SQLITE_MAX_EXPR_DEPTH + "MAX_EXPR_DEPTH=" CTIMEOPT_VAL(SQLITE_MAX_EXPR_DEPTH), +#endif +#ifdef SQLITE_MAX_FUNCTION_ARG + "MAX_FUNCTION_ARG=" CTIMEOPT_VAL(SQLITE_MAX_FUNCTION_ARG), +#endif +#ifdef SQLITE_MAX_LENGTH + "MAX_LENGTH=" CTIMEOPT_VAL(SQLITE_MAX_LENGTH), +#endif +#ifdef SQLITE_MAX_LIKE_PATTERN_LENGTH + "MAX_LIKE_PATTERN_LENGTH=" CTIMEOPT_VAL(SQLITE_MAX_LIKE_PATTERN_LENGTH), +#endif +#ifdef SQLITE_MAX_MEMORY + "MAX_MEMORY=" CTIMEOPT_VAL(SQLITE_MAX_MEMORY), +#endif +#ifdef SQLITE_MAX_MMAP_SIZE + "MAX_MMAP_SIZE=" CTIMEOPT_VAL(SQLITE_MAX_MMAP_SIZE), +#endif +#ifdef SQLITE_MAX_MMAP_SIZE_ + "MAX_MMAP_SIZE_=" CTIMEOPT_VAL(SQLITE_MAX_MMAP_SIZE_), +#endif +#ifdef SQLITE_MAX_PAGE_COUNT + "MAX_PAGE_COUNT=" CTIMEOPT_VAL(SQLITE_MAX_PAGE_COUNT), +#endif +#ifdef SQLITE_MAX_PAGE_SIZE + "MAX_PAGE_SIZE=" CTIMEOPT_VAL(SQLITE_MAX_PAGE_SIZE), +#endif +#ifdef SQLITE_MAX_SCHEMA_RETRY + "MAX_SCHEMA_RETRY=" CTIMEOPT_VAL(SQLITE_MAX_SCHEMA_RETRY), +#endif +#ifdef SQLITE_MAX_SQL_LENGTH + "MAX_SQL_LENGTH=" CTIMEOPT_VAL(SQLITE_MAX_SQL_LENGTH), +#endif +#ifdef SQLITE_MAX_TRIGGER_DEPTH + "MAX_TRIGGER_DEPTH=" CTIMEOPT_VAL(SQLITE_MAX_TRIGGER_DEPTH), +#endif +#ifdef SQLITE_MAX_VARIABLE_NUMBER + "MAX_VARIABLE_NUMBER=" CTIMEOPT_VAL(SQLITE_MAX_VARIABLE_NUMBER), +#endif +#ifdef SQLITE_MAX_VDBE_OP + "MAX_VDBE_OP=" CTIMEOPT_VAL(SQLITE_MAX_VDBE_OP), +#endif +#ifdef SQLITE_MAX_WORKER_THREADS + "MAX_WORKER_THREADS=" CTIMEOPT_VAL(SQLITE_MAX_WORKER_THREADS), +#endif +#ifdef SQLITE_MEMDEBUG + "MEMDEBUG", +#endif +#ifdef SQLITE_MIXED_ENDIAN_64BIT_FLOAT + "MIXED_ENDIAN_64BIT_FLOAT", +#endif +#ifdef SQLITE_MMAP_READWRITE + "MMAP_READWRITE", +#endif +#ifdef SQLITE_MUTEX_NOOP + "MUTEX_NOOP", +#endif +#ifdef SQLITE_MUTEX_OMIT + "MUTEX_OMIT", +#endif +#ifdef SQLITE_MUTEX_PTHREADS + "MUTEX_PTHREADS", +#endif +#ifdef SQLITE_MUTEX_W32 + "MUTEX_W32", +#endif +#ifdef SQLITE_NEED_ERR_NAME + "NEED_ERR_NAME", +#endif +#ifdef SQLITE_NO_SYNC + "NO_SYNC", +#endif +#ifdef SQLITE_OMIT_ALTERTABLE + "OMIT_ALTERTABLE", +#endif +#ifdef SQLITE_OMIT_ANALYZE + "OMIT_ANALYZE", +#endif +#ifdef SQLITE_OMIT_ATTACH + "OMIT_ATTACH", +#endif +#ifdef SQLITE_OMIT_AUTHORIZATION + "OMIT_AUTHORIZATION", +#endif +#ifdef SQLITE_OMIT_AUTOINCREMENT + "OMIT_AUTOINCREMENT", +#endif +#ifdef SQLITE_OMIT_AUTOINIT + "OMIT_AUTOINIT", +#endif +#ifdef SQLITE_OMIT_AUTOMATIC_INDEX + "OMIT_AUTOMATIC_INDEX", +#endif +#ifdef SQLITE_OMIT_AUTORESET + "OMIT_AUTORESET", +#endif +#ifdef SQLITE_OMIT_AUTOVACUUM + "OMIT_AUTOVACUUM", +#endif +#ifdef SQLITE_OMIT_BETWEEN_OPTIMIZATION + "OMIT_BETWEEN_OPTIMIZATION", +#endif +#ifdef SQLITE_OMIT_BLOB_LITERAL + "OMIT_BLOB_LITERAL", +#endif +#ifdef SQLITE_OMIT_CAST + "OMIT_CAST", +#endif +#ifdef SQLITE_OMIT_CHECK + "OMIT_CHECK", +#endif +#ifdef SQLITE_OMIT_COMPLETE + "OMIT_COMPLETE", +#endif +#ifdef SQLITE_OMIT_COMPOUND_SELECT + "OMIT_COMPOUND_SELECT", +#endif +#ifdef SQLITE_OMIT_CONFLICT_CLAUSE + "OMIT_CONFLICT_CLAUSE", +#endif +#ifdef SQLITE_OMIT_CTE + "OMIT_CTE", +#endif +#if defined(SQLITE_OMIT_DATETIME_FUNCS) || defined(SQLITE_OMIT_FLOATING_POINT) + "OMIT_DATETIME_FUNCS", +#endif +#ifdef SQLITE_OMIT_DECLTYPE + "OMIT_DECLTYPE", +#endif +#ifdef SQLITE_OMIT_DEPRECATED + "OMIT_DEPRECATED", +#endif +#ifdef SQLITE_OMIT_DESERIALIZE + "OMIT_DESERIALIZE", +#endif +#ifdef SQLITE_OMIT_DISKIO + "OMIT_DISKIO", +#endif +#ifdef SQLITE_OMIT_EXPLAIN + "OMIT_EXPLAIN", +#endif +#ifdef SQLITE_OMIT_FLAG_PRAGMAS + "OMIT_FLAG_PRAGMAS", +#endif +#ifdef SQLITE_OMIT_FLOATING_POINT + "OMIT_FLOATING_POINT", +#endif +#ifdef SQLITE_OMIT_FOREIGN_KEY + "OMIT_FOREIGN_KEY", +#endif +#ifdef SQLITE_OMIT_GET_TABLE + "OMIT_GET_TABLE", +#endif +#ifdef SQLITE_OMIT_HEX_INTEGER + "OMIT_HEX_INTEGER", +#endif +#ifdef SQLITE_OMIT_INCRBLOB + "OMIT_INCRBLOB", +#endif +#ifdef SQLITE_OMIT_INTEGRITY_CHECK + "OMIT_INTEGRITY_CHECK", +#endif +#ifdef SQLITE_OMIT_INTROSPECTION_PRAGMAS + "OMIT_INTROSPECTION_PRAGMAS", +#endif +#ifdef SQLITE_OMIT_LIKE_OPTIMIZATION + "OMIT_LIKE_OPTIMIZATION", +#endif +#ifdef SQLITE_OMIT_LOAD_EXTENSION + "OMIT_LOAD_EXTENSION", +#endif +#ifdef SQLITE_OMIT_LOCALTIME + "OMIT_LOCALTIME", +#endif +#ifdef SQLITE_OMIT_LOOKASIDE + "OMIT_LOOKASIDE", +#endif +#ifdef SQLITE_OMIT_MEMORYDB + "OMIT_MEMORYDB", +#endif +#ifdef SQLITE_OMIT_OR_OPTIMIZATION + "OMIT_OR_OPTIMIZATION", +#endif +#ifdef SQLITE_OMIT_PAGER_PRAGMAS + "OMIT_PAGER_PRAGMAS", +#endif +#ifdef SQLITE_OMIT_PARSER_TRACE + "OMIT_PARSER_TRACE", +#endif +#ifdef SQLITE_OMIT_POPEN + "OMIT_POPEN", +#endif +#ifdef SQLITE_OMIT_PRAGMA + "OMIT_PRAGMA", +#endif +#ifdef SQLITE_OMIT_PROGRESS_CALLBACK + "OMIT_PROGRESS_CALLBACK", +#endif +#ifdef SQLITE_OMIT_QUICKBALANCE + "OMIT_QUICKBALANCE", +#endif +#ifdef SQLITE_OMIT_REINDEX + "OMIT_REINDEX", +#endif +#ifdef SQLITE_OMIT_SCHEMA_PRAGMAS + "OMIT_SCHEMA_PRAGMAS", +#endif +#ifdef SQLITE_OMIT_SCHEMA_VERSION_PRAGMAS + "OMIT_SCHEMA_VERSION_PRAGMAS", +#endif +#ifdef SQLITE_OMIT_SHARED_CACHE + "OMIT_SHARED_CACHE", +#endif +#ifdef SQLITE_OMIT_SHUTDOWN_DIRECTORIES + "OMIT_SHUTDOWN_DIRECTORIES", +#endif +#ifdef SQLITE_OMIT_SUBQUERY + "OMIT_SUBQUERY", +#endif +#ifdef SQLITE_OMIT_TCL_VARIABLE + "OMIT_TCL_VARIABLE", +#endif +#ifdef SQLITE_OMIT_TEMPDB + "OMIT_TEMPDB", +#endif +#ifdef SQLITE_OMIT_TEST_CONTROL + "OMIT_TEST_CONTROL", +#endif +#ifdef SQLITE_OMIT_TRACE +# if SQLITE_OMIT_TRACE != 1 + "OMIT_TRACE=" CTIMEOPT_VAL(SQLITE_OMIT_TRACE), +# endif +#endif +#ifdef SQLITE_OMIT_TRIGGER + "OMIT_TRIGGER", +#endif +#ifdef SQLITE_OMIT_TRUNCATE_OPTIMIZATION + "OMIT_TRUNCATE_OPTIMIZATION", +#endif +#ifdef SQLITE_OMIT_UTF16 + "OMIT_UTF16", +#endif +#ifdef SQLITE_OMIT_VACUUM + "OMIT_VACUUM", +#endif +#ifdef SQLITE_OMIT_VIEW + "OMIT_VIEW", +#endif +#ifdef SQLITE_OMIT_VIRTUALTABLE + "OMIT_VIRTUALTABLE", +#endif +#ifdef SQLITE_OMIT_WAL + "OMIT_WAL", +#endif +#ifdef SQLITE_OMIT_WSD + "OMIT_WSD", +#endif +#ifdef SQLITE_OMIT_XFER_OPT + "OMIT_XFER_OPT", +#endif +#ifdef SQLITE_PCACHE_SEPARATE_HEADER + "PCACHE_SEPARATE_HEADER", +#endif +#ifdef SQLITE_PERFORMANCE_TRACE + "PERFORMANCE_TRACE", +#endif +#ifdef SQLITE_POWERSAFE_OVERWRITE +# if SQLITE_POWERSAFE_OVERWRITE != 1 + "POWERSAFE_OVERWRITE=" CTIMEOPT_VAL(SQLITE_POWERSAFE_OVERWRITE), +# endif +#endif +#ifdef SQLITE_PREFER_PROXY_LOCKING + "PREFER_PROXY_LOCKING", +#endif +#ifdef SQLITE_PROXY_DEBUG + "PROXY_DEBUG", +#endif +#ifdef SQLITE_REVERSE_UNORDERED_SELECTS + "REVERSE_UNORDERED_SELECTS", +#endif +#ifdef SQLITE_RTREE_INT_ONLY + "RTREE_INT_ONLY", +#endif +#ifdef SQLITE_SECURE_DELETE + "SECURE_DELETE", +#endif +#ifdef SQLITE_SMALL_STACK + "SMALL_STACK", +#endif +#ifdef SQLITE_SORTER_PMASZ + "SORTER_PMASZ=" CTIMEOPT_VAL(SQLITE_SORTER_PMASZ), +#endif +#ifdef SQLITE_SOUNDEX + "SOUNDEX", +#endif +#ifdef SQLITE_STAT4_SAMPLES + "STAT4_SAMPLES=" CTIMEOPT_VAL(SQLITE_STAT4_SAMPLES), +#endif +#ifdef SQLITE_STMTJRNL_SPILL + "STMTJRNL_SPILL=" CTIMEOPT_VAL(SQLITE_STMTJRNL_SPILL), +#endif +#ifdef SQLITE_SUBSTR_COMPATIBILITY + "SUBSTR_COMPATIBILITY", +#endif +#if (!defined(SQLITE_WIN32_MALLOC) \ + && !defined(SQLITE_ZERO_MALLOC) \ + && !defined(SQLITE_MEMDEBUG) \ + ) || defined(SQLITE_SYSTEM_MALLOC) + "SYSTEM_MALLOC", +#endif +#ifdef SQLITE_TCL + "TCL", +#endif +#ifdef SQLITE_TEMP_STORE + "TEMP_STORE=" CTIMEOPT_VAL(SQLITE_TEMP_STORE), +#endif +#ifdef SQLITE_TEST + "TEST", +#endif +#if defined(SQLITE_THREADSAFE) + "THREADSAFE=" CTIMEOPT_VAL(SQLITE_THREADSAFE), +#elif defined(THREADSAFE) + "THREADSAFE=" CTIMEOPT_VAL(THREADSAFE), +#else + "THREADSAFE=1", +#endif +#ifdef SQLITE_UNLINK_AFTER_CLOSE + "UNLINK_AFTER_CLOSE", +#endif +#ifdef SQLITE_UNTESTABLE + "UNTESTABLE", +#endif +#ifdef SQLITE_USER_AUTHENTICATION + "USER_AUTHENTICATION", +#endif +#ifdef SQLITE_USE_ALLOCA + "USE_ALLOCA", +#endif +#ifdef SQLITE_USE_FCNTL_TRACE + "USE_FCNTL_TRACE", +#endif +#ifdef SQLITE_USE_URI + "USE_URI", +#endif +#ifdef SQLITE_VDBE_COVERAGE + "VDBE_COVERAGE", +#endif +#ifdef SQLITE_WIN32_MALLOC + "WIN32_MALLOC", +#endif +#ifdef SQLITE_ZERO_MALLOC + "ZERO_MALLOC", +#endif +/* +** END CODE GENERATED BY tool/mkctime.tcl +*/ +}; + +SQLITE_PRIVATE const char **sqlite3CompileOptions(int *pnOpt){ + *pnOpt = sizeof(sqlite3azCompileOpt) / sizeof(sqlite3azCompileOpt[0]); + return (const char**)sqlite3azCompileOpt; +} + +#endif /* SQLITE_OMIT_COMPILEOPTION_DIAGS */ + +/************** End of ctime.c ***********************************************/ /************** Begin file global.c ******************************************/ /* ** 2008 June 13 @@ -21093,6 +21553,18 @@ SQLITE_PRIVATE SQLITE_WSD struct Sqlite3Config sqlite3Config = { */ SQLITE_PRIVATE FuncDefHash sqlite3BuiltinFunctions; +#if defined(SQLITE_COVERAGE_TEST) || defined(SQLITE_DEBUG) +/* +** Counter used for coverage testing. Does not come into play for +** release builds. +** +** Access to this global variable is not mutex protected. This might +** result in TSAN warnings. But as the variable does not exist in +** release builds, that should not be a concern. +*/ +SQLITE_PRIVATE unsigned int sqlite3CoverageCounter; +#endif /* SQLITE_COVERAGE_TEST || SQLITE_DEBUG */ + #ifdef VDBE_PROFILE /* ** The following performance counter can be used in place of @@ -21143,6 +21615,48 @@ SQLITE_PRIVATE const unsigned char sqlite3OpcodeProperty[] = OPFLG_INITIALIZER; */ SQLITE_PRIVATE const char sqlite3StrBINARY[] = "BINARY"; +/* +** Standard typenames. These names must match the COLTYPE_* definitions. +** Adjust the SQLITE_N_STDTYPE value if adding or removing entries. +** +** sqlite3StdType[] The actual names of the datatypes. +** +** sqlite3StdTypeLen[] The length (in bytes) of each entry +** in sqlite3StdType[]. +** +** sqlite3StdTypeAffinity[] The affinity associated with each entry +** in sqlite3StdType[]. +** +** sqlite3StdTypeMap[] The type value (as returned from +** sqlite3_column_type() or sqlite3_value_type()) +** for each entry in sqlite3StdType[]. +*/ +SQLITE_PRIVATE const unsigned char sqlite3StdTypeLen[] = { 3, 4, 3, 7, 4, 4 }; +SQLITE_PRIVATE const char sqlite3StdTypeAffinity[] = { + SQLITE_AFF_NUMERIC, + SQLITE_AFF_BLOB, + SQLITE_AFF_INTEGER, + SQLITE_AFF_INTEGER, + SQLITE_AFF_REAL, + SQLITE_AFF_TEXT +}; +SQLITE_PRIVATE const char sqlite3StdTypeMap[] = { + 0, + SQLITE_BLOB, + SQLITE_INTEGER, + SQLITE_INTEGER, + SQLITE_FLOAT, + SQLITE_TEXT +}; +SQLITE_PRIVATE const char *sqlite3StdType[] = { + "ANY", + "BLOB", + "INT", + "INTEGER", + "REAL", + "TEXT" +}; + /************** End of global.c **********************************************/ /************** Begin file status.c ******************************************/ /* @@ -21345,8 +21859,8 @@ struct VdbeFrame { int nMem; /* Number of entries in aMem */ int nChildMem; /* Number of memory cells for child frame */ int nChildCsr; /* Number of cursors for child frame */ - int nChange; /* Statement changes (Vdbe.nChange) */ - int nDbChange; /* Value of db->nChange */ + i64 nChange; /* Statement changes (Vdbe.nChange) */ + i64 nDbChange; /* Value of db->nChange */ }; /* Magic number for sanity checking on VdbeFrame objects */ @@ -21553,7 +22067,7 @@ struct Vdbe { u32 cacheCtr; /* VdbeCursor row cache generation counter */ int pc; /* The program counter */ int rc; /* Value to return */ - int nChange; /* Number of db changes made since last reset */ + i64 nChange; /* Number of db changes made since last reset */ int iStatement; /* Statement number (or 0 if has no opened stmt) */ i64 iCurrentTime; /* Value of julianday('now') for this statement */ i64 nFkConstraint; /* Number of imm. FK constraints this VM */ @@ -21691,13 +22205,18 @@ SQLITE_PRIVATE void sqlite3VdbeMemSetDouble(Mem*, double); SQLITE_PRIVATE void sqlite3VdbeMemSetPointer(Mem*, void*, const char*, void(*)(void*)); SQLITE_PRIVATE void sqlite3VdbeMemInit(Mem*,sqlite3*,u16); SQLITE_PRIVATE void sqlite3VdbeMemSetNull(Mem*); +#ifndef SQLITE_OMIT_INCRBLOB SQLITE_PRIVATE void sqlite3VdbeMemSetZeroBlob(Mem*,int); +#else +SQLITE_PRIVATE int sqlite3VdbeMemSetZeroBlob(Mem*,int); +#endif #ifdef SQLITE_DEBUG SQLITE_PRIVATE int sqlite3VdbeMemIsRowSet(const Mem*); #endif SQLITE_PRIVATE int sqlite3VdbeMemSetRowSet(Mem*); SQLITE_PRIVATE int sqlite3VdbeMemMakeWriteable(Mem*); SQLITE_PRIVATE int sqlite3VdbeMemStringify(Mem*, u8, u8); +SQLITE_PRIVATE int sqlite3IntFloatCompare(i64,double); SQLITE_PRIVATE i64 sqlite3VdbeIntValue(Mem*); SQLITE_PRIVATE int sqlite3VdbeMemIntegerify(Mem*); SQLITE_PRIVATE double sqlite3VdbeRealValue(Mem*); @@ -23182,131 +23701,100 @@ static void strftimeFunc( sqlite3_value **argv ){ DateTime x; - u64 n; size_t i,j; - char *z; sqlite3 *db; const char *zFmt; - char zBuf[100]; + sqlite3_str sRes; + + if( argc==0 ) return; zFmt = (const char*)sqlite3_value_text(argv[0]); if( zFmt==0 || isDate(context, argc-1, argv+1, &x) ) return; db = sqlite3_context_db_handle(context); - for(i=0, n=1; zFmt[i]; i++, n++){ - if( zFmt[i]=='%' ){ - switch( zFmt[i+1] ){ - case 'd': - case 'H': - case 'm': - case 'M': - case 'S': - case 'W': - n++; - /* fall thru */ - case 'w': - case '%': - break; - case 'f': - n += 8; - break; - case 'j': - n += 3; - break; - case 'Y': - n += 8; - break; - case 's': - case 'J': - n += 50; - break; - default: - return; /* ERROR. return a NULL */ - } - i++; - } - } - testcase( n==sizeof(zBuf)-1 ); - testcase( n==sizeof(zBuf) ); - testcase( n==(u64)db->aLimit[SQLITE_LIMIT_LENGTH]+1 ); - testcase( n==(u64)db->aLimit[SQLITE_LIMIT_LENGTH] ); - if( n(u64)db->aLimit[SQLITE_LIMIT_LENGTH] ){ - sqlite3_result_error_toobig(context); - return; - }else{ - z = sqlite3DbMallocRawNN(db, (int)n); - if( z==0 ){ - sqlite3_result_error_nomem(context); - return; - } - } + sqlite3StrAccumInit(&sRes, 0, 0, 0, db->aLimit[SQLITE_LIMIT_LENGTH]); + computeJD(&x); computeYMD_HMS(&x); for(i=j=0; zFmt[i]; i++){ - if( zFmt[i]!='%' ){ - z[j++] = zFmt[i]; - }else{ - i++; - switch( zFmt[i] ){ - case 'd': sqlite3_snprintf(3, &z[j],"%02d",x.D); j+=2; break; - case 'f': { - double s = x.s; - if( s>59.999 ) s = 59.999; - sqlite3_snprintf(7, &z[j],"%06.3f", s); - j += sqlite3Strlen30(&z[j]); - break; + if( zFmt[i]!='%' ) continue; + if( j59.999 ) s = 59.999; + sqlite3_str_appendf(&sRes, "%06.3f", s); + break; + } + case 'H': { + sqlite3_str_appendf(&sRes, "%02d", x.h); + break; + } + case 'W': /* Fall thru */ + case 'j': { + int nDay; /* Number of days since 1st day of year */ + DateTime y = x; + y.validJD = 0; + y.M = 1; + y.D = 1; + computeJD(&y); + nDay = (int)((x.iJD-y.iJD+43200000)/86400000); + if( zFmt[i]=='W' ){ + int wd; /* 0=Monday, 1=Tuesday, ... 6=Sunday */ + wd = (int)(((x.iJD+43200000)/86400000)%7); + sqlite3_str_appendf(&sRes,"%02d",(nDay+7-wd)/7); + }else{ + sqlite3_str_appendf(&sRes,"%03d",nDay+1); } - case 'H': sqlite3_snprintf(3, &z[j],"%02d",x.h); j+=2; break; - case 'W': /* Fall thru */ - case 'j': { - int nDay; /* Number of days since 1st day of year */ - DateTime y = x; - y.validJD = 0; - y.M = 1; - y.D = 1; - computeJD(&y); - nDay = (int)((x.iJD-y.iJD+43200000)/86400000); - if( zFmt[i]=='W' ){ - int wd; /* 0=Monday, 1=Tuesday, ... 6=Sunday */ - wd = (int)(((x.iJD+43200000)/86400000)%7); - sqlite3_snprintf(3, &z[j],"%02d",(nDay+7-wd)/7); - j += 2; - }else{ - sqlite3_snprintf(4, &z[j],"%03d",nDay+1); - j += 3; - } - break; - } - case 'J': { - sqlite3_snprintf(20, &z[j],"%.16g",x.iJD/86400000.0); - j+=sqlite3Strlen30(&z[j]); - break; - } - case 'm': sqlite3_snprintf(3, &z[j],"%02d",x.M); j+=2; break; - case 'M': sqlite3_snprintf(3, &z[j],"%02d",x.m); j+=2; break; - case 's': { - i64 iS = (i64)(x.iJD/1000 - 21086676*(i64)10000); - sqlite3Int64ToText(iS, &z[j]); - j += sqlite3Strlen30(&z[j]); - break; - } - case 'S': sqlite3_snprintf(3,&z[j],"%02d",(int)x.s); j+=2; break; - case 'w': { - z[j++] = (char)(((x.iJD+129600000)/86400000) % 7) + '0'; - break; - } - case 'Y': { - sqlite3_snprintf(5,&z[j],"%04d",x.Y); j+=sqlite3Strlen30(&z[j]); - break; - } - default: z[j++] = '%'; break; + break; + } + case 'J': { + sqlite3_str_appendf(&sRes,"%.16g",x.iJD/86400000.0); + break; + } + case 'm': { + sqlite3_str_appendf(&sRes,"%02d",x.M); + break; + } + case 'M': { + sqlite3_str_appendf(&sRes,"%02d",x.m); + break; + } + case 's': { + i64 iS = (i64)(x.iJD/1000 - 21086676*(i64)10000); + sqlite3_str_appendf(&sRes,"%lld",iS); + break; + } + case 'S': { + sqlite3_str_appendf(&sRes,"%02d",(int)x.s); + break; + } + case 'w': { + sqlite3_str_appendchar(&sRes, 1, + (char)(((x.iJD+129600000)/86400000) % 7) + '0'); + break; + } + case 'Y': { + sqlite3_str_appendf(&sRes,"%04d",x.Y); + break; + } + case '%': { + sqlite3_str_appendchar(&sRes, 1, '%'); + break; + } + default: { + sqlite3_str_reset(&sRes); + return; } } } - z[j] = 0; - sqlite3_result_text(context, z, -1, - z==zBuf ? SQLITE_TRANSIENT : SQLITE_DYNAMIC); + if( jpMethods==0) ) return 0; return id->pMethods->xDeviceCharacteristics(id); } #ifndef SQLITE_OMIT_WAL @@ -23741,12 +24230,15 @@ SQLITE_PRIVATE int sqlite3OsOpenMalloc( rc = sqlite3OsOpen(pVfs, zFile, pFile, flags, pOutFlags); if( rc!=SQLITE_OK ){ sqlite3_free(pFile); + *ppFile = 0; }else{ *ppFile = pFile; } }else{ + *ppFile = 0; rc = SQLITE_NOMEM_BKPT; } + assert( *ppFile!=0 || rc!=SQLITE_OK ); return rc; } SQLITE_PRIVATE void sqlite3OsCloseFree(sqlite3_file *pFile){ @@ -24464,7 +24956,7 @@ static void adjustStats(int iSize, int increment){ ** This routine checks the guards at either end of the allocation and ** if they are incorrect it asserts. */ -static struct MemBlockHdr *sqlite3MemsysGetHeader(void *pAllocation){ +static struct MemBlockHdr *sqlite3MemsysGetHeader(const void *pAllocation){ struct MemBlockHdr *p; int *pInt; u8 *pU8; @@ -24711,7 +25203,7 @@ SQLITE_PRIVATE void sqlite3MemdebugSetType(void *p, u8 eType){ ** ** assert( sqlite3MemdebugHasType(p, MEMTYPE_HEAP) ); */ -SQLITE_PRIVATE int sqlite3MemdebugHasType(void *p, u8 eType){ +SQLITE_PRIVATE int sqlite3MemdebugHasType(const void *p, u8 eType){ int rc = 1; if( p && sqlite3GlobalConfig.m.xFree==sqlite3MemFree ){ struct MemBlockHdr *pHdr; @@ -24733,7 +25225,7 @@ SQLITE_PRIVATE int sqlite3MemdebugHasType(void *p, u8 eType){ ** ** assert( sqlite3MemdebugNoType(p, MEMTYPE_LOOKASIDE) ); */ -SQLITE_PRIVATE int sqlite3MemdebugNoType(void *p, u8 eType){ +SQLITE_PRIVATE int sqlite3MemdebugNoType(const void *p, u8 eType){ int rc = 1; if( p && sqlite3GlobalConfig.m.xFree==sqlite3MemFree ){ struct MemBlockHdr *pHdr; @@ -27111,205 +27603,7 @@ SQLITE_PRIVATE sqlite3_mutex_methods const *sqlite3DefaultMutex(void){ /* ** Include code that is common to all os_*.c files */ -/************** Include os_common.h in the middle of mutex_w32.c *************/ -/************** Begin file os_common.h ***************************************/ -/* -** 2004 May 22 -** -** The author disclaims copyright to this source code. In place of -** a legal notice, here is a blessing: -** -** May you do good and not evil. -** May you find forgiveness for yourself and forgive others. -** May you share freely, never taking more than you give. -** -****************************************************************************** -** -** This file contains macros and a little bit of code that is common to -** all of the platform-specific files (os_*.c) and is #included into those -** files. -** -** This file should be #included by the os_*.c files only. It is not a -** general purpose header file. -*/ -#ifndef _OS_COMMON_H_ -#define _OS_COMMON_H_ - -/* -** At least two bugs have slipped in because we changed the MEMORY_DEBUG -** macro to SQLITE_DEBUG and some older makefiles have not yet made the -** switch. The following code should catch this problem at compile-time. -*/ -#ifdef MEMORY_DEBUG -# error "The MEMORY_DEBUG macro is obsolete. Use SQLITE_DEBUG instead." -#endif - -/* -** Macros for performance tracing. Normally turned off. Only works -** on i486 hardware. -*/ -#ifdef SQLITE_PERFORMANCE_TRACE - -/* -** hwtime.h contains inline assembler code for implementing -** high-performance timing routines. -*/ -/************** Include hwtime.h in the middle of os_common.h ****************/ -/************** Begin file hwtime.h ******************************************/ -/* -** 2008 May 27 -** -** The author disclaims copyright to this source code. In place of -** a legal notice, here is a blessing: -** -** May you do good and not evil. -** May you find forgiveness for yourself and forgive others. -** May you share freely, never taking more than you give. -** -****************************************************************************** -** -** This file contains inline asm code for retrieving "high-performance" -** counters for x86 and x86_64 class CPUs. -*/ -#ifndef SQLITE_HWTIME_H -#define SQLITE_HWTIME_H - -/* -** The following routine only works on pentium-class (or newer) processors. -** It uses the RDTSC opcode to read the cycle count value out of the -** processor and returns that value. This can be used for high-res -** profiling. -*/ -#if !defined(__STRICT_ANSI__) && \ - (defined(__GNUC__) || defined(_MSC_VER)) && \ - (defined(i386) || defined(__i386__) || defined(_M_IX86)) - - #if defined(__GNUC__) - - __inline__ sqlite_uint64 sqlite3Hwtime(void){ - unsigned int lo, hi; - __asm__ __volatile__ ("rdtsc" : "=a" (lo), "=d" (hi)); - return (sqlite_uint64)hi << 32 | lo; - } - - #elif defined(_MSC_VER) - - __declspec(naked) __inline sqlite_uint64 __cdecl sqlite3Hwtime(void){ - __asm { - rdtsc - ret ; return value at EDX:EAX - } - } - - #endif - -#elif !defined(__STRICT_ANSI__) && (defined(__GNUC__) && defined(__x86_64__)) - - __inline__ sqlite_uint64 sqlite3Hwtime(void){ - unsigned long val; - __asm__ __volatile__ ("rdtsc" : "=A" (val)); - return val; - } - -#elif !defined(__STRICT_ANSI__) && (defined(__GNUC__) && defined(__ppc__)) - - __inline__ sqlite_uint64 sqlite3Hwtime(void){ - unsigned long long retval; - unsigned long junk; - __asm__ __volatile__ ("\n\ - 1: mftbu %1\n\ - mftb %L0\n\ - mftbu %0\n\ - cmpw %0,%1\n\ - bne 1b" - : "=r" (retval), "=r" (junk)); - return retval; - } - -#else - - /* - ** asm() is needed for hardware timing support. Without asm(), - ** disable the sqlite3Hwtime() routine. - ** - ** sqlite3Hwtime() is only used for some obscure debugging - ** and analysis configurations, not in any deliverable, so this - ** should not be a great loss. - */ -SQLITE_PRIVATE sqlite_uint64 sqlite3Hwtime(void){ return ((sqlite_uint64)0); } - -#endif - -#endif /* !defined(SQLITE_HWTIME_H) */ - -/************** End of hwtime.h **********************************************/ -/************** Continuing where we left off in os_common.h ******************/ - -static sqlite_uint64 g_start; -static sqlite_uint64 g_elapsed; -#define TIMER_START g_start=sqlite3Hwtime() -#define TIMER_END g_elapsed=sqlite3Hwtime()-g_start -#define TIMER_ELAPSED g_elapsed -#else -#define TIMER_START -#define TIMER_END -#define TIMER_ELAPSED ((sqlite_uint64)0) -#endif - -/* -** If we compile with the SQLITE_TEST macro set, then the following block -** of code will give us the ability to simulate a disk I/O error. This -** is used for testing the I/O recovery logic. -*/ -#if defined(SQLITE_TEST) -SQLITE_API extern int sqlite3_io_error_hit; -SQLITE_API extern int sqlite3_io_error_hardhit; -SQLITE_API extern int sqlite3_io_error_pending; -SQLITE_API extern int sqlite3_io_error_persist; -SQLITE_API extern int sqlite3_io_error_benign; -SQLITE_API extern int sqlite3_diskfull_pending; -SQLITE_API extern int sqlite3_diskfull; -#define SimulateIOErrorBenign(X) sqlite3_io_error_benign=(X) -#define SimulateIOError(CODE) \ - if( (sqlite3_io_error_persist && sqlite3_io_error_hit) \ - || sqlite3_io_error_pending-- == 1 ) \ - { local_ioerr(); CODE; } -static void local_ioerr(){ - IOTRACE(("IOERR\n")); - sqlite3_io_error_hit++; - if( !sqlite3_io_error_benign ) sqlite3_io_error_hardhit++; -} -#define SimulateDiskfullError(CODE) \ - if( sqlite3_diskfull_pending ){ \ - if( sqlite3_diskfull_pending == 1 ){ \ - local_ioerr(); \ - sqlite3_diskfull = 1; \ - sqlite3_io_error_hit = 1; \ - CODE; \ - }else{ \ - sqlite3_diskfull_pending--; \ - } \ - } -#else -#define SimulateIOErrorBenign(X) -#define SimulateIOError(A) -#define SimulateDiskfullError(A) -#endif /* defined(SQLITE_TEST) */ - -/* -** When testing, keep a count of the number of open files. -*/ -#if defined(SQLITE_TEST) -SQLITE_API extern int sqlite3_open_file_count; -#define OpenCounter(X) sqlite3_open_file_count+=(X) -#else -#define OpenCounter(X) -#endif /* defined(SQLITE_TEST) */ - -#endif /* !defined(_OS_COMMON_H_) */ - -/************** End of os_common.h *******************************************/ -/************** Continuing where we left off in mutex_w32.c ******************/ +/* #include "os_common.h" */ /* ** Include the header file for the Windows VFS. @@ -28102,7 +28396,7 @@ SQLITE_API void *sqlite3_malloc64(sqlite3_uint64 n){ ** TRUE if p is a lookaside memory allocation from db */ #ifndef SQLITE_OMIT_LOOKASIDE -static int isLookaside(sqlite3 *db, void *p){ +static int isLookaside(sqlite3 *db, const void *p){ return SQLITE_WITHIN(p, db->lookaside.pStart, db->lookaside.pEnd); } #else @@ -28113,18 +28407,18 @@ static int isLookaside(sqlite3 *db, void *p){ ** Return the size of a memory allocation previously obtained from ** sqlite3Malloc() or sqlite3_malloc(). */ -SQLITE_PRIVATE int sqlite3MallocSize(void *p){ +SQLITE_PRIVATE int sqlite3MallocSize(const void *p){ assert( sqlite3MemdebugHasType(p, MEMTYPE_HEAP) ); - return sqlite3GlobalConfig.m.xSize(p); + return sqlite3GlobalConfig.m.xSize((void*)p); } -static int lookasideMallocSize(sqlite3 *db, void *p){ +static int lookasideMallocSize(sqlite3 *db, const void *p){ #ifndef SQLITE_OMIT_TWOSIZE_LOOKASIDE return plookaside.pMiddle ? db->lookaside.szTrue : LOOKASIDE_SMALL; #else return db->lookaside.szTrue; #endif } -SQLITE_PRIVATE int sqlite3DbMallocSize(sqlite3 *db, void *p){ +SQLITE_PRIVATE int sqlite3DbMallocSize(sqlite3 *db, const void *p){ assert( p!=0 ); #ifdef SQLITE_DEBUG if( db==0 || !isLookaside(db,p) ){ @@ -28151,7 +28445,7 @@ SQLITE_PRIVATE int sqlite3DbMallocSize(sqlite3 *db, void *p){ } } } - return sqlite3GlobalConfig.m.xSize(p); + return sqlite3GlobalConfig.m.xSize((void*)p); } SQLITE_API sqlite3_uint64 sqlite3_msize(void *p){ assert( sqlite3MemdebugNoType(p, (u8)~MEMTYPE_HEAP) ); @@ -28761,7 +29055,7 @@ static char et_getdigit(LONGDOUBLE_TYPE *val, int *cnt){ /* ** Set the StrAccum object to an error mode. */ -static void setStrAccumError(StrAccum *p, u8 eError){ +SQLITE_PRIVATE void sqlite3StrAccumSetError(StrAccum *p, u8 eError){ assert( eError==SQLITE_NOMEM || eError==SQLITE_TOOBIG ); p->accError = eError; if( p->mxAlloc ) sqlite3_str_reset(p); @@ -28797,12 +29091,12 @@ static char *printfTempBuf(sqlite3_str *pAccum, sqlite3_int64 n){ char *z; if( pAccum->accError ) return 0; if( n>pAccum->nAlloc && n>pAccum->mxAlloc ){ - setStrAccumError(pAccum, SQLITE_TOOBIG); + sqlite3StrAccumSetError(pAccum, SQLITE_TOOBIG); return 0; } z = sqlite3DbMallocRaw(pAccum->db, n); if( z==0 ){ - setStrAccumError(pAccum, SQLITE_NOMEM); + sqlite3StrAccumSetError(pAccum, SQLITE_NOMEM); } return z; } @@ -29541,7 +29835,7 @@ static int sqlite3StrAccumEnlarge(StrAccum *p, int N){ return 0; } if( p->mxAlloc==0 ){ - setStrAccumError(p, SQLITE_TOOBIG); + sqlite3StrAccumSetError(p, SQLITE_TOOBIG); return p->nAlloc - p->nChar - 1; }else{ char *zOld = isMalloced(p) ? p->zText : 0; @@ -29554,7 +29848,7 @@ static int sqlite3StrAccumEnlarge(StrAccum *p, int N){ } if( szNew > p->mxAlloc ){ sqlite3_str_reset(p); - setStrAccumError(p, SQLITE_TOOBIG); + sqlite3StrAccumSetError(p, SQLITE_TOOBIG); return 0; }else{ p->nAlloc = (int)szNew; @@ -29572,7 +29866,7 @@ static int sqlite3StrAccumEnlarge(StrAccum *p, int N){ p->printfFlags |= SQLITE_PRINTF_MALLOCED; }else{ sqlite3_str_reset(p); - setStrAccumError(p, SQLITE_NOMEM); + sqlite3StrAccumSetError(p, SQLITE_NOMEM); return 0; } } @@ -29645,7 +29939,7 @@ static SQLITE_NOINLINE char *strAccumFinishRealloc(StrAccum *p){ memcpy(zText, p->zText, p->nChar+1); p->printfFlags |= SQLITE_PRINTF_MALLOCED; }else{ - setStrAccumError(p, SQLITE_NOMEM); + sqlite3StrAccumSetError(p, SQLITE_NOMEM); } p->zText = zText; return zText; @@ -29660,6 +29954,22 @@ SQLITE_PRIVATE char *sqlite3StrAccumFinish(StrAccum *p){ return p->zText; } +/* +** Use the content of the StrAccum passed as the second argument +** as the result of an SQL function. +*/ +SQLITE_PRIVATE void sqlite3ResultStrAccum(sqlite3_context *pCtx, StrAccum *p){ + if( p->accError ){ + sqlite3_result_error_code(pCtx, p->accError); + sqlite3_str_reset(p); + }else if( isMalloced(p) ){ + sqlite3_result_text(pCtx, p->zText, p->nChar, SQLITE_DYNAMIC); + }else{ + sqlite3_result_text(pCtx, "", 0, SQLITE_STATIC); + sqlite3_str_reset(p); + } +} + /* ** This singleton is an sqlite3_str object that is returned if ** sqlite3_malloc() fails to provide space for a real one. This @@ -30080,6 +30390,8 @@ SQLITE_PRIVATE void sqlite3TreeViewSrcList(TreeView *pView, const SrcList *pSrc) } if( pItem->fg.jointype & JT_LEFT ){ sqlite3_str_appendf(&x, " LEFT-JOIN"); + }else if( pItem->fg.jointype & JT_CROSS ){ + sqlite3_str_appendf(&x, " CROSS-JOIN"); } if( pItem->fg.fromDDL ){ sqlite3_str_appendf(&x, " DDL"); @@ -30378,6 +30690,7 @@ SQLITE_PRIVATE void sqlite3TreeViewExpr(TreeView *pView, const Expr *pExpr, u8 m sqlite3TreeViewLine(pView, "COLUMN(%d)%s%s", pExpr->iColumn, zFlgs, zOp2); }else{ + assert( ExprUseYTab(pExpr) ); sqlite3TreeViewLine(pView, "{%d:%d} pTab=%p%s", pExpr->iTable, pExpr->iColumn, pExpr->y.pTab, zFlgs); @@ -30397,11 +30710,13 @@ SQLITE_PRIVATE void sqlite3TreeViewExpr(TreeView *pView, const Expr *pExpr, u8 m } #ifndef SQLITE_OMIT_FLOATING_POINT case TK_FLOAT: { + assert( !ExprHasProperty(pExpr, EP_IntValue) ); sqlite3TreeViewLine(pView,"%s", pExpr->u.zToken); break; } #endif case TK_STRING: { + assert( !ExprHasProperty(pExpr, EP_IntValue) ); sqlite3TreeViewLine(pView,"%Q", pExpr->u.zToken); break; } @@ -30410,17 +30725,19 @@ SQLITE_PRIVATE void sqlite3TreeViewExpr(TreeView *pView, const Expr *pExpr, u8 m break; } case TK_TRUEFALSE: { - sqlite3TreeViewLine(pView, - sqlite3ExprTruthValue(pExpr) ? "TRUE" : "FALSE"); + sqlite3TreeViewLine(pView,"%s%s", + sqlite3ExprTruthValue(pExpr) ? "TRUE" : "FALSE", zFlgs); break; } #ifndef SQLITE_OMIT_BLOB_LITERAL case TK_BLOB: { + assert( !ExprHasProperty(pExpr, EP_IntValue) ); sqlite3TreeViewLine(pView,"%s", pExpr->u.zToken); break; } #endif case TK_VARIABLE: { + assert( !ExprHasProperty(pExpr, EP_IntValue) ); sqlite3TreeViewLine(pView,"VARIABLE(%s,%d)", pExpr->u.zToken, pExpr->iColumn); break; @@ -30430,12 +30747,14 @@ SQLITE_PRIVATE void sqlite3TreeViewExpr(TreeView *pView, const Expr *pExpr, u8 m break; } case TK_ID: { + assert( !ExprHasProperty(pExpr, EP_IntValue) ); sqlite3TreeViewLine(pView,"ID \"%w\"", pExpr->u.zToken); break; } #ifndef SQLITE_OMIT_CAST case TK_CAST: { /* Expressions of the form: CAST(pLeft AS token) */ + assert( !ExprHasProperty(pExpr, EP_IntValue) ); sqlite3TreeViewLine(pView,"CAST %Q", pExpr->u.zToken); sqlite3TreeViewExpr(pView, pExpr->pLeft, 0); break; @@ -30485,6 +30804,7 @@ SQLITE_PRIVATE void sqlite3TreeViewExpr(TreeView *pView, const Expr *pExpr, u8 m } case TK_SPAN: { + assert( !ExprHasProperty(pExpr, EP_IntValue) ); sqlite3TreeViewLine(pView, "SPAN %Q", pExpr->u.zToken); sqlite3TreeViewExpr(pView, pExpr->pLeft, 0); break; @@ -30496,6 +30816,7 @@ SQLITE_PRIVATE void sqlite3TreeViewExpr(TreeView *pView, const Expr *pExpr, u8 m ** up in the treeview output as "SOFT-COLLATE". Explicit COLLATE ** operators that appear in the original SQL always have the ** EP_Collate bit set and appear in treeview output as just "COLLATE" */ + assert( !ExprHasProperty(pExpr, EP_IntValue) ); sqlite3TreeViewLine(pView, "%sCOLLATE %Q%s", !ExprHasProperty(pExpr, EP_Collate) ? "SOFT-" : "", pExpr->u.zToken, zFlgs); @@ -30511,6 +30832,7 @@ SQLITE_PRIVATE void sqlite3TreeViewExpr(TreeView *pView, const Expr *pExpr, u8 m pFarg = 0; pWin = 0; }else{ + assert( ExprUseXList(pExpr) ); pFarg = pExpr->x.pList; #ifndef SQLITE_OMIT_WINDOWFUNC pWin = ExprHasProperty(pExpr, EP_WinFunc) ? pExpr->y.pWin : 0; @@ -30518,6 +30840,7 @@ SQLITE_PRIVATE void sqlite3TreeViewExpr(TreeView *pView, const Expr *pExpr, u8 m pWin = 0; #endif } + assert( !ExprHasProperty(pExpr, EP_IntValue) ); if( pExpr->op==TK_AGG_FUNCTION ){ sqlite3TreeViewLine(pView, "AGG_FUNCTION%d %Q%s agg=%d[%d]/%p", pExpr->op2, pExpr->u.zToken, zFlgs, @@ -30549,11 +30872,13 @@ SQLITE_PRIVATE void sqlite3TreeViewExpr(TreeView *pView, const Expr *pExpr, u8 m } #ifndef SQLITE_OMIT_SUBQUERY case TK_EXISTS: { + assert( ExprUseXSelect(pExpr) ); sqlite3TreeViewLine(pView, "EXISTS-expr flags=0x%x", pExpr->flags); sqlite3TreeViewSelect(pView, pExpr->x.pSelect, 0); break; } case TK_SELECT: { + assert( ExprUseXSelect(pExpr) ); sqlite3TreeViewLine(pView, "subquery-expr flags=0x%x", pExpr->flags); sqlite3TreeViewSelect(pView, pExpr->x.pSelect, 0); break; @@ -30561,7 +30886,7 @@ SQLITE_PRIVATE void sqlite3TreeViewExpr(TreeView *pView, const Expr *pExpr, u8 m case TK_IN: { sqlite3TreeViewLine(pView, "IN flags=0x%x", pExpr->flags); sqlite3TreeViewExpr(pView, pExpr->pLeft, 1); - if( ExprHasProperty(pExpr, EP_xIsSelect) ){ + if( ExprUseXSelect(pExpr) ){ sqlite3TreeViewSelect(pView, pExpr->x.pSelect, 0); }else{ sqlite3TreeViewExprList(pView, pExpr->x.pList, 0, 0); @@ -30582,9 +30907,12 @@ SQLITE_PRIVATE void sqlite3TreeViewExpr(TreeView *pView, const Expr *pExpr, u8 m ** Z is stored in pExpr->pList->a[1].pExpr. */ case TK_BETWEEN: { - Expr *pX = pExpr->pLeft; - Expr *pY = pExpr->x.pList->a[0].pExpr; - Expr *pZ = pExpr->x.pList->a[1].pExpr; + const Expr *pX, *pY, *pZ; + pX = pExpr->pLeft; + assert( ExprUseXList(pExpr) ); + assert( pExpr->x.pList->nExpr==2 ); + pY = pExpr->x.pList->a[0].pExpr; + pZ = pExpr->x.pList->a[1].pExpr; sqlite3TreeViewLine(pView, "BETWEEN"); sqlite3TreeViewExpr(pView, pX, 1); sqlite3TreeViewExpr(pView, pY, 1); @@ -30606,6 +30934,7 @@ SQLITE_PRIVATE void sqlite3TreeViewExpr(TreeView *pView, const Expr *pExpr, u8 m case TK_CASE: { sqlite3TreeViewLine(pView, "CASE"); sqlite3TreeViewExpr(pView, pExpr->pLeft, 1); + assert( ExprUseXList(pExpr) ); sqlite3TreeViewExprList(pView, pExpr->x.pList, 0, 0); break; } @@ -30618,6 +30947,7 @@ SQLITE_PRIVATE void sqlite3TreeViewExpr(TreeView *pView, const Expr *pExpr, u8 m case OE_Fail: zType = "fail"; break; case OE_Ignore: zType = "ignore"; break; } + assert( !ExprHasProperty(pExpr, EP_IntValue) ); sqlite3TreeViewLine(pView, "RAISE %s(%Q)", zType, pExpr->u.zToken); break; } @@ -30630,12 +30960,16 @@ SQLITE_PRIVATE void sqlite3TreeViewExpr(TreeView *pView, const Expr *pExpr, u8 m } case TK_VECTOR: { char *z = sqlite3_mprintf("VECTOR%s",zFlgs); + assert( ExprUseXList(pExpr) ); sqlite3TreeViewBareExprList(pView, pExpr->x.pList, z); sqlite3_free(z); break; } case TK_SELECT_COLUMN: { - sqlite3TreeViewLine(pView, "SELECT-COLUMN %d", pExpr->iColumn); + sqlite3TreeViewLine(pView, "SELECT-COLUMN %d of [0..%d]%s", + pExpr->iColumn, pExpr->iTable-1, + pExpr->pRight==pExpr->pLeft ? " (SELECT-owner)" : ""); + assert( ExprUseXSelect(pExpr->pLeft) ); sqlite3TreeViewSelect(pView, pExpr->pLeft->x.pSelect, 0); break; } @@ -30652,6 +30986,15 @@ SQLITE_PRIVATE void sqlite3TreeViewExpr(TreeView *pView, const Expr *pExpr, u8 m sqlite3TreeViewExpr(pView, &tmp, 0); break; } + case TK_ROW: { + if( pExpr->iColumn<=0 ){ + sqlite3TreeViewLine(pView, "First FROM table rowid"); + }else{ + sqlite3TreeViewLine(pView, "First FROM table column %d", + pExpr->iColumn-1); + } + break; + } default: { sqlite3TreeViewLine(pView, "op=%d", pExpr->op); break; @@ -31703,16 +32046,6 @@ SQLITE_PRIVATE void sqlite3UtfSelfTest(void){ #include #endif -/* -** Routine needed to support the testcase() macro. -*/ -#ifdef SQLITE_COVERAGE_TEST -SQLITE_PRIVATE void sqlite3Coverage(int x){ - static unsigned dummy = 0; - dummy += (unsigned)x; -} -#endif - /* ** Calls to sqlite3FaultSim() are used to simulate a failure during testing, ** or to bypass normal error detection during testing in order to let @@ -31742,11 +32075,21 @@ SQLITE_PRIVATE int sqlite3FaultSim(int iTest){ #ifndef SQLITE_OMIT_FLOATING_POINT /* ** Return true if the floating point value is Not a Number (NaN). +** +** Use the math library isnan() function if compiled with SQLITE_HAVE_ISNAN. +** Otherwise, we have our own implementation that works on most systems. */ SQLITE_PRIVATE int sqlite3IsNaN(double x){ + int rc; /* The value return */ +#if !SQLITE_HAVE_ISNAN && !HAVE_ISNAN u64 y; memcpy(&y,&x,sizeof(y)); - return IsNaN(y); + rc = IsNaN(y); +#else + rc = isnan(x); +#endif /* HAVE_ISNAN */ + testcase( rc ); + return rc; } #endif /* SQLITE_OMIT_FLOATING_POINT */ @@ -31771,8 +32114,14 @@ SQLITE_PRIVATE int sqlite3Strlen30(const char *z){ ** the column name if and only if the COLFLAG_HASTYPE flag is set. */ SQLITE_PRIVATE char *sqlite3ColumnType(Column *pCol, char *zDflt){ - if( (pCol->colFlags & COLFLAG_HASTYPE)==0 ) return zDflt; - return pCol->zName + strlen(pCol->zName) + 1; + if( pCol->colFlags & COLFLAG_HASTYPE ){ + return pCol->zCnName + strlen(pCol->zCnName) + 1; + }else if( pCol->eCType ){ + assert( pCol->eCType<=SQLITE_N_STDTYPE ); + return (char*)sqlite3StdType[pCol->eCType-1]; + }else{ + return zDflt; + } } /* @@ -31943,11 +32292,34 @@ SQLITE_PRIVATE void sqlite3Dequote(char *z){ z[j] = 0; } SQLITE_PRIVATE void sqlite3DequoteExpr(Expr *p){ + assert( !ExprHasProperty(p, EP_IntValue) ); assert( sqlite3Isquote(p->u.zToken[0]) ); p->flags |= p->u.zToken[0]=='"' ? EP_Quoted|EP_DblQuoted : EP_Quoted; sqlite3Dequote(p->u.zToken); } +/* +** If the input token p is quoted, try to adjust the token to remove +** the quotes. This is not always possible: +** +** "abc" -> abc +** "ab""cd" -> (not possible because of the interior "") +** +** Remove the quotes if possible. This is a optimization. The overall +** system should still return the correct answer even if this routine +** is always a no-op. +*/ +SQLITE_PRIVATE void sqlite3DequoteToken(Token *p){ + unsigned int i; + if( p->n<2 ) return; + if( !sqlite3Isquote(p->z[0]) ) return; + for(i=1; in-1; i++){ + if( sqlite3Isquote(p->z[i]) ) return; + } + p->n -= 2; + p->z++; +} + /* ** Generate a Token object from a string */ @@ -33053,13 +33425,13 @@ static void logBadConnection(const char *zType){ ** used as an argument to sqlite3_errmsg() or sqlite3_close(). */ SQLITE_PRIVATE int sqlite3SafetyCheckOk(sqlite3 *db){ - u32 magic; + u8 eOpenState; if( db==0 ){ logBadConnection("NULL"); return 0; } - magic = db->magic; - if( magic!=SQLITE_MAGIC_OPEN ){ + eOpenState = db->eOpenState; + if( eOpenState!=SQLITE_STATE_OPEN ){ if( sqlite3SafetyCheckSickOrOk(db) ){ testcase( sqlite3GlobalConfig.xLog!=0 ); logBadConnection("unopened"); @@ -33070,11 +33442,11 @@ SQLITE_PRIVATE int sqlite3SafetyCheckOk(sqlite3 *db){ } } SQLITE_PRIVATE int sqlite3SafetyCheckSickOrOk(sqlite3 *db){ - u32 magic; - magic = db->magic; - if( magic!=SQLITE_MAGIC_SICK && - magic!=SQLITE_MAGIC_OPEN && - magic!=SQLITE_MAGIC_BUSY ){ + u8 eOpenState; + eOpenState = db->eOpenState; + if( eOpenState!=SQLITE_STATE_SICK && + eOpenState!=SQLITE_STATE_OPEN && + eOpenState!=SQLITE_STATE_BUSY ){ testcase( sqlite3GlobalConfig.xLog!=0 ); logBadConnection("invalid"); return 0; @@ -33693,35 +34065,35 @@ SQLITE_PRIVATE const char *sqlite3OpcodeName(int i){ /* 18 */ "If" OpHelp(""), /* 19 */ "Not" OpHelp("r[P2]= !r[P1]"), /* 20 */ "IfNot" OpHelp(""), - /* 21 */ "IfNullRow" OpHelp("if P1.nullRow then r[P3]=NULL, goto P2"), - /* 22 */ "SeekLT" OpHelp("key=r[P3@P4]"), - /* 23 */ "SeekLE" OpHelp("key=r[P3@P4]"), - /* 24 */ "SeekGE" OpHelp("key=r[P3@P4]"), - /* 25 */ "SeekGT" OpHelp("key=r[P3@P4]"), - /* 26 */ "IfNotOpen" OpHelp("if( !csr[P1] ) goto P2"), - /* 27 */ "IfNoHope" OpHelp("key=r[P3@P4]"), - /* 28 */ "NoConflict" OpHelp("key=r[P3@P4]"), - /* 29 */ "NotFound" OpHelp("key=r[P3@P4]"), - /* 30 */ "Found" OpHelp("key=r[P3@P4]"), - /* 31 */ "SeekRowid" OpHelp("intkey=r[P3]"), - /* 32 */ "NotExists" OpHelp("intkey=r[P3]"), - /* 33 */ "Last" OpHelp(""), - /* 34 */ "IfSmaller" OpHelp(""), - /* 35 */ "SorterSort" OpHelp(""), - /* 36 */ "Sort" OpHelp(""), - /* 37 */ "Rewind" OpHelp(""), - /* 38 */ "IdxLE" OpHelp("key=r[P3@P4]"), - /* 39 */ "IdxGT" OpHelp("key=r[P3@P4]"), - /* 40 */ "IdxLT" OpHelp("key=r[P3@P4]"), - /* 41 */ "IdxGE" OpHelp("key=r[P3@P4]"), - /* 42 */ "RowSetRead" OpHelp("r[P3]=rowset(P1)"), + /* 21 */ "IsNullOrType" OpHelp("if typeof(r[P1]) IN (P3,5) goto P2"), + /* 22 */ "IfNullRow" OpHelp("if P1.nullRow then r[P3]=NULL, goto P2"), + /* 23 */ "SeekLT" OpHelp("key=r[P3@P4]"), + /* 24 */ "SeekLE" OpHelp("key=r[P3@P4]"), + /* 25 */ "SeekGE" OpHelp("key=r[P3@P4]"), + /* 26 */ "SeekGT" OpHelp("key=r[P3@P4]"), + /* 27 */ "IfNotOpen" OpHelp("if( !csr[P1] ) goto P2"), + /* 28 */ "IfNoHope" OpHelp("key=r[P3@P4]"), + /* 29 */ "NoConflict" OpHelp("key=r[P3@P4]"), + /* 30 */ "NotFound" OpHelp("key=r[P3@P4]"), + /* 31 */ "Found" OpHelp("key=r[P3@P4]"), + /* 32 */ "SeekRowid" OpHelp("intkey=r[P3]"), + /* 33 */ "NotExists" OpHelp("intkey=r[P3]"), + /* 34 */ "Last" OpHelp(""), + /* 35 */ "IfSmaller" OpHelp(""), + /* 36 */ "SorterSort" OpHelp(""), + /* 37 */ "Sort" OpHelp(""), + /* 38 */ "Rewind" OpHelp(""), + /* 39 */ "IdxLE" OpHelp("key=r[P3@P4]"), + /* 40 */ "IdxGT" OpHelp("key=r[P3@P4]"), + /* 41 */ "IdxLT" OpHelp("key=r[P3@P4]"), + /* 42 */ "IdxGE" OpHelp("key=r[P3@P4]"), /* 43 */ "Or" OpHelp("r[P3]=(r[P1] || r[P2])"), /* 44 */ "And" OpHelp("r[P3]=(r[P1] && r[P2])"), - /* 45 */ "RowSetTest" OpHelp("if r[P3] in rowset(P1) goto P2"), - /* 46 */ "Program" OpHelp(""), - /* 47 */ "FkIfZero" OpHelp("if fkctr[P1]==0 goto P2"), - /* 48 */ "IfPos" OpHelp("if r[P1]>0 then r[P1]-=P3, goto P2"), - /* 49 */ "IfNotZero" OpHelp("if r[P1]!=0 then r[P1]--, goto P2"), + /* 45 */ "RowSetRead" OpHelp("r[P3]=rowset(P1)"), + /* 46 */ "RowSetTest" OpHelp("if r[P3] in rowset(P1) goto P2"), + /* 47 */ "Program" OpHelp(""), + /* 48 */ "FkIfZero" OpHelp("if fkctr[P1]==0 goto P2"), + /* 49 */ "IfPos" OpHelp("if r[P1]>0 then r[P1]-=P3, goto P2"), /* 50 */ "IsNull" OpHelp("if r[P1]==NULL goto P2"), /* 51 */ "NotNull" OpHelp("if r[P1]!=NULL goto P2"), /* 52 */ "Ne" OpHelp("IF r[P3]!=r[P1]"), @@ -33731,49 +34103,49 @@ SQLITE_PRIVATE const char *sqlite3OpcodeName(int i){ /* 56 */ "Lt" OpHelp("IF r[P3]=r[P1]"), /* 58 */ "ElseEq" OpHelp(""), - /* 59 */ "DecrJumpZero" OpHelp("if (--r[P1])==0 goto P2"), - /* 60 */ "IncrVacuum" OpHelp(""), - /* 61 */ "VNext" OpHelp(""), - /* 62 */ "Init" OpHelp("Start at P2"), - /* 63 */ "PureFunc" OpHelp("r[P3]=func(r[P2@NP])"), - /* 64 */ "Function" OpHelp("r[P3]=func(r[P2@NP])"), - /* 65 */ "Return" OpHelp(""), - /* 66 */ "EndCoroutine" OpHelp(""), - /* 67 */ "HaltIfNull" OpHelp("if r[P3]=null halt"), - /* 68 */ "Halt" OpHelp(""), - /* 69 */ "Integer" OpHelp("r[P2]=P1"), - /* 70 */ "Int64" OpHelp("r[P2]=P4"), - /* 71 */ "String" OpHelp("r[P2]='P4' (len=P1)"), - /* 72 */ "Null" OpHelp("r[P2..P3]=NULL"), - /* 73 */ "SoftNull" OpHelp("r[P1]=NULL"), - /* 74 */ "Blob" OpHelp("r[P2]=P4 (len=P1)"), - /* 75 */ "Variable" OpHelp("r[P2]=parameter(P1,P4)"), - /* 76 */ "Move" OpHelp("r[P2@P3]=r[P1@P3]"), - /* 77 */ "Copy" OpHelp("r[P2@P3+1]=r[P1@P3+1]"), - /* 78 */ "SCopy" OpHelp("r[P2]=r[P1]"), - /* 79 */ "IntCopy" OpHelp("r[P2]=r[P1]"), - /* 80 */ "ChngCntRow" OpHelp("output=r[P1]"), - /* 81 */ "ResultRow" OpHelp("output=r[P1@P2]"), - /* 82 */ "CollSeq" OpHelp(""), - /* 83 */ "AddImm" OpHelp("r[P1]=r[P1]+P2"), - /* 84 */ "RealAffinity" OpHelp(""), - /* 85 */ "Cast" OpHelp("affinity(r[P1])"), - /* 86 */ "Permutation" OpHelp(""), - /* 87 */ "Compare" OpHelp("r[P1@P3] <-> r[P2@P3]"), - /* 88 */ "IsTrue" OpHelp("r[P2] = coalesce(r[P1]==TRUE,P3) ^ P4"), - /* 89 */ "ZeroOrNull" OpHelp("r[P2] = 0 OR NULL"), - /* 90 */ "Offset" OpHelp("r[P3] = sqlite_offset(P1)"), - /* 91 */ "Column" OpHelp("r[P3]=PX"), - /* 92 */ "Affinity" OpHelp("affinity(r[P1@P2])"), - /* 93 */ "MakeRecord" OpHelp("r[P3]=mkrec(r[P1@P2])"), - /* 94 */ "Count" OpHelp("r[P2]=count()"), - /* 95 */ "ReadCookie" OpHelp(""), - /* 96 */ "SetCookie" OpHelp(""), - /* 97 */ "ReopenIdx" OpHelp("root=P2 iDb=P3"), - /* 98 */ "OpenRead" OpHelp("root=P2 iDb=P3"), - /* 99 */ "OpenWrite" OpHelp("root=P2 iDb=P3"), - /* 100 */ "OpenDup" OpHelp(""), - /* 101 */ "OpenAutoindex" OpHelp("nColumn=P2"), + /* 59 */ "IfNotZero" OpHelp("if r[P1]!=0 then r[P1]--, goto P2"), + /* 60 */ "DecrJumpZero" OpHelp("if (--r[P1])==0 goto P2"), + /* 61 */ "IncrVacuum" OpHelp(""), + /* 62 */ "VNext" OpHelp(""), + /* 63 */ "Init" OpHelp("Start at P2"), + /* 64 */ "PureFunc" OpHelp("r[P3]=func(r[P2@NP])"), + /* 65 */ "Function" OpHelp("r[P3]=func(r[P2@NP])"), + /* 66 */ "Return" OpHelp(""), + /* 67 */ "EndCoroutine" OpHelp(""), + /* 68 */ "HaltIfNull" OpHelp("if r[P3]=null halt"), + /* 69 */ "Halt" OpHelp(""), + /* 70 */ "Integer" OpHelp("r[P2]=P1"), + /* 71 */ "Int64" OpHelp("r[P2]=P4"), + /* 72 */ "String" OpHelp("r[P2]='P4' (len=P1)"), + /* 73 */ "Null" OpHelp("r[P2..P3]=NULL"), + /* 74 */ "SoftNull" OpHelp("r[P1]=NULL"), + /* 75 */ "Blob" OpHelp("r[P2]=P4 (len=P1)"), + /* 76 */ "Variable" OpHelp("r[P2]=parameter(P1,P4)"), + /* 77 */ "Move" OpHelp("r[P2@P3]=r[P1@P3]"), + /* 78 */ "Copy" OpHelp("r[P2@P3+1]=r[P1@P3+1]"), + /* 79 */ "SCopy" OpHelp("r[P2]=r[P1]"), + /* 80 */ "IntCopy" OpHelp("r[P2]=r[P1]"), + /* 81 */ "ChngCntRow" OpHelp("output=r[P1]"), + /* 82 */ "ResultRow" OpHelp("output=r[P1@P2]"), + /* 83 */ "CollSeq" OpHelp(""), + /* 84 */ "AddImm" OpHelp("r[P1]=r[P1]+P2"), + /* 85 */ "RealAffinity" OpHelp(""), + /* 86 */ "Cast" OpHelp("affinity(r[P1])"), + /* 87 */ "Permutation" OpHelp(""), + /* 88 */ "Compare" OpHelp("r[P1@P3] <-> r[P2@P3]"), + /* 89 */ "IsTrue" OpHelp("r[P2] = coalesce(r[P1]==TRUE,P3) ^ P4"), + /* 90 */ "ZeroOrNull" OpHelp("r[P2] = 0 OR NULL"), + /* 91 */ "Offset" OpHelp("r[P3] = sqlite_offset(P1)"), + /* 92 */ "Column" OpHelp("r[P3]=PX"), + /* 93 */ "TypeCheck" OpHelp("typecheck(r[P1@P2])"), + /* 94 */ "Affinity" OpHelp("affinity(r[P1@P2])"), + /* 95 */ "MakeRecord" OpHelp("r[P3]=mkrec(r[P1@P2])"), + /* 96 */ "Count" OpHelp("r[P2]=count()"), + /* 97 */ "ReadCookie" OpHelp(""), + /* 98 */ "SetCookie" OpHelp(""), + /* 99 */ "ReopenIdx" OpHelp("root=P2 iDb=P3"), + /* 100 */ "OpenRead" OpHelp("root=P2 iDb=P3"), + /* 101 */ "OpenWrite" OpHelp("root=P2 iDb=P3"), /* 102 */ "BitAnd" OpHelp("r[P3]=r[P1]&r[P2]"), /* 103 */ "BitOr" OpHelp("r[P3]=r[P1]|r[P2]"), /* 104 */ "ShiftLeft" OpHelp("r[P3]=r[P2]<0 then r[P2]=r[P1]+max(0,r[P3]) else r[P2]=(-1)"), - /* 157 */ "AggInverse" OpHelp("accum=r[P3] inverse(r[P2@P5])"), - /* 158 */ "AggStep" OpHelp("accum=r[P3] step(r[P2@P5])"), - /* 159 */ "AggStep1" OpHelp("accum=r[P3] step(r[P2@P5])"), - /* 160 */ "AggValue" OpHelp("r[P3]=value N=P2"), - /* 161 */ "AggFinal" OpHelp("accum=r[P1] N=P2"), - /* 162 */ "Expire" OpHelp(""), - /* 163 */ "CursorLock" OpHelp(""), - /* 164 */ "CursorUnlock" OpHelp(""), - /* 165 */ "TableLock" OpHelp("iDb=P1 root=P2 write=P3"), - /* 166 */ "VBegin" OpHelp(""), - /* 167 */ "VCreate" OpHelp(""), - /* 168 */ "VDestroy" OpHelp(""), - /* 169 */ "VOpen" OpHelp(""), - /* 170 */ "VColumn" OpHelp("r[P3]=vcolumn(P2)"), - /* 171 */ "VRename" OpHelp(""), - /* 172 */ "Pagecount" OpHelp(""), - /* 173 */ "MaxPgcnt" OpHelp(""), - /* 174 */ "Trace" OpHelp(""), - /* 175 */ "CursorHint" OpHelp(""), - /* 176 */ "ReleaseReg" OpHelp("release r[P1@P2] mask P3"), - /* 177 */ "Noop" OpHelp(""), - /* 178 */ "Explain" OpHelp(""), - /* 179 */ "Abortable" OpHelp(""), + /* 153 */ "IntegrityCk" OpHelp(""), + /* 154 */ "RowSetAdd" OpHelp("rowset(P1)=r[P2]"), + /* 155 */ "Param" OpHelp(""), + /* 156 */ "FkCounter" OpHelp("fkctr[P1]+=P2"), + /* 157 */ "MemMax" OpHelp("r[P1]=max(r[P1],r[P2])"), + /* 158 */ "OffsetLimit" OpHelp("if r[P1]>0 then r[P2]=r[P1]+max(0,r[P3]) else r[P2]=(-1)"), + /* 159 */ "AggInverse" OpHelp("accum=r[P3] inverse(r[P2@P5])"), + /* 160 */ "AggStep" OpHelp("accum=r[P3] step(r[P2@P5])"), + /* 161 */ "AggStep1" OpHelp("accum=r[P3] step(r[P2@P5])"), + /* 162 */ "AggValue" OpHelp("r[P3]=value N=P2"), + /* 163 */ "AggFinal" OpHelp("accum=r[P1] N=P2"), + /* 164 */ "Expire" OpHelp(""), + /* 165 */ "CursorLock" OpHelp(""), + /* 166 */ "CursorUnlock" OpHelp(""), + /* 167 */ "TableLock" OpHelp("iDb=P1 root=P2 write=P3"), + /* 168 */ "VBegin" OpHelp(""), + /* 169 */ "VCreate" OpHelp(""), + /* 170 */ "VDestroy" OpHelp(""), + /* 171 */ "VOpen" OpHelp(""), + /* 172 */ "VColumn" OpHelp("r[P3]=vcolumn(P2)"), + /* 173 */ "VRename" OpHelp(""), + /* 174 */ "Pagecount" OpHelp(""), + /* 175 */ "MaxPgcnt" OpHelp(""), + /* 176 */ "Trace" OpHelp(""), + /* 177 */ "CursorHint" OpHelp(""), + /* 178 */ "ReleaseReg" OpHelp("release r[P1@P2] mask P3"), + /* 179 */ "Noop" OpHelp(""), + /* 180 */ "Explain" OpHelp(""), + /* 181 */ "Abortable" OpHelp(""), }; return azName[i]; } @@ -34158,205 +34532,7 @@ static pid_t randomnessPid = 0; /* ** Include code that is common to all os_*.c files */ -/************** Include os_common.h in the middle of os_unix.c ***************/ -/************** Begin file os_common.h ***************************************/ -/* -** 2004 May 22 -** -** The author disclaims copyright to this source code. In place of -** a legal notice, here is a blessing: -** -** May you do good and not evil. -** May you find forgiveness for yourself and forgive others. -** May you share freely, never taking more than you give. -** -****************************************************************************** -** -** This file contains macros and a little bit of code that is common to -** all of the platform-specific files (os_*.c) and is #included into those -** files. -** -** This file should be #included by the os_*.c files only. It is not a -** general purpose header file. -*/ -#ifndef _OS_COMMON_H_ -#define _OS_COMMON_H_ - -/* -** At least two bugs have slipped in because we changed the MEMORY_DEBUG -** macro to SQLITE_DEBUG and some older makefiles have not yet made the -** switch. The following code should catch this problem at compile-time. -*/ -#ifdef MEMORY_DEBUG -# error "The MEMORY_DEBUG macro is obsolete. Use SQLITE_DEBUG instead." -#endif - -/* -** Macros for performance tracing. Normally turned off. Only works -** on i486 hardware. -*/ -#ifdef SQLITE_PERFORMANCE_TRACE - -/* -** hwtime.h contains inline assembler code for implementing -** high-performance timing routines. -*/ -/************** Include hwtime.h in the middle of os_common.h ****************/ -/************** Begin file hwtime.h ******************************************/ -/* -** 2008 May 27 -** -** The author disclaims copyright to this source code. In place of -** a legal notice, here is a blessing: -** -** May you do good and not evil. -** May you find forgiveness for yourself and forgive others. -** May you share freely, never taking more than you give. -** -****************************************************************************** -** -** This file contains inline asm code for retrieving "high-performance" -** counters for x86 and x86_64 class CPUs. -*/ -#ifndef SQLITE_HWTIME_H -#define SQLITE_HWTIME_H - -/* -** The following routine only works on pentium-class (or newer) processors. -** It uses the RDTSC opcode to read the cycle count value out of the -** processor and returns that value. This can be used for high-res -** profiling. -*/ -#if !defined(__STRICT_ANSI__) && \ - (defined(__GNUC__) || defined(_MSC_VER)) && \ - (defined(i386) || defined(__i386__) || defined(_M_IX86)) - - #if defined(__GNUC__) - - __inline__ sqlite_uint64 sqlite3Hwtime(void){ - unsigned int lo, hi; - __asm__ __volatile__ ("rdtsc" : "=a" (lo), "=d" (hi)); - return (sqlite_uint64)hi << 32 | lo; - } - - #elif defined(_MSC_VER) - - __declspec(naked) __inline sqlite_uint64 __cdecl sqlite3Hwtime(void){ - __asm { - rdtsc - ret ; return value at EDX:EAX - } - } - - #endif - -#elif !defined(__STRICT_ANSI__) && (defined(__GNUC__) && defined(__x86_64__)) - - __inline__ sqlite_uint64 sqlite3Hwtime(void){ - unsigned long val; - __asm__ __volatile__ ("rdtsc" : "=A" (val)); - return val; - } - -#elif !defined(__STRICT_ANSI__) && (defined(__GNUC__) && defined(__ppc__)) - - __inline__ sqlite_uint64 sqlite3Hwtime(void){ - unsigned long long retval; - unsigned long junk; - __asm__ __volatile__ ("\n\ - 1: mftbu %1\n\ - mftb %L0\n\ - mftbu %0\n\ - cmpw %0,%1\n\ - bne 1b" - : "=r" (retval), "=r" (junk)); - return retval; - } - -#else - - /* - ** asm() is needed for hardware timing support. Without asm(), - ** disable the sqlite3Hwtime() routine. - ** - ** sqlite3Hwtime() is only used for some obscure debugging - ** and analysis configurations, not in any deliverable, so this - ** should not be a great loss. - */ -SQLITE_PRIVATE sqlite_uint64 sqlite3Hwtime(void){ return ((sqlite_uint64)0); } - -#endif - -#endif /* !defined(SQLITE_HWTIME_H) */ - -/************** End of hwtime.h **********************************************/ -/************** Continuing where we left off in os_common.h ******************/ - -static sqlite_uint64 g_start; -static sqlite_uint64 g_elapsed; -#define TIMER_START g_start=sqlite3Hwtime() -#define TIMER_END g_elapsed=sqlite3Hwtime()-g_start -#define TIMER_ELAPSED g_elapsed -#else -#define TIMER_START -#define TIMER_END -#define TIMER_ELAPSED ((sqlite_uint64)0) -#endif - -/* -** If we compile with the SQLITE_TEST macro set, then the following block -** of code will give us the ability to simulate a disk I/O error. This -** is used for testing the I/O recovery logic. -*/ -#if defined(SQLITE_TEST) -SQLITE_API extern int sqlite3_io_error_hit; -SQLITE_API extern int sqlite3_io_error_hardhit; -SQLITE_API extern int sqlite3_io_error_pending; -SQLITE_API extern int sqlite3_io_error_persist; -SQLITE_API extern int sqlite3_io_error_benign; -SQLITE_API extern int sqlite3_diskfull_pending; -SQLITE_API extern int sqlite3_diskfull; -#define SimulateIOErrorBenign(X) sqlite3_io_error_benign=(X) -#define SimulateIOError(CODE) \ - if( (sqlite3_io_error_persist && sqlite3_io_error_hit) \ - || sqlite3_io_error_pending-- == 1 ) \ - { local_ioerr(); CODE; } -static void local_ioerr(){ - IOTRACE(("IOERR\n")); - sqlite3_io_error_hit++; - if( !sqlite3_io_error_benign ) sqlite3_io_error_hardhit++; -} -#define SimulateDiskfullError(CODE) \ - if( sqlite3_diskfull_pending ){ \ - if( sqlite3_diskfull_pending == 1 ){ \ - local_ioerr(); \ - sqlite3_diskfull = 1; \ - sqlite3_io_error_hit = 1; \ - CODE; \ - }else{ \ - sqlite3_diskfull_pending--; \ - } \ - } -#else -#define SimulateIOErrorBenign(X) -#define SimulateIOError(A) -#define SimulateDiskfullError(A) -#endif /* defined(SQLITE_TEST) */ - -/* -** When testing, keep a count of the number of open files. -*/ -#if defined(SQLITE_TEST) -SQLITE_API extern int sqlite3_open_file_count; -#define OpenCounter(X) sqlite3_open_file_count+=(X) -#else -#define OpenCounter(X) -#endif /* defined(SQLITE_TEST) */ - -#endif /* !defined(_OS_COMMON_H_) */ - -/************** End of os_common.h *******************************************/ -/************** Continuing where we left off in os_unix.c ********************/ +/* #include "os_common.h" */ /* ** Define various macros that are missing from some systems. @@ -38010,7 +38186,9 @@ static void unixModeBit(unixFile *pFile, unsigned char mask, int *pArg){ /* Forward declaration */ static int unixGetTempname(int nBuf, char *zBuf); -static int unixFcntlExternalReader(unixFile*, int*); +#ifndef SQLITE_OMIT_WAL + static int unixFcntlExternalReader(unixFile*, int*); +#endif /* ** Information and control of an open file handle. @@ -38129,7 +38307,12 @@ static int unixFileControl(sqlite3_file *id, int op, void *pArg){ #endif /* SQLITE_ENABLE_LOCKING_STYLE && defined(__APPLE__) */ case SQLITE_FCNTL_EXTERNAL_READER: { +#ifndef SQLITE_OMIT_WAL return unixFcntlExternalReader((unixFile*)id, (int*)pArg); +#else + *(int*)pArg = 0; + return SQLITE_OK; +#endif } } return SQLITE_NOTFOUND; @@ -39850,25 +40033,35 @@ static int fillInUnixFile( return rc; } +/* +** Directories to consider for temp files. +*/ +static const char *azTempDirs[] = { + 0, + 0, + "/var/tmp", + "/usr/tmp", + "/tmp", + "." +}; + +/* +** Initialize first two members of azTempDirs[] array. +*/ +static void unixTempFileInit(void){ + azTempDirs[0] = getenv("SQLITE_TMPDIR"); + azTempDirs[1] = getenv("TMPDIR"); +} + /* ** Return the name of a directory in which to put temporary files. ** If no suitable temporary file directory can be found, return NULL. */ static const char *unixTempFileDir(void){ - static const char *azDirs[] = { - 0, - 0, - "/var/tmp", - "/usr/tmp", - "/tmp", - "." - }; unsigned int i = 0; struct stat buf; const char *zDir = sqlite3_temp_directory; - if( !azDirs[0] ) azDirs[0] = getenv("SQLITE_TMPDIR"); - if( !azDirs[1] ) azDirs[1] = getenv("TMPDIR"); while(1){ if( zDir!=0 && osStat(zDir, &buf)==0 @@ -39877,8 +40070,8 @@ static const char *unixTempFileDir(void){ ){ return zDir; } - if( i>=sizeof(azDirs)/sizeof(azDirs[0]) ) break; - zDir = azDirs[i++]; + if( i>=sizeof(azTempDirs)/sizeof(azTempDirs[0]) ) break; + zDir = azTempDirs[i++]; } return 0; } @@ -40184,6 +40377,11 @@ static int unixOpen( } memset(p, 0, sizeof(unixFile)); +#ifdef SQLITE_ASSERT_NO_FILES + /* Applications that never read or write a persistent disk files */ + assert( zName==0 ); +#endif + if( eType==SQLITE_OPEN_MAIN_DB ){ UnixUnusedFd *pUnused; pUnused = findReusableFd(zName, flags); @@ -42145,6 +42343,9 @@ SQLITE_API int sqlite3_os_init(void){ assert( UNIX_SHM_DMS==128 ); /* Byte offset of the deadman-switch */ #endif + /* Initialize temp file dir array. */ + unixTempFileInit(); + return SQLITE_OK; } @@ -42184,205 +42385,7 @@ SQLITE_API int sqlite3_os_end(void){ /* ** Include code that is common to all os_*.c files */ -/************** Include os_common.h in the middle of os_win.c ****************/ -/************** Begin file os_common.h ***************************************/ -/* -** 2004 May 22 -** -** The author disclaims copyright to this source code. In place of -** a legal notice, here is a blessing: -** -** May you do good and not evil. -** May you find forgiveness for yourself and forgive others. -** May you share freely, never taking more than you give. -** -****************************************************************************** -** -** This file contains macros and a little bit of code that is common to -** all of the platform-specific files (os_*.c) and is #included into those -** files. -** -** This file should be #included by the os_*.c files only. It is not a -** general purpose header file. -*/ -#ifndef _OS_COMMON_H_ -#define _OS_COMMON_H_ - -/* -** At least two bugs have slipped in because we changed the MEMORY_DEBUG -** macro to SQLITE_DEBUG and some older makefiles have not yet made the -** switch. The following code should catch this problem at compile-time. -*/ -#ifdef MEMORY_DEBUG -# error "The MEMORY_DEBUG macro is obsolete. Use SQLITE_DEBUG instead." -#endif - -/* -** Macros for performance tracing. Normally turned off. Only works -** on i486 hardware. -*/ -#ifdef SQLITE_PERFORMANCE_TRACE - -/* -** hwtime.h contains inline assembler code for implementing -** high-performance timing routines. -*/ -/************** Include hwtime.h in the middle of os_common.h ****************/ -/************** Begin file hwtime.h ******************************************/ -/* -** 2008 May 27 -** -** The author disclaims copyright to this source code. In place of -** a legal notice, here is a blessing: -** -** May you do good and not evil. -** May you find forgiveness for yourself and forgive others. -** May you share freely, never taking more than you give. -** -****************************************************************************** -** -** This file contains inline asm code for retrieving "high-performance" -** counters for x86 and x86_64 class CPUs. -*/ -#ifndef SQLITE_HWTIME_H -#define SQLITE_HWTIME_H - -/* -** The following routine only works on pentium-class (or newer) processors. -** It uses the RDTSC opcode to read the cycle count value out of the -** processor and returns that value. This can be used for high-res -** profiling. -*/ -#if !defined(__STRICT_ANSI__) && \ - (defined(__GNUC__) || defined(_MSC_VER)) && \ - (defined(i386) || defined(__i386__) || defined(_M_IX86)) - - #if defined(__GNUC__) - - __inline__ sqlite_uint64 sqlite3Hwtime(void){ - unsigned int lo, hi; - __asm__ __volatile__ ("rdtsc" : "=a" (lo), "=d" (hi)); - return (sqlite_uint64)hi << 32 | lo; - } - - #elif defined(_MSC_VER) - - __declspec(naked) __inline sqlite_uint64 __cdecl sqlite3Hwtime(void){ - __asm { - rdtsc - ret ; return value at EDX:EAX - } - } - - #endif - -#elif !defined(__STRICT_ANSI__) && (defined(__GNUC__) && defined(__x86_64__)) - - __inline__ sqlite_uint64 sqlite3Hwtime(void){ - unsigned long val; - __asm__ __volatile__ ("rdtsc" : "=A" (val)); - return val; - } - -#elif !defined(__STRICT_ANSI__) && (defined(__GNUC__) && defined(__ppc__)) - - __inline__ sqlite_uint64 sqlite3Hwtime(void){ - unsigned long long retval; - unsigned long junk; - __asm__ __volatile__ ("\n\ - 1: mftbu %1\n\ - mftb %L0\n\ - mftbu %0\n\ - cmpw %0,%1\n\ - bne 1b" - : "=r" (retval), "=r" (junk)); - return retval; - } - -#else - - /* - ** asm() is needed for hardware timing support. Without asm(), - ** disable the sqlite3Hwtime() routine. - ** - ** sqlite3Hwtime() is only used for some obscure debugging - ** and analysis configurations, not in any deliverable, so this - ** should not be a great loss. - */ -SQLITE_PRIVATE sqlite_uint64 sqlite3Hwtime(void){ return ((sqlite_uint64)0); } - -#endif - -#endif /* !defined(SQLITE_HWTIME_H) */ - -/************** End of hwtime.h **********************************************/ -/************** Continuing where we left off in os_common.h ******************/ - -static sqlite_uint64 g_start; -static sqlite_uint64 g_elapsed; -#define TIMER_START g_start=sqlite3Hwtime() -#define TIMER_END g_elapsed=sqlite3Hwtime()-g_start -#define TIMER_ELAPSED g_elapsed -#else -#define TIMER_START -#define TIMER_END -#define TIMER_ELAPSED ((sqlite_uint64)0) -#endif - -/* -** If we compile with the SQLITE_TEST macro set, then the following block -** of code will give us the ability to simulate a disk I/O error. This -** is used for testing the I/O recovery logic. -*/ -#if defined(SQLITE_TEST) -SQLITE_API extern int sqlite3_io_error_hit; -SQLITE_API extern int sqlite3_io_error_hardhit; -SQLITE_API extern int sqlite3_io_error_pending; -SQLITE_API extern int sqlite3_io_error_persist; -SQLITE_API extern int sqlite3_io_error_benign; -SQLITE_API extern int sqlite3_diskfull_pending; -SQLITE_API extern int sqlite3_diskfull; -#define SimulateIOErrorBenign(X) sqlite3_io_error_benign=(X) -#define SimulateIOError(CODE) \ - if( (sqlite3_io_error_persist && sqlite3_io_error_hit) \ - || sqlite3_io_error_pending-- == 1 ) \ - { local_ioerr(); CODE; } -static void local_ioerr(){ - IOTRACE(("IOERR\n")); - sqlite3_io_error_hit++; - if( !sqlite3_io_error_benign ) sqlite3_io_error_hardhit++; -} -#define SimulateDiskfullError(CODE) \ - if( sqlite3_diskfull_pending ){ \ - if( sqlite3_diskfull_pending == 1 ){ \ - local_ioerr(); \ - sqlite3_diskfull = 1; \ - sqlite3_io_error_hit = 1; \ - CODE; \ - }else{ \ - sqlite3_diskfull_pending--; \ - } \ - } -#else -#define SimulateIOErrorBenign(X) -#define SimulateIOError(A) -#define SimulateDiskfullError(A) -#endif /* defined(SQLITE_TEST) */ - -/* -** When testing, keep a count of the number of open files. -*/ -#if defined(SQLITE_TEST) -SQLITE_API extern int sqlite3_open_file_count; -#define OpenCounter(X) sqlite3_open_file_count+=(X) -#else -#define OpenCounter(X) -#endif /* defined(SQLITE_TEST) */ - -#endif /* !defined(_OS_COMMON_H_) */ - -/************** End of os_common.h *******************************************/ -/************** Continuing where we left off in os_win.c *********************/ +/* #include "os_common.h" */ /* ** Include the header file for the Windows VFS. @@ -48793,7 +48796,7 @@ static int memdbRead( */ static int memdbEnlarge(MemStore *p, sqlite3_int64 newSz){ unsigned char *pNew; - if( (p->mFlags & SQLITE_DESERIALIZE_RESIZEABLE)==0 || p->nMmap>0 ){ + if( (p->mFlags & SQLITE_DESERIALIZE_RESIZEABLE)==0 || NEVER(p->nMmap>0) ){ return SQLITE_FULL; } if( newSz>p->szMax ){ @@ -48852,8 +48855,9 @@ static int memdbTruncate(sqlite3_file *pFile, sqlite_int64 size){ MemStore *p = ((MemFile*)pFile)->pStore; int rc = SQLITE_OK; memdbEnter(p); - if( NEVER(size>p->sz) ){ - rc = SQLITE_FULL; + if( size>p->sz ){ + /* This can only happen with a corrupt wal mode db */ + rc = SQLITE_CORRUPT; }else{ p->sz = size; } @@ -48992,7 +48996,7 @@ static int memdbFetch( ){ MemStore *p = ((MemFile*)pFile)->pStore; memdbEnter(p); - if( iOfst+iAmt>p->sz ){ + if( iOfst+iAmt>p->sz || (p->mFlags & SQLITE_DESERIALIZE_RESIZEABLE)!=0 ){ *pp = 0; }else{ p->nMmap++; @@ -49026,10 +49030,9 @@ static int memdbOpen( MemFile *pFile = (MemFile*)pFd; MemStore *p = 0; int szName; - if( (flags & SQLITE_OPEN_MAIN_DB)==0 ){ - return ORIGVFS(pVfs)->xOpen(ORIGVFS(pVfs), zName, pFd, flags, pOutFlags); - } - memset(pFile, 0, sizeof(*p)); + UNUSED_PARAMETER(pVfs); + + memset(pFile, 0, sizeof(*pFile)); szName = sqlite3Strlen30(zName); if( szName>1 && zName[0]=='/' ){ int i; @@ -49088,8 +49091,9 @@ static int memdbOpen( p->szMax = sqlite3GlobalConfig.mxMemdbSize; } pFile->pStore = p; - assert( pOutFlags!=0 ); /* True because flags==SQLITE_OPEN_MAIN_DB */ - *pOutFlags = flags | SQLITE_OPEN_MEMORY; + if( pOutFlags!=0 ){ + *pOutFlags = flags | SQLITE_OPEN_MEMORY; + } pFd->pMethods = &memdb_io_methods; memdbLeave(p); return SQLITE_OK; @@ -49330,7 +49334,8 @@ SQLITE_API int sqlite3_deserialize( sqlite3_mutex_enter(db->mutex); if( zSchema==0 ) zSchema = db->aDb[0].zDbSName; iDb = sqlite3FindDbName(db, zSchema); - if( iDb<0 ){ + testcase( iDb==1 ); + if( iDb<2 && iDb!=0 ){ rc = SQLITE_ERROR; goto end_deserialize; } @@ -49753,7 +49758,7 @@ SQLITE_PRIVATE int sqlite3BitvecBuiltinTest(int sz, int *aOp){ sqlite3BitvecClear(0, 1, pTmpSpace); /* Run the program */ - pc = 0; + pc = i = 0; while( (op = aOp[pc])!=0 ){ switch( op ){ case 1: @@ -50057,11 +50062,14 @@ static int numberOfCachePages(PCache *p){ ** suggested cache size is set to N. */ return p->szCache; }else{ + i64 n; /* IMPLEMANTATION-OF: R-59858-46238 If the argument N is negative, then the ** number of cache pages is adjusted to be a number of pages that would ** use approximately abs(N*1024) bytes of memory based on the current ** page size. */ - return (int)((-1024*(i64)p->szCache)/(p->szPage+p->szExtra)); + n = ((-1024*(i64)p->szCache)/(p->szPage+p->szExtra)); + if( n>1000000000 ) n = 1000000000; + return (int)n; } } @@ -51516,12 +51524,18 @@ static sqlite3_pcache *pcache1Create(int szPage, int szExtra, int bPurgeable){ */ static void pcache1Cachesize(sqlite3_pcache *p, int nMax){ PCache1 *pCache = (PCache1 *)p; + u32 n; + assert( nMax>=0 ); if( pCache->bPurgeable ){ PGroup *pGroup = pCache->pGroup; pcache1EnterMutex(pGroup); - pGroup->nMaxPage += (nMax - pCache->nMax); + n = (u32)nMax; + if( n > 0x7fff0000 - pGroup->nMaxPage + pCache->nMax ){ + n = 0x7fff0000 - pGroup->nMaxPage + pCache->nMax; + } + pGroup->nMaxPage += (n - pCache->nMax); pGroup->mxPinned = pGroup->nMaxPage + 10 - pGroup->nMinPage; - pCache->nMax = nMax; + pCache->nMax = n; pCache->n90pct = pCache->nMax*9/10; pcache1EnforceMaxPage(pCache); pcache1LeaveMutex(pGroup); @@ -51537,7 +51551,7 @@ static void pcache1Shrink(sqlite3_pcache *p){ PCache1 *pCache = (PCache1*)p; if( pCache->bPurgeable ){ PGroup *pGroup = pCache->pGroup; - int savedMaxPage; + unsigned int savedMaxPage; pcache1EnterMutex(pGroup); savedMaxPage = pGroup->nMaxPage; pGroup->nMaxPage = 0; @@ -53274,6 +53288,7 @@ struct Pager { u8 noLock; /* Do not lock (except in WAL mode) */ u8 readOnly; /* True for a read-only database */ u8 memDb; /* True to inhibit all file I/O */ + u8 memVfs; /* VFS-implemented memory database */ /************************************************************************** ** The following block contains those class members that change during @@ -53323,8 +53338,8 @@ struct Pager { i16 nReserve; /* Number of unused bytes at end of each page */ u32 vfsFlags; /* Flags for sqlite3_vfs.xOpen() */ u32 sectorSize; /* Assumed sector size during rollback */ - int pageSize; /* Number of bytes in a page */ Pgno mxPgno; /* Maximum allowed size of the database */ + i64 pageSize; /* Number of bytes in a page */ i64 journalSizeLimit; /* Size limit for persistent journal files */ char *zFilename; /* Name of the database file */ char *zJournal; /* Name of the journal file */ @@ -55668,6 +55683,7 @@ static int readDbPage(PgHdr *pPg){ */ static void pager_write_changecounter(PgHdr *pPg){ u32 change_counter; + if( NEVER(pPg==0) ) return; /* Increment the value just read and write it back to byte 24. */ change_counter = sqlite3Get4byte((u8*)pPg->pPager->dbFileVers)+1; @@ -57502,6 +57518,7 @@ SQLITE_PRIVATE int sqlite3PagerOpen( pPager->zWal = 0; } #endif + (void)pPtr; /* Suppress warning about unused pPtr value */ if( nPathname ) sqlite3DbFree(0, zPathname); pPager->pVfs = pVfs; @@ -57514,7 +57531,7 @@ SQLITE_PRIVATE int sqlite3PagerOpen( rc = sqlite3OsOpen(pVfs, pPager->zFilename, pPager->fd, vfsFlags, &fout); assert( !memDb ); #ifndef SQLITE_OMIT_DESERIALIZE - memJM = (fout&SQLITE_OPEN_MEMORY)!=0; + pPager->memVfs = memJM = (fout&SQLITE_OPEN_MEMORY)!=0; #endif readOnly = (fout&SQLITE_OPEN_READONLY)!=0; @@ -59381,8 +59398,8 @@ SQLITE_PRIVATE int sqlite3PagerRefcount(Pager *pPager){ ** used by the pager and its associated cache. */ SQLITE_PRIVATE int sqlite3PagerMemUsed(Pager *pPager){ - int perPageSize = pPager->pageSize + pPager->nExtra + sizeof(PgHdr) - + 5*sizeof(void*); + int perPageSize = pPager->pageSize + pPager->nExtra + + (int)(sizeof(PgHdr) + 5*sizeof(void*)); return perPageSize*sqlite3PcachePagecount(pPager->pPCache) + sqlite3MallocSize(pPager) + pPager->pageSize; @@ -59451,7 +59468,7 @@ SQLITE_PRIVATE void sqlite3PagerCacheStat(Pager *pPager, int eStat, int reset, i ** Return true if this is an in-memory or temp-file backed pager. */ SQLITE_PRIVATE int sqlite3PagerIsMemdb(Pager *pPager){ - return pPager->tempFile; + return pPager->tempFile || pPager->memVfs; } /* @@ -59576,14 +59593,14 @@ SQLITE_PRIVATE int sqlite3PagerSavepoint(Pager *pPager, int op, int iSavepoint){ } pPager->nSavepoint = nNew; - /* If this is a release of the outermost savepoint, truncate - ** the sub-journal to zero bytes in size. */ + /* Truncate the sub-journal so that it only includes the parts + ** that are still in use. */ if( op==SAVEPOINT_RELEASE ){ PagerSavepoint *pRel = &pPager->aSavepoint[nNew]; if( pRel->bTruncateOnRelease && isOpen(pPager->sjfd) ){ /* Only truncate if it is an in-memory sub-journal. */ if( sqlite3JournalIsInMemory(pPager->sjfd) ){ - i64 sz = (pPager->pageSize+4)*pRel->iSubRec; + i64 sz = (pPager->pageSize+4)*(i64)pRel->iSubRec; rc = sqlite3OsTruncate(pPager->sjfd, sz); assert( rc==SQLITE_OK ); } @@ -59771,7 +59788,7 @@ SQLITE_PRIVATE int sqlite3PagerMovepage(Pager *pPager, DbPage *pPg, Pgno pgno, i pPgOld = sqlite3PagerLookup(pPager, pgno); assert( !pPgOld || pPgOld->nRef==1 || CORRUPT_DB ); if( pPgOld ){ - if( pPgOld->nRef>1 ){ + if( NEVER(pPgOld->nRef>1) ){ sqlite3PagerUnrefNotNull(pPgOld); return SQLITE_CORRUPT_BKPT; } @@ -60517,7 +60534,10 @@ SQLITE_PRIVATE int sqlite3PagerWalFramesize(Pager *pPager){ ** HASHTABLE_NPAGE_ONE frames. The values of HASHTABLE_NPAGE_ONE and ** HASHTABLE_NPAGE are selected so that together the wal-index header and ** first index block are the same size as all other index blocks in the -** wal-index. +** wal-index. The values are: +** +** HASHTABLE_NPAGE 4096 +** HASHTABLE_NPAGE_ONE 4062 ** ** Each index block contains two sections, a page-mapping that contains the ** database page number associated with each wal frame, and a hash-table @@ -60753,6 +60773,70 @@ struct WalCkptInfo { }; #define READMARK_NOT_USED 0xffffffff +/* +** This is a schematic view of the complete 136-byte header of the +** wal-index file (also known as the -shm file): +** +** +-----------------------------+ +** 0: | iVersion | \ +** +-----------------------------+ | +** 4: | (unused padding) | | +** +-----------------------------+ | +** 8: | iChange | | +** +-------+-------+-------------+ | +** 12: | bInit | bBig | szPage | | +** +-------+-------+-------------+ | +** 16: | mxFrame | | First copy of the +** +-----------------------------+ | WalIndexHdr object +** 20: | nPage | | +** +-----------------------------+ | +** 24: | aFrameCksum | | +** | | | +** +-----------------------------+ | +** 32: | aSalt | | +** | | | +** +-----------------------------+ | +** 40: | aCksum | | +** | | / +** +-----------------------------+ +** 48: | iVersion | \ +** +-----------------------------+ | +** 52: | (unused padding) | | +** +-----------------------------+ | +** 56: | iChange | | +** +-------+-------+-------------+ | +** 60: | bInit | bBig | szPage | | +** +-------+-------+-------------+ | Second copy of the +** 64: | mxFrame | | WalIndexHdr +** +-----------------------------+ | +** 68: | nPage | | +** +-----------------------------+ | +** 72: | aFrameCksum | | +** | | | +** +-----------------------------+ | +** 80: | aSalt | | +** | | | +** +-----------------------------+ | +** 88: | aCksum | | +** | | / +** +-----------------------------+ +** 96: | nBackfill | +** +-----------------------------+ +** 100: | 5 read marks | +** | | +** | | +** | | +** | | +** +-------+-------+------+------+ +** 120: | Write | Ckpt | Rcvr | Rd0 | \ +** +-------+-------+------+------+ ) 8 lock bytes +** | Read1 | Read2 | Rd3 | Rd4 | / +** +-------+-------+------+------+ +** 128: | nBackfillAttempted | +** +-----------------------------+ +** 132: | (unused padding) | +** +-----------------------------+ +*/ /* A block of WALINDEX_LOCK_RESERVED bytes beginning at ** WALINDEX_LOCK_OFFSET is reserved for locks. Since some systems @@ -60909,9 +60993,13 @@ struct WalIterator { ** so. It is safe to enlarge the wal-index if pWal->writeLock is true ** or pWal->exclusiveMode==WAL_HEAPMEMORY_MODE. ** -** If this call is successful, *ppPage is set to point to the wal-index -** page and SQLITE_OK is returned. If an error (an OOM or VFS error) occurs, -** then an SQLite error code is returned and *ppPage is set to 0. +** Three possible result scenarios: +** +** (1) rc==SQLITE_OK and *ppPage==Requested-Wal-Index-Page +** (2) rc>=SQLITE_ERROR and *ppPage==NULL +** (3) rc==SQLITE_OK and *ppPage==NULL // only if iPage==0 +** +** Scenario (3) can only occur when pWal->writeLock is false and iPage==0 */ static SQLITE_NOINLINE int walIndexPageRealloc( Wal *pWal, /* The WAL context */ @@ -60944,7 +61032,9 @@ static SQLITE_NOINLINE int walIndexPageRealloc( rc = sqlite3OsShmMap(pWal->pDbFd, iPage, WALINDEX_PGSZ, pWal->writeLock, (void volatile **)&pWal->apWiData[iPage] ); - assert( pWal->apWiData[iPage]!=0 || rc!=SQLITE_OK || pWal->writeLock==0 ); + assert( pWal->apWiData[iPage]!=0 + || rc!=SQLITE_OK + || (pWal->writeLock==0 && iPage==0) ); testcase( pWal->apWiData[iPage]==0 && rc==SQLITE_OK ); if( rc==SQLITE_OK ){ if( iPage>0 && sqlite3FaultSim(600) ) rc = SQLITE_NOMEM; @@ -61283,8 +61373,8 @@ struct WalHashLoc { ** slot in the hash table is set to N, it refers to frame number ** (pLoc->iZero+N) in the log. ** -** Finally, set pLoc->aPgno so that pLoc->aPgno[1] is the page number of the -** first frame indexed by the hash table, frame (pLoc->iZero+1). +** Finally, set pLoc->aPgno so that pLoc->aPgno[0] is the page number of the +** first frame indexed by the hash table, frame (pLoc->iZero). */ static int walHashGet( Wal *pWal, /* WAL handle */ @@ -61296,7 +61386,7 @@ static int walHashGet( rc = walIndexPage(pWal, iHash, &pLoc->aPgno); assert( rc==SQLITE_OK || iHash>0 ); - if( rc==SQLITE_OK ){ + if( pLoc->aPgno ){ pLoc->aHash = (volatile ht_slot *)&pLoc->aPgno[HASHTABLE_NPAGE]; if( iHash==0 ){ pLoc->aPgno = &pLoc->aPgno[WALINDEX_HDR_SIZE/sizeof(u32)]; @@ -61304,7 +61394,8 @@ static int walHashGet( }else{ pLoc->iZero = HASHTABLE_NPAGE_ONE + (iHash-1)*HASHTABLE_NPAGE; } - pLoc->aPgno = &pLoc->aPgno[-1]; + }else if( NEVER(rc==SQLITE_OK) ){ + rc = SQLITE_ERROR; } return rc; } @@ -61386,8 +61477,9 @@ static void walCleanupHash(Wal *pWal){ /* Zero the entries in the aPgno array that correspond to frames with ** frame numbers greater than pWal->hdr.mxFrame. */ - nByte = (int)((char *)sLoc.aHash - (char *)&sLoc.aPgno[iLimit+1]); - memset((void *)&sLoc.aPgno[iLimit+1], 0, nByte); + nByte = (int)((char *)sLoc.aHash - (char *)&sLoc.aPgno[iLimit]); + assert( nByte>=0 ); + memset((void *)&sLoc.aPgno[iLimit], 0, nByte); #ifdef SQLITE_ENABLE_EXPENSIVE_ASSERT /* Verify that the every entry in the mapping region is still reachable @@ -61396,11 +61488,11 @@ static void walCleanupHash(Wal *pWal){ if( iLimit ){ int j; /* Loop counter */ int iKey; /* Hash key */ - for(j=1; j<=iLimit; j++){ + for(j=0; j=0 ); + memset((void*)sLoc.aPgno, 0, nByte); } /* If the entry in aPgno[] is already set, then the previous writer @@ -61443,9 +61535,9 @@ static int walIndexAppend(Wal *pWal, u32 iFrame, u32 iPage){ ** Remove the remnants of that writers uncommitted transaction from ** the hash-table before writing any new entries. */ - if( sLoc.aPgno[idx] ){ + if( sLoc.aPgno[idx-1] ){ walCleanupHash(pWal); - assert( !sLoc.aPgno[idx] ); + assert( !sLoc.aPgno[idx-1] ); } /* Write the aPgno[] array entry and the hash-table slot. */ @@ -61453,7 +61545,7 @@ static int walIndexAppend(Wal *pWal, u32 iFrame, u32 iPage){ for(iKey=walHash(iPage); sLoc.aHash[iKey]; iKey=walNextHash(iKey)){ if( (nCollide--)==0 ) return SQLITE_CORRUPT_BKPT; } - sLoc.aPgno[idx] = iPage; + sLoc.aPgno[idx-1] = iPage; AtomicStore(&sLoc.aHash[iKey], (ht_slot)idx); #ifdef SQLITE_ENABLE_EXPENSIVE_ASSERT @@ -61474,19 +61566,18 @@ static int walIndexAppend(Wal *pWal, u32 iFrame, u32 iPage){ */ if( (idx&0x3ff)==0 ){ int i; /* Loop counter */ - for(i=1; i<=idx; i++){ + for(i=0; iapWiData[iPg] = aPrivate; for(iFrame=iFirst; iFrame<=iLast; iFrame++){ @@ -61766,14 +61858,43 @@ SQLITE_PRIVATE int sqlite3WalOpen( assert( zWalName && zWalName[0] ); assert( pDbFd ); + /* Verify the values of various constants. Any changes to the values + ** of these constants would result in an incompatible on-disk format + ** for the -shm file. Any change that causes one of these asserts to + ** fail is a backward compatibility problem, even if the change otherwise + ** works. + ** + ** This table also serves as a helpful cross-reference when trying to + ** interpret hex dumps of the -shm file. + */ + assert( 48 == sizeof(WalIndexHdr) ); + assert( 40 == sizeof(WalCkptInfo) ); + assert( 120 == WALINDEX_LOCK_OFFSET ); + assert( 136 == WALINDEX_HDR_SIZE ); + assert( 4096 == HASHTABLE_NPAGE ); + assert( 4062 == HASHTABLE_NPAGE_ONE ); + assert( 8192 == HASHTABLE_NSLOT ); + assert( 383 == HASHTABLE_HASH_1 ); + assert( 32768 == WALINDEX_PGSZ ); + assert( 8 == SQLITE_SHM_NLOCK ); + assert( 5 == WAL_NREADER ); + assert( 24 == WAL_FRAME_HDRSIZE ); + assert( 32 == WAL_HDRSIZE ); + assert( 120 == WALINDEX_LOCK_OFFSET + WAL_WRITE_LOCK ); + assert( 121 == WALINDEX_LOCK_OFFSET + WAL_CKPT_LOCK ); + assert( 122 == WALINDEX_LOCK_OFFSET + WAL_RECOVER_LOCK ); + assert( 123 == WALINDEX_LOCK_OFFSET + WAL_READ_LOCK(0) ); + assert( 124 == WALINDEX_LOCK_OFFSET + WAL_READ_LOCK(1) ); + assert( 125 == WALINDEX_LOCK_OFFSET + WAL_READ_LOCK(2) ); + assert( 126 == WALINDEX_LOCK_OFFSET + WAL_READ_LOCK(3) ); + assert( 127 == WALINDEX_LOCK_OFFSET + WAL_READ_LOCK(4) ); + /* In the amalgamation, the os_unix.c and os_win.c source files come before ** this source file. Verify that the #defines of the locking byte offsets ** in os_unix.c and os_win.c agree with the WALINDEX_LOCK_OFFSET value. ** For that matter, if the lock offset ever changes from its initial design ** value of 120, we need to know that so there is an assert() to check it. */ - assert( 120==WALINDEX_LOCK_OFFSET ); - assert( 136==WALINDEX_HDR_SIZE ); #ifdef WIN_SHM_BASE assert( WIN_SHM_BASE==WALINDEX_LOCK_OFFSET ); #endif @@ -62075,7 +62196,6 @@ static int walIteratorInit(Wal *pWal, u32 nBackfill, WalIterator **pp){ int nEntry; /* Number of entries in this segment */ ht_slot *aIndex; /* Sorted index for this segment */ - sLoc.aPgno++; if( (i+1)==nSegment ){ nEntry = (int)(iLast - sLoc.iZero); }else{ @@ -63214,7 +63334,8 @@ SQLITE_PRIVATE int sqlite3WalSnapshotRecover(Wal *pWal){ rc = walHashGet(pWal, walFramePage(i), &sLoc); if( rc!=SQLITE_OK ) break; - pgno = sLoc.aPgno[i-sLoc.iZero]; + assert( i - sLoc.iZero - 1 >=0 ); + pgno = sLoc.aPgno[i-sLoc.iZero-1]; iDbOff = (i64)(pgno-1) * szPage; if( iDbOff+szPage<=szDb ){ @@ -63447,7 +63568,7 @@ SQLITE_PRIVATE int sqlite3WalFindFrame( iKey = walHash(pgno); while( (iH = AtomicLoad(&sLoc.aHash[iKey]))!=0 ){ u32 iFrame = iH + sLoc.iZero; - if( iFrame<=iLast && iFrame>=pWal->minFrame && sLoc.aPgno[iH]==pgno ){ + if( iFrame<=iLast && iFrame>=pWal->minFrame && sLoc.aPgno[iH-1]==pgno ){ assert( iFrame>iRead || CORRUPT_DB ); iRead = iFrame; } @@ -64699,7 +64820,6 @@ typedef struct CellInfo CellInfo; */ struct MemPage { u8 isInit; /* True if previously initialized. MUST BE FIRST! */ - u8 bBusy; /* Prevent endless loops on corrupt database files */ u8 intKey; /* True if table b-trees. False for index b-trees */ u8 intKeyLeaf; /* True if the leaf of an intKey table */ Pgno pgno; /* Page number for this page */ @@ -66280,15 +66400,13 @@ static int btreeMoveto( sqlite3VdbeRecordUnpack(pKeyInfo, (int)nKey, pKey, pIdxKey); if( pIdxKey->nField==0 || pIdxKey->nField>pKeyInfo->nAllField ){ rc = SQLITE_CORRUPT_BKPT; - goto moveto_done; + }else{ + rc = sqlite3BtreeIndexMoveto(pCur, pIdxKey, pRes); } + sqlite3DbFree(pCur->pKeyInfo->db, pIdxKey); }else{ pIdxKey = 0; - } - rc = sqlite3BtreeMovetoUnpacked(pCur, pIdxKey, nKey, bias, pRes); -moveto_done: - if( pIdxKey ){ - sqlite3DbFree(pCur->pKeyInfo->db, pIdxKey); + rc = sqlite3BtreeTableMoveto(pCur, nKey, bias, pRes); } return rc; } @@ -66943,7 +67061,7 @@ static int defragmentPage(MemPage *pPage, int nMaxFrag){ if( iFree2+sz2 > usableSize ) return SQLITE_CORRUPT_PAGE(pPage); memmove(&data[iFree+sz+sz2], &data[iFree+sz], iFree2-(iFree+sz)); sz += sz2; - }else if( iFree+sz>usableSize ){ + }else if( NEVER(iFree+sz>usableSize) ){ return SQLITE_CORRUPT_PAGE(pPage); } @@ -67137,7 +67255,7 @@ static int allocateSpace(MemPage *pPage, int nByte, int *pIdx){ int g2; assert( pSpace+nByte<=data+pPage->pBt->usableSize ); *pIdx = g2 = (int)(pSpace-data); - if( NEVER(g2<=gap) ){ + if( g2<=gap ){ return SQLITE_CORRUPT_PAGE(pPage); }else{ return SQLITE_OK; @@ -68550,7 +68668,6 @@ static int lockBtree(BtShared *pBt){ MemPage *pPage1; /* Page 1 of the database file */ u32 nPage; /* Number of pages in the database */ u32 nPageFile = 0; /* Number of pages in the database file */ - u32 nPageHeader; /* Number of pages in the database according to hdr */ assert( sqlite3_mutex_held(pBt->mutex) ); assert( pBt->pPage1==0 ); @@ -68562,7 +68679,7 @@ static int lockBtree(BtShared *pBt){ /* Do some checking to help insure the file we opened really is ** a valid database file. */ - nPage = nPageHeader = get4byte(28+(u8*)pPage1->aData); + nPage = get4byte(28+(u8*)pPage1->aData); sqlite3PagerPagecount(pBt->pPager, (int*)&nPageFile); if( nPage==0 || memcmp(24+(u8*)pPage1->aData, 92+(u8*)pPage1->aData,4)!=0 ){ nPage = nPageFile; @@ -68597,7 +68714,7 @@ static int lockBtree(BtShared *pBt){ goto page1_init_failed; } - /* If the write version is set to 2, this database should be accessed + /* If the read version is set to 2, this database should be accessed ** in WAL mode. If the log is not already open, open it now. Then ** return SQLITE_OK and return without populating BtShared.pPage1. ** The caller detects this and calls this function again. This is @@ -69395,16 +69512,18 @@ SQLITE_PRIVATE int sqlite3BtreeIncrVacuum(Btree *p){ /* ** This routine is called prior to sqlite3PagerCommit when a transaction ** is committed for an auto-vacuum database. -** -** If SQLITE_OK is returned, then *pnTrunc is set to the number of pages -** the database file should be truncated to during the commit process. -** i.e. the database has been reorganized so that only the first *pnTrunc -** pages are in use. */ -static int autoVacuumCommit(BtShared *pBt){ +static int autoVacuumCommit(Btree *p){ int rc = SQLITE_OK; - Pager *pPager = pBt->pPager; - VVA_ONLY( int nRef = sqlite3PagerRefcount(pPager); ) + Pager *pPager; + BtShared *pBt; + sqlite3 *db; + VVA_ONLY( int nRef ); + + assert( p!=0 ); + pBt = p->pBt; + pPager = pBt->pPager; + VVA_ONLY( nRef = sqlite3PagerRefcount(pPager); ) assert( sqlite3_mutex_held(pBt->mutex) ); invalidateAllOverflowCache(pBt); @@ -69412,6 +69531,7 @@ static int autoVacuumCommit(BtShared *pBt){ if( !pBt->incrVacuum ){ Pgno nFin; /* Number of pages in database after autovacuuming */ Pgno nFree; /* Number of pages on the freelist initially */ + Pgno nVac; /* Number of pages to vacuum */ Pgno iFree; /* The next page to be freed */ Pgno nOrig; /* Database size before freeing */ @@ -69425,18 +69545,42 @@ static int autoVacuumCommit(BtShared *pBt){ } nFree = get4byte(&pBt->pPage1->aData[36]); - nFin = finalDbSize(pBt, nOrig, nFree); + db = p->db; + if( db->xAutovacPages ){ + int iDb; + for(iDb=0; ALWAYS(iDbnDb); iDb++){ + if( db->aDb[iDb].pBt==p ) break; + } + nVac = db->xAutovacPages( + db->pAutovacPagesArg, + db->aDb[iDb].zDbSName, + nOrig, + nFree, + pBt->pageSize + ); + if( nVac>nFree ){ + nVac = nFree; + } + if( nVac==0 ){ + return SQLITE_OK; + } + }else{ + nVac = nFree; + } + nFin = finalDbSize(pBt, nOrig, nVac); if( nFin>nOrig ) return SQLITE_CORRUPT_BKPT; if( nFinnFin && rc==SQLITE_OK; iFree--){ - rc = incrVacuumStep(pBt, nFin, iFree, 1); + rc = incrVacuumStep(pBt, nFin, iFree, nVac==nFree); } if( (rc==SQLITE_DONE || rc==SQLITE_OK) && nFree>0 ){ rc = sqlite3PagerWrite(pBt->pPage1->pDbPage); - put4byte(&pBt->pPage1->aData[32], 0); - put4byte(&pBt->pPage1->aData[36], 0); + if( nVac==nFree ){ + put4byte(&pBt->pPage1->aData[32], 0); + put4byte(&pBt->pPage1->aData[36], 0); + } put4byte(&pBt->pPage1->aData[28], nFin); pBt->bDoTruncate = 1; pBt->nPage = nFin; @@ -69487,7 +69631,7 @@ SQLITE_PRIVATE int sqlite3BtreeCommitPhaseOne(Btree *p, const char *zSuperJrnl){ sqlite3BtreeEnter(p); #ifndef SQLITE_OMIT_AUTOVACUUM if( pBt->autoVacuum ){ - rc = autoVacuumCommit(pBt); + rc = autoVacuumCommit(p); if( rc!=SQLITE_OK ){ sqlite3BtreeLeave(p); return rc; @@ -70292,7 +70436,9 @@ static int accessPayload( assert( pPage ); assert( eOp==0 || eOp==1 ); assert( pCur->eState==CURSOR_VALID ); - assert( pCur->ixnCell ); + if( pCur->ix>=pPage->nCell ){ + return SQLITE_CORRUPT_PAGE(pPage); + } assert( cursorHoldsMutex(pCur) ); getCellInfo(pCur); @@ -70479,7 +70625,6 @@ SQLITE_PRIVATE int sqlite3BtreePayload(BtCursor *pCur, u32 offset, u32 amt, void assert( cursorHoldsMutex(pCur) ); assert( pCur->eState==CURSOR_VALID ); assert( pCur->iPage>=0 && pCur->pPage ); - assert( pCur->ixpPage->nCell ); return accessPayload(pCur, offset, amt, (unsigned char*)pBuf, 0); } @@ -70541,7 +70686,7 @@ static const void *fetchPayload( assert( pCur->eState==CURSOR_VALID ); assert( sqlite3_mutex_held(pCur->pBtree->db->mutex) ); assert( cursorOwnsBtShared(pCur) ); - assert( pCur->ixpPage->nCell ); + assert( pCur->ixpPage->nCell || CORRUPT_DB ); assert( pCur->info.nSize>0 ); assert( pCur->info.pPayload>pCur->pPage->aData || CORRUPT_DB ); assert( pCur->info.pPayloadpPage->aDataEnd ||CORRUPT_DB); @@ -70874,12 +71019,8 @@ SQLITE_PRIVATE int sqlite3BtreeLast(BtCursor *pCur, int *pRes){ return rc; } -/* Move the cursor so that it points to an entry near the key -** specified by pIdxKey or intKey. Return a success code. -** -** For INTKEY tables, the intKey parameter is used. pIdxKey -** must be NULL. For index tables, pIdxKey is used and intKey -** is ignored. +/* Move the cursor so that it points to an entry in a table (a.k.a INTKEY) +** table near the key intKey. Return a success code. ** ** If an exact match is not found, then the cursor is always ** left pointing at a leaf page which would hold the entry if it @@ -70892,39 +71033,32 @@ SQLITE_PRIVATE int sqlite3BtreeLast(BtCursor *pCur, int *pRes){ ** *pRes is as follows: ** ** *pRes<0 The cursor is left pointing at an entry that -** is smaller than intKey/pIdxKey or if the table is empty +** is smaller than intKey or if the table is empty ** and the cursor is therefore left point to nothing. ** ** *pRes==0 The cursor is left pointing at an entry that -** exactly matches intKey/pIdxKey. +** exactly matches intKey. ** ** *pRes>0 The cursor is left pointing at an entry that -** is larger than intKey/pIdxKey. -** -** For index tables, the pIdxKey->eqSeen field is set to 1 if there -** exists an entry in the table that exactly matches pIdxKey. +** is larger than intKey. */ -SQLITE_PRIVATE int sqlite3BtreeMovetoUnpacked( +SQLITE_PRIVATE int sqlite3BtreeTableMoveto( BtCursor *pCur, /* The cursor to be moved */ - UnpackedRecord *pIdxKey, /* Unpacked index key */ i64 intKey, /* The table key */ int biasRight, /* If true, bias the search to the high end */ int *pRes /* Write search results here */ ){ int rc; - RecordCompare xRecordCompare; assert( cursorOwnsBtShared(pCur) ); assert( sqlite3_mutex_held(pCur->pBtree->db->mutex) ); assert( pRes ); - assert( (pIdxKey==0)==(pCur->pKeyInfo==0) ); - assert( pCur->eState!=CURSOR_VALID || (pIdxKey==0)==(pCur->curIntKey!=0) ); + assert( pCur->pKeyInfo==0 ); + assert( pCur->eState!=CURSOR_VALID || pCur->curIntKey!=0 ); /* If the cursor is already positioned at the point we are trying ** to move to, then just return without doing any work */ - if( pIdxKey==0 - && pCur->eState==CURSOR_VALID && (pCur->curFlags & BTCF_ValidNKey)!=0 - ){ + if( pCur->eState==CURSOR_VALID && (pCur->curFlags & BTCF_ValidNKey)!=0 ){ if( pCur->info.nKey==intKey ){ *pRes = 0; return SQLITE_OK; @@ -70946,9 +71080,7 @@ SQLITE_PRIVATE int sqlite3BtreeMovetoUnpacked( if( pCur->info.nKey==intKey ){ return SQLITE_OK; } - }else if( rc==SQLITE_DONE ){ - rc = SQLITE_OK; - }else{ + }else if( rc!=SQLITE_DONE ){ return rc; } } @@ -70959,16 +71091,149 @@ SQLITE_PRIVATE int sqlite3BtreeMovetoUnpacked( pCur->pBtree->nSeek++; /* Performance measurement during testing */ #endif - if( pIdxKey ){ - xRecordCompare = sqlite3VdbeFindCompare(pIdxKey); - pIdxKey->errCode = 0; - assert( pIdxKey->default_rc==1 - || pIdxKey->default_rc==0 - || pIdxKey->default_rc==-1 - ); - }else{ - xRecordCompare = 0; /* All keys are integers */ + rc = moveToRoot(pCur); + if( rc ){ + if( rc==SQLITE_EMPTY ){ + assert( pCur->pgnoRoot==0 || pCur->pPage->nCell==0 ); + *pRes = -1; + return SQLITE_OK; + } + return rc; } + assert( pCur->pPage ); + assert( pCur->pPage->isInit ); + assert( pCur->eState==CURSOR_VALID ); + assert( pCur->pPage->nCell > 0 ); + assert( pCur->iPage==0 || pCur->apPage[0]->intKey==pCur->curIntKey ); + assert( pCur->curIntKey ); + + for(;;){ + int lwr, upr, idx, c; + Pgno chldPg; + MemPage *pPage = pCur->pPage; + u8 *pCell; /* Pointer to current cell in pPage */ + + /* pPage->nCell must be greater than zero. If this is the root-page + ** the cursor would have been INVALID above and this for(;;) loop + ** not run. If this is not the root-page, then the moveToChild() routine + ** would have already detected db corruption. Similarly, pPage must + ** be the right kind (index or table) of b-tree page. Otherwise + ** a moveToChild() or moveToRoot() call would have detected corruption. */ + assert( pPage->nCell>0 ); + assert( pPage->intKey ); + lwr = 0; + upr = pPage->nCell-1; + assert( biasRight==0 || biasRight==1 ); + idx = upr>>(1-biasRight); /* idx = biasRight ? upr : (lwr+upr)/2; */ + pCur->ix = (u16)idx; + for(;;){ + i64 nCellKey; + pCell = findCellPastPtr(pPage, idx); + if( pPage->intKeyLeaf ){ + while( 0x80 <= *(pCell++) ){ + if( pCell>=pPage->aDataEnd ){ + return SQLITE_CORRUPT_PAGE(pPage); + } + } + } + getVarint(pCell, (u64*)&nCellKey); + if( nCellKeyupr ){ c = -1; break; } + }else if( nCellKey>intKey ){ + upr = idx-1; + if( lwr>upr ){ c = +1; break; } + }else{ + assert( nCellKey==intKey ); + pCur->ix = (u16)idx; + if( !pPage->leaf ){ + lwr = idx; + goto moveto_table_next_layer; + }else{ + pCur->curFlags |= BTCF_ValidNKey; + pCur->info.nKey = nCellKey; + pCur->info.nSize = 0; + *pRes = 0; + return SQLITE_OK; + } + } + assert( lwr+upr>=0 ); + idx = (lwr+upr)>>1; /* idx = (lwr+upr)/2; */ + } + assert( lwr==upr+1 || !pPage->leaf ); + assert( pPage->isInit ); + if( pPage->leaf ){ + assert( pCur->ixpPage->nCell ); + pCur->ix = (u16)idx; + *pRes = c; + rc = SQLITE_OK; + goto moveto_table_finish; + } +moveto_table_next_layer: + if( lwr>=pPage->nCell ){ + chldPg = get4byte(&pPage->aData[pPage->hdrOffset+8]); + }else{ + chldPg = get4byte(findCell(pPage, lwr)); + } + pCur->ix = (u16)lwr; + rc = moveToChild(pCur, chldPg); + if( rc ) break; + } +moveto_table_finish: + pCur->info.nSize = 0; + assert( (pCur->curFlags & BTCF_ValidOvfl)==0 ); + return rc; +} + +/* Move the cursor so that it points to an entry in an index table +** near the key pIdxKey. Return a success code. +** +** If an exact match is not found, then the cursor is always +** left pointing at a leaf page which would hold the entry if it +** were present. The cursor might point to an entry that comes +** before or after the key. +** +** An integer is written into *pRes which is the result of +** comparing the key with the entry to which the cursor is +** pointing. The meaning of the integer written into +** *pRes is as follows: +** +** *pRes<0 The cursor is left pointing at an entry that +** is smaller than pIdxKey or if the table is empty +** and the cursor is therefore left point to nothing. +** +** *pRes==0 The cursor is left pointing at an entry that +** exactly matches pIdxKey. +** +** *pRes>0 The cursor is left pointing at an entry that +** is larger than pIdxKey. +** +** The pIdxKey->eqSeen field is set to 1 if there +** exists an entry in the table that exactly matches pIdxKey. +*/ +SQLITE_PRIVATE int sqlite3BtreeIndexMoveto( + BtCursor *pCur, /* The cursor to be moved */ + UnpackedRecord *pIdxKey, /* Unpacked index key */ + int *pRes /* Write search results here */ +){ + int rc; + RecordCompare xRecordCompare; + + assert( cursorOwnsBtShared(pCur) ); + assert( sqlite3_mutex_held(pCur->pBtree->db->mutex) ); + assert( pRes ); + assert( pCur->pKeyInfo!=0 ); + +#ifdef SQLITE_DEBUG + pCur->pBtree->nSeek++; /* Performance measurement during testing */ +#endif + + xRecordCompare = sqlite3VdbeFindCompare(pIdxKey); + pIdxKey->errCode = 0; + assert( pIdxKey->default_rc==1 + || pIdxKey->default_rc==0 + || pIdxKey->default_rc==-1 + ); rc = moveToRoot(pCur); if( rc ){ @@ -71001,130 +71266,92 @@ SQLITE_PRIVATE int sqlite3BtreeMovetoUnpacked( assert( pPage->intKey==(pIdxKey==0) ); lwr = 0; upr = pPage->nCell-1; - assert( biasRight==0 || biasRight==1 ); - idx = upr>>(1-biasRight); /* idx = biasRight ? upr : (lwr+upr)/2; */ + idx = upr>>1; /* idx = (lwr+upr)/2; */ pCur->ix = (u16)idx; - if( xRecordCompare==0 ){ - for(;;){ - i64 nCellKey; - pCell = findCellPastPtr(pPage, idx); - if( pPage->intKeyLeaf ){ - while( 0x80 <= *(pCell++) ){ - if( pCell>=pPage->aDataEnd ){ - return SQLITE_CORRUPT_PAGE(pPage); - } - } - } - getVarint(pCell, (u64*)&nCellKey); - if( nCellKeyupr ){ c = -1; break; } - }else if( nCellKey>intKey ){ - upr = idx-1; - if( lwr>upr ){ c = +1; break; } - }else{ - assert( nCellKey==intKey ); - pCur->ix = (u16)idx; - if( !pPage->leaf ){ - lwr = idx; - goto moveto_next_layer; - }else{ - pCur->curFlags |= BTCF_ValidNKey; - pCur->info.nKey = nCellKey; - pCur->info.nSize = 0; - *pRes = 0; - return SQLITE_OK; - } - } - assert( lwr+upr>=0 ); - idx = (lwr+upr)>>1; /* idx = (lwr+upr)/2; */ - } - }else{ - for(;;){ - int nCell; /* Size of the pCell cell in bytes */ - pCell = findCellPastPtr(pPage, idx); + for(;;){ + int nCell; /* Size of the pCell cell in bytes */ + pCell = findCellPastPtr(pPage, idx); - /* The maximum supported page-size is 65536 bytes. This means that - ** the maximum number of record bytes stored on an index B-Tree - ** page is less than 16384 bytes and may be stored as a 2-byte - ** varint. This information is used to attempt to avoid parsing - ** the entire cell by checking for the cases where the record is - ** stored entirely within the b-tree page by inspecting the first - ** 2 bytes of the cell. - */ - nCell = pCell[0]; - if( nCell<=pPage->max1bytePayload ){ - /* This branch runs if the record-size field of the cell is a - ** single byte varint and the record fits entirely on the main - ** b-tree page. */ - testcase( pCell+nCell+1==pPage->aDataEnd ); - c = xRecordCompare(nCell, (void*)&pCell[1], pIdxKey); - }else if( !(pCell[1] & 0x80) - && (nCell = ((nCell&0x7f)<<7) + pCell[1])<=pPage->maxLocal - ){ - /* The record-size field is a 2 byte varint and the record - ** fits entirely on the main b-tree page. */ - testcase( pCell+nCell+2==pPage->aDataEnd ); - c = xRecordCompare(nCell, (void*)&pCell[2], pIdxKey); - }else{ - /* The record flows over onto one or more overflow pages. In - ** this case the whole cell needs to be parsed, a buffer allocated - ** and accessPayload() used to retrieve the record into the - ** buffer before VdbeRecordCompare() can be called. - ** - ** If the record is corrupt, the xRecordCompare routine may read - ** up to two varints past the end of the buffer. An extra 18 - ** bytes of padding is allocated at the end of the buffer in - ** case this happens. */ - void *pCellKey; - u8 * const pCellBody = pCell - pPage->childPtrSize; - const int nOverrun = 18; /* Size of the overrun padding */ - pPage->xParseCell(pPage, pCellBody, &pCur->info); - nCell = (int)pCur->info.nKey; - testcase( nCell<0 ); /* True if key size is 2^32 or more */ - testcase( nCell==0 ); /* Invalid key size: 0x80 0x80 0x00 */ - testcase( nCell==1 ); /* Invalid key size: 0x80 0x80 0x01 */ - testcase( nCell==2 ); /* Minimum legal index key size */ - if( nCell<2 || nCell/pCur->pBt->usableSize>pCur->pBt->nPage ){ - rc = SQLITE_CORRUPT_PAGE(pPage); - goto moveto_finish; - } - pCellKey = sqlite3Malloc( nCell+nOverrun ); - if( pCellKey==0 ){ - rc = SQLITE_NOMEM_BKPT; - goto moveto_finish; - } - pCur->ix = (u16)idx; - rc = accessPayload(pCur, 0, nCell, (unsigned char*)pCellKey, 0); - memset(((u8*)pCellKey)+nCell,0,nOverrun); /* Fix uninit warnings */ - pCur->curFlags &= ~BTCF_ValidOvfl; - if( rc ){ - sqlite3_free(pCellKey); - goto moveto_finish; - } - c = sqlite3VdbeRecordCompare(nCell, pCellKey, pIdxKey); + /* The maximum supported page-size is 65536 bytes. This means that + ** the maximum number of record bytes stored on an index B-Tree + ** page is less than 16384 bytes and may be stored as a 2-byte + ** varint. This information is used to attempt to avoid parsing + ** the entire cell by checking for the cases where the record is + ** stored entirely within the b-tree page by inspecting the first + ** 2 bytes of the cell. + */ + nCell = pCell[0]; + if( nCell<=pPage->max1bytePayload ){ + /* This branch runs if the record-size field of the cell is a + ** single byte varint and the record fits entirely on the main + ** b-tree page. */ + testcase( pCell+nCell+1==pPage->aDataEnd ); + c = xRecordCompare(nCell, (void*)&pCell[1], pIdxKey); + }else if( !(pCell[1] & 0x80) + && (nCell = ((nCell&0x7f)<<7) + pCell[1])<=pPage->maxLocal + ){ + /* The record-size field is a 2 byte varint and the record + ** fits entirely on the main b-tree page. */ + testcase( pCell+nCell+2==pPage->aDataEnd ); + c = xRecordCompare(nCell, (void*)&pCell[2], pIdxKey); + }else{ + /* The record flows over onto one or more overflow pages. In + ** this case the whole cell needs to be parsed, a buffer allocated + ** and accessPayload() used to retrieve the record into the + ** buffer before VdbeRecordCompare() can be called. + ** + ** If the record is corrupt, the xRecordCompare routine may read + ** up to two varints past the end of the buffer. An extra 18 + ** bytes of padding is allocated at the end of the buffer in + ** case this happens. */ + void *pCellKey; + u8 * const pCellBody = pCell - pPage->childPtrSize; + const int nOverrun = 18; /* Size of the overrun padding */ + pPage->xParseCell(pPage, pCellBody, &pCur->info); + nCell = (int)pCur->info.nKey; + testcase( nCell<0 ); /* True if key size is 2^32 or more */ + testcase( nCell==0 ); /* Invalid key size: 0x80 0x80 0x00 */ + testcase( nCell==1 ); /* Invalid key size: 0x80 0x80 0x01 */ + testcase( nCell==2 ); /* Minimum legal index key size */ + if( nCell<2 || nCell/pCur->pBt->usableSize>pCur->pBt->nPage ){ + rc = SQLITE_CORRUPT_PAGE(pPage); + goto moveto_index_finish; + } + pCellKey = sqlite3Malloc( nCell+nOverrun ); + if( pCellKey==0 ){ + rc = SQLITE_NOMEM_BKPT; + goto moveto_index_finish; + } + pCur->ix = (u16)idx; + rc = accessPayload(pCur, 0, nCell, (unsigned char*)pCellKey, 0); + memset(((u8*)pCellKey)+nCell,0,nOverrun); /* Fix uninit warnings */ + pCur->curFlags &= ~BTCF_ValidOvfl; + if( rc ){ sqlite3_free(pCellKey); + goto moveto_index_finish; } - assert( - (pIdxKey->errCode!=SQLITE_CORRUPT || c==0) - && (pIdxKey->errCode!=SQLITE_NOMEM || pCur->pBtree->db->mallocFailed) - ); - if( c<0 ){ - lwr = idx+1; - }else if( c>0 ){ - upr = idx-1; - }else{ - assert( c==0 ); - *pRes = 0; - rc = SQLITE_OK; - pCur->ix = (u16)idx; - if( pIdxKey->errCode ) rc = SQLITE_CORRUPT_BKPT; - goto moveto_finish; - } - if( lwr>upr ) break; - assert( lwr+upr>=0 ); - idx = (lwr+upr)>>1; /* idx = (lwr+upr)/2 */ + c = sqlite3VdbeRecordCompare(nCell, pCellKey, pIdxKey); + sqlite3_free(pCellKey); } + assert( + (pIdxKey->errCode!=SQLITE_CORRUPT || c==0) + && (pIdxKey->errCode!=SQLITE_NOMEM || pCur->pBtree->db->mallocFailed) + ); + if( c<0 ){ + lwr = idx+1; + }else if( c>0 ){ + upr = idx-1; + }else{ + assert( c==0 ); + *pRes = 0; + rc = SQLITE_OK; + pCur->ix = (u16)idx; + if( pIdxKey->errCode ) rc = SQLITE_CORRUPT_BKPT; + goto moveto_index_finish; + } + if( lwr>upr ) break; + assert( lwr+upr>=0 ); + idx = (lwr+upr)>>1; /* idx = (lwr+upr)/2 */ } assert( lwr==upr+1 || (pPage->intKey && !pPage->leaf) ); assert( pPage->isInit ); @@ -71133,9 +71360,8 @@ SQLITE_PRIVATE int sqlite3BtreeMovetoUnpacked( pCur->ix = (u16)idx; *pRes = c; rc = SQLITE_OK; - goto moveto_finish; + goto moveto_index_finish; } -moveto_next_layer: if( lwr>=pPage->nCell ){ chldPg = get4byte(&pPage->aData[pPage->hdrOffset+8]); }else{ @@ -71145,7 +71371,7 @@ moveto_next_layer: rc = moveToChild(pCur, chldPg); if( rc ) break; } -moveto_finish: +moveto_index_finish: pCur->info.nSize = 0; assert( (pCur->curFlags & BTCF_ValidOvfl)==0 ); return rc; @@ -71246,16 +71472,6 @@ static SQLITE_NOINLINE int btreeNext(BtCursor *pCur){ return SQLITE_CORRUPT_BKPT; } - /* If the database file is corrupt, it is possible for the value of idx - ** to be invalid here. This can only occur if a second cursor modifies - ** the page while cursor pCur is holding a reference to it. Which can - ** only happen if the database is corrupt in such a way as to link the - ** page into more than one b-tree structure. - ** - ** Update 2019-12-23: appears to long longer be possible after the - ** addition of anotherValidCursor() condition on balance_deeper(). */ - harmless( idx>pPage->nCell ); - if( idx>=pPage->nCell ){ if( !pPage->leaf ){ rc = moveToChild(pCur, get4byte(&pPage->aData[pPage->hdrOffset+8])); @@ -71743,7 +71959,7 @@ static int freePage2(BtShared *pBt, MemPage *pMemPage, Pgno iPage){ assert( CORRUPT_DB || iPage>1 ); assert( !pMemPage || pMemPage->pgno==iPage ); - if( iPage<2 || iPage>pBt->nPage ){ + if( NEVER(iPage<2) || iPage>pBt->nPage ){ return SQLITE_CORRUPT_BKPT; } if( pMemPage ){ @@ -72699,6 +72915,7 @@ static int editPage( pData = &aData[get2byteNotZero(&aData[hdr+5])]; if( pDatapPg->aDataEnd) ) goto editpage_fail; /* Add cells to the start of the page */ if( iNewpBt; assert( sqlite3_mutex_held(pBt->mutex) ); assert( sqlite3PagerIswriteable(pParent->pDbPage) ); @@ -73129,6 +73345,7 @@ static int balance_nonroot( goto balance_cleanup; } } + nMaxCells += apOld[i]->nCell + ArraySize(pParent->apOvfl); if( (i--)==0 ) break; if( pParent->nOverflow && i+nxDiv==pParent->aiOvfl[0] ){ @@ -73170,7 +73387,6 @@ static int balance_nonroot( /* Make nMaxCells a multiple of 4 in order to preserve 8-byte ** alignment */ - nMaxCells = nOld*(MX_CELL(pBt) + ArraySize(pParent->apOvfl)); nMaxCells = (nMaxCells + 3)&~3; /* @@ -73453,7 +73669,9 @@ static int balance_nonroot( apOld[i] = 0; rc = sqlite3PagerWrite(pNew->pDbPage); nNew++; - if( sqlite3PagerPageRefcount(pNew->pDbPage)!=1+(i==(iParentIdx-nxDiv)) ){ + if( sqlite3PagerPageRefcount(pNew->pDbPage)!=1+(i==(iParentIdx-nxDiv)) + && rc==SQLITE_OK + ){ rc = SQLITE_CORRUPT_BKPT; } if( rc ) goto balance_cleanup; @@ -73873,7 +74091,7 @@ static int balance_deeper(MemPage *pRoot, MemPage **ppChild){ ** Return SQLITE_CORRUPT if any cursor other than pCur is currently valid ** on the same B-tree as pCur. ** -** This can if a database is corrupt with two or more SQL tables +** This can occur if a database is corrupt with two or more SQL tables ** pointing to the same b-tree. If an insert occurs on one SQL table ** and causes a BEFORE TRIGGER to do a secondary insert on the other SQL ** table linked to the same b-tree. If the secondary insert causes a @@ -74102,7 +74320,7 @@ static int btreeOverwriteCell(BtCursor *pCur, const BtreePayload *pX){ do{ rc = btreeGetPage(pBt, ovflPgno, &pPage, 0); if( rc ) return rc; - if( sqlite3PagerPageRefcount(pPage->pDbPage)!=1 ){ + if( sqlite3PagerPageRefcount(pPage->pDbPage)!=1 || pPage->isInit ){ rc = SQLITE_CORRUPT_BKPT; }else{ if( iOffset+ovflPageSize<(u32)nTotal ){ @@ -74251,7 +74469,8 @@ SQLITE_PRIVATE int sqlite3BtreeInsert( ** to an adjacent cell. Move the cursor so that it is pointing either ** to the cell to be overwritten or an adjacent cell. */ - rc = sqlite3BtreeMovetoUnpacked(pCur, 0, pX->nKey, flags!=0, &loc); + rc = sqlite3BtreeTableMoveto(pCur, pX->nKey, + (flags & BTREE_APPEND)!=0, &loc); if( rc ) return rc; } }else{ @@ -74274,13 +74493,11 @@ SQLITE_PRIVATE int sqlite3BtreeInsert( r.aMem = pX->aMem; r.nField = pX->nMem; r.default_rc = 0; - r.errCode = 0; - r.r1 = 0; - r.r2 = 0; r.eqSeen = 0; - rc = sqlite3BtreeMovetoUnpacked(pCur, &r, 0, flags!=0, &loc); + rc = sqlite3BtreeIndexMoveto(pCur, &r, &loc); }else{ - rc = btreeMoveto(pCur, pX->pKey, pX->nKey, flags!=0, &loc); + rc = btreeMoveto(pCur, pX->pKey, pX->nKey, + (flags & BTREE_APPEND)!=0, &loc); } if( rc ) return rc; } @@ -74343,7 +74560,10 @@ SQLITE_PRIVATE int sqlite3BtreeInsert( idx = pCur->ix; if( loc==0 ){ CellInfo info; - assert( idxnCell ); + assert( idx>=0 ); + if( idx>=pPage->nCell ){ + return SQLITE_CORRUPT_BKPT; + } rc = sqlite3PagerWrite(pPage->pDbPage); if( rc ){ goto end_insert; @@ -74525,7 +74745,7 @@ SQLITE_PRIVATE int sqlite3BtreeTransferRow(BtCursor *pDest, BtCursor *pSrc, i64 } }while( rc==SQLITE_OK && nOut>0 ); - if( rc==SQLITE_OK && nRem>0 ){ + if( rc==SQLITE_OK && nRem>0 && ALWAYS(pPgnoOut) ){ Pgno pgnoNew; MemPage *pNew = 0; rc = allocateBtreePage(pBt, &pNew, &pgnoNew, 0, 0); @@ -74914,7 +75134,7 @@ static int clearDatabasePage( BtShared *pBt, /* The BTree that contains the table */ Pgno pgno, /* Page number to clear */ int freePageFlag, /* Deallocate page if true */ - int *pnChange /* Add number of Cells freed to this counter */ + i64 *pnChange /* Add number of Cells freed to this counter */ ){ MemPage *pPage; int rc; @@ -74929,11 +75149,12 @@ static int clearDatabasePage( } rc = getAndInitPage(pBt, pgno, &pPage, 0, 0); if( rc ) return rc; - if( pPage->bBusy ){ + if( (pBt->openFlags & BTREE_SINGLE)==0 + && sqlite3PagerPageRefcount(pPage->pDbPage)!=1 + ){ rc = SQLITE_CORRUPT_BKPT; goto cleardatabasepage_out; } - pPage->bBusy = 1; hdr = pPage->hdrOffset; for(i=0; inCell; i++){ pCell = findCell(pPage, i); @@ -74947,6 +75168,7 @@ static int clearDatabasePage( if( !pPage->leaf ){ rc = clearDatabasePage(pBt, get4byte(&pPage->aData[hdr+8]), 1, pnChange); if( rc ) goto cleardatabasepage_out; + if( pPage->intKey ) pnChange = 0; } if( pnChange ){ testcase( !pPage->intKey ); @@ -74959,7 +75181,6 @@ static int clearDatabasePage( } cleardatabasepage_out: - pPage->bBusy = 0; releasePage(pPage); return rc; } @@ -74976,7 +75197,7 @@ cleardatabasepage_out: ** If pnChange is not NULL, then the integer value pointed to by pnChange ** is incremented by the number of entries in the table. */ -SQLITE_PRIVATE int sqlite3BtreeClearTable(Btree *p, int iTable, int *pnChange){ +SQLITE_PRIVATE int sqlite3BtreeClearTable(Btree *p, int iTable, i64 *pnChange){ int rc; BtShared *pBt = p->pBt; sqlite3BtreeEnter(p); @@ -75038,10 +75259,10 @@ static int btreeDropTable(Btree *p, Pgno iTable, int *piMoved){ return SQLITE_CORRUPT_BKPT; } - rc = btreeGetPage(pBt, (Pgno)iTable, &pPage, 0); - if( rc ) return rc; rc = sqlite3BtreeClearTable(p, iTable, 0); - if( rc ){ + if( rc ) return rc; + rc = btreeGetPage(pBt, (Pgno)iTable, &pPage, 0); + if( NEVER(rc) ){ releasePage(pPage); return rc; } @@ -77198,6 +77419,7 @@ SQLITE_PRIVATE int sqlite3VdbeChangeEncoding(Mem *pMem, int desiredEnc){ #ifndef SQLITE_OMIT_UTF16 int rc; #endif + assert( pMem!=0 ); assert( !sqlite3VdbeMemIsRowSet(pMem) ); assert( desiredEnc==SQLITE_UTF8 || desiredEnc==SQLITE_UTF16LE || desiredEnc==SQLITE_UTF16BE ); @@ -77330,6 +77552,7 @@ static SQLITE_NOINLINE int vdbeMemAddTerminator(Mem *pMem){ ** Return SQLITE_OK on success or SQLITE_NOMEM if malloc fails. */ SQLITE_PRIVATE int sqlite3VdbeMemMakeWriteable(Mem *pMem){ + assert( pMem!=0 ); assert( pMem->db==0 || sqlite3_mutex_held(pMem->db->mutex) ); assert( !sqlite3VdbeMemIsRowSet(pMem) ); if( (pMem->flags & (MEM_Str|MEM_Blob))!=0 ){ @@ -77354,6 +77577,7 @@ SQLITE_PRIVATE int sqlite3VdbeMemMakeWriteable(Mem *pMem){ #ifndef SQLITE_OMIT_INCRBLOB SQLITE_PRIVATE int sqlite3VdbeMemExpandBlob(Mem *pMem){ int nByte; + assert( pMem!=0 ); assert( pMem->flags & MEM_Zero ); assert( (pMem->flags&MEM_Blob)!=0 || MemNullNochng(pMem) ); testcase( sqlite3_value_nochange(pMem) ); @@ -77369,6 +77593,8 @@ SQLITE_PRIVATE int sqlite3VdbeMemExpandBlob(Mem *pMem){ if( sqlite3VdbeMemGrow(pMem, nByte, 1) ){ return SQLITE_NOMEM_BKPT; } + assert( pMem->z!=0 ); + assert( sqlite3DbMallocSize(pMem->db,pMem->z) >= nByte ); memset(&pMem->z[pMem->n], 0, pMem->u.nZero); pMem->n += pMem->u.nZero; @@ -77381,6 +77607,7 @@ SQLITE_PRIVATE int sqlite3VdbeMemExpandBlob(Mem *pMem){ ** Make sure the given Mem is \u0000 terminated. */ SQLITE_PRIVATE int sqlite3VdbeMemNulTerminate(Mem *pMem){ + assert( pMem!=0 ); assert( pMem->db==0 || sqlite3_mutex_held(pMem->db->mutex) ); testcase( (pMem->flags & (MEM_Term|MEM_Str))==(MEM_Term|MEM_Str) ); testcase( (pMem->flags & (MEM_Term|MEM_Str))==0 ); @@ -77408,6 +77635,7 @@ SQLITE_PRIVATE int sqlite3VdbeMemNulTerminate(Mem *pMem){ SQLITE_PRIVATE int sqlite3VdbeMemStringify(Mem *pMem, u8 enc, u8 bForce){ const int nByte = 32; + assert( pMem!=0 ); assert( pMem->db==0 || sqlite3_mutex_held(pMem->db->mutex) ); assert( !(pMem->flags&MEM_Zero) ); assert( !(pMem->flags&(MEM_Str|MEM_Blob)) ); @@ -77443,6 +77671,7 @@ SQLITE_PRIVATE int sqlite3VdbeMemFinalize(Mem *pMem, FuncDef *pFunc){ sqlite3_context ctx; Mem t; assert( pFunc!=0 ); + assert( pMem!=0 ); assert( pFunc->xFinalize!=0 ); assert( (pMem->flags & MEM_Null)!=0 || pFunc==pMem->u.pDef ); assert( pMem->db==0 || sqlite3_mutex_held(pMem->db->mutex) ); @@ -77593,6 +77822,7 @@ static SQLITE_NOINLINE i64 memIntValue(Mem *pMem){ } SQLITE_PRIVATE i64 sqlite3VdbeIntValue(Mem *pMem){ int flags; + assert( pMem!=0 ); assert( pMem->db==0 || sqlite3_mutex_held(pMem->db->mutex) ); assert( EIGHT_BYTE_ALIGNMENT(pMem) ); flags = pMem->flags; @@ -77621,6 +77851,7 @@ static SQLITE_NOINLINE double memRealValue(Mem *pMem){ return val; } SQLITE_PRIVATE double sqlite3VdbeRealValue(Mem *pMem){ + assert( pMem!=0 ); assert( pMem->db==0 || sqlite3_mutex_held(pMem->db->mutex) ); assert( EIGHT_BYTE_ALIGNMENT(pMem) ); if( pMem->flags & MEM_Real ){ @@ -77653,6 +77884,7 @@ SQLITE_PRIVATE int sqlite3VdbeBooleanValue(Mem *pMem, int ifNull){ */ SQLITE_PRIVATE void sqlite3VdbeIntegerAffinity(Mem *pMem){ i64 ix; + assert( pMem!=0 ); assert( pMem->flags & MEM_Real ); assert( !sqlite3VdbeMemIsRowSet(pMem) ); assert( pMem->db==0 || sqlite3_mutex_held(pMem->db->mutex) ); @@ -77680,6 +77912,7 @@ SQLITE_PRIVATE void sqlite3VdbeIntegerAffinity(Mem *pMem){ ** Convert pMem to type integer. Invalidate any prior representations. */ SQLITE_PRIVATE int sqlite3VdbeMemIntegerify(Mem *pMem){ + assert( pMem!=0 ); assert( pMem->db==0 || sqlite3_mutex_held(pMem->db->mutex) ); assert( !sqlite3VdbeMemIsRowSet(pMem) ); assert( EIGHT_BYTE_ALIGNMENT(pMem) ); @@ -77694,6 +77927,7 @@ SQLITE_PRIVATE int sqlite3VdbeMemIntegerify(Mem *pMem){ ** Invalidate any prior representations. */ SQLITE_PRIVATE int sqlite3VdbeMemRealify(Mem *pMem){ + assert( pMem!=0 ); assert( pMem->db==0 || sqlite3_mutex_held(pMem->db->mutex) ); assert( EIGHT_BYTE_ALIGNMENT(pMem) ); @@ -77727,6 +77961,7 @@ SQLITE_PRIVATE int sqlite3RealSameAsInt(double r1, sqlite3_int64 i){ ** as much of the string as we can and ignore the rest. */ SQLITE_PRIVATE int sqlite3VdbeMemNumerify(Mem *pMem){ + assert( pMem!=0 ); testcase( pMem->flags & MEM_Int ); testcase( pMem->flags & MEM_Real ); testcase( pMem->flags & MEM_IntReal ); @@ -77836,6 +78071,7 @@ SQLITE_PRIVATE void sqlite3ValueSetNull(sqlite3_value *p){ ** Delete any previous value and set the value to be a BLOB of length ** n containing all zeros. */ +#ifndef SQLITE_OMIT_INCRBLOB SQLITE_PRIVATE void sqlite3VdbeMemSetZeroBlob(Mem *pMem, int n){ sqlite3VdbeMemRelease(pMem); pMem->flags = MEM_Blob|MEM_Zero; @@ -77845,6 +78081,21 @@ SQLITE_PRIVATE void sqlite3VdbeMemSetZeroBlob(Mem *pMem, int n){ pMem->enc = SQLITE_UTF8; pMem->z = 0; } +#else +SQLITE_PRIVATE int sqlite3VdbeMemSetZeroBlob(Mem *pMem, int n){ + int nByte = n>0?n:1; + if( sqlite3VdbeMemGrow(pMem, nByte, 0) ){ + return SQLITE_NOMEM_BKPT; + } + assert( pMem->z!=0 ); + assert( sqlite3DbMallocSize(pMem->db, pMem->z)>=nByte ); + memset(pMem->z, 0, nByte); + pMem->n = n>0?n:0; + pMem->flags = MEM_Blob; + pMem->enc = SQLITE_UTF8; + return SQLITE_OK; +} +#endif /* ** The pMem is known to contain content that needs to be destroyed prior @@ -78078,6 +78329,7 @@ SQLITE_PRIVATE int sqlite3VdbeMemSetStr( int iLimit; /* Maximum allowed string or blob size */ u16 flags = 0; /* New value for pMem->flags */ + assert( pMem!=0 ); assert( pMem->db==0 || sqlite3_mutex_held(pMem->db->mutex) ); assert( !sqlite3VdbeMemIsRowSet(pMem) ); @@ -78386,7 +78638,7 @@ static sqlite3_value *valueNew(sqlite3 *db, struct ValueNewStat4Ctx *p){ #ifdef SQLITE_ENABLE_STAT4 static int valueFromFunction( sqlite3 *db, /* The database connection */ - Expr *p, /* The expression to evaluate */ + const Expr *p, /* The expression to evaluate */ u8 enc, /* Encoding to use */ u8 aff, /* Affinity to use */ sqlite3_value **ppVal, /* Write the new value here */ @@ -78403,8 +78655,10 @@ static int valueFromFunction( assert( pCtx!=0 ); assert( (p->flags & EP_TokenOnly)==0 ); + assert( ExprUseXList(p) ); pList = p->x.pList; if( pList ) nVal = pList->nExpr; + assert( !ExprHasProperty(p, EP_IntValue) ); pFunc = sqlite3FindFunction(db, p->u.zToken, nVal, enc, 0); assert( pFunc ); if( (pFunc->funcFlags & (SQLITE_FUNC_CONSTANT|SQLITE_FUNC_SLOCHNG))==0 @@ -78480,7 +78734,7 @@ static int valueFromFunction( */ static int valueFromExpr( sqlite3 *db, /* The database connection */ - Expr *pExpr, /* The expression to evaluate */ + const Expr *pExpr, /* The expression to evaluate */ u8 enc, /* Encoding to use */ u8 affinity, /* Affinity to use */ sqlite3_value **ppVal, /* Write the new value here */ @@ -78508,7 +78762,9 @@ static int valueFromExpr( assert( (pExpr->flags & EP_TokenOnly)==0 || pCtx==0 ); if( op==TK_CAST ){ - u8 aff = sqlite3AffinityType(pExpr->u.zToken,0); + u8 aff; + assert( !ExprHasProperty(pExpr, EP_IntValue) ); + aff = sqlite3AffinityType(pExpr->u.zToken,0); rc = valueFromExpr(db, pExpr->pLeft, enc, aff, ppVal, pCtx); testcase( rc!=SQLITE_OK ); if( *ppVal ){ @@ -78581,6 +78837,7 @@ static int valueFromExpr( #ifndef SQLITE_OMIT_BLOB_LITERAL else if( op==TK_BLOB ){ int nVal; + assert( !ExprHasProperty(pExpr, EP_IntValue) ); assert( pExpr->u.zToken[0]=='x' || pExpr->u.zToken[0]=='X' ); assert( pExpr->u.zToken[1]=='\'' ); pVal = valueNew(db, pCtx); @@ -78598,6 +78855,7 @@ static int valueFromExpr( } #endif else if( op==TK_TRUEFALSE ){ + assert( !ExprHasProperty(pExpr, EP_IntValue) ); pVal = valueNew(db, pCtx); if( pVal ){ pVal->flags = MEM_Int; @@ -78635,7 +78893,7 @@ no_mem: */ SQLITE_PRIVATE int sqlite3ValueFromExpr( sqlite3 *db, /* The database connection */ - Expr *pExpr, /* The expression to evaluate */ + const Expr *pExpr, /* The expression to evaluate */ u8 enc, /* Encoding to use */ u8 affinity, /* Affinity to use */ sqlite3_value **ppVal /* Write the new value here */ @@ -79151,8 +79409,10 @@ SQLITE_PRIVATE int sqlite3VdbeAddOp3(Vdbe *p, int op, int p1, int p2, int p3){ if( p->nOpAlloc<=i ){ return growOp3(p, op, p1, p2, p3); } + assert( p->aOp!=0 ); p->nOp++; pOp = &p->aOp[i]; + assert( pOp!=0 ); pOp->opcode = (u8)op; pOp->p5 = 0; pOp->p1 = p1; @@ -80395,7 +80655,7 @@ SQLITE_PRIVATE char *sqlite3VdbeDisplayComment( if( zOpName[nOpName+1] ){ int seenCom = 0; char c; - zSynopsis = zOpName += nOpName + 1; + zSynopsis = zOpName + nOpName + 1; if( strncmp(zSynopsis,"IF ",3)==0 ){ sqlite3_snprintf(sizeof(zAlt), zAlt, "if %s goto P2", zSynopsis+3); zSynopsis = zAlt; @@ -80468,6 +80728,7 @@ static void displayP4Expr(StrAccum *p, Expr *pExpr){ const char *zOp = 0; switch( pExpr->op ){ case TK_STRING: + assert( !ExprHasProperty(pExpr, EP_IntValue) ); sqlite3_str_appendf(p, "%Q", pExpr->u.zToken); break; case TK_INTEGER: @@ -80570,7 +80831,7 @@ SQLITE_PRIVATE char *sqlite3VdbeDisplayP4(sqlite3 *db, Op *pOp){ case P4_COLLSEQ: { static const char *const encnames[] = {"?", "8", "16LE", "16BE"}; CollSeq *pColl = pOp->p4.pColl; - assert( pColl->enc>=0 && pColl->enc<4 ); + assert( pColl->enc<4 ); sqlite3_str_appendf(&x, "%.18s-%s", pColl->zName, encnames[pColl->enc]); break; @@ -80814,8 +81075,8 @@ static void releaseMemArray(Mem *p, int N){ */ testcase( p->flags & MEM_Agg ); testcase( p->flags & MEM_Dyn ); - testcase( p->xDel==sqlite3VdbeFrameMemDel ); if( p->flags&(MEM_Agg|MEM_Dyn) ){ + testcase( (p->flags & MEM_Dyn)!=0 && p->xDel==sqlite3VdbeFrameMemDel ); sqlite3VdbeMemRelease(p); }else if( p->szMalloc ){ sqlite3DbFreeNN(db, p->zMalloc); @@ -81915,9 +82176,9 @@ SQLITE_PRIVATE int sqlite3VdbeCheckFk(Vdbe *p, int deferred){ ** has made changes and is in autocommit mode, then commit those ** changes. If a rollback is needed, then do the rollback. ** -** This routine is the only way to move the state of a VM from -** SQLITE_MAGIC_RUN to SQLITE_MAGIC_HALT. It is harmless to -** call this on a VM that is in the SQLITE_MAGIC_HALT state. +** This routine is the only way to move the sqlite3eOpenState of a VM from +** SQLITE_STATE_RUN to SQLITE_STATE_HALT. It is harmless to +** call this on a VM that is in the SQLITE_STATE_HALT state. ** ** Return an error code. If the commit could not complete because of ** lock contention, return SQLITE_BUSY. If SQLITE_BUSY is returned, it @@ -81963,9 +82224,15 @@ SQLITE_PRIVATE int sqlite3VdbeHalt(Vdbe *p){ sqlite3VdbeEnter(p); /* Check for one of the special errors */ - mrc = p->rc & 0xff; - isSpecialError = mrc==SQLITE_NOMEM || mrc==SQLITE_IOERR - || mrc==SQLITE_INTERRUPT || mrc==SQLITE_FULL; + if( p->rc ){ + mrc = p->rc & 0xff; + isSpecialError = mrc==SQLITE_NOMEM + || mrc==SQLITE_IOERR + || mrc==SQLITE_INTERRUPT + || mrc==SQLITE_FULL; + }else{ + mrc = isSpecialError = 0; + } if( isSpecialError ){ /* If the query was read-only and the error code is SQLITE_INTERRUPT, ** no rollback is necessary. Otherwise, at least a savepoint @@ -82017,6 +82284,9 @@ SQLITE_PRIVATE int sqlite3VdbeHalt(Vdbe *p){ return SQLITE_ERROR; } rc = SQLITE_CONSTRAINT_FOREIGNKEY; + }else if( db->flags & SQLITE_CorruptRdOnly ){ + rc = SQLITE_CORRUPT; + db->flags &= ~SQLITE_CorruptRdOnly; }else{ /* The auto-commit flag is true, the vdbe program was successful ** or hit an 'OR FAIL' constraint and there are no deferred foreign @@ -82411,7 +82681,7 @@ SQLITE_PRIVATE int SQLITE_NOINLINE sqlite3VdbeFinishMoveto(VdbeCursor *p){ assert( p->deferredMoveto ); assert( p->isTable ); assert( p->eCurType==CURTYPE_BTREE ); - rc = sqlite3BtreeMovetoUnpacked(p->uc.pCursor, 0, p->movetoTarget, 0, &res); + rc = sqlite3BtreeTableMoveto(p->uc.pCursor, p->movetoTarget, 0, &res); if( rc ) return rc; if( res!=0 ) return SQLITE_CORRUPT_BKPT; #ifdef SQLITE_TEST @@ -83195,7 +83465,7 @@ SQLITE_PRIVATE SQLITE_NOINLINE int sqlite3BlobCompare(const Mem *pB1, const Mem ** number. Return negative, zero, or positive if the first (i64) is less than, ** equal to, or greater than the second (double). */ -static int sqlite3IntFloatCompare(i64 i, double r){ +SQLITE_PRIVATE int sqlite3IntFloatCompare(i64 i, double r){ if( sizeof(LONGDOUBLE_TYPE)>8 ){ LONGDOUBLE_TYPE x = (LONGDOUBLE_TYPE)i; testcase( xmutex) ); db->nChange = nChange; db->nTotalChange += nChange; @@ -84121,6 +84391,8 @@ SQLITE_PRIVATE void sqlite3VdbePreUpdateHook( } } + assert( pCsr!=0 ); + assert( pCsr->eCurType==CURTYPE_BTREE ); assert( pCsr->nField==pTab->nCol || (pCsr->nField==pTab->nCol+1 && op==SQLITE_DELETE && iReg==-1) ); @@ -84520,8 +84792,8 @@ SQLITE_API void sqlite3_value_free(sqlite3_value *pOld){ ** the function result. ** ** The setStrOrError() function calls sqlite3VdbeMemSetStr() to store the -** result as a string or blob but if the string or blob is too large, it -** then sets the error code to SQLITE_TOOBIG +** result as a string or blob. Appropriate errors are set if the string/blob +** is too big or if an OOM occurs. ** ** The invokeValueDestructor(P,X) routine invokes destructor function X() ** on value P is not going to be used and need to be destroyed. @@ -84533,8 +84805,16 @@ static void setResultStrOrError( u8 enc, /* Encoding of z. 0 for BLOBs */ void (*xDel)(void*) /* Destructor function */ ){ - if( sqlite3VdbeMemSetStr(pCtx->pOut, z, n, enc, xDel)==SQLITE_TOOBIG ){ - sqlite3_result_error_toobig(pCtx); + int rc = sqlite3VdbeMemSetStr(pCtx->pOut, z, n, enc, xDel); + if( rc ){ + if( rc==SQLITE_TOOBIG ){ + sqlite3_result_error_toobig(pCtx); + }else{ + /* The only errors possible from sqlite3VdbeMemSetStr are + ** SQLITE_TOOBIG and SQLITE_NOMEM */ + assert( rc==SQLITE_NOMEM ); + sqlite3_result_error_nomem(pCtx); + } } } static int invokeValueDestructor( @@ -84691,8 +84971,12 @@ SQLITE_API int sqlite3_result_zeroblob64(sqlite3_context *pCtx, u64 n){ if( n>(u64)pOut->db->aLimit[SQLITE_LIMIT_LENGTH] ){ return SQLITE_TOOBIG; } +#ifndef SQLITE_OMIT_INCRBLOB sqlite3VdbeMemSetZeroBlob(pCtx->pOut, (int)n); return SQLITE_OK; +#else + return sqlite3VdbeMemSetZeroBlob(pCtx->pOut, (int)n); +#endif } SQLITE_API void sqlite3_result_error_code(sqlite3_context *pCtx, int errCode){ pCtx->isError = errCode ? errCode : -1; @@ -85704,7 +85988,11 @@ SQLITE_API int sqlite3_bind_zeroblob(sqlite3_stmt *pStmt, int i, int n){ Vdbe *p = (Vdbe *)pStmt; rc = vdbeUnbind(p, i); if( rc==SQLITE_OK ){ +#ifndef SQLITE_OMIT_INCRBLOB sqlite3VdbeMemSetZeroBlob(&p->aVar[i-1], n); +#else + rc = sqlite3VdbeMemSetZeroBlob(&p->aVar[i-1], n); +#endif sqlite3_mutex_leave(p->db->mutex); } return rc; @@ -85992,6 +86280,7 @@ SQLITE_API int sqlite3_preupdate_old(sqlite3 *db, int iIdx, sqlite3_value **ppVa u32 nRec; u8 *aRec; + assert( p->pCsr->eCurType==CURTYPE_BTREE ); nRec = sqlite3BtreePayloadSize(p->pCsr->uc.pCursor); aRec = sqlite3DbMallocRaw(db, nRec); if( !aRec ) goto preupdate_old_out; @@ -86299,11 +86588,9 @@ SQLITE_PRIVATE char *sqlite3VdbeExpandSql( #ifndef SQLITE_OMIT_UTF16 Mem utf8; /* Used to convert UTF16 into UTF8 for display */ #endif - char zBase[100]; /* Initial working space */ db = p->db; - sqlite3StrAccumInit(&out, 0, zBase, sizeof(zBase), - db->aLimit[SQLITE_LIMIT_LENGTH]); + sqlite3StrAccumInit(&out, 0, 0, 0, db->aLimit[SQLITE_LIMIT_LENGTH]); if( db->nVdbeExec>1 ){ while( *zRawSql ){ const char *zStart = zRawSql; @@ -87036,96 +87323,7 @@ SQLITE_PRIVATE void sqlite3VdbeRegisterDump(Vdbe *v){ ** hwtime.h contains inline assembler code for implementing ** high-performance timing routines. */ -/************** Include hwtime.h in the middle of vdbe.c *********************/ -/************** Begin file hwtime.h ******************************************/ -/* -** 2008 May 27 -** -** The author disclaims copyright to this source code. In place of -** a legal notice, here is a blessing: -** -** May you do good and not evil. -** May you find forgiveness for yourself and forgive others. -** May you share freely, never taking more than you give. -** -****************************************************************************** -** -** This file contains inline asm code for retrieving "high-performance" -** counters for x86 and x86_64 class CPUs. -*/ -#ifndef SQLITE_HWTIME_H -#define SQLITE_HWTIME_H - -/* -** The following routine only works on pentium-class (or newer) processors. -** It uses the RDTSC opcode to read the cycle count value out of the -** processor and returns that value. This can be used for high-res -** profiling. -*/ -#if !defined(__STRICT_ANSI__) && \ - (defined(__GNUC__) || defined(_MSC_VER)) && \ - (defined(i386) || defined(__i386__) || defined(_M_IX86)) - - #if defined(__GNUC__) - - __inline__ sqlite_uint64 sqlite3Hwtime(void){ - unsigned int lo, hi; - __asm__ __volatile__ ("rdtsc" : "=a" (lo), "=d" (hi)); - return (sqlite_uint64)hi << 32 | lo; - } - - #elif defined(_MSC_VER) - - __declspec(naked) __inline sqlite_uint64 __cdecl sqlite3Hwtime(void){ - __asm { - rdtsc - ret ; return value at EDX:EAX - } - } - - #endif - -#elif !defined(__STRICT_ANSI__) && (defined(__GNUC__) && defined(__x86_64__)) - - __inline__ sqlite_uint64 sqlite3Hwtime(void){ - unsigned long val; - __asm__ __volatile__ ("rdtsc" : "=A" (val)); - return val; - } - -#elif !defined(__STRICT_ANSI__) && (defined(__GNUC__) && defined(__ppc__)) - - __inline__ sqlite_uint64 sqlite3Hwtime(void){ - unsigned long long retval; - unsigned long junk; - __asm__ __volatile__ ("\n\ - 1: mftbu %1\n\ - mftb %L0\n\ - mftbu %0\n\ - cmpw %0,%1\n\ - bne 1b" - : "=r" (retval), "=r" (junk)); - return retval; - } - -#else - - /* - ** asm() is needed for hardware timing support. Without asm(), - ** disable the sqlite3Hwtime() routine. - ** - ** sqlite3Hwtime() is only used for some obscure debugging - ** and analysis configurations, not in any deliverable, so this - ** should not be a great loss. - */ -SQLITE_PRIVATE sqlite_uint64 sqlite3Hwtime(void){ return ((sqlite_uint64)0); } - -#endif - -#endif /* !defined(SQLITE_HWTIME_H) */ - -/************** End of hwtime.h **********************************************/ -/************** Continuing where we left off in vdbe.c ***********************/ +/* #include "hwtime.h" */ #endif @@ -87172,6 +87370,19 @@ static Mem *out2Prerelease(Vdbe *p, VdbeOp *pOp){ } } +/* +** Return the symbolic name for the data type of a pMem +*/ +static const char *vdbeMemTypeName(Mem *pMem){ + static const char *azTypes[] = { + /* SQLITE_INTEGER */ "INT", + /* SQLITE_FLOAT */ "REAL", + /* SQLITE_TEXT */ "TEXT", + /* SQLITE_BLOB */ "BLOB", + /* SQLITE_NULL */ "NULL" + }; + return azTypes[sqlite3_value_type(pMem)-1]; +} /* ** Execute as much of a VDBE program as we can. @@ -89003,6 +89214,22 @@ case OP_IsNull: { /* same as TK_ISNULL, jump, in1 */ break; } +/* Opcode: IsNullOrType P1 P2 P3 * * +** Synopsis: if typeof(r[P1]) IN (P3,5) goto P2 +** +** Jump to P2 if the value in register P1 is NULL or has a datatype P3. +** P3 is an integer which should be one of SQLITE_INTEGER, SQLITE_FLOAT, +** SQLITE_BLOB, SQLITE_NULL, or SQLITE_TEXT. +*/ +case OP_IsNullOrType: { /* jump, in1 */ + int doTheJump; + pIn1 = &aMem[pOp->p1]; + doTheJump = (pIn1->flags & MEM_Null)!=0 || sqlite3_value_type(pIn1)==pOp->p3; + VdbeBranchTaken( doTheJump, 2); + if( doTheJump ) goto jump_to_p2; + break; +} + /* Opcode: ZeroOrNull P1 P2 P3 * * ** Synopsis: r[P2] = 0 OR NULL ** @@ -89371,6 +89598,108 @@ op_column_corrupt: } } +/* Opcode: TypeCheck P1 P2 P3 P4 * +** Synopsis: typecheck(r[P1@P2]) +** +** Apply affinities to the range of P2 registers beginning with P1. +** Take the affinities from the Table object in P4. If any value +** cannot be coerced into the correct type, then raise an error. +** +** This opcode is similar to OP_Affinity except that this opcode +** forces the register type to the Table column type. This is used +** to implement "strict affinity". +** +** GENERATED ALWAYS AS ... STATIC columns are only checked if P3 +** is zero. When P3 is non-zero, no type checking occurs for +** static generated columns. Virtual columns are computed at query time +** and so they are never checked. +** +** Preconditions: +** +**
    +**
  • P2 should be the number of non-virtual columns in the +** table of P4. +**
  • Table P4 should be a STRICT table. +**
+** +** If any precondition is false, an assertion fault occurs. +*/ +case OP_TypeCheck: { + Table *pTab; + Column *aCol; + int i; + + assert( pOp->p4type==P4_TABLE ); + pTab = pOp->p4.pTab; + assert( pTab->tabFlags & TF_Strict ); + assert( pTab->nNVCol==pOp->p2 ); + aCol = pTab->aCol; + pIn1 = &aMem[pOp->p1]; + for(i=0; inCol; i++){ + if( aCol[i].colFlags & COLFLAG_GENERATED ){ + if( aCol[i].colFlags & COLFLAG_VIRTUAL ) continue; + if( pOp->p3 ){ pIn1++; continue; } + } + assert( pIn1 < &aMem[pOp->p1+pOp->p2] ); + applyAffinity(pIn1, aCol[i].affinity, encoding); + if( (pIn1->flags & MEM_Null)==0 ){ + switch( aCol[i].eCType ){ + case COLTYPE_BLOB: { + if( (pIn1->flags & MEM_Blob)==0 ) goto vdbe_type_error; + break; + } + case COLTYPE_INTEGER: + case COLTYPE_INT: { + if( (pIn1->flags & MEM_Int)==0 ) goto vdbe_type_error; + break; + } + case COLTYPE_TEXT: { + if( (pIn1->flags & MEM_Str)==0 ) goto vdbe_type_error; + break; + } + case COLTYPE_REAL: { + if( pIn1->flags & MEM_Int ){ + /* When applying REAL affinity, if the result is still an MEM_Int + ** that will fit in 6 bytes, then change the type to MEM_IntReal + ** so that we keep the high-resolution integer value but know that + ** the type really wants to be REAL. */ + testcase( pIn1->u.i==140737488355328LL ); + testcase( pIn1->u.i==140737488355327LL ); + testcase( pIn1->u.i==-140737488355328LL ); + testcase( pIn1->u.i==-140737488355329LL ); + if( pIn1->u.i<=140737488355327LL && pIn1->u.i>=-140737488355328LL){ + pIn1->flags |= MEM_IntReal; + pIn1->flags &= ~MEM_Int; + }else{ + pIn1->u.r = (double)pIn1->u.i; + pIn1->flags |= MEM_Real; + pIn1->flags &= ~MEM_Int; + } + }else if( (pIn1->flags & MEM_Real)==0 ){ + goto vdbe_type_error; + } + break; + } + default: { + /* COLTYPE_ANY. Accept anything. */ + break; + } + } + } + REGISTER_TRACE((int)(pIn1-aMem), pIn1); + pIn1++; + } + assert( pIn1 == &aMem[pOp->p1+pOp->p2] ); + break; + +vdbe_type_error: + sqlite3VdbeError(p, "cannot store %s value in %s column %s.%s", + vdbeMemTypeName(pIn1), sqlite3StdType[aCol[i].eCType-1], + pTab->zName, aCol[i].zCnName); + rc = SQLITE_CONSTRAINT_DATATYPE; + goto abort_due_to_error; +} + /* Opcode: Affinity P1 P2 * P4 * ** Synopsis: affinity(r[P1@P2]) ** @@ -90034,8 +90363,16 @@ case OP_Transaction: { assert( pOp->p2>=0 && pOp->p2<=2 ); assert( pOp->p1>=0 && pOp->p1nDb ); assert( DbMaskTest(p->btreeMask, pOp->p1) ); - if( pOp->p2 && (db->flags & SQLITE_QueryOnly)!=0 ){ - rc = SQLITE_READONLY; + assert( rc==SQLITE_OK ); + if( pOp->p2 && (db->flags & (SQLITE_QueryOnly|SQLITE_CorruptRdOnly))!=0 ){ + if( db->flags & SQLITE_QueryOnly ){ + /* Writes prohibited by the "PRAGMA query_only=TRUE" statement */ + rc = SQLITE_READONLY; + }else{ + /* Writes prohibited due to a prior SQLITE_CORRUPT in the current + ** transaction */ + rc = SQLITE_CORRUPT; + } goto abort_due_to_error; } pBt = db->aDb[pOp->p1].pBt; @@ -90077,7 +90414,8 @@ case OP_Transaction: { } } assert( pOp->p5==0 || pOp->p4type==P4_INT32 ); - if( pOp->p5 + if( rc==SQLITE_OK + && pOp->p5 && (iMeta!=pOp->p3 || db->aDb[pOp->p1].pSchema->iGeneration!=pOp->p4.i) ){ @@ -90287,6 +90625,8 @@ case OP_ReopenIdx: { pCur = p->apCsr[pOp->p1]; if( pCur && pCur->pgnoRoot==(u32)pOp->p2 ){ assert( pCur->iDb==pOp->p3 ); /* Guaranteed by the code generator */ + assert( pCur->eCurType==CURTYPE_BTREE ); + sqlite3BtreeClearCursor(pCur->uc.pCursor); goto open_cursor_set_hints; } /* If the cursor is not currently open or is open on a different @@ -90469,7 +90809,7 @@ case OP_OpenEphemeral: { aMem[pOp->p3].z = ""; } pCx = p->apCsr[pOp->p1]; - if( pCx && !pCx->hasBeenDuped ){ + if( pCx && !pCx->hasBeenDuped && ALWAYS(pOp->p2<=pCx->nField) ){ /* If the ephermeral table is already open and has no duplicates from ** OP_OpenDup, then erase all existing content so that the table is ** empty again, rather than creating a new table. */ @@ -90774,6 +91114,7 @@ case OP_SeekGT: { /* jump, in3, group */ /* If the P3 value could not be converted into an integer without ** loss of information, then special processing is required... */ if( (newType & (MEM_Int|MEM_IntReal))==0 ){ + int c; if( (newType & MEM_Real)==0 ){ if( (newType & MEM_Null) || oc>=OP_SeekGE ){ VdbeBranchTaken(1,2); @@ -90783,7 +91124,8 @@ case OP_SeekGT: { /* jump, in3, group */ if( rc!=SQLITE_OK ) goto abort_due_to_error; goto seek_not_found; } - }else + } + c = sqlite3IntFloatCompare(iKey, pIn3->u.r); /* If the approximation iKey is larger than the actual real search ** term, substitute >= for > and < for <=. e.g. if the search term @@ -90792,7 +91134,7 @@ case OP_SeekGT: { /* jump, in3, group */ ** (x > 4.9) -> (x >= 5) ** (x <= 4.9) -> (x < 5) */ - if( pIn3->u.r<(double)iKey ){ + if( c>0 ){ assert( OP_SeekGE==(OP_SeekGT-1) ); assert( OP_SeekLT==(OP_SeekLE-1) ); assert( (OP_SeekLE & 0x0001)==(OP_SeekGT & 0x0001) ); @@ -90801,14 +91143,14 @@ case OP_SeekGT: { /* jump, in3, group */ /* If the approximation iKey is smaller than the actual real search ** term, substitute <= for < and > for >=. */ - else if( pIn3->u.r>(double)iKey ){ + else if( c<0 ){ assert( OP_SeekLE==(OP_SeekLT+1) ); assert( OP_SeekGT==(OP_SeekGE+1) ); assert( (OP_SeekLT & 0x0001)==(OP_SeekGE & 0x0001) ); if( (oc & 0x0001)==(OP_SeekLT & 0x0001) ) oc++; } } - rc = sqlite3BtreeMovetoUnpacked(pC->uc.pCursor, 0, (u64)iKey, 0, &res); + rc = sqlite3BtreeTableMoveto(pC->uc.pCursor, (u64)iKey, 0, &res); pC->movetoTarget = iKey; /* Used by OP_Delete */ if( rc!=SQLITE_OK ){ goto abort_due_to_error; @@ -90855,7 +91197,7 @@ case OP_SeekGT: { /* jump, in3, group */ { int i; for(i=0; iuc.pCursor, &r, 0, 0, &res); + rc = sqlite3BtreeIndexMoveto(pC->uc.pCursor, &r, &res); if( rc!=SQLITE_OK ){ goto abort_due_to_error; } @@ -91274,7 +91616,7 @@ case OP_Found: { /* jump, in3 */ } } } - rc = sqlite3BtreeMovetoUnpacked(pC->uc.pCursor, pIdxKey, 0, 0, &res); + rc = sqlite3BtreeIndexMoveto(pC->uc.pCursor, pIdxKey, &res); if( pFree ) sqlite3DbFreeNN(db, pFree); if( rc!=SQLITE_OK ){ goto abort_due_to_error; @@ -91383,7 +91725,7 @@ notExistsWithKey: pCrsr = pC->uc.pCursor; assert( pCrsr!=0 ); res = 0; - rc = sqlite3BtreeMovetoUnpacked(pCrsr, 0, iKey, 0, &res); + rc = sqlite3BtreeTableMoveto(pCrsr, iKey, 0, &res); assert( rc==SQLITE_OK || res==0 ); pC->movetoTarget = iKey; /* Used by OP_Delete */ pC->nullRow = 0; @@ -91540,7 +91882,7 @@ case OP_NewRowid: { /* out2 */ do{ sqlite3_randomness(sizeof(v), &v); v &= (MAX_ROWID>>1); v++; /* Ensure that v is greater than zero */ - }while( ((rc = sqlite3BtreeMovetoUnpacked(pC->uc.pCursor, 0, (u64)v, + }while( ((rc = sqlite3BtreeTableMoveto(pC->uc.pCursor, (u64)v, 0, &res))==SQLITE_OK) && (res==0) && (++cnt<100)); @@ -91630,7 +91972,7 @@ case OP_Insert: { assert( (pOp->p5 & OPFLAG_ISNOOP) || HasRowid(pTab) ); }else{ pTab = 0; - zDb = 0; /* Not needed. Silence a compiler warning. */ + zDb = 0; } #ifdef SQLITE_ENABLE_PREUPDATE_HOOK @@ -91783,13 +92125,14 @@ case OP_Delete: { pC->movetoTarget = sqlite3BtreeIntegerKey(pC->uc.pCursor); } }else{ - zDb = 0; /* Not needed. Silence a compiler warning. */ - pTab = 0; /* Not needed. Silence a compiler warning. */ + zDb = 0; + pTab = 0; } #ifdef SQLITE_ENABLE_PREUPDATE_HOOK /* Invoke the pre-update-hook if required. */ - if( db->xPreUpdateCallback && pOp->p4.pTab ){ + assert( db->xPreUpdateCallback==0 || pTab==pOp->p4.pTab ); + if( db->xPreUpdateCallback && pTab ){ assert( !(opflags & OPFLAG_ISUPDATE) || HasRowid(pTab)==0 || (aMem[pOp->p3].flags & MEM_Int) @@ -91830,7 +92173,7 @@ case OP_Delete: { /* Invoke the update-hook if required. */ if( opflags & OPFLAG_NCHANGE ){ p->nChange++; - if( db->xUpdateCallback && HasRowid(pTab) ){ + if( db->xUpdateCallback && ALWAYS(pTab!=0) && HasRowid(pTab) ){ db->xUpdateCallback(db->pUpdateArg, SQLITE_DELETE, zDb, pTab->zName, pC->movetoTarget); assert( pC->iDb>=0 ); @@ -92417,7 +92760,8 @@ case OP_SorterInsert: { /* in2 */ ** an UPDATE or DELETE statement and the index entry to be updated ** or deleted is not found. For some uses of IdxDelete ** (example: the EXCEPT operator) it does not matter that no matching -** entry is found. For those cases, P5 is zero. +** entry is found. For those cases, P5 is zero. Also, do not raise +** this (self-correcting and non-critical) error if in writable_schema mode. */ case OP_IdxDelete: { VdbeCursor *pC; @@ -92438,12 +92782,12 @@ case OP_IdxDelete: { r.nField = (u16)pOp->p3; r.default_rc = 0; r.aMem = &aMem[pOp->p2]; - rc = sqlite3BtreeMovetoUnpacked(pCrsr, &r, 0, 0, &res); + rc = sqlite3BtreeIndexMoveto(pCrsr, &r, &res); if( rc ) goto abort_due_to_error; if( res==0 ){ rc = sqlite3BtreeDelete(pCrsr, BTREE_AUXDELETE); if( rc ) goto abort_due_to_error; - }else if( pOp->p5 ){ + }else if( pOp->p5 && !sqlite3WritableSchema(db) ){ rc = sqlite3ReportError(SQLITE_CORRUPT_INDEX, __LINE__, "index corruption"); goto abort_due_to_error; } @@ -92751,7 +93095,7 @@ case OP_Destroy: { /* out2 */ ** See also: Destroy */ case OP_Clear: { - int nChange; + i64 nChange; sqlite3VdbeIncrWriteCounter(p, 0); nChange = 0; @@ -92877,7 +93221,7 @@ case OP_ParseSchema: { }else #endif { - zSchema = DFLT_SCHEMA_TABLE; + zSchema = LEGACY_SCHEMA_TABLE; initData.db = db; initData.iDb = iDb; initData.pzErrMsg = &p->zErrMsg; @@ -94097,6 +94441,7 @@ case OP_VFilter: { /* jump */ pCur = p->apCsr[pOp->p1]; assert( memIsValid(pQuery) ); REGISTER_TRACE(pOp->p3, pQuery); + assert( pCur!=0 ); assert( pCur->eCurType==CURTYPE_VTAB ); pVCur = pCur->uc.pVCur; pVtab = pVCur->pVtab; @@ -94108,7 +94453,6 @@ case OP_VFilter: { /* jump */ iQuery = (int)pQuery->u.i; /* Invoke the xFilter method */ - res = 0; apArg = p->apArg; for(i = 0; iapCsr[pOp->p1]; + assert( pCur!=0 ); assert( pCur->eCurType==CURTYPE_VTAB ); assert( pOp->p3>0 && pOp->p3<=(p->nMem+1 - p->nCursor) ); pDest = &aMem[pOp->p3]; @@ -94198,8 +94543,8 @@ case OP_VNext: { /* jump */ int res; VdbeCursor *pCur; - res = 0; pCur = p->apCsr[pOp->p1]; + assert( pCur!=0 ); assert( pCur->eCurType==CURTYPE_VTAB ); if( pCur->nullRow ){ break; @@ -94295,7 +94640,7 @@ case OP_VUpdate: { const sqlite3_module *pModule; int nArg; int i; - sqlite_int64 rowid; + sqlite_int64 rowid = 0; Mem **apArg; Mem *pX; @@ -94740,6 +95085,18 @@ abort_due_to_error: rc = SQLITE_CORRUPT_BKPT; } assert( rc ); +#ifdef SQLITE_DEBUG + if( db->flags & SQLITE_VdbeTrace ){ + const char *zTrace = p->zSql; + if( zTrace==0 ){ + if( aOp[0].opcode==OP_Trace ){ + zTrace = aOp[0].p4.z; + } + if( zTrace==0 ) zTrace = "???"; + } + printf("ABORT-due-to-error (rc=%d): %s\n", rc, zTrace); + } +#endif if( p->zErrMsg==0 && rc!=SQLITE_IOERR_NOMEM ){ sqlite3VdbeError(p, "%s", sqlite3ErrStr(rc)); } @@ -94750,6 +95107,9 @@ abort_due_to_error: (int)(pOp - aOp), p->zSql, p->zErrMsg); sqlite3VdbeHalt(p); if( rc==SQLITE_IOERR_NOMEM ) sqlite3OomFault(db); + if( rc==SQLITE_CORRUPT && db->autoCommit==0 ){ + db->flags |= SQLITE_CorruptRdOnly; + } rc = SQLITE_ERROR; if( resetSchemaOnFault>0 ){ sqlite3ResetOneSchema(db, resetSchemaOnFault-1); @@ -94881,7 +95241,10 @@ static int blobSeekToRow(Incrblob *p, sqlite3_int64 iRow, char **pzErr){ } if( rc==SQLITE_ROW ){ VdbeCursor *pC = v->apCsr[0]; - u32 type = pC->nHdrParsed>p->iCol ? pC->aType[p->iCol] : 0; + u32 type; + assert( pC!=0 ); + assert( pC->eCurType==CURTYPE_BTREE ); + type = pC->nHdrParsed>p->iCol ? pC->aType[p->iCol] : 0; testcase( pC->nHdrParsed==p->iCol ); testcase( pC->nHdrParsed==p->iCol+1 ); if( type<12 ){ @@ -94973,7 +95336,7 @@ SQLITE_API int sqlite3_blob_open( sqlite3ErrorMsg(&sParse, "cannot open table without rowid: %s", zTable); } #ifndef SQLITE_OMIT_VIEW - if( pTab && pTab->pSelect ){ + if( pTab && IsView(pTab) ){ pTab = 0; sqlite3ErrorMsg(&sParse, "cannot open view: %s", zTable); } @@ -94993,7 +95356,7 @@ SQLITE_API int sqlite3_blob_open( /* Now search pTab for the exact column. */ for(iCol=0; iColnCol; iCol++) { - if( sqlite3StrICmp(pTab->aCol[iCol].zName, zColumn)==0 ){ + if( sqlite3StrICmp(pTab->aCol[iCol].zCnName, zColumn)==0 ){ break; } } @@ -95018,7 +95381,8 @@ SQLITE_API int sqlite3_blob_open( ** key columns must be indexed. The check below will pick up this ** case. */ FKey *pFKey; - for(pFKey=pTab->pFKey; pFKey; pFKey=pFKey->pNextFrom){ + assert( IsOrdinaryTable(pTab) ); + for(pFKey=pTab->u.tab.pFKey; pFKey; pFKey=pFKey->pNextFrom){ int j; for(j=0; jnCol; j++){ if( pFKey->aCol[j].iFrom==iCol ){ @@ -95225,6 +95589,8 @@ static int blobReadWrite( */ sqlite3_int64 iKey; iKey = sqlite3BtreeIntegerKey(p->pCsr); + assert( v->apCsr[0]!=0 ); + assert( v->apCsr[0]->eCurType==CURTYPE_BTREE ); sqlite3VdbePreUpdateHook( v, v->apCsr[0], SQLITE_DELETE, p->zDb, p->pTab, iKey, -1, p->iCol ); @@ -96607,7 +96973,7 @@ static void vdbeSorterExtendFile(sqlite3 *db, sqlite3_file *pFd, i64 nByte){ sqlite3OsFileControlHint(pFd, SQLITE_FCNTL_CHUNK_SIZE, &chunksize); sqlite3OsFileControlHint(pFd, SQLITE_FCNTL_SIZE_HINT, &nByte); sqlite3OsFetch(pFd, 0, (int)nByte, &p); - sqlite3OsUnfetch(pFd, 0, p); + if( p ) sqlite3OsUnfetch(pFd, 0, p); } } #else @@ -97325,6 +97691,7 @@ static int vdbeIncrMergerNew( vdbeMergeEngineFree(pMerger); rc = SQLITE_NOMEM_BKPT; } + assert( *ppOut!=0 || rc!=SQLITE_OK ); return rc; } @@ -99026,7 +99393,7 @@ static SQLITE_NOINLINE int walkExpr(Walker *pWalker, Expr *pExpr){ assert( !ExprHasProperty(pExpr, EP_WinFunc) ); pExpr = pExpr->pRight; continue; - }else if( ExprHasProperty(pExpr, EP_xIsSelect) ){ + }else if( ExprUseXSelect(pExpr) ){ assert( !ExprHasProperty(pExpr, EP_WinFunc) ); if( sqlite3WalkSelect(pWalker, pExpr->x.pSelect) ) return WRC_Abort; }else{ @@ -99298,6 +99665,7 @@ static void resolveAlias( }else{ incrAggFunctionDepth(pDup, nSubquery); if( pExpr->op==TK_COLLATE ){ + assert( !ExprHasProperty(pExpr, EP_IntValue) ); pDup = sqlite3ExprAddCollateString(pParse, pDup, pExpr->u.zToken); } @@ -99401,6 +99769,7 @@ SQLITE_PRIVATE Bitmask sqlite3ExprColUsed(Expr *pExpr){ Table *pExTab; n = pExpr->iColumn; + assert( ExprUseYTab(pExpr) ); pExTab = pExpr->y.pTab; assert( pExTab!=0 ); if( (pExTab->tabFlags & TF_HasGenerated)!=0 @@ -99514,7 +99883,7 @@ static int lookupName( u8 hCol; pTab = pItem->pTab; assert( pTab!=0 && pTab->zName!=0 ); - assert( pTab->nCol>0 ); + assert( pTab->nCol>0 || pParse->nErr ); if( pItem->pSelect && (pItem->pSelect->selFlags & SF_NestedFrom)!=0 ){ int hit = 0; pEList = pItem->pSelect->pEList; @@ -99538,16 +99907,16 @@ static int lookupName( if( sqlite3StrICmp(zTabName, zTab)!=0 ){ continue; } + assert( ExprUseYTab(pExpr) ); if( IN_RENAME_OBJECT && pItem->zAlias ){ sqlite3RenameTokenRemap(pParse, 0, (void*)&pExpr->y.pTab); } } - if( 0==(cntTab++) ){ - pMatch = pItem; - } hCol = sqlite3StrIHash(zCol); for(j=0, pCol=pTab->aCol; jnCol; j++, pCol++){ - if( pCol->hName==hCol && sqlite3StrICmp(pCol->zName, zCol)==0 ){ + if( pCol->hName==hCol + && sqlite3StrICmp(pCol->zCnName, zCol)==0 + ){ /* If there has been exactly one prior match and this match ** is for the right-hand table of a NATURAL JOIN or is in a ** USING clause, then skip this match. @@ -99563,9 +99932,14 @@ static int lookupName( break; } } + if( 0==cnt && VisibleRowid(pTab) ){ + cntTab++; + pMatch = pItem; + } } if( pMatch ){ pExpr->iTable = pMatch->iCursor; + assert( ExprUseYTab(pExpr) ); pExpr->y.pTab = pMatch->pTab; /* RIGHT JOIN not (yet) supported */ assert( (pMatch->fg.jointype & JT_RIGHT)==0 ); @@ -99620,7 +99994,9 @@ static int lookupName( pSchema = pTab->pSchema; cntTab++; for(iCol=0, pCol=pTab->aCol; iColnCol; iCol++, pCol++){ - if( pCol->hName==hCol && sqlite3StrICmp(pCol->zName, zCol)==0 ){ + if( pCol->hName==hCol + && sqlite3StrICmp(pCol->zCnName, zCol)==0 + ){ if( iCol==pTab->iPKey ){ iCol = -1; } @@ -99637,6 +100013,7 @@ static int lookupName( #ifndef SQLITE_OMIT_UPSERT if( pExpr->iTable==EXCLUDED_TABLE_NUMBER ){ testcase( iCol==(-1) ); + assert( ExprUseYTab(pExpr) ); if( IN_RENAME_OBJECT ){ pExpr->iColumn = iCol; pExpr->y.pTab = pTab; @@ -99649,6 +100026,7 @@ static int lookupName( }else #endif /* SQLITE_OMIT_UPSERT */ { + assert( ExprUseYTab(pExpr) ); pExpr->y.pTab = pTab; if( pParse->bReturning ){ eNewExprOp = TK_REGISTER; @@ -99685,7 +100063,7 @@ static int lookupName( && pMatch && (pNC->ncFlags & (NC_IdxExpr|NC_GenCol))==0 && sqlite3IsRowid(zCol) - && VisibleRowid(pMatch->pTab) + && ALWAYS(VisibleRowid(pMatch->pTab)) ){ cnt = 1; pExpr->iColumn = -1; @@ -99723,8 +100101,8 @@ static int lookupName( ){ Expr *pOrig; assert( pExpr->pLeft==0 && pExpr->pRight==0 ); - assert( pExpr->x.pList==0 ); - assert( pExpr->x.pSelect==0 ); + assert( ExprUseXList(pExpr)==0 || pExpr->x.pList==0 ); + assert( ExprUseXSelect(pExpr)==0 || pExpr->x.pSelect==0 ); pOrig = pEList->a[j].pExpr; if( (pNC->ncFlags&NC_AllowAgg)==0 && ExprHasProperty(pOrig, EP_Agg) ){ sqlite3ErrorMsg(pParse, "misuse of aliased aggregate %s", zAs); @@ -99796,7 +100174,7 @@ static int lookupName( sqlite3VdbeAddDblquoteStr(db, pParse->pVdbe, zCol); #endif pExpr->op = TK_STRING; - pExpr->y.pTab = 0; + memset(&pExpr->y, 0, sizeof(pExpr->y)); return WRC_Prune; } if( sqlite3ExprIdToTrueFalse(pExpr) ){ @@ -99882,7 +100260,9 @@ SQLITE_PRIVATE Expr *sqlite3CreateColumnExpr(sqlite3 *db, SrcList *pSrc, int iSr Expr *p = sqlite3ExprAlloc(db, TK_COLUMN, 0, 0); if( p ){ SrcItem *pItem = &pSrc->a[iSrc]; - Table *pTab = p->y.pTab = pItem->pTab; + Table *pTab; + assert( ExprUseYTab(p) ); + pTab = p->y.pTab = pItem->pTab; p->iTable = pItem->iCursor; if( p->y.pTab->iPKey==iCol ){ p->iColumn = -1; @@ -99949,6 +100329,7 @@ static void notValidImpl( static int exprProbability(Expr *p){ double r = -1.0; if( p->op!=TK_FLOAT ) return -1; + assert( !ExprHasProperty(p, EP_IntValue) ); sqlite3AtoF(p->u.zToken, &r, sqlite3Strlen30(p->u.zToken), SQLITE_UTF8); assert( r>=0.0 ); if( r>1.0 ) return -1; @@ -99997,6 +100378,7 @@ static int resolveExprStep(Walker *pWalker, Expr *pExpr){ assert( pSrcList && pSrcList->nSrc>=1 ); pItem = pSrcList->a; pExpr->op = TK_COLUMN; + assert( ExprUseYTab(pExpr) ); pExpr->y.pTab = pItem->pTab; pExpr->iTable = pItem->iCursor; pExpr->iColumn--; @@ -100028,6 +100410,8 @@ static int resolveExprStep(Walker *pWalker, Expr *pExpr){ } sqlite3WalkExpr(pWalker, pExpr->pLeft); if( 0==sqlite3ExprCanBeNull(pExpr->pLeft) && !IN_RENAME_OBJECT ){ + testcase( ExprHasProperty(pExpr, EP_FromJoin) ); + assert( !ExprHasProperty(pExpr, EP_IntValue) ); if( pExpr->op==TK_NOTNULL ){ pExpr->u.zToken = "true"; ExprSetProperty(pExpr, EP_IsTrue); @@ -100063,6 +100447,7 @@ static int resolveExprStep(Walker *pWalker, Expr *pExpr){ if( pExpr->op==TK_ID ){ zDb = 0; zTable = 0; + assert( !ExprHasProperty(pExpr, EP_IntValue) ); zColumn = pExpr->u.zToken; }else{ Expr *pLeft = pExpr->pLeft; @@ -100075,12 +100460,15 @@ static int resolveExprStep(Walker *pWalker, Expr *pExpr){ zDb = 0; }else{ assert( pRight->op==TK_DOT ); + assert( !ExprHasProperty(pRight, EP_IntValue) ); zDb = pLeft->u.zToken; pLeft = pRight->pLeft; pRight = pRight->pRight; } + assert( ExprUseUToken(pLeft) && ExprUseUToken(pRight) ); zTable = pLeft->u.zToken; zColumn = pRight->u.zToken; + assert( ExprUseYTab(pExpr) ); if( IN_RENAME_OBJECT ){ sqlite3RenameTokenRemap(pParse, (void*)pExpr, (void*)pRight); sqlite3RenameTokenRemap(pParse, (void*)&pExpr->y.pTab, (void*)pLeft); @@ -100105,7 +100493,7 @@ static int resolveExprStep(Walker *pWalker, Expr *pExpr){ #ifndef SQLITE_OMIT_WINDOWFUNC Window *pWin = (IsWindowFunc(pExpr) ? pExpr->y.pWin : 0); #endif - assert( !ExprHasProperty(pExpr, EP_xIsSelect) ); + assert( !ExprHasProperty(pExpr, EP_xIsSelect|EP_IntValue) ); zId = pExpr->u.zToken; nId = sqlite3Strlen30(zId); pDef = sqlite3FindFunction(pParse->db, zId, n, enc, 0); @@ -100269,7 +100657,7 @@ static int resolveExprStep(Walker *pWalker, Expr *pExpr){ #ifndef SQLITE_OMIT_WINDOWFUNC if( pWin ){ Select *pSel = pNC->pWinSelect; - assert( pWin==pExpr->y.pWin ); + assert( pWin==0 || (ExprUseYWin(pExpr) && pWin==pExpr->y.pWin) ); if( IN_RENAME_OBJECT==0 ){ sqlite3WindowUpdate(pParse, pSel ? pSel->pWinDefn : 0, pWin, pDef); if( pParse->db->mallocFailed ) break; @@ -100282,7 +100670,7 @@ static int resolveExprStep(Walker *pWalker, Expr *pExpr){ }else #endif /* SQLITE_OMIT_WINDOWFUNC */ { - NameContext *pNC2 = pNC; + NameContext *pNC2; /* For looping up thru outer contexts */ pExpr->op = TK_AGG_FUNCTION; pExpr->op2 = 0; #ifndef SQLITE_OMIT_WINDOWFUNC @@ -100290,16 +100678,22 @@ static int resolveExprStep(Walker *pWalker, Expr *pExpr){ sqlite3WalkExpr(pWalker, pExpr->y.pWin->pFilter); } #endif - while( pNC2 && !sqlite3FunctionUsesThisSrc(pExpr, pNC2->pSrcList) ){ + pNC2 = pNC; + while( pNC2 + && sqlite3ReferencesSrcList(pParse, pExpr, pNC2->pSrcList)==0 + ){ pExpr->op2++; pNC2 = pNC2->pNext; } assert( pDef!=0 || IN_RENAME_OBJECT ); if( pNC2 && pDef ){ assert( SQLITE_FUNC_MINMAX==NC_MinMaxAgg ); + assert( SQLITE_FUNC_ANYORDER==NC_OrderAgg ); testcase( (pDef->funcFlags & SQLITE_FUNC_MINMAX)!=0 ); - pNC2->ncFlags |= NC_HasAgg | (pDef->funcFlags & SQLITE_FUNC_MINMAX); - + testcase( (pDef->funcFlags & SQLITE_FUNC_ANYORDER)!=0 ); + pNC2->ncFlags |= NC_HasAgg + | ((pDef->funcFlags^SQLITE_FUNC_ANYORDER) + & (SQLITE_FUNC_MINMAX|SQLITE_FUNC_ANYORDER)); } } pNC->ncFlags |= savedAllowFlags; @@ -100315,15 +100709,17 @@ static int resolveExprStep(Walker *pWalker, Expr *pExpr){ #endif case TK_IN: { testcase( pExpr->op==TK_IN ); - if( ExprHasProperty(pExpr, EP_xIsSelect) ){ + if( ExprUseXSelect(pExpr) ){ int nRef = pNC->nRef; testcase( pNC->ncFlags & NC_IsCheck ); testcase( pNC->ncFlags & NC_PartIdx ); testcase( pNC->ncFlags & NC_IdxExpr ); testcase( pNC->ncFlags & NC_GenCol ); - sqlite3ResolveNotValid(pParse, pNC, "subqueries", - NC_IsCheck|NC_PartIdx|NC_IdxExpr|NC_GenCol, pExpr); - sqlite3WalkSelect(pWalker, pExpr->x.pSelect); + if( pNC->ncFlags & NC_SelfRef ){ + notValidImpl(pParse, pNC, "subqueries", pExpr); + }else{ + sqlite3WalkSelect(pWalker, pExpr->x.pSelect); + } assert( pNC->nRef>=nRef ); if( nRef!=pNC->nRef ){ ExprSetProperty(pExpr, EP_VarSelect); @@ -100370,6 +100766,7 @@ static int resolveExprStep(Walker *pWalker, Expr *pExpr){ assert( pExpr->pLeft!=0 ); nLeft = sqlite3ExprVectorSize(pExpr->pLeft); if( pExpr->op==TK_BETWEEN ){ + assert( ExprUseXList(pExpr) ); nRight = sqlite3ExprVectorSize(pExpr->x.pList->a[0].pExpr); if( nRight==nLeft ){ nRight = sqlite3ExprVectorSize(pExpr->x.pList->a[1].pExpr); @@ -100418,7 +100815,9 @@ static int resolveAsName( UNUSED_PARAMETER(pParse); if( pE->op==TK_ID ){ - char *zCol = pE->u.zToken; + const char *zCol; + assert( !ExprHasProperty(pE, EP_IntValue) ); + zCol = pE->u.zToken; for(i=0; inExpr; i++){ if( pEList->a[i].eEName==ENAME_NAME && sqlite3_stricmp(pEList->a[i].zEName, zCol)==0 @@ -100842,7 +101241,7 @@ static int resolveSelectStep(Walker *pWalker, Select *p){ p->pOrderBy = 0; } - /* Recursively resolve names in all subqueries + /* Recursively resolve names in all subqueries in the FROM clause */ for(i=0; ipSrc->nSrc; i++){ SrcItem *pItem = &p->pSrc->a[i]; @@ -100886,7 +101285,8 @@ static int resolveSelectStep(Walker *pWalker, Select *p){ pGroupBy = p->pGroupBy; if( pGroupBy || (sNC.ncFlags & NC_HasAgg)!=0 ){ assert( NC_MinMaxAgg==SF_MinMaxAgg ); - p->selFlags |= SF_Aggregate | (sNC.ncFlags&NC_MinMaxAgg); + assert( NC_OrderAgg==SF_OrderByReqd ); + p->selFlags |= SF_Aggregate | (sNC.ncFlags&(NC_MinMaxAgg|NC_OrderAgg)); }else{ sNC.ncFlags &= ~NC_AllowAgg; } @@ -101069,8 +101469,8 @@ SQLITE_PRIVATE int sqlite3ResolveExprNames( Walker w; if( pExpr==0 ) return SQLITE_OK; - savedHasAgg = pNC->ncFlags & (NC_HasAgg|NC_MinMaxAgg|NC_HasWin); - pNC->ncFlags &= ~(NC_HasAgg|NC_MinMaxAgg|NC_HasWin); + savedHasAgg = pNC->ncFlags & (NC_HasAgg|NC_MinMaxAgg|NC_HasWin|NC_OrderAgg); + pNC->ncFlags &= ~(NC_HasAgg|NC_MinMaxAgg|NC_HasWin|NC_OrderAgg); w.pParse = pNC->pParse; w.xExprCallback = resolveExprStep; w.xSelectCallback = (pNC->ncFlags & NC_NoSelect) ? 0 : resolveSelectStep; @@ -101113,8 +101513,8 @@ SQLITE_PRIVATE int sqlite3ResolveExprListNames( w.xSelectCallback = resolveSelectStep; w.xSelectCallback2 = 0; w.u.pNC = pNC; - savedHasAgg = pNC->ncFlags & (NC_HasAgg|NC_MinMaxAgg|NC_HasWin); - pNC->ncFlags &= ~(NC_HasAgg|NC_MinMaxAgg|NC_HasWin); + savedHasAgg = pNC->ncFlags & (NC_HasAgg|NC_MinMaxAgg|NC_HasWin|NC_OrderAgg); + pNC->ncFlags &= ~(NC_HasAgg|NC_MinMaxAgg|NC_HasWin|NC_OrderAgg); for(i=0; inExpr; i++){ Expr *pExpr = pList->a[i].pExpr; if( pExpr==0 ) continue; @@ -101132,10 +101532,11 @@ SQLITE_PRIVATE int sqlite3ResolveExprListNames( assert( EP_Win==NC_HasWin ); testcase( pNC->ncFlags & NC_HasAgg ); testcase( pNC->ncFlags & NC_HasWin ); - if( pNC->ncFlags & (NC_HasAgg|NC_MinMaxAgg|NC_HasWin) ){ + if( pNC->ncFlags & (NC_HasAgg|NC_MinMaxAgg|NC_HasWin|NC_OrderAgg) ){ ExprSetProperty(pExpr, pNC->ncFlags & (NC_HasAgg|NC_HasWin) ); - savedHasAgg |= pNC->ncFlags & (NC_HasAgg|NC_MinMaxAgg|NC_HasWin); - pNC->ncFlags &= ~(NC_HasAgg|NC_MinMaxAgg|NC_HasWin); + savedHasAgg |= pNC->ncFlags & + (NC_HasAgg|NC_MinMaxAgg|NC_HasWin|NC_OrderAgg); + pNC->ncFlags &= ~(NC_HasAgg|NC_MinMaxAgg|NC_HasWin|NC_OrderAgg); } if( w.pParse->nErr>0 ) return WRC_Abort; } @@ -101249,9 +101650,9 @@ static int exprCodeVector(Parse *pParse, Expr *p, int *piToFree); /* ** Return the affinity character for a single column of a table. */ -SQLITE_PRIVATE char sqlite3TableColumnAffinity(Table *pTab, int iCol){ - assert( iColnCol ); - return iCol>=0 ? pTab->aCol[iCol].affinity : SQLITE_AFF_INTEGER; +SQLITE_PRIVATE char sqlite3TableColumnAffinity(const Table *pTab, int iCol){ + if( iCol<0 || NEVER(iCol>=pTab->nCol) ) return SQLITE_AFF_INTEGER; + return pTab->aCol[iCol].affinity; } /* @@ -101281,11 +101682,14 @@ SQLITE_PRIVATE char sqlite3ExprAffinity(const Expr *pExpr){ } op = pExpr->op; if( op==TK_REGISTER ) op = pExpr->op2; - if( (op==TK_COLUMN || op==TK_AGG_COLUMN) && pExpr->y.pTab ){ - return sqlite3TableColumnAffinity(pExpr->y.pTab, pExpr->iColumn); + if( op==TK_COLUMN || op==TK_AGG_COLUMN ){ + assert( ExprUseYTab(pExpr) ); + if( pExpr->y.pTab ){ + return sqlite3TableColumnAffinity(pExpr->y.pTab, pExpr->iColumn); + } } if( op==TK_SELECT ){ - assert( pExpr->flags&EP_xIsSelect ); + assert( ExprUseXSelect(pExpr) ); assert( pExpr->x.pSelect!=0 ); assert( pExpr->x.pSelect->pEList!=0 ); assert( pExpr->x.pSelect->pEList->a[0].pExpr!=0 ); @@ -101298,12 +101702,15 @@ SQLITE_PRIVATE char sqlite3ExprAffinity(const Expr *pExpr){ } #endif if( op==TK_SELECT_COLUMN ){ - assert( pExpr->pLeft->flags&EP_xIsSelect ); + assert( pExpr->pLeft!=0 && ExprUseXSelect(pExpr->pLeft) ); + assert( pExpr->iColumn < pExpr->iTable ); + assert( pExpr->iTable==pExpr->pLeft->x.pSelect->pEList->nExpr ); return sqlite3ExprAffinity( pExpr->pLeft->x.pSelect->pEList->a[pExpr->iColumn].pExpr ); } if( op==TK_VECTOR ){ + assert( ExprUseXList(pExpr) ); return sqlite3ExprAffinity(pExpr->x.pList->a[0].pExpr); } return pExpr->affExpr; @@ -101318,7 +101725,7 @@ SQLITE_PRIVATE char sqlite3ExprAffinity(const Expr *pExpr){ ** and the pExpr parameter is returned unchanged. */ SQLITE_PRIVATE Expr *sqlite3ExprAddCollateToken( - Parse *pParse, /* Parsing context */ + const Parse *pParse, /* Parsing context */ Expr *pExpr, /* Add the "COLLATE" clause to this expression */ const Token *pCollName, /* Name of collating sequence */ int dequote /* True to dequote pCollName */ @@ -101333,7 +101740,11 @@ SQLITE_PRIVATE Expr *sqlite3ExprAddCollateToken( } return pExpr; } -SQLITE_PRIVATE Expr *sqlite3ExprAddCollateString(Parse *pParse, Expr *pExpr, const char *zC){ +SQLITE_PRIVATE Expr *sqlite3ExprAddCollateString( + const Parse *pParse, /* Parsing context */ + Expr *pExpr, /* Add the "COLLATE" clause to this expression */ + const char *zC /* The collating sequence name */ +){ Token s; assert( zC!=0 ); sqlite3TokenInit(&s, (char*)zC); @@ -101359,7 +101770,7 @@ SQLITE_PRIVATE Expr *sqlite3ExprSkipCollate(Expr *pExpr){ SQLITE_PRIVATE Expr *sqlite3ExprSkipCollateAndLikely(Expr *pExpr){ while( pExpr && ExprHasProperty(pExpr, EP_Skip|EP_Unlikely) ){ if( ExprHasProperty(pExpr, EP_Unlikely) ){ - assert( !ExprHasProperty(pExpr, EP_xIsSelect) ); + assert( ExprUseXList(pExpr) ); assert( pExpr->x.pList->nExpr>0 ); assert( pExpr->op==TK_FUNCTION ); pExpr = pExpr->x.pList->a[0].pExpr; @@ -101392,27 +101803,30 @@ SQLITE_PRIVATE CollSeq *sqlite3ExprCollSeq(Parse *pParse, const Expr *pExpr){ while( p ){ int op = p->op; if( op==TK_REGISTER ) op = p->op2; - if( (op==TK_AGG_COLUMN || op==TK_COLUMN || op==TK_TRIGGER) - && p->y.pTab!=0 - ){ - /* op==TK_REGISTER && p->y.pTab!=0 happens when pExpr was originally - ** a TK_COLUMN but was previously evaluated and cached in a register */ - int j = p->iColumn; - if( j>=0 ){ - const char *zColl = p->y.pTab->aCol[j].zColl; - pColl = sqlite3FindCollSeq(db, ENC(db), zColl, 0); + if( op==TK_AGG_COLUMN || op==TK_COLUMN || op==TK_TRIGGER ){ + assert( ExprUseYTab(p) ); + if( p->y.pTab!=0 ){ + /* op==TK_REGISTER && p->y.pTab!=0 happens when pExpr was originally + ** a TK_COLUMN but was previously evaluated and cached in a register */ + int j = p->iColumn; + if( j>=0 ){ + const char *zColl = sqlite3ColumnColl(&p->y.pTab->aCol[j]); + pColl = sqlite3FindCollSeq(db, ENC(db), zColl, 0); + } + break; } - break; } if( op==TK_CAST || op==TK_UPLUS ){ p = p->pLeft; continue; } if( op==TK_VECTOR ){ + assert( ExprUseXList(p) ); p = p->x.pList->a[0].pExpr; continue; } if( op==TK_COLLATE ){ + assert( !ExprHasProperty(p, EP_IntValue) ); pColl = sqlite3GetCollSeq(pParse, ENC(db), 0, p->u.zToken); break; } @@ -101422,11 +101836,9 @@ SQLITE_PRIVATE CollSeq *sqlite3ExprCollSeq(Parse *pParse, const Expr *pExpr){ }else{ Expr *pNext = p->pRight; /* The Expr.x union is never used at the same time as Expr.pRight */ + assert( ExprUseXList(p) ); assert( p->x.pList==0 || p->pRight==0 ); - if( p->x.pList!=0 - && !db->mallocFailed - && ALWAYS(!ExprHasProperty(p, EP_xIsSelect)) - ){ + if( p->x.pList!=0 && !db->mallocFailed ){ int i; for(i=0; ALWAYS(ix.pList->nExpr); i++){ if( ExprHasProperty(p->x.pList->a[i].pExpr, EP_Collate) ){ @@ -101509,7 +101921,7 @@ static char comparisonAffinity(const Expr *pExpr){ aff = sqlite3ExprAffinity(pExpr->pLeft); if( pExpr->pRight ){ aff = sqlite3CompareAffinity(pExpr->pRight, aff); - }else if( ExprHasProperty(pExpr, EP_xIsSelect) ){ + }else if( ExprUseXSelect(pExpr) ){ aff = sqlite3CompareAffinity(pExpr->x.pSelect->pEList->a[0].pExpr, aff); }else if( aff==0 ){ aff = SQLITE_AFF_BLOB; @@ -101635,7 +102047,7 @@ static int codeCompare( ** But a TK_SELECT might be either a vector or a scalar. It is only ** considered a vector if it has two or more result columns. */ -SQLITE_PRIVATE int sqlite3ExprIsVector(Expr *pExpr){ +SQLITE_PRIVATE int sqlite3ExprIsVector(const Expr *pExpr){ return sqlite3ExprVectorSize(pExpr)>1; } @@ -101645,12 +102057,14 @@ SQLITE_PRIVATE int sqlite3ExprIsVector(Expr *pExpr){ ** is a sub-select, return the number of columns in the sub-select. For ** any other type of expression, return 1. */ -SQLITE_PRIVATE int sqlite3ExprVectorSize(Expr *pExpr){ +SQLITE_PRIVATE int sqlite3ExprVectorSize(const Expr *pExpr){ u8 op = pExpr->op; if( op==TK_REGISTER ) op = pExpr->op2; if( op==TK_VECTOR ){ + assert( ExprUseXList(pExpr) ); return pExpr->x.pList->nExpr; }else if( op==TK_SELECT ){ + assert( ExprUseXSelect(pExpr) ); return pExpr->x.pSelect->pEList->nExpr; }else{ return 1; @@ -101677,8 +102091,10 @@ SQLITE_PRIVATE Expr *sqlite3VectorFieldSubexpr(Expr *pVector, int i){ if( sqlite3ExprIsVector(pVector) ){ assert( pVector->op2==0 || pVector->op==TK_REGISTER ); if( pVector->op==TK_SELECT || pVector->op2==TK_SELECT ){ + assert( ExprUseXSelect(pVector) ); return pVector->x.pSelect->pEList->a[i].pExpr; }else{ + assert( ExprUseXList(pVector) ); return pVector->x.pList->a[i].pExpr; } } @@ -101709,11 +102125,12 @@ SQLITE_PRIVATE Expr *sqlite3VectorFieldSubexpr(Expr *pVector, int i){ SQLITE_PRIVATE Expr *sqlite3ExprForVectorField( Parse *pParse, /* Parsing context */ Expr *pVector, /* The vector. List of expressions or a sub-SELECT */ - int iField /* Which column of the vector to return */ + int iField, /* Which column of the vector to return */ + int nField /* Total number of columns in the vector */ ){ Expr *pRet; if( pVector->op==TK_SELECT ){ - assert( pVector->flags & EP_xIsSelect ); + assert( ExprUseXSelect(pVector) ); /* The TK_SELECT_COLUMN Expr node: ** ** pLeft: pVector containing TK_SELECT. Not deleted. @@ -101732,14 +102149,23 @@ SQLITE_PRIVATE Expr *sqlite3ExprForVectorField( */ pRet = sqlite3PExpr(pParse, TK_SELECT_COLUMN, 0, 0); if( pRet ){ + pRet->iTable = nField; pRet->iColumn = iField; pRet->pLeft = pVector; } - assert( pRet==0 || pRet->iTable==0 ); }else{ - if( pVector->op==TK_VECTOR ) pVector = pVector->x.pList->a[iField].pExpr; + if( pVector->op==TK_VECTOR ){ + Expr **ppVector; + assert( ExprUseXList(pVector) ); + ppVector = &pVector->x.pList->a[iField].pExpr; + pVector = *ppVector; + if( IN_RENAME_OBJECT ){ + /* This must be a vector UPDATE inside a trigger */ + *ppVector = 0; + return pVector; + } + } pRet = sqlite3ExprDup(pParse->db, pVector, 0); - sqlite3RenameTokenRemap(pParse, pRet, pVector); } return pRet; } @@ -101795,10 +102221,12 @@ static int exprVectorRegister( return pVector->iTable+iField; } if( op==TK_SELECT ){ + assert( ExprUseXSelect(pVector) ); *ppExpr = pVector->x.pSelect->pEList->a[iField].pExpr; return regSelect+iField; } if( op==TK_VECTOR ){ + assert( ExprUseXList(pVector) ); *ppExpr = pVector->x.pList->a[iField].pExpr; return sqlite3ExprCodeTemp(pParse, *ppExpr, pRegFree); } @@ -101932,14 +102360,14 @@ SQLITE_PRIVATE int sqlite3ExprCheckHeight(Parse *pParse, int nHeight){ ** to by pnHeight, the second parameter, then set *pnHeight to that ** value. */ -static void heightOfExpr(Expr *p, int *pnHeight){ +static void heightOfExpr(const Expr *p, int *pnHeight){ if( p ){ if( p->nHeight>*pnHeight ){ *pnHeight = p->nHeight; } } } -static void heightOfExprList(ExprList *p, int *pnHeight){ +static void heightOfExprList(const ExprList *p, int *pnHeight){ if( p ){ int i; for(i=0; inExpr; i++){ @@ -101947,8 +102375,8 @@ static void heightOfExprList(ExprList *p, int *pnHeight){ } } } -static void heightOfSelect(Select *pSelect, int *pnHeight){ - Select *p; +static void heightOfSelect(const Select *pSelect, int *pnHeight){ + const Select *p; for(p=pSelect; p; p=p->pPrior){ heightOfExpr(p->pWhere, pnHeight); heightOfExpr(p->pHaving, pnHeight); @@ -101973,7 +102401,7 @@ static void exprSetHeight(Expr *p){ int nHeight = 0; heightOfExpr(p->pLeft, &nHeight); heightOfExpr(p->pRight, &nHeight); - if( ExprHasProperty(p, EP_xIsSelect) ){ + if( ExprUseXSelect(p) ){ heightOfSelect(p->x.pSelect, &nHeight); }else if( p->x.pList ){ heightOfExprList(p->x.pList, &nHeight); @@ -102000,7 +102428,7 @@ SQLITE_PRIVATE void sqlite3ExprSetHeightAndFlags(Parse *pParse, Expr *p){ ** Return the maximum height of any expression tree referenced ** by the select statement passed as an argument. */ -SQLITE_PRIVATE int sqlite3SelectExprHeight(Select *p){ +SQLITE_PRIVATE int sqlite3SelectExprHeight(const Select *p){ int nHeight = 0; heightOfSelect(p, &nHeight); return nHeight; @@ -102012,7 +102440,7 @@ SQLITE_PRIVATE int sqlite3SelectExprHeight(Select *p){ */ SQLITE_PRIVATE void sqlite3ExprSetHeightAndFlags(Parse *pParse, Expr *p){ if( pParse->nErr ) return; - if( p && p->x.pList && !ExprHasProperty(p, EP_xIsSelect) ){ + if( p && ExprUseXList(p) && p->x.pList ){ p->flags |= EP_Propagate & sqlite3ExprListFlags(p->x.pList); } } @@ -102170,6 +102598,63 @@ SQLITE_PRIVATE void sqlite3PExprAddSelect(Parse *pParse, Expr *pExpr, Select *pS } } +/* +** Expression list pEList is a list of vector values. This function +** converts the contents of pEList to a VALUES(...) Select statement +** returning 1 row for each element of the list. For example, the +** expression list: +** +** ( (1,2), (3,4) (5,6) ) +** +** is translated to the equivalent of: +** +** VALUES(1,2), (3,4), (5,6) +** +** Each of the vector values in pEList must contain exactly nElem terms. +** If a list element that is not a vector or does not contain nElem terms, +** an error message is left in pParse. +** +** This is used as part of processing IN(...) expressions with a list +** of vectors on the RHS. e.g. "... IN ((1,2), (3,4), (5,6))". +*/ +SQLITE_PRIVATE Select *sqlite3ExprListToValues(Parse *pParse, int nElem, ExprList *pEList){ + int ii; + Select *pRet = 0; + assert( nElem>1 ); + for(ii=0; iinExpr; ii++){ + Select *pSel; + Expr *pExpr = pEList->a[ii].pExpr; + int nExprElem; + if( pExpr->op==TK_VECTOR ){ + assert( ExprUseXList(pExpr) ); + nExprElem = pExpr->x.pList->nExpr; + }else{ + nExprElem = 1; + } + if( nExprElem!=nElem ){ + sqlite3ErrorMsg(pParse, "IN(...) element has %d term%s - expected %d", + nExprElem, nExprElem>1?"s":"", nElem + ); + break; + } + assert( ExprUseXList(pExpr) ); + pSel = sqlite3SelectNew(pParse, pExpr->x.pList, 0, 0, 0, 0, 0, SF_Values,0); + pExpr->x.pList = 0; + if( pSel ){ + if( pRet ){ + pSel->op = TK_ALL; + pSel->pPrior = pRet; + } + pRet = pSel; + } + } + + if( pRet && pRet->pPrior ){ + pRet->selFlags |= SF_MultiValue; + } + sqlite3ExprListDelete(pParse->db, pEList); + return pRet; +} /* ** Join two expressions using an AND operator. If either expression is @@ -102203,7 +102688,7 @@ SQLITE_PRIVATE Expr *sqlite3ExprAnd(Parse *pParse, Expr *pLeft, Expr *pRight){ SQLITE_PRIVATE Expr *sqlite3ExprFunction( Parse *pParse, /* Parsing context */ ExprList *pList, /* Argument list */ - Token *pToken, /* Name of the function */ + const Token *pToken, /* Name of the function */ int eDistinct /* SF_Distinct or SF_ALL or 0 */ ){ Expr *pNew; @@ -102214,12 +102699,15 @@ SQLITE_PRIVATE Expr *sqlite3ExprFunction( sqlite3ExprListDelete(db, pList); /* Avoid memory leak when malloc fails */ return 0; } - if( pList && pList->nExpr > pParse->db->aLimit[SQLITE_LIMIT_FUNCTION_ARG] ){ + if( pList + && pList->nExpr > pParse->db->aLimit[SQLITE_LIMIT_FUNCTION_ARG] + && !pParse->nested + ){ sqlite3ErrorMsg(pParse, "too many arguments on function %T", pToken); } pNew->x.pList = pList; ExprSetProperty(pNew, EP_HasFunc); - assert( !ExprHasProperty(pNew, EP_xIsSelect) ); + assert( ExprUseXList(pNew) ); sqlite3ExprSetHeightAndFlags(pParse, pNew); if( eDistinct==SF_Distinct ) ExprSetProperty(pNew, EP_Distinct); return pNew; @@ -102238,8 +102726,8 @@ SQLITE_PRIVATE Expr *sqlite3ExprFunction( */ SQLITE_PRIVATE void sqlite3ExprFunctionUsable( Parse *pParse, /* Parsing and code generating context */ - Expr *pExpr, /* The function invocation */ - FuncDef *pDef /* The function being invoked */ + const Expr *pExpr, /* The function invocation */ + const FuncDef *pDef /* The function being invoked */ ){ assert( !IN_RENAME_OBJECT ); assert( (pDef->funcFlags & (SQLITE_FUNC_DIRECT|SQLITE_FUNC_UNSAFE))!=0 ); @@ -102345,27 +102833,26 @@ SQLITE_PRIVATE void sqlite3ExprAssignVarNumber(Parse *pParse, Expr *pExpr, u32 n */ static SQLITE_NOINLINE void sqlite3ExprDeleteNN(sqlite3 *db, Expr *p){ assert( p!=0 ); - /* Sanity check: Assert that the IntValue is non-negative if it exists */ - assert( !ExprHasProperty(p, EP_IntValue) || p->u.iValue>=0 ); - - assert( !ExprHasProperty(p, EP_WinFunc) || p->y.pWin!=0 || db->mallocFailed ); - assert( p->op!=TK_FUNCTION || ExprHasProperty(p, EP_TokenOnly|EP_Reduced) - || p->y.pWin==0 || ExprHasProperty(p, EP_WinFunc) ); + assert( !ExprUseUValue(p) || p->u.iValue>=0 ); + assert( !ExprUseYWin(p) || !ExprUseYSub(p) ); + assert( !ExprUseYWin(p) || p->y.pWin!=0 || db->mallocFailed ); + assert( p->op!=TK_FUNCTION || !ExprUseYSub(p) ); #ifdef SQLITE_DEBUG if( ExprHasProperty(p, EP_Leaf) && !ExprHasProperty(p, EP_TokenOnly) ){ assert( p->pLeft==0 ); assert( p->pRight==0 ); - assert( p->x.pSelect==0 ); + assert( !ExprUseXSelect(p) || p->x.pSelect==0 ); + assert( !ExprUseXList(p) || p->x.pList==0 ); } #endif if( !ExprHasProperty(p, (EP_TokenOnly|EP_Leaf)) ){ /* The Expr.x union is never used at the same time as Expr.pRight */ - assert( p->x.pList==0 || p->pRight==0 ); + assert( (ExprUseXList(p) && p->x.pList==0) || p->pRight==0 ); if( p->pLeft && p->op!=TK_SELECT_COLUMN ) sqlite3ExprDeleteNN(db, p->pLeft); if( p->pRight ){ assert( !ExprHasProperty(p, EP_WinFunc) ); sqlite3ExprDeleteNN(db, p->pRight); - }else if( ExprHasProperty(p, EP_xIsSelect) ){ + }else if( ExprUseXSelect(p) ){ assert( !ExprHasProperty(p, EP_WinFunc) ); sqlite3SelectDelete(db, p->x.pSelect); }else{ @@ -102377,7 +102864,10 @@ static SQLITE_NOINLINE void sqlite3ExprDeleteNN(sqlite3 *db, Expr *p){ #endif } } - if( ExprHasProperty(p, EP_MemToken) ) sqlite3DbFree(db, p->u.zToken); + if( ExprHasProperty(p, EP_MemToken) ){ + assert( !ExprHasProperty(p, EP_IntValue) ); + sqlite3DbFree(db, p->u.zToken); + } if( !ExprHasProperty(p, EP_Static) ){ sqlite3DbFreeNN(db, p); } @@ -102419,7 +102909,7 @@ SQLITE_PRIVATE void sqlite3ExprUnmapAndDelete(Parse *pParse, Expr *p){ ** passed as the first argument. This is always one of EXPR_FULLSIZE, ** EXPR_REDUCEDSIZE or EXPR_TOKENONLYSIZE. */ -static int exprStructSize(Expr *p){ +static int exprStructSize(const Expr *p){ if( ExprHasProperty(p, EP_TokenOnly) ) return EXPR_TOKENONLYSIZE; if( ExprHasProperty(p, EP_Reduced) ) return EXPR_REDUCEDSIZE; return EXPR_FULLSIZE; @@ -102459,7 +102949,7 @@ static int exprStructSize(Expr *p){ ** of dupedExprStructSize() contain multiple assert() statements that attempt ** to enforce this constraint. */ -static int dupedExprStructSize(Expr *p, int flags){ +static int dupedExprStructSize(const Expr *p, int flags){ int nSize; assert( flags==EXPRDUP_REDUCE || flags==0 ); /* Only one flag value allowed */ assert( EXPR_FULLSIZE<=0xfff ); @@ -102490,7 +102980,7 @@ static int dupedExprStructSize(Expr *p, int flags){ ** of the Expr structure and a copy of the Expr.u.zToken string (if that ** string is defined.) */ -static int dupedExprNodeSize(Expr *p, int flags){ +static int dupedExprNodeSize(const Expr *p, int flags){ int nByte = dupedExprStructSize(p, flags) & 0xfff; if( !ExprHasProperty(p, EP_IntValue) && p->u.zToken ){ nByte += sqlite3Strlen30NN(p->u.zToken)+1; @@ -102511,7 +103001,7 @@ static int dupedExprNodeSize(Expr *p, int flags){ ** and Expr.pRight variables (but not for any structures pointed to or ** descended from the Expr.x.pList or Expr.x.pSelect variables). */ -static int dupedExprSize(Expr *p, int flags){ +static int dupedExprSize(const Expr *p, int flags){ int nByte = 0; if( p ){ nByte = dupedExprNodeSize(p, flags); @@ -102530,7 +103020,7 @@ static int dupedExprSize(Expr *p, int flags){ ** if any. Before returning, *pzBuffer is set to the first byte past the ** portion of the buffer copied into by this function. */ -static Expr *exprDup(sqlite3 *db, Expr *p, int dupFlags, u8 **pzBuffer){ +static Expr *exprDup(sqlite3 *db, const Expr *p, int dupFlags, u8 **pzBuffer){ Expr *pNew; /* Value to return */ u8 *zAlloc; /* Memory space from which to build Expr object */ u32 staticFlag; /* EP_Static if space not obtained from malloc */ @@ -102593,7 +103083,7 @@ static Expr *exprDup(sqlite3 *db, Expr *p, int dupFlags, u8 **pzBuffer){ if( 0==((p->flags|pNew->flags) & (EP_TokenOnly|EP_Leaf)) ){ /* Fill in the pNew->x.pSelect or pNew->x.pList member. */ - if( ExprHasProperty(p, EP_xIsSelect) ){ + if( ExprUseXSelect(p) ){ pNew->x.pSelect = sqlite3SelectDup(db, p->x.pSelect, dupFlags); }else{ pNew->x.pList = sqlite3ExprListDup(db, p->x.pList, dupFlags); @@ -102622,7 +103112,6 @@ static Expr *exprDup(sqlite3 *db, Expr *p, int dupFlags, u8 **pzBuffer){ if( !ExprHasProperty(p, EP_TokenOnly|EP_Leaf) ){ if( pNew->op==TK_SELECT_COLUMN ){ pNew->pLeft = p->pLeft; - assert( p->iColumn==0 || p->pRight==0 ); assert( p->pRight==0 || p->pRight==p->pLeft || ExprHasProperty(p->pLeft, EP_Subquery) ); }else{ @@ -102712,15 +103201,17 @@ static void gatherSelectWindows(Select *p){ ** truncated version of the usual Expr structure that will be stored as ** part of the in-memory representation of the database schema. */ -SQLITE_PRIVATE Expr *sqlite3ExprDup(sqlite3 *db, Expr *p, int flags){ +SQLITE_PRIVATE Expr *sqlite3ExprDup(sqlite3 *db, const Expr *p, int flags){ assert( flags==0 || flags==EXPRDUP_REDUCE ); return p ? exprDup(db, p, flags, 0) : 0; } -SQLITE_PRIVATE ExprList *sqlite3ExprListDup(sqlite3 *db, ExprList *p, int flags){ +SQLITE_PRIVATE ExprList *sqlite3ExprListDup(sqlite3 *db, const ExprList *p, int flags){ ExprList *pNew; - struct ExprList_item *pItem, *pOldItem; + struct ExprList_item *pItem; + const struct ExprList_item *pOldItem; int i; - Expr *pPriorSelectCol = 0; + Expr *pPriorSelectColOld = 0; + Expr *pPriorSelectColNew = 0; assert( db!=0 ); if( p==0 ) return 0; pNew = sqlite3DbMallocRawNN(db, sqlite3DbMallocSize(db, p)); @@ -102737,17 +103228,17 @@ SQLITE_PRIVATE ExprList *sqlite3ExprListDup(sqlite3 *db, ExprList *p, int flags) && pOldExpr->op==TK_SELECT_COLUMN && (pNewExpr = pItem->pExpr)!=0 ){ - assert( pNewExpr->iColumn==0 || i>0 ); - if( pNewExpr->iColumn==0 ){ - assert( pOldExpr->pLeft==pOldExpr->pRight - || ExprHasProperty(pOldExpr->pLeft, EP_Subquery) ); - pPriorSelectCol = pNewExpr->pLeft = pNewExpr->pRight; + if( pNewExpr->pRight ){ + pPriorSelectColOld = pOldExpr->pRight; + pPriorSelectColNew = pNewExpr->pRight; + pNewExpr->pLeft = pNewExpr->pRight; }else{ - assert( i>0 ); - assert( pItem[-1].pExpr!=0 ); - assert( pNewExpr->iColumn==pItem[-1].pExpr->iColumn+1 ); - assert( pPriorSelectCol==pItem[-1].pExpr->pLeft ); - pNewExpr->pLeft = pPriorSelectCol; + if( pOldExpr->pLeft!=pPriorSelectColOld ){ + pPriorSelectColOld = pOldExpr->pLeft; + pPriorSelectColNew = sqlite3ExprDup(db, pPriorSelectColOld, flags); + pNewExpr->pRight = pPriorSelectColNew; + } + pNewExpr->pLeft = pPriorSelectColNew; } } pItem->zEName = sqlite3DbStrDup(db, pOldItem->zEName); @@ -102769,7 +103260,7 @@ SQLITE_PRIVATE ExprList *sqlite3ExprListDup(sqlite3 *db, ExprList *p, int flags) */ #if !defined(SQLITE_OMIT_VIEW) || !defined(SQLITE_OMIT_TRIGGER) \ || !defined(SQLITE_OMIT_SUBQUERY) -SQLITE_PRIVATE SrcList *sqlite3SrcListDup(sqlite3 *db, SrcList *p, int flags){ +SQLITE_PRIVATE SrcList *sqlite3SrcListDup(sqlite3 *db, const SrcList *p, int flags){ SrcList *pNew; int i; int nByte; @@ -102781,7 +103272,7 @@ SQLITE_PRIVATE SrcList *sqlite3SrcListDup(sqlite3 *db, SrcList *p, int flags){ pNew->nSrc = pNew->nAlloc = p->nSrc; for(i=0; inSrc; i++){ SrcItem *pNewItem = &pNew->a[i]; - SrcItem *pOldItem = &p->a[i]; + const SrcItem *pOldItem = &p->a[i]; Table *pTab; pNewItem->pSchema = pOldItem->pSchema; pNewItem->zDatabase = sqlite3DbStrDup(db, pOldItem->zDatabase); @@ -102813,7 +103304,7 @@ SQLITE_PRIVATE SrcList *sqlite3SrcListDup(sqlite3 *db, SrcList *p, int flags){ } return pNew; } -SQLITE_PRIVATE IdList *sqlite3IdListDup(sqlite3 *db, IdList *p){ +SQLITE_PRIVATE IdList *sqlite3IdListDup(sqlite3 *db, const IdList *p){ IdList *pNew; int i; assert( db!=0 ); @@ -102837,11 +103328,11 @@ SQLITE_PRIVATE IdList *sqlite3IdListDup(sqlite3 *db, IdList *p){ } return pNew; } -SQLITE_PRIVATE Select *sqlite3SelectDup(sqlite3 *db, Select *pDup, int flags){ +SQLITE_PRIVATE Select *sqlite3SelectDup(sqlite3 *db, const Select *pDup, int flags){ Select *pRet = 0; Select *pNext = 0; Select **pp = &pRet; - Select *p; + const Select *p; assert( db!=0 ); for(p=pDup; p; p=p->pPrior){ @@ -102886,7 +103377,7 @@ SQLITE_PRIVATE Select *sqlite3SelectDup(sqlite3 *db, Select *pDup, int flags){ return pRet; } #else -SQLITE_PRIVATE Select *sqlite3SelectDup(sqlite3 *db, Select *p, int flags){ +SQLITE_PRIVATE Select *sqlite3SelectDup(sqlite3 *db, const Select *p, int flags){ assert( p==0 ); return 0; } @@ -103006,11 +103497,9 @@ SQLITE_PRIVATE ExprList *sqlite3ExprListAppendVector( } for(i=0; inId; i++){ - Expr *pSubExpr = sqlite3ExprForVectorField(pParse, pExpr, i); + Expr *pSubExpr = sqlite3ExprForVectorField(pParse, pExpr, i, pColumns->nId); assert( pSubExpr!=0 || db->mallocFailed ); - assert( pSubExpr==0 || pSubExpr->iTable==0 ); if( pSubExpr==0 ) continue; - pSubExpr->iTable = pColumns->nId; pList = sqlite3ExprListAppend(pParse, pList, pSubExpr); if( pList ){ assert( pList->nExpr==iFirst+i+1 ); @@ -103084,7 +103573,7 @@ SQLITE_PRIVATE void sqlite3ExprListSetSortOrder(ExprList *p, int iSortOrder, int SQLITE_PRIVATE void sqlite3ExprListSetName( Parse *pParse, /* Parsing context */ ExprList *pList, /* List to which to add the span. */ - Token *pName, /* Name to be added */ + const Token *pName, /* Name to be added */ int dequote /* True to cause the name to be dequoted */ ){ assert( pList!=0 || pParse->db->mallocFailed!=0 ); @@ -103102,7 +103591,7 @@ SQLITE_PRIVATE void sqlite3ExprListSetName( ** to the token-map. */ sqlite3Dequote(pItem->zEName); if( IN_RENAME_OBJECT ){ - sqlite3RenameTokenMap(pParse, (void*)pItem->zEName, pName); + sqlite3RenameTokenMap(pParse, (const void*)pItem->zEName, pName); } } } @@ -103221,7 +103710,7 @@ SQLITE_PRIVATE u32 sqlite3IsTrueOrFalse(const char *zIn){ SQLITE_PRIVATE int sqlite3ExprIdToTrueFalse(Expr *pExpr){ u32 v; assert( pExpr->op==TK_ID || pExpr->op==TK_STRING ); - if( !ExprHasProperty(pExpr, EP_Quoted) + if( !ExprHasProperty(pExpr, EP_Quoted|EP_IntValue) && (v = sqlite3IsTrueOrFalse(pExpr->u.zToken))!=0 ){ pExpr->op = TK_TRUEFALSE; @@ -103238,6 +103727,7 @@ SQLITE_PRIVATE int sqlite3ExprIdToTrueFalse(Expr *pExpr){ SQLITE_PRIVATE int sqlite3ExprTruthValue(const Expr *pExpr){ pExpr = sqlite3ExprSkipCollate((Expr*)pExpr); assert( pExpr->op==TK_TRUEFALSE ); + assert( !ExprHasProperty(pExpr, EP_IntValue) ); assert( sqlite3StrICmp(pExpr->u.zToken,"true")==0 || sqlite3StrICmp(pExpr->u.zToken,"false")==0 ); return pExpr->u.zToken[4]==0; @@ -103442,7 +103932,7 @@ static int exprNodeIsConstantOrGroupBy(Walker *pWalker, Expr *pExpr){ } /* Check if pExpr is a sub-select. If so, consider it variable. */ - if( ExprHasProperty(pExpr, EP_xIsSelect) ){ + if( ExprUseXSelect(pExpr) ){ pWalker->eCode = 0; return WRC_Abort; } @@ -103530,7 +104020,7 @@ SQLITE_PRIVATE int sqlite3ExprContainsSubquery(Expr *p){ ** in *pValue. If the expression is not an integer or if it is too big ** to fit in a signed 32-bit integer, return 0 and leave *pValue unchanged. */ -SQLITE_PRIVATE int sqlite3ExprIsInteger(Expr *p, int *pValue){ +SQLITE_PRIVATE int sqlite3ExprIsInteger(const Expr *p, int *pValue){ int rc = 0; if( NEVER(p==0) ) return 0; /* Used to only happen following on OOM */ @@ -103549,9 +104039,9 @@ SQLITE_PRIVATE int sqlite3ExprIsInteger(Expr *p, int *pValue){ break; } case TK_UMINUS: { - int v; + int v = 0; if( sqlite3ExprIsInteger(p->pLeft, &v) ){ - assert( v!=(-2147483647-1) ); + assert( ((unsigned int)v)!=0x80000000 ); *pValue = -v; rc = 1; } @@ -103592,6 +104082,7 @@ SQLITE_PRIVATE int sqlite3ExprCanBeNull(const Expr *p){ case TK_BLOB: return 0; case TK_COLUMN: + assert( ExprUseYTab(p) ); return ExprHasProperty(p, EP_CanBeNull) || p->y.pTab==0 || /* Reference to column of index on expression */ (p->iColumn>=0 @@ -103663,13 +104154,13 @@ SQLITE_PRIVATE int sqlite3IsRowid(const char *z){ ** table, then return NULL. */ #ifndef SQLITE_OMIT_SUBQUERY -static Select *isCandidateForInOpt(Expr *pX){ +static Select *isCandidateForInOpt(const Expr *pX){ Select *p; SrcList *pSrc; ExprList *pEList; Table *pTab; int i; - if( !ExprHasProperty(pX, EP_xIsSelect) ) return 0; /* Not a subquery */ + if( !ExprUseXSelect(pX) ) return 0; /* Not a subquery */ if( ExprHasProperty(pX, EP_VarSelect) ) return 0; /* Correlated subq */ p = pX->x.pSelect; if( p->pPrior ) return 0; /* Not a compound SELECT */ @@ -103687,7 +104178,7 @@ static Select *isCandidateForInOpt(Expr *pX){ if( pSrc->a[0].pSelect ) return 0; /* FROM is not a subquery or view */ pTab = pSrc->a[0].pTab; assert( pTab!=0 ); - assert( pTab->pSelect==0 ); /* FROM clause is not a view */ + assert( !IsView(pTab) ); /* FROM clause is not a view */ if( IsVirtual(pTab) ) return 0; /* FROM clause not a virtual table */ pEList = p->pEList; assert( pEList!=0 ); @@ -103840,7 +104331,7 @@ SQLITE_PRIVATE int sqlite3FindInIndex( ** or not NULL is actually possible (it may not be, for example, due ** to NOT NULL constraints in the schema). If no NULL values are possible, ** set prRhsHasNull to 0 before continuing. */ - if( prRhsHasNull && (pX->flags & EP_xIsSelect) ){ + if( prRhsHasNull && ExprUseXSelect(pX) ){ int i; ExprList *pEList = pX->x.pSelect->pEList; for(i=0; inExpr; i++){ @@ -103941,7 +104432,8 @@ SQLITE_PRIVATE int sqlite3FindInIndex( CollSeq *pReq = sqlite3BinaryCompareCollSeq(pParse, pLhs, pRhs); int j; - assert( pReq!=0 || pRhs->iColumn==XN_ROWID || pParse->nErr ); + assert( pReq!=0 || pRhs->iColumn==XN_ROWID + || pParse->nErr || db->mallocFailed ); for(j=0; jaiColumn[j]!=pRhs->iColumn ) continue; assert( pIdx->azColl[j] ); @@ -103996,7 +104488,7 @@ SQLITE_PRIVATE int sqlite3FindInIndex( */ if( eType==0 && (inFlags & IN_INDEX_NOOP_OK) - && !ExprHasProperty(pX, EP_xIsSelect) + && ExprUseXList(pX) && (!sqlite3InRhsIsConstant(pX) || pX->x.pList->nExpr<=2) ){ eType = IN_INDEX_NOOP; @@ -104041,10 +104533,10 @@ SQLITE_PRIVATE int sqlite3FindInIndex( ** It is the responsibility of the caller to ensure that the returned ** string is eventually freed using sqlite3DbFree(). */ -static char *exprINAffinity(Parse *pParse, Expr *pExpr){ +static char *exprINAffinity(Parse *pParse, const Expr *pExpr){ Expr *pLeft = pExpr->pLeft; int nVal = sqlite3ExprVectorSize(pLeft); - Select *pSelect = (pExpr->flags & EP_xIsSelect) ? pExpr->x.pSelect : 0; + Select *pSelect = ExprUseXSelect(pExpr) ? pExpr->x.pSelect : 0; char *zRet; assert( pExpr->op==TK_IN ); @@ -104094,7 +104586,7 @@ SQLITE_PRIVATE void sqlite3SubselectError(Parse *pParse, int nActual, int nExpec */ SQLITE_PRIVATE void sqlite3VectorErrorMsg(Parse *pParse, Expr *pExpr){ #ifndef SQLITE_OMIT_SUBQUERY - if( pExpr->flags & EP_xIsSelect ){ + if( ExprUseXSelect(pExpr) ){ sqlite3SubselectError(pParse, pExpr->x.pSelect->pEList->nExpr, 1); }else #endif @@ -104158,10 +104650,11 @@ SQLITE_PRIVATE void sqlite3CodeRhsOfIN( */ if( ExprHasProperty(pExpr, EP_Subrtn) ){ addrOnce = sqlite3VdbeAddOp0(v, OP_Once); VdbeCoverage(v); - if( ExprHasProperty(pExpr, EP_xIsSelect) ){ + if( ExprUseXSelect(pExpr) ){ ExplainQueryPlan((pParse, 0, "REUSE LIST SUBQUERY %d", pExpr->x.pSelect->selId)); } + assert( ExprUseYSub(pExpr) ); sqlite3VdbeAddOp2(v, OP_Gosub, pExpr->y.sub.regReturn, pExpr->y.sub.iAddr); sqlite3VdbeAddOp2(v, OP_OpenDup, iTab, pExpr->iTable); @@ -104170,6 +104663,7 @@ SQLITE_PRIVATE void sqlite3CodeRhsOfIN( } /* Begin coding the subroutine */ + assert( !ExprUseYWin(pExpr) ); ExprSetProperty(pExpr, EP_Subrtn); assert( !ExprHasProperty(pExpr, EP_TokenOnly|EP_Reduced) ); pExpr->y.sub.regReturn = ++pParse->nMem; @@ -104190,7 +104684,7 @@ SQLITE_PRIVATE void sqlite3CodeRhsOfIN( pExpr->iTable = iTab; addr = sqlite3VdbeAddOp2(v, OP_OpenEphemeral, pExpr->iTable, nVal); #ifdef SQLITE_ENABLE_EXPLAIN_COMMENTS - if( ExprHasProperty(pExpr, EP_xIsSelect) ){ + if( ExprUseXSelect(pExpr) ){ VdbeComment((v, "Result of SELECT %u", pExpr->x.pSelect->selId)); }else{ VdbeComment((v, "RHS of IN operator")); @@ -104198,7 +104692,7 @@ SQLITE_PRIVATE void sqlite3CodeRhsOfIN( #endif pKeyInfo = sqlite3KeyInfoAlloc(pParse->db, nVal, 1); - if( ExprHasProperty(pExpr, EP_xIsSelect) ){ + if( ExprUseXSelect(pExpr) ){ /* Case 1: expr IN (SELECT ...) ** ** Generate code to write the results of the select into the temporary @@ -104296,6 +104790,7 @@ SQLITE_PRIVATE void sqlite3CodeRhsOfIN( if( addrOnce ){ sqlite3VdbeJumpHere(v, addrOnce); /* Subroutine return */ + assert( ExprUseYSub(pExpr) ); sqlite3VdbeAddOp1(v, OP_Return, pExpr->y.sub.regReturn); sqlite3VdbeChangeP1(v, pExpr->y.sub.iAddr-1, sqlite3VdbeCurrentAddr(v)-1); sqlite3ClearTempRegCache(pParse); @@ -104332,19 +104827,22 @@ SQLITE_PRIVATE int sqlite3CodeSubselect(Parse *pParse, Expr *pExpr){ testcase( pExpr->op==TK_EXISTS ); testcase( pExpr->op==TK_SELECT ); assert( pExpr->op==TK_EXISTS || pExpr->op==TK_SELECT ); - assert( ExprHasProperty(pExpr, EP_xIsSelect) ); + assert( ExprUseXSelect(pExpr) ); pSel = pExpr->x.pSelect; /* If this routine has already been coded, then invoke it as a ** subroutine. */ if( ExprHasProperty(pExpr, EP_Subrtn) ){ ExplainQueryPlan((pParse, 0, "REUSE SUBQUERY %d", pSel->selId)); + assert( ExprUseYSub(pExpr) ); sqlite3VdbeAddOp2(v, OP_Gosub, pExpr->y.sub.regReturn, pExpr->y.sub.iAddr); return pExpr->iTable; } /* Begin coding the subroutine */ + assert( !ExprUseYWin(pExpr) ); + assert( !ExprHasProperty(pExpr, EP_Reduced|EP_TokenOnly) ); ExprSetProperty(pExpr, EP_Subrtn); pExpr->y.sub.regReturn = ++pParse->nMem; pExpr->y.sub.iAddr = @@ -104424,6 +104922,7 @@ SQLITE_PRIVATE int sqlite3CodeSubselect(Parse *pParse, Expr *pExpr){ } /* Subroutine return */ + assert( ExprUseYSub(pExpr) ); sqlite3VdbeAddOp1(v, OP_Return, pExpr->y.sub.regReturn); sqlite3VdbeChangeP1(v, pExpr->y.sub.iAddr-1, sqlite3VdbeCurrentAddr(v)-1); sqlite3ClearTempRegCache(pParse); @@ -104440,7 +104939,7 @@ SQLITE_PRIVATE int sqlite3CodeSubselect(Parse *pParse, Expr *pExpr){ */ SQLITE_PRIVATE int sqlite3ExprCheckIN(Parse *pParse, Expr *pIn){ int nVector = sqlite3ExprVectorSize(pIn->pLeft); - if( (pIn->flags & EP_xIsSelect)!=0 && !pParse->db->mallocFailed ){ + if( ExprUseXSelect(pIn) && !pParse->db->mallocFailed ){ if( nVector!=pIn->x.pSelect->pEList->nExpr ){ sqlite3SubselectError(pParse, pIn->x.pSelect->pEList->nExpr, nVector); return 1; @@ -104574,13 +105073,15 @@ static void sqlite3ExprCodeIN( ** This is step (1) in the in-operator.md optimized algorithm. */ if( eType==IN_INDEX_NOOP ){ - ExprList *pList = pExpr->x.pList; - CollSeq *pColl = sqlite3ExprCollSeq(pParse, pExpr->pLeft); + ExprList *pList; + CollSeq *pColl; int labelOk = sqlite3VdbeMakeLabel(pParse); int r2, regToFree; int regCkNull = 0; int ii; - assert( !ExprHasProperty(pExpr, EP_xIsSelect) ); + assert( ExprUseXList(pExpr) ); + pList = pExpr->x.pList; + pColl = sqlite3ExprCollSeq(pParse, pExpr->pLeft); if( destIfNull!=destIfFalse ){ regCkNull = sqlite3GetTempReg(pParse); sqlite3VdbeAddOp3(v, OP_BitAnd, rLhs, rLhs, regCkNull); @@ -104817,9 +105318,10 @@ SQLITE_PRIVATE void sqlite3ExprCodeLoadIndexColumn( ** and store the result in register regOut */ SQLITE_PRIVATE void sqlite3ExprCodeGeneratedColumn( - Parse *pParse, - Column *pCol, - int regOut + Parse *pParse, /* Parsing context */ + Table *pTab, /* Table containing the generated column */ + Column *pCol, /* The generated column */ + int regOut /* Put the result in this register */ ){ int iAddr; Vdbe *v = pParse->pVdbe; @@ -104830,7 +105332,7 @@ SQLITE_PRIVATE void sqlite3ExprCodeGeneratedColumn( }else{ iAddr = 0; } - sqlite3ExprCodeCopy(pParse, pCol->pDflt, regOut); + sqlite3ExprCodeCopy(pParse, sqlite3ColumnExpr(pTab,pCol), regOut); if( pCol->affinity>=SQLITE_AFF_TEXT ){ sqlite3VdbeAddOp4(v, OP_Affinity, regOut, 1, 0, &pCol->affinity, 1); } @@ -104866,12 +105368,13 @@ SQLITE_PRIVATE void sqlite3ExprCodeGetColumnOfTable( }else if( (pCol = &pTab->aCol[iCol])->colFlags & COLFLAG_VIRTUAL ){ Parse *pParse = sqlite3VdbeParser(v); if( pCol->colFlags & COLFLAG_BUSY ){ - sqlite3ErrorMsg(pParse, "generated column loop on \"%s\"", pCol->zName); + sqlite3ErrorMsg(pParse, "generated column loop on \"%s\"", + pCol->zCnName); }else{ int savedSelfTab = pParse->iSelfTab; pCol->colFlags |= COLFLAG_BUSY; pParse->iSelfTab = iTabCur+1; - sqlite3ExprCodeGeneratedColumn(pParse, pCol, regOut); + sqlite3ExprCodeGeneratedColumn(pParse, pTab, pCol, regOut); pParse->iSelfTab = savedSelfTab; pCol->colFlags &= ~COLFLAG_BUSY; } @@ -104964,6 +105467,7 @@ static int exprCodeVector(Parse *pParse, Expr *p, int *piFreeable){ int i; iResult = pParse->nMem+1; pParse->nMem += nResult; + assert( ExprUseXList(p) ); for(i=0; ix.pList->a[i].pExpr, i+iResult); } @@ -105038,6 +105542,7 @@ static int exprCodeInlineFunction( ** Test-only SQL functions that are only usable if enabled ** via SQLITE_TESTCTRL_INTERNAL_FUNCTIONS */ +#if !defined(SQLITE_UNTESTABLE) case INLINEFUNC_expr_compare: { /* Compare two expressions using sqlite3ExprCompare() */ assert( nFarg==2 ); @@ -105071,7 +105576,6 @@ static int exprCodeInlineFunction( break; } -#ifdef SQLITE_DEBUG case INLINEFUNC_affinity: { /* The AFFINITY() function evaluates to a string that describes ** the type affinity of the argument. This is used for testing of @@ -105085,7 +105589,7 @@ static int exprCodeInlineFunction( (aff<=SQLITE_AFF_NONE) ? "none" : azAff[aff-SQLITE_AFF_BLOB]); break; } -#endif +#endif /* !defined(SQLITE_UNTESTABLE) */ } return target; } @@ -105139,7 +105643,8 @@ expr_code_doover: if( pCol->iColumn<0 ){ VdbeComment((v,"%s.rowid",pTab->zName)); }else{ - VdbeComment((v,"%s.%s",pTab->zName,pTab->aCol[pCol->iColumn].zName)); + VdbeComment((v,"%s.%s", + pTab->zName, pTab->aCol[pCol->iColumn].zCnName)); if( pTab->aCol[pCol->iColumn].affinity==SQLITE_AFF_REAL ){ sqlite3VdbeAddOp1(v, OP_RealAffinity, target); } @@ -105161,6 +105666,7 @@ expr_code_doover: */ int aff; iReg = sqlite3ExprCodeTarget(pParse, pExpr->pLeft,target); + assert( ExprUseYTab(pExpr) ); if( pExpr->y.pTab ){ aff = sqlite3TableColumnAffinity(pExpr->y.pTab, pExpr->iColumn); }else{ @@ -105184,9 +105690,11 @@ expr_code_doover: ** immediately prior to the first column. */ Column *pCol; - Table *pTab = pExpr->y.pTab; + Table *pTab; int iSrc; int iCol = pExpr->iColumn; + assert( ExprUseYTab(pExpr) ); + pTab = pExpr->y.pTab; assert( pTab!=0 ); assert( iCol>=XN_ROWID ); assert( iColnCol ); @@ -105200,12 +105708,12 @@ expr_code_doover: if( pCol->colFlags & COLFLAG_GENERATED ){ if( pCol->colFlags & COLFLAG_BUSY ){ sqlite3ErrorMsg(pParse, "generated column loop on \"%s\"", - pCol->zName); + pCol->zCnName); return 0; } pCol->colFlags |= COLFLAG_BUSY; if( pCol->colFlags & COLFLAG_NOTAVAIL ){ - sqlite3ExprCodeGeneratedColumn(pParse, pCol, iSrc); + sqlite3ExprCodeGeneratedColumn(pParse, pTab, pCol, iSrc); } pCol->colFlags &= ~(COLFLAG_BUSY|COLFLAG_NOTAVAIL); return iSrc; @@ -105224,6 +105732,7 @@ expr_code_doover: iTab = pParse->iSelfTab - 1; } } + assert( ExprUseYTab(pExpr) ); iReg = sqlite3ExprCodeGetColumn(pParse, pExpr->y.pTab, pExpr->iColumn, iTab, target, pExpr->op2); @@ -105301,6 +105810,7 @@ expr_code_doover: sqlite3VdbeAddOp2(v, OP_SCopy, inReg, target); inReg = target; } + assert( !ExprHasProperty(pExpr, EP_IntValue) ); sqlite3VdbeAddOp2(v, OP_Cast, target, sqlite3AffinityType(pExpr->u.zToken, 0)); return inReg; @@ -105468,8 +105978,8 @@ expr_code_doover: ** multiple times if we know they always give the same result */ return sqlite3ExprCodeRunJustOnce(pParse, pExpr, -1); } - assert( !ExprHasProperty(pExpr, EP_xIsSelect) ); assert( !ExprHasProperty(pExpr, EP_TokenOnly) ); + assert( ExprUseXList(pExpr) ); pFarg = pExpr->x.pList; nFarg = pFarg ? pFarg->nExpr : 0; assert( !ExprHasProperty(pExpr, EP_IntValue) ); @@ -105558,7 +106068,7 @@ expr_code_doover: sqlite3VdbeAddOp4(v, OP_CollSeq, 0, 0, 0, (char *)pColl, P4_COLLSEQ); } #ifdef SQLITE_ENABLE_OFFSET_SQL_FUNC - if( pDef->funcFlags & SQLITE_FUNC_OFFSET ){ + if( (pDef->funcFlags & SQLITE_FUNC_OFFSET)!=0 && ALWAYS(pFarg!=0) ){ Expr *pArg = pFarg->a[0].pExpr; if( pArg->op==TK_COLUMN ){ sqlite3VdbeAddOp3(v, OP_Offset, pArg->iTable, pArg->iColumn, target); @@ -105588,7 +106098,10 @@ expr_code_doover: testcase( op==TK_SELECT ); if( pParse->db->mallocFailed ){ return 0; - }else if( op==TK_SELECT && (nCol = pExpr->x.pSelect->pEList->nExpr)!=1 ){ + }else if( op==TK_SELECT + && ALWAYS( ExprUseXSelect(pExpr) ) + && (nCol = pExpr->x.pSelect->pEList->nExpr)!=1 + ){ sqlite3SubselectError(pParse, nCol, 1); }else{ return sqlite3CodeSubselect(pParse, pExpr); @@ -105600,11 +106113,9 @@ expr_code_doover: if( pExpr->pLeft->iTable==0 ){ pExpr->pLeft->iTable = sqlite3CodeSubselect(pParse, pExpr->pLeft); } - assert( pExpr->iTable==0 || pExpr->pLeft->op==TK_SELECT - || pExpr->pLeft->op==TK_ERROR ); - if( pExpr->iTable!=0 - && pExpr->iTable!=(n = sqlite3ExprVectorSize(pExpr->pLeft)) - ){ + assert( pExpr->pLeft->op==TK_SELECT || pExpr->pLeft->op==TK_ERROR ); + n = sqlite3ExprVectorSize(pExpr->pLeft); + if( pExpr->iTable!=n ){ sqlite3ErrorMsg(pParse, "%d columns assigned %d values", pExpr->iTable, n); } @@ -105672,9 +106183,14 @@ expr_code_doover: ** p1==1 -> old.a p1==4 -> new.a ** p1==2 -> old.b p1==5 -> new.b */ - Table *pTab = pExpr->y.pTab; - int iCol = pExpr->iColumn; - int p1 = pExpr->iTable * (pTab->nCol+1) + 1 + Table *pTab; + int iCol; + int p1; + + assert( ExprUseYTab(pExpr) ); + pTab = pExpr->y.pTab; + iCol = pExpr->iColumn; + p1 = pExpr->iTable * (pTab->nCol+1) + 1 + sqlite3TableColumnToStorage(pTab, iCol); assert( pExpr->iTable==0 || pExpr->iTable==1 ); @@ -105685,7 +106201,7 @@ expr_code_doover: sqlite3VdbeAddOp2(v, OP_Param, p1, target); VdbeComment((v, "r[%d]=%s.%s", target, (pExpr->iTable ? "new" : "old"), - (pExpr->iColumn<0 ? "rowid" : pExpr->y.pTab->aCol[iCol].zName) + (pExpr->iColumn<0 ? "rowid" : pExpr->y.pTab->aCol[iCol].zCnName) )); #ifndef SQLITE_OMIT_FLOATING_POINT @@ -105762,7 +106278,7 @@ expr_code_doover: Expr *pDel = 0; sqlite3 *db = pParse->db; - assert( !ExprHasProperty(pExpr, EP_xIsSelect) && pExpr->x.pList ); + assert( ExprUseXList(pExpr) && pExpr->x.pList!=0 ); assert(pExpr->x.pList->nExpr > 0); pEList = pExpr->x.pList; aListelem = pEList->a; @@ -105959,7 +106475,7 @@ SQLITE_PRIVATE void sqlite3ExprCode(Parse *pParse, Expr *pExpr, int target){ inReg = sqlite3ExprCodeTarget(pParse, pExpr, target); if( inReg!=target ){ u8 op; - if( ExprHasProperty(pExpr,EP_Subquery) ){ + if( ALWAYS(pExpr) && ExprHasProperty(pExpr,EP_Subquery) ){ op = OP_Copy; }else{ op = OP_SCopy; @@ -106107,7 +106623,7 @@ static void exprCodeBetween( memset(&compRight, 0, sizeof(Expr)); memset(&exprAnd, 0, sizeof(Expr)); - assert( !ExprHasProperty(pExpr, EP_xIsSelect) ); + assert( ExprUseXList(pExpr) ); pDel = sqlite3ExprDup(db, pExpr->pLeft, 0); if( db->mallocFailed==0 ){ exprAnd.op = TK_AND; @@ -106497,7 +107013,11 @@ SQLITE_PRIVATE void sqlite3ExprIfFalseDup(Parse *pParse, Expr *pExpr, int dest,i ** Otherwise, if the values are not the same or if pExpr is not a simple ** SQL value, zero is returned. */ -static int exprCompareVariable(Parse *pParse, Expr *pVar, Expr *pExpr){ +static int exprCompareVariable( + const Parse *pParse, + const Expr *pVar, + const Expr *pExpr +){ int res = 0; int iVar; sqlite3_value *pL, *pR = 0; @@ -106549,7 +107069,12 @@ static int exprCompareVariable(Parse *pParse, Expr *pVar, Expr *pExpr){ ** Argument pParse should normally be NULL. If it is not NULL and pA or ** pB causes a return value of 2. */ -SQLITE_PRIVATE int sqlite3ExprCompare(Parse *pParse, Expr *pA, Expr *pB, int iTab){ +SQLITE_PRIVATE int sqlite3ExprCompare( + const Parse *pParse, + const Expr *pA, + const Expr *pB, + int iTab +){ u32 combinedFlags; if( pA==0 || pB==0 ){ return pB==pA ? 0 : 2; @@ -106573,7 +107098,9 @@ SQLITE_PRIVATE int sqlite3ExprCompare(Parse *pParse, Expr *pA, Expr *pB, int iTa } return 2; } - if( pA->op!=TK_COLUMN && pA->op!=TK_AGG_COLUMN && pA->u.zToken ){ + assert( !ExprHasProperty(pA, EP_IntValue) ); + assert( !ExprHasProperty(pB, EP_IntValue) ); + if( pA->u.zToken ){ if( pA->op==TK_FUNCTION || pA->op==TK_AGG_FUNCTION ){ if( sqlite3StrICmp(pA->u.zToken,pB->u.zToken)!=0 ) return 2; #ifndef SQLITE_OMIT_WINDOWFUNC @@ -106591,7 +107118,12 @@ SQLITE_PRIVATE int sqlite3ExprCompare(Parse *pParse, Expr *pA, Expr *pB, int iTa return 0; }else if( pA->op==TK_COLLATE ){ if( sqlite3_stricmp(pA->u.zToken,pB->u.zToken)!=0 ) return 2; - }else if( ALWAYS(pB->u.zToken!=0) && strcmp(pA->u.zToken,pB->u.zToken)!=0 ){ + }else + if( pB->u.zToken!=0 + && pA->op!=TK_COLUMN + && pA->op!=TK_AGG_COLUMN + && strcmp(pA->u.zToken,pB->u.zToken)!=0 + ){ return 2; } } @@ -106633,7 +107165,7 @@ SQLITE_PRIVATE int sqlite3ExprCompare(Parse *pParse, Expr *pA, Expr *pB, int iTa ** Two NULL pointers are considered to be the same. But a NULL pointer ** always differs from a non-NULL pointer. */ -SQLITE_PRIVATE int sqlite3ExprListCompare(ExprList *pA, ExprList *pB, int iTab){ +SQLITE_PRIVATE int sqlite3ExprListCompare(const ExprList *pA, const ExprList *pB, int iTab){ int i; if( pA==0 && pB==0 ) return 0; if( pA==0 || pB==0 ) return 1; @@ -106652,7 +107184,7 @@ SQLITE_PRIVATE int sqlite3ExprListCompare(ExprList *pA, ExprList *pB, int iTab){ ** Like sqlite3ExprCompare() except COLLATE operators at the top-level ** are ignored. */ -SQLITE_PRIVATE int sqlite3ExprCompareSkip(Expr *pA, Expr *pB, int iTab){ +SQLITE_PRIVATE int sqlite3ExprCompareSkip(Expr *pA,Expr *pB, int iTab){ return sqlite3ExprCompare(0, sqlite3ExprSkipCollateAndLikely(pA), sqlite3ExprSkipCollateAndLikely(pB), @@ -106666,9 +107198,9 @@ SQLITE_PRIVATE int sqlite3ExprCompareSkip(Expr *pA, Expr *pB, int iTab){ ** non-NULL if pNN is not NULL */ static int exprImpliesNotNull( - Parse *pParse, /* Parsing context */ - Expr *p, /* The expression to be checked */ - Expr *pNN, /* The expression that is NOT NULL */ + const Parse *pParse,/* Parsing context */ + const Expr *p, /* The expression to be checked */ + const Expr *pNN, /* The expression that is NOT NULL */ int iTab, /* Table being evaluated */ int seenNot /* Return true only if p can be any non-NULL value */ ){ @@ -106680,12 +107212,13 @@ static int exprImpliesNotNull( switch( p->op ){ case TK_IN: { if( seenNot && ExprHasProperty(p, EP_xIsSelect) ) return 0; - assert( ExprHasProperty(p,EP_xIsSelect) - || (p->x.pList!=0 && p->x.pList->nExpr>0) ); + assert( ExprUseXSelect(p) || (p->x.pList!=0 && p->x.pList->nExpr>0) ); return exprImpliesNotNull(pParse, p->pLeft, pNN, iTab, 1); } case TK_BETWEEN: { - ExprList *pList = p->x.pList; + ExprList *pList; + assert( ExprUseXList(p) ); + pList = p->x.pList; assert( pList!=0 ); assert( pList->nExpr==2 ); if( seenNot ) return 0; @@ -106761,7 +107294,12 @@ static int exprImpliesNotNull( ** improvement. Returning false might cause a performance reduction, but ** it will always give the correct answer and is hence always safe. */ -SQLITE_PRIVATE int sqlite3ExprImpliesExpr(Parse *pParse, Expr *pE1, Expr *pE2, int iTab){ +SQLITE_PRIVATE int sqlite3ExprImpliesExpr( + const Parse *pParse, + const Expr *pE1, + const Expr *pE2, + int iTab +){ if( sqlite3ExprCompare(pParse, pE1, pE2, iTab)==0 ){ return 1; } @@ -106857,10 +107395,14 @@ static int impliesNotNullRow(Walker *pWalker, Expr *pExpr){ testcase( pExpr->op==TK_GE ); /* The y.pTab=0 assignment in wherecode.c always happens after the ** impliesNotNullRow() test */ - if( (pLeft->op==TK_COLUMN && ALWAYS(pLeft->y.pTab!=0) - && IsVirtual(pLeft->y.pTab)) - || (pRight->op==TK_COLUMN && ALWAYS(pRight->y.pTab!=0) - && IsVirtual(pRight->y.pTab)) + assert( pLeft->op!=TK_COLUMN || ExprUseYTab(pLeft) ); + assert( pRight->op!=TK_COLUMN || ExprUseYTab(pRight) ); + if( (pLeft->op==TK_COLUMN + && pLeft->y.pTab!=0 + && IsVirtual(pLeft->y.pTab)) + || (pRight->op==TK_COLUMN + && pRight->y.pTab!=0 + && IsVirtual(pRight->y.pTab)) ){ return WRC_Prune; } @@ -106969,88 +107511,125 @@ SQLITE_PRIVATE int sqlite3ExprCoveredByIndex( } -/* -** An instance of the following structure is used by the tree walker -** to count references to table columns in the arguments of an -** aggregate function, in order to implement the -** sqlite3FunctionThisSrc() routine. +/* Structure used to pass information throught the Walker in order to +** implement sqlite3ReferencesSrcList(). */ -struct SrcCount { - SrcList *pSrc; /* One particular FROM clause in a nested query */ - int iSrcInner; /* Smallest cursor number in this context */ - int nThis; /* Number of references to columns in pSrcList */ - int nOther; /* Number of references to columns in other FROM clauses */ +struct RefSrcList { + sqlite3 *db; /* Database connection used for sqlite3DbRealloc() */ + SrcList *pRef; /* Looking for references to these tables */ + i64 nExclude; /* Number of tables to exclude from the search */ + int *aiExclude; /* Cursor IDs for tables to exclude from the search */ }; /* -** xSelect callback for sqlite3FunctionUsesThisSrc(). If this is the first -** SELECT with a FROM clause encountered during this iteration, set -** SrcCount.iSrcInner to the cursor number of the leftmost object in -** the FROM cause. +** Walker SELECT callbacks for sqlite3ReferencesSrcList(). +** +** When entering a new subquery on the pExpr argument, add all FROM clause +** entries for that subquery to the exclude list. +** +** When leaving the subquery, remove those entries from the exclude list. */ -static int selectSrcCount(Walker *pWalker, Select *pSel){ - struct SrcCount *p = pWalker->u.pSrcCount; - if( p->iSrcInner==0x7FFFFFFF && ALWAYS(pSel->pSrc) && pSel->pSrc->nSrc ){ - pWalker->u.pSrcCount->iSrcInner = pSel->pSrc->a[0].iCursor; +static int selectRefEnter(Walker *pWalker, Select *pSelect){ + struct RefSrcList *p = pWalker->u.pRefSrcList; + SrcList *pSrc = pSelect->pSrc; + i64 i, j; + int *piNew; + if( pSrc->nSrc==0 ) return WRC_Continue; + j = p->nExclude; + p->nExclude += pSrc->nSrc; + piNew = sqlite3DbRealloc(p->db, p->aiExclude, p->nExclude*sizeof(int)); + if( piNew==0 ){ + p->nExclude = 0; + return WRC_Abort; + }else{ + p->aiExclude = piNew; + } + for(i=0; inSrc; i++, j++){ + p->aiExclude[j] = pSrc->a[i].iCursor; } return WRC_Continue; } +static void selectRefLeave(Walker *pWalker, Select *pSelect){ + struct RefSrcList *p = pWalker->u.pRefSrcList; + SrcList *pSrc = pSelect->pSrc; + if( p->nExclude ){ + assert( p->nExclude>=pSrc->nSrc ); + p->nExclude -= pSrc->nSrc; + } +} -/* -** Count the number of references to columns. +/* This is the Walker EXPR callback for sqlite3ReferencesSrcList(). +** +** Set the 0x01 bit of pWalker->eCode if there is a reference to any +** of the tables shown in RefSrcList.pRef. +** +** Set the 0x02 bit of pWalker->eCode if there is a reference to a +** table is in neither RefSrcList.pRef nor RefSrcList.aiExclude. */ -static int exprSrcCount(Walker *pWalker, Expr *pExpr){ - /* There was once a NEVER() on the second term on the grounds that - ** sqlite3FunctionUsesThisSrc() was always called before - ** sqlite3ExprAnalyzeAggregates() and so the TK_COLUMNs have not yet - ** been converted into TK_AGG_COLUMN. But this is no longer true due - ** to window functions - sqlite3WindowRewrite() may now indirectly call - ** FunctionUsesThisSrc() when creating a new sub-select. */ - if( pExpr->op==TK_COLUMN || pExpr->op==TK_AGG_COLUMN ){ +static int exprRefToSrcList(Walker *pWalker, Expr *pExpr){ + if( pExpr->op==TK_COLUMN + || pExpr->op==TK_AGG_COLUMN + ){ int i; - struct SrcCount *p = pWalker->u.pSrcCount; - SrcList *pSrc = p->pSrc; + struct RefSrcList *p = pWalker->u.pRefSrcList; + SrcList *pSrc = p->pRef; int nSrc = pSrc ? pSrc->nSrc : 0; for(i=0; iiTable==pSrc->a[i].iCursor ) break; + if( pExpr->iTable==pSrc->a[i].iCursor ){ + pWalker->eCode |= 1; + return WRC_Continue; + } } - if( inThis++; - }else if( pExpr->iTableiSrcInner ){ - /* In a well-formed parse tree (no name resolution errors), - ** TK_COLUMN nodes with smaller Expr.iTable values are in an - ** outer context. Those are the only ones to count as "other" */ - p->nOther++; + for(i=0; inExclude && p->aiExclude[i]!=pExpr->iTable; i++){} + if( i>=p->nExclude ){ + pWalker->eCode |= 2; } } return WRC_Continue; } /* -** Determine if any of the arguments to the pExpr Function reference -** pSrcList. Return true if they do. Also return true if the function -** has no arguments or has only constant arguments. Return false if pExpr -** references columns but not columns of tables found in pSrcList. +** Check to see if pExpr references any tables in pSrcList. +** Possible return values: +** +** 1 pExpr does references a table in pSrcList. +** +** 0 pExpr references some table that is not defined in either +** pSrcList or in subqueries of pExpr itself. +** +** -1 pExpr only references no tables at all, or it only +** references tables defined in subqueries of pExpr itself. +** +** As currently used, pExpr is always an aggregate function call. That +** fact is exploited for efficiency. */ -SQLITE_PRIVATE int sqlite3FunctionUsesThisSrc(Expr *pExpr, SrcList *pSrcList){ +SQLITE_PRIVATE int sqlite3ReferencesSrcList(Parse *pParse, Expr *pExpr, SrcList *pSrcList){ Walker w; - struct SrcCount cnt; - assert( pExpr->op==TK_AGG_FUNCTION ); + struct RefSrcList x; memset(&w, 0, sizeof(w)); - w.xExprCallback = exprSrcCount; - w.xSelectCallback = selectSrcCount; - w.u.pSrcCount = &cnt; - cnt.pSrc = pSrcList; - cnt.iSrcInner = (pSrcList&&pSrcList->nSrc)?pSrcList->a[0].iCursor:0x7FFFFFFF; - cnt.nThis = 0; - cnt.nOther = 0; + memset(&x, 0, sizeof(x)); + w.xExprCallback = exprRefToSrcList; + w.xSelectCallback = selectRefEnter; + w.xSelectCallback2 = selectRefLeave; + w.u.pRefSrcList = &x; + x.db = pParse->db; + x.pRef = pSrcList; + assert( pExpr->op==TK_AGG_FUNCTION ); + assert( ExprUseXList(pExpr) ); sqlite3WalkExprList(&w, pExpr->x.pList); #ifndef SQLITE_OMIT_WINDOWFUNC if( ExprHasProperty(pExpr, EP_WinFunc) ){ sqlite3WalkExpr(&w, pExpr->y.pWin->pFilter); } #endif - return cnt.nThis>0 || cnt.nOther==0; + sqlite3DbFree(pParse->db, x.aiExclude); + if( w.eCode & 0x01 ){ + return 1; + }else if( w.eCode ){ + return 0; + }else{ + return -1; + } } /* @@ -107185,6 +107764,7 @@ static int analyzeAggregate(Walker *pWalker, Expr *pExpr){ && (k = addAggInfoColumn(pParse->db, pAggInfo))>=0 ){ pCol = &pAggInfo->aCol[k]; + assert( ExprUseYTab(pExpr) ); pCol->pTab = pExpr->y.pTab; pCol->iTable = pExpr->iTable; pCol->iColumn = pExpr->iColumn; @@ -107248,7 +107828,7 @@ static int analyzeAggregate(Walker *pWalker, Expr *pExpr){ pItem = &pAggInfo->aFunc[i]; pItem->pFExpr = pExpr; pItem->iMem = ++pParse->nMem; - assert( !ExprHasProperty(pExpr, EP_IntValue) ); + assert( ExprUseUToken(pExpr) ); pItem->pFunc = sqlite3FindFunction(pParse->db, pExpr->u.zToken, pExpr->x.pList ? pExpr->x.pList->nExpr : 0, enc, 0); @@ -107463,7 +108043,7 @@ static void renameTestSchema( pParse->colNamesSet = 1; sqlite3NestedParse(pParse, "SELECT 1 " - "FROM \"%w\"." DFLT_SCHEMA_TABLE " " + "FROM \"%w\"." LEGACY_SCHEMA_TABLE " " "WHERE name NOT LIKE 'sqliteX_%%' ESCAPE 'X'" " AND sql NOT LIKE 'create virtual%%'" " AND sqlite_rename_test(%Q, sql, type, name, %d, %Q, %d)=NULL ", @@ -107474,7 +108054,7 @@ static void renameTestSchema( if( bTemp==0 ){ sqlite3NestedParse(pParse, "SELECT 1 " - "FROM temp." DFLT_SCHEMA_TABLE " " + "FROM temp." LEGACY_SCHEMA_TABLE " " "WHERE name NOT LIKE 'sqliteX_%%' ESCAPE 'X'" " AND sql NOT LIKE 'create virtual%%'" " AND sqlite_rename_test(%Q, sql, type, name, 1, %Q, %d)=NULL ", @@ -107492,14 +108072,14 @@ static void renameTestSchema( */ static void renameFixQuotes(Parse *pParse, const char *zDb, int bTemp){ sqlite3NestedParse(pParse, - "UPDATE \"%w\"." DFLT_SCHEMA_TABLE + "UPDATE \"%w\"." LEGACY_SCHEMA_TABLE " SET sql = sqlite_rename_quotefix(%Q, sql)" "WHERE name NOT LIKE 'sqliteX_%%' ESCAPE 'X'" " AND sql NOT LIKE 'create virtual%%'" , zDb, zDb ); if( bTemp==0 ){ sqlite3NestedParse(pParse, - "UPDATE temp." DFLT_SCHEMA_TABLE + "UPDATE temp." LEGACY_SCHEMA_TABLE " SET sql = sqlite_rename_quotefix('temp', sql)" "WHERE name NOT LIKE 'sqliteX_%%' ESCAPE 'X'" " AND sql NOT LIKE 'create virtual%%'" @@ -107538,9 +108118,7 @@ SQLITE_PRIVATE void sqlite3AlterRenameTable( const char *zTabName; /* Original name of the table */ Vdbe *v; VTable *pVTab = 0; /* Non-zero if this is a v-tab with an xRename() */ - u32 savedDbFlags; /* Saved value of db->mDbFlags */ - savedDbFlags = db->mDbFlags; if( NEVER(db->mallocFailed) ) goto exit_rename_table; assert( pSrc->nSrc==1 ); assert( sqlite3BtreeHoldsAllMutexes(pParse->db) ); @@ -107549,7 +108127,6 @@ SQLITE_PRIVATE void sqlite3AlterRenameTable( if( !pTab ) goto exit_rename_table; iDb = sqlite3SchemaToIndex(pParse->db, pTab->pSchema); zDb = db->aDb[iDb].zDbSName; - db->mDbFlags |= DBFLAG_PreferBuiltin; /* Get a NULL terminated version of the new table name. */ zName = sqlite3NameFromToken(db, pName); @@ -107578,7 +108155,7 @@ SQLITE_PRIVATE void sqlite3AlterRenameTable( } #ifndef SQLITE_OMIT_VIEW - if( pTab->pSelect ){ + if( IsView(pTab) ){ sqlite3ErrorMsg(pParse, "view %s may not be altered", pTab->zName); goto exit_rename_table; } @@ -107620,7 +108197,7 @@ SQLITE_PRIVATE void sqlite3AlterRenameTable( /* Rewrite all CREATE TABLE, INDEX, TRIGGER or VIEW statements in ** the schema to use the new table name. */ sqlite3NestedParse(pParse, - "UPDATE \"%w\"." DFLT_SCHEMA_TABLE " SET " + "UPDATE \"%w\"." LEGACY_SCHEMA_TABLE " SET " "sql = sqlite_rename_table(%Q, type, name, sql, %Q, %Q, %d) " "WHERE (type!='index' OR tbl_name=%Q COLLATE nocase)" "AND name NOT LIKE 'sqliteX_%%' ESCAPE 'X'" @@ -107630,7 +108207,7 @@ SQLITE_PRIVATE void sqlite3AlterRenameTable( /* Update the tbl_name and name columns of the sqlite_schema table ** as required. */ sqlite3NestedParse(pParse, - "UPDATE %Q." DFLT_SCHEMA_TABLE " SET " + "UPDATE %Q." LEGACY_SCHEMA_TABLE " SET " "tbl_name = %Q, " "name = CASE " "WHEN type='table' THEN %Q " @@ -107690,7 +108267,6 @@ SQLITE_PRIVATE void sqlite3AlterRenameTable( exit_rename_table: sqlite3SrcListDelete(db, pSrc); sqlite3DbFree(db, zName); - db->mDbFlags = savedDbFlags; } /* @@ -107740,7 +108316,7 @@ SQLITE_PRIVATE void sqlite3AlterFinishAddColumn(Parse *pParse, Token *pColDef){ zDb = db->aDb[iDb].zDbSName; zTab = &pNew->zName[16]; /* Skip the "sqlite_altertab_" prefix on the name */ pCol = &pNew->aCol[pNew->nCol-1]; - pDflt = pCol->pDflt; + pDflt = sqlite3ColumnExpr(pNew, pCol); pTab = sqlite3FindTable(db, zTab, zDb); assert( pTab ); @@ -107774,7 +108350,8 @@ SQLITE_PRIVATE void sqlite3AlterFinishAddColumn(Parse *pParse, Token *pColDef){ if( pDflt && pDflt->pLeft->op==TK_NULL ){ pDflt = 0; } - if( (db->flags&SQLITE_ForeignKeys) && pNew->pFKey && pDflt ){ + assert( IsOrdinaryTable(pNew) ); + if( (db->flags&SQLITE_ForeignKeys) && pNew->u.tab.pFKey && pDflt ){ sqlite3ErrorIfNotEmpty(pParse, zDb, zTab, "Cannot add a REFERENCES column with non-NULL default value"); } @@ -107811,31 +108388,30 @@ SQLITE_PRIVATE void sqlite3AlterFinishAddColumn(Parse *pParse, Token *pColDef){ zCol = sqlite3DbStrNDup(db, (char*)pColDef->z, pColDef->n); if( zCol ){ char *zEnd = &zCol[pColDef->n-1]; - u32 savedDbFlags = db->mDbFlags; while( zEnd>zCol && (*zEnd==';' || sqlite3Isspace(*zEnd)) ){ *zEnd-- = '\0'; } - db->mDbFlags |= DBFLAG_PreferBuiltin; /* substr() operations on characters, but addColOffset is in bytes. So we ** have to use printf() to translate between these units: */ + assert( IsOrdinaryTable(pTab) ); + assert( IsOrdinaryTable(pNew) ); sqlite3NestedParse(pParse, - "UPDATE \"%w\"." DFLT_SCHEMA_TABLE " SET " + "UPDATE \"%w\"." LEGACY_SCHEMA_TABLE " SET " "sql = printf('%%.%ds, ',sql) || %Q" " || substr(sql,1+length(printf('%%.%ds',sql))) " "WHERE type = 'table' AND name = %Q", - zDb, pNew->addColOffset, zCol, pNew->addColOffset, + zDb, pNew->u.tab.addColOffset, zCol, pNew->u.tab.addColOffset, zTab ); sqlite3DbFree(db, zCol); - db->mDbFlags = savedDbFlags; } - /* Make sure the schema version is at least 3. But do not upgrade - ** from less than 3 to 4, as that will corrupt any preexisting DESC - ** index. - */ v = sqlite3GetVdbe(pParse); if( v ){ + /* Make sure the schema version is at least 3. But do not upgrade + ** from less than 3 to 4, as that will corrupt any preexisting DESC + ** index. + */ r1 = sqlite3GetTempReg(pParse); sqlite3VdbeAddOp3(v, OP_ReadCookie, iDb, r1, BTREE_FILE_FORMAT); sqlite3VdbeUsesBtree(v, iDb); @@ -107844,10 +108420,25 @@ SQLITE_PRIVATE void sqlite3AlterFinishAddColumn(Parse *pParse, Token *pColDef){ VdbeCoverage(v); sqlite3VdbeAddOp3(v, OP_SetCookie, iDb, BTREE_FILE_FORMAT, 3); sqlite3ReleaseTempReg(pParse, r1); - } - /* Reload the table definition */ - renameReloadSchema(pParse, iDb, INITFLAG_AlterRename); + /* Reload the table definition */ + renameReloadSchema(pParse, iDb, INITFLAG_AlterAdd); + + /* Verify that constraints are still satisfied */ + if( pNew->pCheck!=0 + || (pCol->notNull && (pCol->colFlags & COLFLAG_GENERATED)!=0) + ){ + sqlite3NestedParse(pParse, + "SELECT CASE WHEN quick_check GLOB 'CHECK*'" + " THEN raise(ABORT,'CHECK constraint failed')" + " ELSE raise(ABORT,'NOT NULL constraint failed')" + " END" + " FROM pragma_quick_check(\"%w\",\"%w\")" + " WHERE quick_check GLOB 'CHECK*' OR quick_check GLOB 'NULL*'", + zTab, zDb + ); + } + } } /* @@ -107888,7 +108479,7 @@ SQLITE_PRIVATE void sqlite3AlterBeginAddColumn(Parse *pParse, SrcList *pSrc){ #endif /* Make sure this is not an attempt to ALTER a view. */ - if( pTab->pSelect ){ + if( IsView(pTab) ){ sqlite3ErrorMsg(pParse, "Cannot add a column to a view"); goto exit_begin_add_column; } @@ -107897,7 +108488,8 @@ SQLITE_PRIVATE void sqlite3AlterBeginAddColumn(Parse *pParse, SrcList *pSrc){ } sqlite3MayAbort(pParse); - assert( pTab->addColOffset>0 ); + assert( IsOrdinaryTable(pTab) ); + assert( pTab->u.tab.addColOffset>0 ); iDb = sqlite3SchemaToIndex(db, pTab->pSchema); /* Put a copy of the Table struct in Parse.pNewTable for the @@ -107924,13 +108516,13 @@ SQLITE_PRIVATE void sqlite3AlterBeginAddColumn(Parse *pParse, SrcList *pSrc){ memcpy(pNew->aCol, pTab->aCol, sizeof(Column)*pNew->nCol); for(i=0; inCol; i++){ Column *pCol = &pNew->aCol[i]; - pCol->zName = sqlite3DbStrDup(db, pCol->zName); - pCol->hName = sqlite3StrIHash(pCol->zName); - pCol->zColl = 0; - pCol->pDflt = 0; + pCol->zCnName = sqlite3DbStrDup(db, pCol->zCnName); + pCol->hName = sqlite3StrIHash(pCol->zCnName); } + assert( IsOrdinaryTable(pNew) ); + pNew->u.tab.pDfltList = sqlite3ExprListDup(db, pTab->u.tab.pDfltList, 0); pNew->pSchema = db->aDb[iDb].pSchema; - pNew->addColOffset = pTab->addColOffset; + pNew->u.tab.addColOffset = pTab->u.tab.addColOffset; pNew->nTabRef = 1; exit_begin_add_column: @@ -107950,7 +108542,7 @@ exit_begin_add_column: static int isRealTable(Parse *pParse, Table *pTab, int bDrop){ const char *zType = 0; #ifndef SQLITE_OMIT_VIEW - if( pTab->pSelect ){ + if( IsView(pTab) ){ zType = "view"; } #endif @@ -108017,7 +108609,7 @@ SQLITE_PRIVATE void sqlite3AlterRenameColumn( zOld = sqlite3NameFromToken(db, pOld); if( !zOld ) goto exit_rename_column; for(iCol=0; iColnCol; iCol++){ - if( 0==sqlite3StrICmp(pTab->aCol[iCol].zName, zOld) ) break; + if( 0==sqlite3StrICmp(pTab->aCol[iCol].zCnName, zOld) ) break; } if( iCol==pTab->nCol ){ sqlite3ErrorMsg(pParse, "no such column: \"%s\"", zOld); @@ -108038,18 +108630,17 @@ SQLITE_PRIVATE void sqlite3AlterRenameColumn( assert( pNew->n>0 ); bQuote = sqlite3Isquote(pNew->z[0]); sqlite3NestedParse(pParse, - "UPDATE \"%w\"." DFLT_SCHEMA_TABLE " SET " + "UPDATE \"%w\"." LEGACY_SCHEMA_TABLE " SET " "sql = sqlite_rename_column(sql, type, name, %Q, %Q, %d, %Q, %d, %d) " "WHERE name NOT LIKE 'sqliteX_%%' ESCAPE 'X' " - " AND (type != 'index' OR tbl_name = %Q)" - " AND sql NOT LIKE 'create virtual%%'", + " AND (type != 'index' OR tbl_name = %Q)", zDb, zDb, pTab->zName, iCol, zNew, bQuote, iSchema==1, pTab->zName ); sqlite3NestedParse(pParse, - "UPDATE temp." DFLT_SCHEMA_TABLE " SET " + "UPDATE temp." LEGACY_SCHEMA_TABLE " SET " "sql = sqlite_rename_column(sql, type, name, %Q, %Q, %d, %Q, %d, 1) " "WHERE type IN ('trigger', 'view')", zDb, pTab->zName, iCol, zNew, bQuote @@ -108084,7 +108675,7 @@ SQLITE_PRIVATE void sqlite3AlterRenameColumn( ** the parse tree. */ struct RenameToken { - void *p; /* Parse tree element created by token t */ + const void *p; /* Parse tree element created by token t */ Token t; /* The token that created parse tree element p */ RenameToken *pNext; /* Next is a list of all RenameToken objects */ }; @@ -108126,9 +108717,9 @@ struct RenameCtx { ** Technically, as x no longer points into a valid object or to the byte ** following a valid object, it may not be used in comparison operations. */ -static void renameTokenCheckAll(Parse *pParse, void *pPtr){ +static void renameTokenCheckAll(Parse *pParse, const void *pPtr){ if( pParse->nErr==0 && pParse->db->mallocFailed==0 ){ - RenameToken *p; + const RenameToken *p; u8 i = 0; for(p=pParse->pRename; p; p=p->pNext){ if( p->p ){ @@ -108154,7 +108745,11 @@ static void renameTokenCheckAll(Parse *pParse, void *pPtr){ ** with tail recursion in tokenExpr() routine, for a small performance ** improvement. */ -SQLITE_PRIVATE void *sqlite3RenameTokenMap(Parse *pParse, void *pPtr, Token *pToken){ +SQLITE_PRIVATE const void *sqlite3RenameTokenMap( + Parse *pParse, + const void *pPtr, + const Token *pToken +){ RenameToken *pNew; assert( pPtr || pParse->db->mallocFailed ); renameTokenCheckAll(pParse, pPtr); @@ -108176,7 +108771,7 @@ SQLITE_PRIVATE void *sqlite3RenameTokenMap(Parse *pParse, void *pPtr, Token *pTo ** with parse tree element pFrom. This function remaps the associated token ** to parse tree element pTo. */ -SQLITE_PRIVATE void sqlite3RenameTokenRemap(Parse *pParse, void *pTo, void *pFrom){ +SQLITE_PRIVATE void sqlite3RenameTokenRemap(Parse *pParse, const void *pTo, const void *pFrom){ RenameToken *p; renameTokenCheckAll(pParse, pTo); for(p=pParse->pRename; p; p=p->pNext){ @@ -108192,7 +108787,10 @@ SQLITE_PRIVATE void sqlite3RenameTokenRemap(Parse *pParse, void *pTo, void *pFro */ static int renameUnmapExprCb(Walker *pWalker, Expr *pExpr){ Parse *pParse = pWalker->pParse; - sqlite3RenameTokenRemap(pParse, 0, (void*)pExpr); + sqlite3RenameTokenRemap(pParse, 0, (const void*)pExpr); + if( ExprUseYTab(pExpr) ){ + sqlite3RenameTokenRemap(pParse, 0, (const void*)&pExpr->y.pTab); + } return WRC_Continue; } @@ -108222,6 +108820,7 @@ static void renameWalkWith(Walker *pWalker, Select *pSelect){ memset(&sNC, 0, sizeof(sNC)); sNC.pParse = pParse; if( pCopy ) sqlite3SelectPrep(sNC.pParse, p, &sNC); + if( sNC.pParse->db->mallocFailed ) return; sqlite3WalkSelect(pWalker, p); sqlite3RenameExprlistUnmap(pParse, pWith->a[i].pCols); } @@ -108236,12 +108835,12 @@ static void renameWalkWith(Walker *pWalker, Select *pSelect){ */ static void unmapColumnIdlistNames( Parse *pParse, - IdList *pIdList + const IdList *pIdList ){ if( pIdList ){ int ii; for(ii=0; iinId; ii++){ - sqlite3RenameTokenRemap(pParse, 0, (void*)pIdList->a[ii].zName); + sqlite3RenameTokenRemap(pParse, 0, (const void*)pIdList->a[ii].zName); } } } @@ -108253,9 +108852,9 @@ static int renameUnmapSelectCb(Walker *pWalker, Select *p){ Parse *pParse = pWalker->pParse; int i; if( pParse->nErr ) return WRC_Abort; + testcase( p->selFlags & SF_View ); + testcase( p->selFlags & SF_CopyCte ); if( p->selFlags & (SF_View|SF_CopyCte) ){ - testcase( p->selFlags & SF_View ); - testcase( p->selFlags & SF_CopyCte ); return WRC_Prune; } if( ALWAYS(p->pEList) ){ @@ -108270,7 +108869,7 @@ static int renameUnmapSelectCb(Walker *pWalker, Select *p){ SrcList *pSrc = p->pSrc; for(i=0; inSrc; i++){ sqlite3RenameTokenRemap(pParse, 0, (void*)pSrc->a[i].zName); - if( sqlite3WalkExpr(pWalker, pSrc->a[i].pOn) ) return WRC_Abort; + sqlite3WalkExpr(pWalker, pSrc->a[i].pOn); unmapColumnIdlistNames(pParse, pSrc->a[i].pUsing); } } @@ -108338,7 +108937,7 @@ static void renameTokenFree(sqlite3 *db, RenameToken *pToken){ static RenameToken *renameTokenFind( Parse *pParse, struct RenameCtx *pCtx, - void *pPtr + const void *pPtr ){ RenameToken **pp; if( NEVER(pPtr==0) ){ @@ -108392,6 +108991,7 @@ static int renameColumnExprCb(Walker *pWalker, Expr *pExpr){ renameTokenFind(pWalker->pParse, p, (void*)pExpr); }else if( pExpr->op==TK_COLUMN && pExpr->iColumn==p->iCol + && ALWAYS(ExprUseYTab(pExpr)) && p->pTab==pExpr->y.pTab ){ renameTokenFind(pWalker->pParse, p, (void*)pExpr); @@ -108457,18 +109057,18 @@ static void renameColumnParseError( static void renameColumnElistNames( Parse *pParse, RenameCtx *pCtx, - ExprList *pEList, + const ExprList *pEList, const char *zOld ){ if( pEList ){ int i; for(i=0; inExpr; i++){ - char *zName = pEList->a[i].zEName; + const char *zName = pEList->a[i].zEName; if( ALWAYS(pEList->a[i].eEName==ENAME_NAME) && ALWAYS(zName!=0) && 0==sqlite3_stricmp(zName, zOld) ){ - renameTokenFind(pParse, pCtx, (void*)zName); + renameTokenFind(pParse, pCtx, (const void*)zName); } } } @@ -108482,15 +109082,15 @@ static void renameColumnElistNames( static void renameColumnIdlistNames( Parse *pParse, RenameCtx *pCtx, - IdList *pIdList, + const IdList *pIdList, const char *zOld ){ if( pIdList ){ int i; for(i=0; inId; i++){ - char *zName = pIdList->a[i].zName; + const char *zName = pIdList->a[i].zName; if( 0==sqlite3_stricmp(zName, zOld) ){ - renameTokenFind(pParse, pCtx, (void*)zName); + renameTokenFind(pParse, pCtx, (const void*)zName); } } } @@ -108716,6 +109316,9 @@ static int renameResolveTrigger(Parse *pParse){ } } } + if( rc==SQLITE_OK && db->mallocFailed ){ + rc = SQLITE_NOMEM; + } sNC.pSrcList = pSrc; if( rc==SQLITE_OK && pStep->pWhere ){ rc = sqlite3ResolveExprNames(&sNC, pStep->pWhere); @@ -108863,7 +109466,7 @@ static void renameColumnFunc( sqlite3BtreeLeaveAll(db); return; } - zOld = pTab->aCol[iCol].zName; + zOld = pTab->aCol[iCol].zCnName; memset(&sCtx, 0, sizeof(sCtx)); sCtx.iCol = ((iCol==pTab->iPKey) ? -1 : iCol); @@ -108882,8 +109485,8 @@ static void renameColumnFunc( sCtx.pTab = pTab; if( rc!=SQLITE_OK ) goto renameColumnFunc_done; if( sParse.pNewTable ){ - Select *pSelect = sParse.pNewTable->pSelect; - if( pSelect ){ + if( IsView(sParse.pNewTable) ){ + Select *pSelect = sParse.pNewTable->u.view.pSelect; pSelect->selFlags &= ~SF_View; sParse.rc = SQLITE_OK; sqlite3SelectPrep(&sParse, pSelect, 0); @@ -108892,16 +109495,15 @@ static void renameColumnFunc( sqlite3WalkSelect(&sWalker, pSelect); } if( rc!=SQLITE_OK ) goto renameColumnFunc_done; - }else{ + }else if( IsOrdinaryTable(sParse.pNewTable) ){ /* A regular table */ int bFKOnly = sqlite3_stricmp(zTable, sParse.pNewTable->zName); FKey *pFKey; - assert( sParse.pNewTable->pSelect==0 ); sCtx.pTab = sParse.pNewTable; if( bFKOnly==0 ){ if( iColnCol ){ renameTokenFind( - &sParse, &sCtx, (void*)sParse.pNewTable->aCol[iCol].zName + &sParse, &sCtx, (void*)sParse.pNewTable->aCol[iCol].zCnName ); } if( sCtx.iCol<0 ){ @@ -108916,12 +109518,15 @@ static void renameColumnFunc( } #ifndef SQLITE_OMIT_GENERATED_COLUMNS for(i=0; inCol; i++){ - sqlite3WalkExpr(&sWalker, sParse.pNewTable->aCol[i].pDflt); + Expr *pExpr = sqlite3ColumnExpr(sParse.pNewTable, + &sParse.pNewTable->aCol[i]); + sqlite3WalkExpr(&sWalker, pExpr); } #endif } - for(pFKey=sParse.pNewTable->pFKey; pFKey; pFKey=pFKey->pNextFrom){ + assert( IsOrdinaryTable(sParse.pNewTable) ); + for(pFKey=sParse.pNewTable->u.tab.pFKey; pFKey; pFKey=pFKey->pNextFrom){ for(i=0; inCol; i++){ if( bFKOnly==0 && pFKey->aCol[i].iFrom==iCol ){ renameTokenFind(&sParse, &sCtx, (void*)&pFKey->aCol[i]); @@ -108992,7 +109597,10 @@ renameColumnFunc_done: */ static int renameTableExprCb(Walker *pWalker, Expr *pExpr){ RenameCtx *p = pWalker->u.pRename; - if( pExpr->op==TK_COLUMN && p->pTab==pExpr->y.pTab ){ + if( pExpr->op==TK_COLUMN + && ALWAYS(ExprUseYTab(pExpr)) + && p->pTab==pExpr->y.pTab + ){ renameTokenFind(pWalker->pParse, p, (void*)&pExpr->y.pTab); } return WRC_Continue; @@ -109087,28 +109695,31 @@ static void renameTableFunc( if( sParse.pNewTable ){ Table *pTab = sParse.pNewTable; - if( pTab->pSelect ){ + if( IsView(pTab) ){ if( isLegacy==0 ){ - Select *pSelect = pTab->pSelect; + Select *pSelect = pTab->u.view.pSelect; NameContext sNC; memset(&sNC, 0, sizeof(sNC)); sNC.pParse = &sParse; assert( pSelect->selFlags & SF_View ); pSelect->selFlags &= ~SF_View; - sqlite3SelectPrep(&sParse, pTab->pSelect, &sNC); + sqlite3SelectPrep(&sParse, pTab->u.view.pSelect, &sNC); if( sParse.nErr ){ rc = sParse.rc; }else{ - sqlite3WalkSelect(&sWalker, pTab->pSelect); + sqlite3WalkSelect(&sWalker, pTab->u.view.pSelect); } } }else{ /* Modify any FK definitions to point to the new table. */ #ifndef SQLITE_OMIT_FOREIGN_KEY - if( isLegacy==0 || (db->flags & SQLITE_ForeignKeys) ){ + if( (isLegacy==0 || (db->flags & SQLITE_ForeignKeys)) + && !IsVirtual(pTab) + ){ FKey *pFKey; - for(pFKey=pTab->pFKey; pFKey; pFKey=pFKey->pNextFrom){ + assert( IsOrdinaryTable(pTab) ); + for(pFKey=pTab->u.tab.pFKey; pFKey; pFKey=pFKey->pNextFrom){ if( sqlite3_stricmp(pFKey->zTo, zOld)==0 ){ renameTokenFind(&sParse, &sCtx, (void*)pFKey->zTo); } @@ -109185,7 +109796,7 @@ static void renameTableFunc( static int renameQuotefixExprCb(Walker *pWalker, Expr *pExpr){ if( pExpr->op==TK_STRING && (pExpr->flags & EP_DblQuoted) ){ - renameTokenFind(pWalker->pParse, pWalker->u.pRename, (void*)pExpr); + renameTokenFind(pWalker->pParse, pWalker->u.pRename, (const void*)pExpr); } return WRC_Continue; } @@ -109248,8 +109859,8 @@ static void renameQuotefixFunc( sWalker.u.pRename = &sCtx; if( sParse.pNewTable ){ - Select *pSelect = sParse.pNewTable->pSelect; - if( pSelect ){ + if( IsView(sParse.pNewTable) ){ + Select *pSelect = sParse.pNewTable->u.view.pSelect; pSelect->selFlags &= ~SF_View; sParse.rc = SQLITE_OK; sqlite3SelectPrep(&sParse, pSelect, 0); @@ -109262,7 +109873,9 @@ static void renameQuotefixFunc( sqlite3WalkExprList(&sWalker, sParse.pNewTable->pCheck); #ifndef SQLITE_OMIT_GENERATED_COLUMNS for(i=0; inCol; i++){ - sqlite3WalkExpr(&sWalker, sParse.pNewTable->aCol[i].pDflt); + sqlite3WalkExpr(&sWalker, + sqlite3ColumnExpr(sParse.pNewTable, + &sParse.pNewTable->aCol[i])); } #endif /* SQLITE_OMIT_GENERATED_COLUMNS */ } @@ -109345,11 +109958,11 @@ static void renameTableTest( rc = renameParseSql(&sParse, zDb, db, zInput, bTemp); db->flags |= (flags & (SQLITE_DqsDML|SQLITE_DqsDDL)); if( rc==SQLITE_OK ){ - if( isLegacy==0 && sParse.pNewTable && sParse.pNewTable->pSelect ){ + if( isLegacy==0 && sParse.pNewTable && IsView(sParse.pNewTable) ){ NameContext sNC; memset(&sNC, 0, sizeof(sNC)); sNC.pParse = &sParse; - sqlite3SelectPrep(&sParse, sParse.pNewTable->pSelect, &sNC); + sqlite3SelectPrep(&sParse, sParse.pNewTable->u.view.pSelect, &sNC); if( sParse.nErr ) rc = sParse.rc; } @@ -109420,13 +110033,14 @@ static void dropColumnFunc( goto drop_column_done; } - pCol = renameTokenFind(&sParse, 0, (void*)pTab->aCol[iCol].zName); + pCol = renameTokenFind(&sParse, 0, (void*)pTab->aCol[iCol].zCnName); if( iColnCol-1 ){ RenameToken *pEnd; - pEnd = renameTokenFind(&sParse, 0, (void*)pTab->aCol[iCol+1].zName); + pEnd = renameTokenFind(&sParse, 0, (void*)pTab->aCol[iCol+1].zCnName); zEnd = (const char*)pEnd->t.z; }else{ - zEnd = (const char*)&zSql[pTab->addColOffset]; + assert( IsOrdinaryTable(pTab) ); + zEnd = (const char*)&zSql[pTab->u.tab.addColOffset]; while( ALWAYS(pCol->t.z[0]!=0) && pCol->t.z[0]!=',' ) pCol->t.z--; } @@ -109452,7 +110066,7 @@ drop_column_done: ** statement. Argument pSrc contains the possibly qualified name of the ** table being edited, and token pName the name of the column to drop. */ -SQLITE_PRIVATE void sqlite3AlterDropColumn(Parse *pParse, SrcList *pSrc, Token *pName){ +SQLITE_PRIVATE void sqlite3AlterDropColumn(Parse *pParse, SrcList *pSrc, const Token *pName){ sqlite3 *db = pParse->db; /* Database handle */ Table *pTab; /* Table to modify */ int iDb; /* Index of db containing pTab in aDb[] */ @@ -109507,7 +110121,7 @@ SQLITE_PRIVATE void sqlite3AlterDropColumn(Parse *pParse, SrcList *pSrc, Token * renameTestSchema(pParse, zDb, iDb==1, "", 0); renameFixQuotes(pParse, zDb, iDb==1); sqlite3NestedParse(pParse, - "UPDATE \"%w\"." DFLT_SCHEMA_TABLE " SET " + "UPDATE \"%w\"." LEGACY_SCHEMA_TABLE " SET " "sql = sqlite_drop_column(%d, sql, %d) " "WHERE (type=='table' AND tbl_name=%Q COLLATE nocase)" , zDb, iDb, iCol, pTab->zName @@ -109562,6 +110176,12 @@ SQLITE_PRIVATE void sqlite3AlterDropColumn(Parse *pParse, SrcList *pSrc, Token * nField++; } } + if( nField==0 ){ + /* dbsqlfuzz 5f09e7bcc78b4954d06bf9f2400d7715f48d1fef */ + pParse->nMem++; + sqlite3VdbeAddOp2(v, OP_Null, 0, reg+1); + nField = 1; + } sqlite3VdbeAddOp3(v, OP_MakeRecord, reg+1, nField, regRec); if( pPk ){ sqlite3VdbeAddOp4Int(v, OP_IdxInsert, iCur, regRec, reg+1, pPk->nKeyCol); @@ -110031,7 +110651,6 @@ static void statInit( + sizeof(tRowcnt)*3*nColUp*(nCol+mxSample); } #endif - db = sqlite3_context_db_handle(context); p = sqlite3DbMallocZero(db, n); if( p==0 ){ sqlite3_result_error_nomem(context); @@ -110450,28 +111069,19 @@ static void statGet( ** ** I = (K+D-1)/D */ - char *z; - int i; + sqlite3_str sStat; /* Text of the constructed "stat" line */ + int i; /* Loop counter */ - char *zRet = sqlite3MallocZero( (p->nKeyCol+1)*25 ); - if( zRet==0 ){ - sqlite3_result_error_nomem(context); - return; - } - - sqlite3_snprintf(24, zRet, "%llu", + sqlite3StrAccumInit(&sStat, 0, 0, 0, (p->nKeyCol+1)*100); + sqlite3_str_appendf(&sStat, "%llu", p->nSkipAhead ? (u64)p->nEst : (u64)p->nRow); - z = zRet + sqlite3Strlen30(zRet); for(i=0; inKeyCol; i++){ u64 nDistinct = p->current.anDLt[i] + 1; u64 iVal = (p->nRow + nDistinct - 1) / nDistinct; - sqlite3_snprintf(24, z, " %llu", iVal); - z += sqlite3Strlen30(z); + sqlite3_str_appendf(&sStat, " %llu", iVal); assert( p->current.anEq[i] ); } - assert( z[0]=='\0' && z>zRet ); - - sqlite3_result_text(context, zRet, -1, sqlite3_free); + sqlite3ResultStrAccum(context, &sStat); } #ifdef SQLITE_ENABLE_STAT4 else if( eCall==STAT_GET_ROWID ){ @@ -110490,6 +111100,8 @@ static void statGet( } }else{ tRowcnt *aCnt = 0; + sqlite3_str sStat; + int i; assert( p->iGetnSample ); switch( eCall ){ @@ -110501,23 +111113,12 @@ static void statGet( break; } } - - { - char *zRet = sqlite3MallocZero(p->nCol * 25); - if( zRet==0 ){ - sqlite3_result_error_nomem(context); - }else{ - int i; - char *z = zRet; - for(i=0; inCol; i++){ - sqlite3_snprintf(24, z, "%llu ", (u64)aCnt[i]); - z += sqlite3Strlen30(z); - } - assert( z[0]=='\0' && z>zRet ); - z[-1] = '\0'; - sqlite3_result_text(context, zRet, -1, sqlite3_free); - } + sqlite3StrAccumInit(&sStat, 0, 0, 0, p->nCol*100); + for(i=0; inCol; i++){ + sqlite3_str_appendf(&sStat, "%llu ", (u64)aCnt[i]); } + if( sStat.nChar ) sStat.nChar--; + sqlite3ResultStrAccum(context, &sStat); } #endif /* SQLITE_ENABLE_STAT4 */ #ifndef SQLITE_DEBUG @@ -110566,7 +111167,7 @@ static void analyzeVdbeCommentIndexWithColumnName( }else if( i==XN_EXPR ){ VdbeComment((v,"%s.expr(%d)",pIdx->zName, k)); }else{ - VdbeComment((v,"%s.%s", pIdx->zName, pIdx->pTable->aCol[i].zName)); + VdbeComment((v,"%s.%s", pIdx->zName, pIdx->pTable->aCol[i].zCnName)); } } #else @@ -110613,7 +111214,7 @@ static void analyzeOneTable( if( v==0 || NEVER(pTab==0) ){ return; } - if( pTab->tnum==0 ){ + if( !IsOrdinaryTable(pTab) ){ /* Do not gather statistics on views or virtual tables */ return; } @@ -111438,9 +112039,12 @@ static int loadStatTbl( */ static int loadStat4(sqlite3 *db, const char *zDb){ int rc = SQLITE_OK; /* Result codes from subroutines */ + const Table *pStat4; assert( db->lookaside.bDisable ); - if( sqlite3FindTable(db, "sqlite_stat4", zDb) ){ + if( (pStat4 = sqlite3FindTable(db, "sqlite_stat4", zDb))!=0 + && IsOrdinaryTable(pStat4) + ){ rc = loadStatTbl(db, "SELECT idx,count(*) FROM %Q.sqlite_stat4 GROUP BY idx", "SELECT idx,neq,nlt,ndlt,sample FROM %Q.sqlite_stat4", @@ -111477,6 +112081,7 @@ SQLITE_PRIVATE int sqlite3AnalysisLoad(sqlite3 *db, int iDb){ char *zSql; int rc = SQLITE_OK; Schema *pSchema = db->aDb[iDb].pSchema; + const Table *pStat1; assert( iDb>=0 && iDbnDb ); assert( db->aDb[iDb].pBt!=0 ); @@ -111499,7 +112104,9 @@ SQLITE_PRIVATE int sqlite3AnalysisLoad(sqlite3 *db, int iDb){ /* Load new statistics out of the sqlite_stat1 table */ sInfo.db = db; sInfo.zDatabase = db->aDb[iDb].zDbSName; - if( sqlite3FindTable(db, "sqlite_stat1", sInfo.zDatabase)!=0 ){ + if( (pStat1 = sqlite3FindTable(db, "sqlite_stat1", sInfo.zDatabase)) + && IsOrdinaryTable(pStat1) + ){ zSql = sqlite3MPrintf(db, "SELECT tbl,idx,stat FROM %Q.sqlite_stat1", sInfo.zDatabase); if( zSql==0 ){ @@ -111890,9 +112497,9 @@ static void codeAttach( sName.pParse = pParse; if( - SQLITE_OK!=(rc = resolveAttachExpr(&sName, pFilename)) || - SQLITE_OK!=(rc = resolveAttachExpr(&sName, pDbname)) || - SQLITE_OK!=(rc = resolveAttachExpr(&sName, pKey)) + SQLITE_OK!=resolveAttachExpr(&sName, pFilename) || + SQLITE_OK!=resolveAttachExpr(&sName, pDbname) || + SQLITE_OK!=resolveAttachExpr(&sName, pKey) ){ goto attach_end; } @@ -111901,6 +112508,7 @@ static void codeAttach( if( pAuthArg ){ char *zAuthArg; if( pAuthArg->op==TK_STRING ){ + assert( !ExprHasProperty(pAuthArg, EP_IntValue) ); zAuthArg = pAuthArg->u.zToken; }else{ zAuthArg = 0; @@ -112319,10 +112927,10 @@ SQLITE_PRIVATE void sqlite3AuthRead( if( iCol>=0 ){ assert( iColnCol ); - zCol = pTab->aCol[iCol].zName; + zCol = pTab->aCol[iCol].zCnName; }else if( pTab->iPKey>=0 ){ assert( pTab->iPKeynCol ); - zCol = pTab->aCol[pTab->iPKey].zName; + zCol = pTab->aCol[pTab->iPKey].zCnName; }else{ zCol = "ROWID"; } @@ -112585,17 +113193,21 @@ SQLITE_PRIVATE void sqlite3FinishCoding(Parse *pParse){ int i; int reg; - addrRewind = - sqlite3VdbeAddOp1(v, OP_Rewind, pReturning->iRetCur); - VdbeCoverage(v); - reg = pReturning->iRetReg; - for(i=0; inRetCol; i++){ - sqlite3VdbeAddOp3(v, OP_Column, pReturning->iRetCur, i, reg+i); + if( pReturning->nRetCol==0 ){ + assert( CORRUPT_DB ); + }else{ + addrRewind = + sqlite3VdbeAddOp1(v, OP_Rewind, pReturning->iRetCur); + VdbeCoverage(v); + reg = pReturning->iRetReg; + for(i=0; inRetCol; i++){ + sqlite3VdbeAddOp3(v, OP_Column, pReturning->iRetCur, i, reg+i); + } + sqlite3VdbeAddOp2(v, OP_ResultRow, reg, i); + sqlite3VdbeAddOp2(v, OP_Next, pReturning->iRetCur, addrRewind+1); + VdbeCoverage(v); + sqlite3VdbeJumpHere(v, addrRewind); } - sqlite3VdbeAddOp2(v, OP_ResultRow, reg, i); - sqlite3VdbeAddOp2(v, OP_Next, pReturning->iRetCur, addrRewind+1); - VdbeCoverage(v); - sqlite3VdbeJumpHere(v, addrRewind); } sqlite3VdbeAddOp0(v, OP_Halt); @@ -112676,7 +113288,11 @@ SQLITE_PRIVATE void sqlite3FinishCoding(Parse *pParse){ if( pParse->bReturning ){ Returning *pRet = pParse->u1.pReturning; - sqlite3VdbeAddOp2(v, OP_OpenEphemeral, pRet->iRetCur, pRet->nRetCol); + if( pRet->nRetCol==0 ){ + assert( CORRUPT_DB ); + }else{ + sqlite3VdbeAddOp2(v, OP_OpenEphemeral, pRet->iRetCur, pRet->nRetCol); + } } /* Finally, jump back to the beginning of the executable code. */ @@ -112700,20 +113316,22 @@ SQLITE_PRIVATE void sqlite3FinishCoding(Parse *pParse){ /* ** Run the parser and code generator recursively in order to generate ** code for the SQL statement given onto the end of the pParse context -** currently under construction. When the parser is run recursively -** this way, the final OP_Halt is not appended and other initialization -** and finalization steps are omitted because those are handling by the -** outermost parser. +** currently under construction. Notes: ** -** Not everything is nestable. This facility is designed to permit -** INSERT, UPDATE, and DELETE operations against the schema table. Use -** care if you decide to try to use this routine for some other purposes. +** * The final OP_Halt is not appended and other initialization +** and finalization steps are omitted because those are handling by the +** outermost parser. +** +** * Built-in SQL functions always take precedence over application-defined +** SQL functions. In other words, it is not possible to override a +** built-in function. */ SQLITE_PRIVATE void sqlite3NestedParse(Parse *pParse, const char *zFormat, ...){ va_list ap; char *zSql; char *zErrMsg = 0; sqlite3 *db = pParse->db; + u32 savedDbFlags = db->mDbFlags; char saveBuf[PARSE_TAIL_SZ]; if( pParse->nErr ) return; @@ -112732,7 +113350,9 @@ SQLITE_PRIVATE void sqlite3NestedParse(Parse *pParse, const char *zFormat, ...){ pParse->nested++; memcpy(saveBuf, PARSE_TAIL(pParse), PARSE_TAIL_SZ); memset(PARSE_TAIL(pParse), 0, PARSE_TAIL_SZ); + db->mDbFlags |= DBFLAG_PreferBuiltin; sqlite3RunParser(pParse, zSql, &zErrMsg); + db->mDbFlags = savedDbFlags; sqlite3DbFree(db, zErrMsg); sqlite3DbFree(db, zSql); memcpy(PARSE_TAIL(pParse), saveBuf, PARSE_TAIL_SZ); @@ -112790,17 +113410,17 @@ SQLITE_PRIVATE Table *sqlite3FindTable(sqlite3 *db, const char *zName, const cha p = sqlite3HashFind(&db->aDb[i].pSchema->tblHash, zName); if( p==0 && sqlite3StrNICmp(zName, "sqlite_", 7)==0 ){ if( i==1 ){ - if( sqlite3StrICmp(zName+7, &ALT_TEMP_SCHEMA_TABLE[7])==0 - || sqlite3StrICmp(zName+7, &ALT_SCHEMA_TABLE[7])==0 - || sqlite3StrICmp(zName+7, &DFLT_SCHEMA_TABLE[7])==0 + if( sqlite3StrICmp(zName+7, &PREFERRED_TEMP_SCHEMA_TABLE[7])==0 + || sqlite3StrICmp(zName+7, &PREFERRED_SCHEMA_TABLE[7])==0 + || sqlite3StrICmp(zName+7, &LEGACY_SCHEMA_TABLE[7])==0 ){ p = sqlite3HashFind(&db->aDb[1].pSchema->tblHash, - DFLT_TEMP_SCHEMA_TABLE); + LEGACY_TEMP_SCHEMA_TABLE); } }else{ - if( sqlite3StrICmp(zName+7, &ALT_SCHEMA_TABLE[7])==0 ){ + if( sqlite3StrICmp(zName+7, &PREFERRED_SCHEMA_TABLE[7])==0 ){ p = sqlite3HashFind(&db->aDb[i].pSchema->tblHash, - DFLT_SCHEMA_TABLE); + LEGACY_SCHEMA_TABLE); } } } @@ -112818,11 +113438,11 @@ SQLITE_PRIVATE Table *sqlite3FindTable(sqlite3 *db, const char *zName, const cha if( p ) break; } if( p==0 && sqlite3StrNICmp(zName, "sqlite_", 7)==0 ){ - if( sqlite3StrICmp(zName+7, &ALT_SCHEMA_TABLE[7])==0 ){ - p = sqlite3HashFind(&db->aDb[0].pSchema->tblHash, DFLT_SCHEMA_TABLE); - }else if( sqlite3StrICmp(zName+7, &ALT_TEMP_SCHEMA_TABLE[7])==0 ){ + if( sqlite3StrICmp(zName+7, &PREFERRED_SCHEMA_TABLE[7])==0 ){ + p = sqlite3HashFind(&db->aDb[0].pSchema->tblHash, LEGACY_SCHEMA_TABLE); + }else if( sqlite3StrICmp(zName+7, &PREFERRED_TEMP_SCHEMA_TABLE[7])==0 ){ p = sqlite3HashFind(&db->aDb[1].pSchema->tblHash, - DFLT_TEMP_SCHEMA_TABLE); + LEGACY_TEMP_SCHEMA_TABLE); } } } @@ -112868,6 +113488,7 @@ SQLITE_PRIVATE Table *sqlite3LocateTable( pMod = sqlite3PragmaVtabRegister(db, zName); } if( pMod && sqlite3VtabEponymousTableInit(pParse, pMod) ){ + testcase( pMod->pEpoTab==0 ); return pMod->pEpoTab; } } @@ -112917,6 +113538,22 @@ SQLITE_PRIVATE Table *sqlite3LocateTableItem( return sqlite3LocateTable(pParse, flags, p->zName, zDb); } +/* +** Return the preferred table name for system tables. Translate legacy +** names into the new preferred names, as appropriate. +*/ +SQLITE_PRIVATE const char *sqlite3PreferredTableName(const char *zName){ + if( sqlite3StrNICmp(zName, "sqlite_", 7)==0 ){ + if( sqlite3StrICmp(zName+7, &LEGACY_SCHEMA_TABLE[7])==0 ){ + return PREFERRED_SCHEMA_TABLE; + } + if( sqlite3StrICmp(zName+7, &LEGACY_TEMP_SCHEMA_TABLE[7])==0 ){ + return PREFERRED_TEMP_SCHEMA_TABLE; + } + } + return zName; +} + /* ** Locate the in-memory structure that describes ** a particular index given the name of that index @@ -113081,6 +113718,84 @@ SQLITE_PRIVATE void sqlite3CommitInternalChanges(sqlite3 *db){ db->mDbFlags &= ~DBFLAG_SchemaChange; } +/* +** Set the expression associated with a column. This is usually +** the DEFAULT value, but might also be the expression that computes +** the value for a generated column. +*/ +SQLITE_PRIVATE void sqlite3ColumnSetExpr( + Parse *pParse, /* Parsing context */ + Table *pTab, /* The table containing the column */ + Column *pCol, /* The column to receive the new DEFAULT expression */ + Expr *pExpr /* The new default expression */ +){ + ExprList *pList; + assert( IsOrdinaryTable(pTab) ); + pList = pTab->u.tab.pDfltList; + if( pCol->iDflt==0 + || NEVER(pList==0) + || NEVER(pList->nExpriDflt) + ){ + pCol->iDflt = pList==0 ? 1 : pList->nExpr+1; + pTab->u.tab.pDfltList = sqlite3ExprListAppend(pParse, pList, pExpr); + }else{ + sqlite3ExprDelete(pParse->db, pList->a[pCol->iDflt-1].pExpr); + pList->a[pCol->iDflt-1].pExpr = pExpr; + } +} + +/* +** Return the expression associated with a column. The expression might be +** the DEFAULT clause or the AS clause of a generated column. +** Return NULL if the column has no associated expression. +*/ +SQLITE_PRIVATE Expr *sqlite3ColumnExpr(Table *pTab, Column *pCol){ + if( pCol->iDflt==0 ) return 0; + if( NEVER(!IsOrdinaryTable(pTab)) ) return 0; + if( NEVER(pTab->u.tab.pDfltList==0) ) return 0; + if( NEVER(pTab->u.tab.pDfltList->nExpriDflt) ) return 0; + return pTab->u.tab.pDfltList->a[pCol->iDflt-1].pExpr; +} + +/* +** Set the collating sequence name for a column. +*/ +SQLITE_PRIVATE void sqlite3ColumnSetColl( + sqlite3 *db, + Column *pCol, + const char *zColl +){ + i64 nColl; + i64 n; + char *zNew; + assert( zColl!=0 ); + n = sqlite3Strlen30(pCol->zCnName) + 1; + if( pCol->colFlags & COLFLAG_HASTYPE ){ + n += sqlite3Strlen30(pCol->zCnName+n) + 1; + } + nColl = sqlite3Strlen30(zColl) + 1; + zNew = sqlite3DbRealloc(db, pCol->zCnName, nColl+n); + if( zNew ){ + pCol->zCnName = zNew; + memcpy(pCol->zCnName + n, zColl, nColl); + pCol->colFlags |= COLFLAG_HASCOLL; + } +} + +/* +** Return the collating squence name for a column +*/ +SQLITE_PRIVATE const char *sqlite3ColumnColl(Column *pCol){ + const char *z; + if( (pCol->colFlags & COLFLAG_HASCOLL)==0 ) return 0; + z = pCol->zCnName; + while( *z ){ z++; } + if( pCol->colFlags & COLFLAG_HASTYPE ){ + do{ z++; }while( *z ); + } + return z+1; +} + /* ** Delete memory allocated for the column names of a table or view (the ** Table.aCol[] array). @@ -113091,12 +113806,20 @@ SQLITE_PRIVATE void sqlite3DeleteColumnNames(sqlite3 *db, Table *pTable){ assert( pTable!=0 ); if( (pCol = pTable->aCol)!=0 ){ for(i=0; inCol; i++, pCol++){ - assert( pCol->zName==0 || pCol->hName==sqlite3StrIHash(pCol->zName) ); - sqlite3DbFree(db, pCol->zName); - sqlite3ExprDelete(db, pCol->pDflt); - sqlite3DbFree(db, pCol->zColl); + assert( pCol->zCnName==0 || pCol->hName==sqlite3StrIHash(pCol->zCnName) ); + sqlite3DbFree(db, pCol->zCnName); } sqlite3DbFree(db, pTable->aCol); + if( IsOrdinaryTable(pTable) ){ + sqlite3ExprListDelete(db, pTable->u.tab.pDfltList); + } + if( db==0 || db->pnBytesFreed==0 ){ + pTable->aCol = 0; + pTable->nCol = 0; + if( IsOrdinaryTable(pTable) ){ + pTable->u.tab.pDfltList = 0; + } + } } } @@ -113148,19 +113871,25 @@ static void SQLITE_NOINLINE deleteTable(sqlite3 *db, Table *pTable){ sqlite3FreeIndex(db, pIndex); } - /* Delete any foreign keys attached to this table. */ - sqlite3FkDelete(db, pTable); + if( IsOrdinaryTable(pTable) ){ + sqlite3FkDelete(db, pTable); + } +#ifndef SQLITE_OMIT_VIRTUAL_TABLE + else if( IsVirtual(pTable) ){ + sqlite3VtabClear(db, pTable); + } +#endif + else{ + assert( IsView(pTable) ); + sqlite3SelectDelete(db, pTable->u.view.pSelect); + } /* Delete the Table structure itself. */ sqlite3DeleteColumnNames(db, pTable); sqlite3DbFree(db, pTable->zName); sqlite3DbFree(db, pTable->zColAff); - sqlite3SelectDelete(db, pTable->pSelect); sqlite3ExprListDelete(db, pTable->pCheck); -#ifndef SQLITE_OMIT_VIRTUALTABLE - sqlite3VtabClear(db, pTable); -#endif sqlite3DbFree(db, pTable); /* Verify that no lookaside memory was used by schema tables */ @@ -113206,10 +113935,10 @@ SQLITE_PRIVATE void sqlite3UnlinkAndDeleteTable(sqlite3 *db, int iDb, const char ** are not \000 terminated and are not persistent. The returned string ** is \000 terminated and is persistent. */ -SQLITE_PRIVATE char *sqlite3NameFromToken(sqlite3 *db, Token *pName){ +SQLITE_PRIVATE char *sqlite3NameFromToken(sqlite3 *db, const Token *pName){ char *zName; if( pName ){ - zName = sqlite3DbStrNDup(db, (char*)pName->z, pName->n); + zName = sqlite3DbStrNDup(db, (const char*)pName->z, pName->n); sqlite3Dequote(zName); }else{ zName = 0; @@ -113223,7 +113952,7 @@ SQLITE_PRIVATE char *sqlite3NameFromToken(sqlite3 *db, Token *pName){ */ SQLITE_PRIVATE void sqlite3OpenSchemaTable(Parse *p, int iDb){ Vdbe *v = sqlite3GetVdbe(p); - sqlite3TableLock(p, iDb, SCHEMA_ROOT, 1, DFLT_SCHEMA_TABLE); + sqlite3TableLock(p, iDb, SCHEMA_ROOT, 1, LEGACY_SCHEMA_TABLE); sqlite3VdbeAddOp4Int(v, OP_OpenWrite, 0, SCHEMA_ROOT, iDb, 5); if( p->nTab==0 ){ p->nTab = 1; @@ -113686,6 +114415,7 @@ SQLITE_PRIVATE void sqlite3StartTable( /* If an error occurs, we jump here */ begin_table_error: + pParse->checkSchema = 1; sqlite3DbFree(db, zName); return; } @@ -113695,7 +114425,7 @@ begin_table_error: */ #if SQLITE_ENABLE_HIDDEN_COLUMNS SQLITE_PRIVATE void sqlite3ColumnPropertiesFromName(Table *pTab, Column *pCol){ - if( sqlite3_strnicmp(pCol->zName, "__hidden__", 10)==0 ){ + if( sqlite3_strnicmp(pCol->zCnName, "__hidden__", 10)==0 ){ pCol->colFlags |= COLFLAG_HIDDEN; if( pTab ) pTab->tabFlags |= TF_HasHidden; }else if( pTab && pCol!=pTab->aCol && (pCol[-1].colFlags & COLFLAG_HIDDEN) ){ @@ -113786,7 +114516,7 @@ SQLITE_PRIVATE void sqlite3AddReturning(Parse *pParse, ExprList *pList){ ** first to get things going. Then this routine is called for each ** column. */ -SQLITE_PRIVATE void sqlite3AddColumn(Parse *pParse, Token *pName, Token *pType){ +SQLITE_PRIVATE void sqlite3AddColumn(Parse *pParse, Token sName, Token sType){ Table *p; int i; char *z; @@ -113794,55 +114524,96 @@ SQLITE_PRIVATE void sqlite3AddColumn(Parse *pParse, Token *pName, Token *pType){ Column *pCol; sqlite3 *db = pParse->db; u8 hName; + Column *aNew; + u8 eType = COLTYPE_CUSTOM; + u8 szEst = 1; + char affinity = SQLITE_AFF_BLOB; if( (p = pParse->pNewTable)==0 ) return; if( p->nCol+1>db->aLimit[SQLITE_LIMIT_COLUMN] ){ sqlite3ErrorMsg(pParse, "too many columns on %s", p->zName); return; } - z = sqlite3DbMallocRaw(db, pName->n + pType->n + 2); + if( !IN_RENAME_OBJECT ) sqlite3DequoteToken(&sName); + + /* Because keywords GENERATE ALWAYS can be converted into indentifiers + ** by the parser, we can sometimes end up with a typename that ends + ** with "generated always". Check for this case and omit the surplus + ** text. */ + if( sType.n>=16 + && sqlite3_strnicmp(sType.z+(sType.n-6),"always",6)==0 + ){ + sType.n -= 6; + while( ALWAYS(sType.n>0) && sqlite3Isspace(sType.z[sType.n-1]) ) sType.n--; + if( sType.n>=9 + && sqlite3_strnicmp(sType.z+(sType.n-9),"generated",9)==0 + ){ + sType.n -= 9; + while( sType.n>0 && sqlite3Isspace(sType.z[sType.n-1]) ) sType.n--; + } + } + + /* Check for standard typenames. For standard typenames we will + ** set the Column.eType field rather than storing the typename after + ** the column name, in order to save space. */ + if( sType.n>=3 ){ + sqlite3DequoteToken(&sType); + for(i=0; i0) ); if( z==0 ) return; - if( IN_RENAME_OBJECT ) sqlite3RenameTokenMap(pParse, (void*)z, pName); - memcpy(z, pName->z, pName->n); - z[pName->n] = 0; + if( IN_RENAME_OBJECT ) sqlite3RenameTokenMap(pParse, (void*)z, &sName); + memcpy(z, sName.z, sName.n); + z[sName.n] = 0; sqlite3Dequote(z); hName = sqlite3StrIHash(z); for(i=0; inCol; i++){ - if( p->aCol[i].hName==hName && sqlite3StrICmp(z, p->aCol[i].zName)==0 ){ + if( p->aCol[i].hName==hName && sqlite3StrICmp(z, p->aCol[i].zCnName)==0 ){ sqlite3ErrorMsg(pParse, "duplicate column name: %s", z); sqlite3DbFree(db, z); return; } } - if( (p->nCol & 0x7)==0 ){ - Column *aNew; - aNew = sqlite3DbRealloc(db,p->aCol,(p->nCol+8)*sizeof(p->aCol[0])); - if( aNew==0 ){ - sqlite3DbFree(db, z); - return; - } - p->aCol = aNew; + aNew = sqlite3DbRealloc(db,p->aCol,((i64)p->nCol+1)*sizeof(p->aCol[0])); + if( aNew==0 ){ + sqlite3DbFree(db, z); + return; } + p->aCol = aNew; pCol = &p->aCol[p->nCol]; memset(pCol, 0, sizeof(p->aCol[0])); - pCol->zName = z; + pCol->zCnName = z; pCol->hName = hName; sqlite3ColumnPropertiesFromName(p, pCol); - if( pType->n==0 ){ + if( sType.n==0 ){ /* If there is no type specified, columns have the default affinity ** 'BLOB' with a default size of 4 bytes. */ - pCol->affinity = SQLITE_AFF_BLOB; - pCol->szEst = 1; + pCol->affinity = affinity; + pCol->eCType = eType; + pCol->szEst = szEst; #ifdef SQLITE_ENABLE_SORTER_REFERENCES - if( 4>=sqlite3GlobalConfig.szSorterRef ){ - pCol->colFlags |= COLFLAG_SORTERREF; + if( affinity==SQLITE_AFF_BLOB ){ + if( 4>=sqlite3GlobalConfig.szSorterRef ){ + pCol->colFlags |= COLFLAG_SORTERREF; + } } #endif }else{ zType = z + sqlite3Strlen30(z) + 1; - memcpy(zType, pType->z, pType->n); - zType[pType->n] = 0; + memcpy(zType, sType.z, sType.n); + zType[sType.n] = 0; sqlite3Dequote(zType); pCol->affinity = sqlite3AffinityType(zType, pCol); pCol->colFlags |= COLFLAG_HASTYPE; @@ -113997,7 +114768,7 @@ SQLITE_PRIVATE void sqlite3AddDefaultValue( pCol = &(p->aCol[p->nCol-1]); if( !sqlite3ExprIsConstantOrFunction(pExpr, isInit) ){ sqlite3ErrorMsg(pParse, "default value of column [%s] is not constant", - pCol->zName); + pCol->zCnName); #ifndef SQLITE_OMIT_GENERATED_COLUMNS }else if( pCol->colFlags & COLFLAG_GENERATED ){ testcase( pCol->colFlags & COLFLAG_VIRTUAL ); @@ -114008,15 +114779,15 @@ SQLITE_PRIVATE void sqlite3AddDefaultValue( /* A copy of pExpr is used instead of the original, as pExpr contains ** tokens that point to volatile memory. */ - Expr x; - sqlite3ExprDelete(db, pCol->pDflt); + Expr x, *pDfltExpr; memset(&x, 0, sizeof(x)); x.op = TK_SPAN; x.u.zToken = sqlite3DbSpanDup(db, zStart, zEnd); x.pLeft = pExpr; x.flags = EP_Skip; - pCol->pDflt = sqlite3ExprDup(db, &x, EXPRDUP_REDUCE); + pDfltExpr = sqlite3ExprDup(db, &x, EXPRDUP_REDUCE); sqlite3DbFree(db, x.u.zToken); + sqlite3ColumnSetExpr(pParse, p, pCol, pDfltExpr); } } if( IN_RENAME_OBJECT ){ @@ -114112,9 +114883,11 @@ SQLITE_PRIVATE void sqlite3AddPrimaryKey( assert( pCExpr!=0 ); sqlite3StringToId(pCExpr); if( pCExpr->op==TK_ID ){ - const char *zCName = pCExpr->u.zToken; + const char *zCName; + assert( !ExprHasProperty(pCExpr, EP_IntValue) ); + zCName = pCExpr->u.zToken; for(iCol=0; iColnCol; iCol++){ - if( sqlite3StrICmp(zCName, pTab->aCol[iCol].zName)==0 ){ + if( sqlite3StrICmp(zCName, pTab->aCol[iCol].zCnName)==0 ){ pCol = &pTab->aCol[iCol]; makeColumnPartOfPrimaryKey(pParse, pCol); break; @@ -114125,7 +114898,7 @@ SQLITE_PRIVATE void sqlite3AddPrimaryKey( } if( nTerm==1 && pCol - && sqlite3StrICmp(sqlite3ColumnType(pCol,""), "INTEGER")==0 + && pCol->eCType==COLTYPE_INTEGER && sortOrder!=SQLITE_SO_DESC ){ if( IN_RENAME_OBJECT && pList ){ @@ -114205,8 +114978,7 @@ SQLITE_PRIVATE void sqlite3AddCollateType(Parse *pParse, Token *pToken){ if( sqlite3LocateCollSeq(pParse, zColl) ){ Index *pIdx; - sqlite3DbFree(db, p->aCol[i].zColl); - p->aCol[i].zColl = zColl; + sqlite3ColumnSetColl(db, &p->aCol[i], zColl); /* If the column is declared as " PRIMARY KEY COLLATE ", ** then an index may have been created on this column before the @@ -114215,12 +114987,11 @@ SQLITE_PRIVATE void sqlite3AddCollateType(Parse *pParse, Token *pToken){ for(pIdx=p->pIndex; pIdx; pIdx=pIdx->pNext){ assert( pIdx->nKeyCol==1 ); if( pIdx->aiColumn[0]==i ){ - pIdx->azColl[0] = p->aCol[i].zColl; + pIdx->azColl[0] = sqlite3ColumnColl(&p->aCol[i]); } } - }else{ - sqlite3DbFree(db, zColl); } + sqlite3DbFree(db, zColl); } /* Change the most recently parsed column to be a GENERATED ALWAYS AS @@ -114240,7 +115011,7 @@ SQLITE_PRIVATE void sqlite3AddGenerated(Parse *pParse, Expr *pExpr, Token *pType sqlite3ErrorMsg(pParse, "virtual tables cannot use computed columns"); goto generated_done; } - if( pCol->pDflt ) goto generated_error; + if( pCol->iDflt>0 ) goto generated_error; if( pType ){ if( pType->n==7 && sqlite3StrNICmp("virtual",pType->z,7)==0 ){ /* no-op */ @@ -114258,13 +115029,13 @@ SQLITE_PRIVATE void sqlite3AddGenerated(Parse *pParse, Expr *pExpr, Token *pType if( pCol->colFlags & COLFLAG_PRIMKEY ){ makeColumnPartOfPrimaryKey(pParse, pCol); /* For the error message */ } - pCol->pDflt = pExpr; + sqlite3ColumnSetExpr(pParse, pTab, pCol, pExpr); pExpr = 0; goto generated_done; generated_error: sqlite3ErrorMsg(pParse, "error in generated column \"%s\"", - pCol->zName); + pCol->zCnName); generated_done: sqlite3ExprDelete(pParse->db, pExpr); #else @@ -114366,7 +115137,7 @@ static char *createTableStmt(sqlite3 *db, Table *p){ Column *pCol; n = 0; for(pCol = p->aCol, i=0; inCol; i++, pCol++){ - n += identLength(pCol->zName) + 5; + n += identLength(pCol->zCnName) + 5; } n += identLength(p->zName); if( n<50 ){ @@ -114402,7 +115173,7 @@ static char *createTableStmt(sqlite3 *db, Table *p){ sqlite3_snprintf(n-k, &zStmt[k], zSep); k += sqlite3Strlen30(&zStmt[k]); zSep = zSep2; - identPut(zStmt, &k, pCol->zName); + identPut(zStmt, &k, pCol->zCnName); assert( pCol->affinity-SQLITE_AFF_BLOB >= 0 ); assert( pCol->affinity-SQLITE_AFF_BLOB < ArraySize(azType) ); testcase( pCol->affinity==SQLITE_AFF_BLOB ); @@ -114486,7 +115257,6 @@ static void estimateIndexWidth(Index *pIdx){ */ static int hasColumn(const i16 *aiCol, int nCol, int x){ while( nCol-- > 0 ){ - assert( aiCol[0]>=0 ); if( x==*(aiCol++) ){ return 1; } @@ -114599,7 +115369,9 @@ static void convertToWithoutRowidTable(Parse *pParse, Table *pTab){ */ if( !db->init.imposterTable ){ for(i=0; inCol; i++){ - if( (pTab->aCol[i].colFlags & COLFLAG_PRIMKEY)!=0 ){ + if( (pTab->aCol[i].colFlags & COLFLAG_PRIMKEY)!=0 + && (pTab->aCol[i].notNull==OE_None) + ){ pTab->aCol[i].notNull = OE_Abort; } } @@ -114621,7 +115393,7 @@ static void convertToWithoutRowidTable(Parse *pParse, Table *pTab){ if( pTab->iPKey>=0 ){ ExprList *pList; Token ipkToken; - sqlite3TokenInit(&ipkToken, pTab->aCol[pTab->iPKey].zName); + sqlite3TokenInit(&ipkToken, pTab->aCol[pTab->iPKey].zCnName); pList = sqlite3ExprListAppend(pParse, 0, sqlite3ExprAlloc(db, TK_ID, &ipkToken, 0)); if( pList==0 ){ @@ -114751,7 +115523,7 @@ SQLITE_PRIVATE int sqlite3IsShadowTableOf(sqlite3 *db, Table *pTab, const char * nName = sqlite3Strlen30(pTab->zName); if( sqlite3_strnicmp(zName, pTab->zName, nName)!=0 ) return 0; if( zName[nName]!='_' ) return 0; - pMod = (Module*)sqlite3HashFind(&db->aModule, pTab->azModuleArg[0]); + pMod = (Module*)sqlite3HashFind(&db->aModule, pTab->u.vtab.azArg[0]); if( pMod==0 ) return 0; if( pMod->pModule->iVersion<3 ) return 0; if( pMod->pModule->xShadowName==0 ) return 0; @@ -114759,6 +115531,41 @@ SQLITE_PRIVATE int sqlite3IsShadowTableOf(sqlite3 *db, Table *pTab, const char * } #endif /* ifndef SQLITE_OMIT_VIRTUALTABLE */ +#ifndef SQLITE_OMIT_VIRTUALTABLE +/* +** Table pTab is a virtual table. If it the virtual table implementation +** exists and has an xShadowName method, then loop over all other ordinary +** tables within the same schema looking for shadow tables of pTab, and mark +** any shadow tables seen using the TF_Shadow flag. +*/ +SQLITE_PRIVATE void sqlite3MarkAllShadowTablesOf(sqlite3 *db, Table *pTab){ + int nName; /* Length of pTab->zName */ + Module *pMod; /* Module for the virtual table */ + HashElem *k; /* For looping through the symbol table */ + + assert( IsVirtual(pTab) ); + pMod = (Module*)sqlite3HashFind(&db->aModule, pTab->u.vtab.azArg[0]); + if( pMod==0 ) return; + if( NEVER(pMod->pModule==0) ) return; + if( pMod->pModule->iVersion<3 ) return; + if( pMod->pModule->xShadowName==0 ) return; + assert( pTab->zName!=0 ); + nName = sqlite3Strlen30(pTab->zName); + for(k=sqliteHashFirst(&pTab->pSchema->tblHash); k; k=sqliteHashNext(k)){ + Table *pOther = sqliteHashData(k); + assert( pOther->zName!=0 ); + if( !IsOrdinaryTable(pOther) ) continue; + if( pOther->tabFlags & TF_Shadow ) continue; + if( sqlite3StrNICmp(pOther->zName, pTab->zName, nName)==0 + && pOther->zName[nName]=='_' + && pMod->pModule->xShadowName(pOther->zName+nName+1) + ){ + pOther->tabFlags |= TF_Shadow; + } + } +} +#endif /* ifndef SQLITE_OMIT_VIRTUALTABLE */ + #ifndef SQLITE_OMIT_VIRTUALTABLE /* ** Return true if zName is a shadow table name in the current database @@ -114832,7 +115639,7 @@ SQLITE_PRIVATE void sqlite3EndTable( Parse *pParse, /* Parse context */ Token *pCons, /* The ',' token after the last column defn. */ Token *pEnd, /* The ')' before options in the CREATE TABLE */ - u8 tabOpts, /* Extra table options. Usually 0. */ + u32 tabOpts, /* Extra table options. Usually 0. */ Select *pSelect /* Select from a "CREATE ... AS SELECT" */ ){ Table *p; /* The new table */ @@ -114860,7 +115667,7 @@ SQLITE_PRIVATE void sqlite3EndTable( ** table itself. So mark it read-only. */ if( db->init.busy ){ - if( pSelect ){ + if( pSelect || (!IsOrdinaryTable(p) && db->init.newTnum) ){ sqlite3ErrorMsg(pParse, ""); return; } @@ -114868,6 +115675,44 @@ SQLITE_PRIVATE void sqlite3EndTable( if( p->tnum==1 ) p->tabFlags |= TF_Readonly; } + /* Special processing for tables that include the STRICT keyword: + ** + ** * Do not allow custom column datatypes. Every column must have + ** a datatype that is one of INT, INTEGER, REAL, TEXT, or BLOB. + ** + ** * If a PRIMARY KEY is defined, other than the INTEGER PRIMARY KEY, + ** then all columns of the PRIMARY KEY must have a NOT NULL + ** constraint. + */ + if( tabOpts & TF_Strict ){ + int ii; + p->tabFlags |= TF_Strict; + for(ii=0; iinCol; ii++){ + Column *pCol = &p->aCol[ii]; + if( pCol->eCType==COLTYPE_CUSTOM ){ + if( pCol->colFlags & COLFLAG_HASTYPE ){ + sqlite3ErrorMsg(pParse, + "unknown datatype for %s.%s: \"%s\"", + p->zName, pCol->zCnName, sqlite3ColumnType(pCol, "") + ); + }else{ + sqlite3ErrorMsg(pParse, "missing datatype for %s.%s", + p->zName, pCol->zCnName); + } + return; + }else if( pCol->eCType==COLTYPE_ANY ){ + pCol->affinity = SQLITE_AFF_BLOB; + } + if( (pCol->colFlags & COLFLAG_PRIMKEY)!=0 + && p->iPKey!=ii + && pCol->notNull == OE_None + ){ + pCol->notNull = OE_Abort; + p->tabFlags |= TF_HasNotNull; + } + } + } + assert( (p->tabFlags & TF_HasPrimaryKey)==0 || p->iPKey>=0 || sqlite3PrimaryKeyIndex(p)!=0 ); assert( (p->tabFlags & TF_HasPrimaryKey)!=0 @@ -114912,7 +115757,7 @@ SQLITE_PRIVATE void sqlite3EndTable( for(ii=0; iinCol; ii++){ u32 colFlags = p->aCol[ii].colFlags; if( (colFlags & COLFLAG_GENERATED)!=0 ){ - Expr *pX = p->aCol[ii].pDflt; + Expr *pX = sqlite3ColumnExpr(p, &p->aCol[ii]); testcase( colFlags & COLFLAG_VIRTUAL ); testcase( colFlags & COLFLAG_STORED ); if( sqlite3ResolveSelfReference(pParse, p, NC_GenCol, pX, 0) ){ @@ -114922,8 +115767,8 @@ SQLITE_PRIVATE void sqlite3EndTable( ** tree that have been allocated from lookaside memory, which is ** illegal in a schema and will lead to errors or heap corruption ** when the database connection closes. */ - sqlite3ExprDelete(db, pX); - p->aCol[ii].pDflt = sqlite3ExprAlloc(db, TK_NULL, 0, 0); + sqlite3ColumnSetExpr(pParse, p, &p->aCol[ii], + sqlite3ExprAlloc(db, TK_NULL, 0, 0)); } }else{ nNG++; @@ -114963,7 +115808,7 @@ SQLITE_PRIVATE void sqlite3EndTable( /* ** Initialize zType for the new view or table. */ - if( p->pSelect==0 ){ + if( IsOrdinaryTable(p) ){ /* A regular table */ zType = "table"; zType2 = "TABLE"; @@ -115049,7 +115894,7 @@ SQLITE_PRIVATE void sqlite3EndTable( ** the information we've collected. */ sqlite3NestedParse(pParse, - "UPDATE %Q." DFLT_SCHEMA_TABLE + "UPDATE %Q." LEGACY_SCHEMA_TABLE " SET type='%s', name=%Q, tbl_name=%Q, rootpage=#%d, sql=%Q" " WHERE rowid=#%d", db->aDb[iDb].zDbSName, @@ -115113,12 +115958,12 @@ SQLITE_PRIVATE void sqlite3EndTable( } #ifndef SQLITE_OMIT_ALTERTABLE - if( !pSelect && !p->pSelect ){ + if( !pSelect && IsOrdinaryTable(p) ){ assert( pCons && pEnd ); if( pCons->z==0 ){ pCons = pEnd; } - p->addColOffset = 13 + (int)(pCons->z - pParse->sNameToken.z); + p->u.tab.addColOffset = 13 + (int)(pCons->z - pParse->sNameToken.z); } #endif } @@ -115175,12 +116020,13 @@ SQLITE_PRIVATE void sqlite3CreateView( */ pSelect->selFlags |= SF_View; if( IN_RENAME_OBJECT ){ - p->pSelect = pSelect; + p->u.view.pSelect = pSelect; pSelect = 0; }else{ - p->pSelect = sqlite3SelectDup(db, pSelect, EXPRDUP_REDUCE); + p->u.view.pSelect = sqlite3SelectDup(db, pSelect, EXPRDUP_REDUCE); } p->pCheck = sqlite3ExprListDup(db, pCNames, EXPRDUP_REDUCE); + p->eTabType = TABTYP_VIEW; if( db->mallocFailed ) goto create_view_fail; /* Locate the end of the CREATE VIEW statement. Make sEnd point to @@ -115234,13 +116080,12 @@ SQLITE_PRIVATE int sqlite3ViewGetColumnNames(Parse *pParse, Table *pTable){ assert( pTable ); #ifndef SQLITE_OMIT_VIRTUALTABLE - db->nSchemaLock++; - rc = sqlite3VtabCallConnect(pParse, pTable); - db->nSchemaLock--; - if( rc ){ - return 1; + if( IsVirtual(pTable) ){ + db->nSchemaLock++; + rc = sqlite3VtabCallConnect(pParse, pTable); + db->nSchemaLock--; + return rc; } - if( IsVirtual(pTable) ) return 0; #endif #ifndef SQLITE_OMIT_VIEW @@ -115277,8 +116122,8 @@ SQLITE_PRIVATE int sqlite3ViewGetColumnNames(Parse *pParse, Table *pTable){ ** to be permanent. So the computation is done on a copy of the SELECT ** statement that defines the view. */ - assert( pTable->pSelect ); - pSel = sqlite3SelectDup(db, pTable->pSelect, 0); + assert( IsView(pTable) ); + pSel = sqlite3SelectDup(db, pTable->u.view.pSelect, 0); if( pSel ){ u8 eParseMode = pParse->eParseMode; pParse->eParseMode = PARSE_MODE_NORMAL; @@ -115337,8 +116182,6 @@ SQLITE_PRIVATE int sqlite3ViewGetColumnNames(Parse *pParse, Table *pTable){ pTable->pSchema->schemaFlags |= DB_UnresetViews; if( db->mallocFailed ){ sqlite3DeleteColumnNames(db, pTable); - pTable->aCol = 0; - pTable->nCol = 0; } #endif /* SQLITE_OMIT_VIEW */ return nErr; @@ -115355,10 +116198,8 @@ static void sqliteViewResetAll(sqlite3 *db, int idx){ if( !DbHasProperty(db, idx, DB_UnresetViews) ) return; for(i=sqliteHashFirst(&db->aDb[idx].pSchema->tblHash); i;i=sqliteHashNext(i)){ Table *pTab = sqliteHashData(i); - if( pTab->pSelect ){ + if( IsView(pTab) ){ sqlite3DeleteColumnNames(db, pTab); - pTab->aCol = 0; - pTab->nCol = 0; } } DbClearProperty(db, idx, DB_UnresetViews); @@ -115432,7 +116273,7 @@ static void destroyRootPage(Parse *pParse, int iTable, int iDb){ ** token for additional information. */ sqlite3NestedParse(pParse, - "UPDATE %Q." DFLT_SCHEMA_TABLE + "UPDATE %Q." LEGACY_SCHEMA_TABLE " SET rootpage=%d WHERE #%d AND rootpage=#%d", pParse->db->aDb[iDb].zDbSName, iTable, r1, r1); #endif @@ -115567,7 +116408,7 @@ SQLITE_PRIVATE void sqlite3CodeDropTable(Parse *pParse, Table *pTab, int iDb, in ** database. */ sqlite3NestedParse(pParse, - "DELETE FROM %Q." DFLT_SCHEMA_TABLE + "DELETE FROM %Q." LEGACY_SCHEMA_TABLE " WHERE tbl_name=%Q and type!='trigger'", pDb->zDbSName, pTab->zName); if( !isView && !IsVirtual(pTab) ){ @@ -115595,6 +116436,7 @@ SQLITE_PRIVATE int sqlite3ReadOnlyShadowTables(sqlite3 *db){ if( (db->flags & SQLITE_Defensive)!=0 && db->pVtabCtx==0 && db->nVdbeExec==0 + && !sqlite3VtabInSync(db) ){ return 1; } @@ -115614,6 +116456,9 @@ static int tableMayNotBeDropped(sqlite3 *db, Table *pTab){ if( (pTab->tabFlags & TF_Shadow)!=0 && sqlite3ReadOnlyShadowTables(db) ){ return 1; } + if( pTab->tabFlags & TF_Eponymous ){ + return 1; + } return 0; } @@ -115698,11 +116543,11 @@ SQLITE_PRIVATE void sqlite3DropTable(Parse *pParse, SrcList *pName, int isView, /* Ensure DROP TABLE is not used on a view, and DROP VIEW is not used ** on a table. */ - if( isView && pTab->pSelect==0 ){ + if( isView && !IsView(pTab) ){ sqlite3ErrorMsg(pParse, "use DROP TABLE to delete table %s", pTab->zName); goto exit_drop_table; } - if( !isView && pTab->pSelect ){ + if( !isView && IsView(pTab) ){ sqlite3ErrorMsg(pParse, "use DROP VIEW to delete view %s", pTab->zName); goto exit_drop_table; } @@ -115753,7 +116598,7 @@ SQLITE_PRIVATE void sqlite3CreateForeignKey( FKey *pFKey = 0; FKey *pNextTo; Table *p = pParse->pNewTable; - int nByte; + i64 nByte; int i; int nCol; char *z; @@ -115766,7 +116611,7 @@ SQLITE_PRIVATE void sqlite3CreateForeignKey( if( pToCol && pToCol->nExpr!=1 ){ sqlite3ErrorMsg(pParse, "foreign key on %s" " should reference only one column of table %T", - p->aCol[iCol].zName, pTo); + p->aCol[iCol].zCnName, pTo); goto fk_end; } nCol = 1; @@ -115789,7 +116634,8 @@ SQLITE_PRIVATE void sqlite3CreateForeignKey( goto fk_end; } pFKey->pFrom = p; - pFKey->pNextFrom = p->pFKey; + assert( IsOrdinaryTable(p) ); + pFKey->pNextFrom = p->u.tab.pFKey; z = (char*)&pFKey->aCol[nCol]; pFKey->zTo = z; if( IN_RENAME_OBJECT ){ @@ -115806,7 +116652,7 @@ SQLITE_PRIVATE void sqlite3CreateForeignKey( for(i=0; inCol; j++){ - if( sqlite3StrICmp(p->aCol[j].zName, pFromCol->a[i].zEName)==0 ){ + if( sqlite3StrICmp(p->aCol[j].zCnName, pFromCol->a[i].zEName)==0 ){ pFKey->aCol[i].iFrom = j; break; } @@ -115854,7 +116700,8 @@ SQLITE_PRIVATE void sqlite3CreateForeignKey( /* Link the foreign key to the table as the last step. */ - p->pFKey = pFKey; + assert( IsOrdinaryTable(p) ); + p->u.tab.pFKey = pFKey; pFKey = 0; fk_end: @@ -115875,7 +116722,9 @@ SQLITE_PRIVATE void sqlite3DeferForeignKey(Parse *pParse, int isDeferred){ #ifndef SQLITE_OMIT_FOREIGN_KEY Table *pTab; FKey *pFKey; - if( (pTab = pParse->pNewTable)==0 || (pFKey = pTab->pFKey)==0 ) return; + if( (pTab = pParse->pNewTable)==0 ) return; + if( NEVER(!IsOrdinaryTable(pTab)) ) return; + if( (pFKey = pTab->u.tab.pFKey)==0 ) return; assert( isDeferred==0 || isDeferred==1 ); /* EV: R-30323-21917 */ pFKey->isDeferred = (u8)isDeferred; #endif @@ -116167,7 +117016,7 @@ SQLITE_PRIVATE void sqlite3CreateIndex( goto exit_create_index; } #ifndef SQLITE_OMIT_VIEW - if( pTab->pSelect ){ + if( IsView(pTab) ){ sqlite3ErrorMsg(pParse, "views may not be indexed"); goto exit_create_index; } @@ -116258,7 +117107,7 @@ SQLITE_PRIVATE void sqlite3CreateIndex( Token prevCol; Column *pCol = &pTab->aCol[pTab->nCol-1]; pCol->colFlags |= COLFLAG_UNIQUE; - sqlite3TokenInit(&prevCol, pCol->zName); + sqlite3TokenInit(&prevCol, pCol->zCnName); pList = sqlite3ExprListAppend(pParse, 0, sqlite3ExprAlloc(db, TK_ID, &prevCol, 0)); if( pList==0 ) goto exit_create_index; @@ -116276,6 +117125,7 @@ SQLITE_PRIVATE void sqlite3CreateIndex( Expr *pExpr = pList->a[i].pExpr; assert( pExpr!=0 ); if( pExpr->op==TK_COLLATE ){ + assert( !ExprHasProperty(pExpr, EP_IntValue) ); nExtra += (1 + sqlite3Strlen30(pExpr->u.zToken)); } } @@ -116371,6 +117221,7 @@ SQLITE_PRIVATE void sqlite3CreateIndex( zColl = 0; if( pListItem->pExpr->op==TK_COLLATE ){ int nColl; + assert( !ExprHasProperty(pListItem->pExpr, EP_IntValue) ); zColl = pListItem->pExpr->u.zToken; nColl = sqlite3Strlen30(zColl) + 1; assert( nExtra>=nColl ); @@ -116379,7 +117230,7 @@ SQLITE_PRIVATE void sqlite3CreateIndex( zExtra += nColl; nExtra -= nColl; }else if( j>=0 ){ - zColl = pTab->aCol[j].zColl; + zColl = sqlite3ColumnColl(&pTab->aCol[j]); } if( !zColl ) zColl = sqlite3StrBINARY; if( !db->init.busy && !sqlite3LocateCollSeq(pParse, zColl) ){ @@ -116577,7 +117428,7 @@ SQLITE_PRIVATE void sqlite3CreateIndex( /* Add an entry in sqlite_schema for this index */ sqlite3NestedParse(pParse, - "INSERT INTO %Q." DFLT_SCHEMA_TABLE " VALUES('index',%Q,%Q,#%d,%Q);", + "INSERT INTO %Q." LEGACY_SCHEMA_TABLE " VALUES('index',%Q,%Q,#%d,%Q);", db->aDb[iDb].zDbSName, pIndex->zName, pTab->zName, @@ -116619,7 +117470,7 @@ exit_create_index: ** The list was already ordered when this routine was entered, so at this ** point at most a single index (the newly added index) will be out of ** order. So we have to reorder at most one index. */ - Index **ppFrom = &pTab->pIndex; + Index **ppFrom; Index *pThis; for(ppFrom=&pTab->pIndex; (pThis = *ppFrom)!=0; ppFrom=&pThis->pNext){ Index *pNext; @@ -116763,7 +117614,7 @@ SQLITE_PRIVATE void sqlite3DropIndex(Parse *pParse, SrcList *pName, int ifExists if( v ){ sqlite3BeginWriteOperation(pParse, 1, iDb); sqlite3NestedParse(pParse, - "DELETE FROM %Q." DFLT_SCHEMA_TABLE " WHERE name=%Q AND type='index'", + "DELETE FROM %Q." LEGACY_SCHEMA_TABLE " WHERE name=%Q AND type='index'", db->aDb[iDb].zDbSName, pIndex->zName ); sqlite3ClearStatTables(pParse, iDb, "idx", pIndex->zName); @@ -117159,6 +118010,7 @@ SQLITE_PRIVATE void sqlite3SrcListIndexedBy(Parse *pParse, SrcList *p, Token *pI }else{ pItem->u1.zIndexedBy = sqlite3NameFromToken(pParse->db, pIndexedBy); pItem->fg.isIndexedBy = 1; + assert( pItem->fg.isCte==0 ); /* No collision on union u2 */ } } } @@ -117471,7 +118323,7 @@ SQLITE_PRIVATE void sqlite3UniqueConstraint( for(j=0; jnKeyCol; j++){ char *zCol; assert( pIdx->aiColumn[j]>=0 ); - zCol = pTab->aCol[pIdx->aiColumn[j]].zName; + zCol = pTab->aCol[pIdx->aiColumn[j]].zCnName; if( j ) sqlite3_str_append(&errMsg, ", ", 2); sqlite3_str_appendall(&errMsg, pTab->zName); sqlite3_str_append(&errMsg, ".", 1); @@ -117498,7 +118350,7 @@ SQLITE_PRIVATE void sqlite3RowidConstraint( int rc; if( pTab->iPKey>=0 ){ zMsg = sqlite3MPrintf(pParse->db, "%s.%s", pTab->zName, - pTab->aCol[pTab->iPKey].zName); + pTab->aCol[pTab->iPKey].zCnName); rc = SQLITE_CONSTRAINT_PRIMARYKEY; }else{ zMsg = sqlite3MPrintf(pParse->db, "%s.rowid", pTab->zName); @@ -118139,6 +118991,7 @@ SQLITE_PRIVATE FuncDef *sqlite3FunctionSearch( ){ FuncDef *p; for(p=sqlite3BuiltinFunctions.a[h]; p; p=p->u.pHash){ + assert( p->funcFlags & SQLITE_FUNC_BUILTIN ); if( sqlite3StrICmp(p->zName, zFunc)==0 ){ return p; } @@ -118160,6 +119013,7 @@ SQLITE_PRIVATE void sqlite3InsertBuiltinFuncs( int nName = sqlite3Strlen30(zName); int h = SQLITE_FUNC_HASH(zName[0], nName); assert( zName[0]>='a' && zName[0]<='z' ); + assert( aDef[i].funcFlags & SQLITE_FUNC_BUILTIN ); pOther = sqlite3FunctionSearch(h, zName); if( pOther ){ assert( pOther!=&aDef[i] && pOther->pNext!=&aDef[i] ); @@ -118425,7 +119279,7 @@ SQLITE_PRIVATE int sqlite3IsReadOnly(Parse *pParse, Table *pTab, int viewOk){ return 1; } #ifndef SQLITE_OMIT_VIEW - if( !viewOk && pTab->pSelect ){ + if( !viewOk && IsView(pTab) ){ sqlite3ErrorMsg(pParse,"cannot modify %s because it is a view",pTab->zName); return 1; } @@ -118529,13 +119383,13 @@ SQLITE_PRIVATE Expr *sqlite3LimitWhere( }else{ Index *pPk = sqlite3PrimaryKeyIndex(pTab); if( pPk->nKeyCol==1 ){ - const char *zName = pTab->aCol[pPk->aiColumn[0]].zName; + const char *zName = pTab->aCol[pPk->aiColumn[0]].zCnName; pLhs = sqlite3Expr(db, TK_ID, zName); pEList = sqlite3ExprListAppend(pParse, 0, sqlite3Expr(db, TK_ID, zName)); }else{ int i; for(i=0; inKeyCol; i++){ - Expr *p = sqlite3Expr(db, TK_ID, pTab->aCol[pPk->aiColumn[i]].zName); + Expr *p = sqlite3Expr(db, TK_ID, pTab->aCol[pPk->aiColumn[i]].zCnName); pEList = sqlite3ExprListAppend(pParse, pEList, p); } pLhs = sqlite3PExpr(pParse, TK_VECTOR, 0, 0); @@ -118551,6 +119405,7 @@ SQLITE_PRIVATE Expr *sqlite3LimitWhere( pSelectSrc = sqlite3SrcListDup(db, pSrc, 0); pSrc->a[0].pTab = pTab; if( pSrc->a[0].fg.isIndexedBy ){ + assert( pSrc->a[0].fg.isCte==0 ); pSrc->a[0].u2.pIBIndex = 0; pSrc->a[0].fg.isIndexedBy = 0; sqlite3DbFree(db, pSrc->a[0].u1.zIndexedBy); @@ -118642,7 +119497,7 @@ SQLITE_PRIVATE void sqlite3DeleteFrom( */ #ifndef SQLITE_OMIT_TRIGGER pTrigger = sqlite3TriggersExist(pParse, pTab, TK_DELETE, 0, 0); - isView = pTab->pSelect!=0; + isView = IsView(pTab); #else # define pTrigger 0 # define isView 0 @@ -118892,7 +119747,7 @@ SQLITE_PRIVATE void sqlite3DeleteFrom( if( eOnePass!=ONEPASS_OFF ){ assert( nKey==nPk ); /* OP_Found will use an unpacked key */ if( !IsVirtual(pTab) && aToOpen[iDataCur-iTabCur] ){ - assert( pPk!=0 || pTab->pSelect!=0 ); + assert( pPk!=0 || IsView(pTab) ); sqlite3VdbeAddOp4Int(v, OP_NotFound, iDataCur, addrBypass, iKey, nKey); VdbeCoverage(v); } @@ -119126,7 +119981,7 @@ SQLITE_PRIVATE void sqlite3GenerateRowDelete( ** the update-hook is not invoked for rows removed by REPLACE, but the ** pre-update-hook is. */ - if( pTab->pSelect==0 ){ + if( !IsView(pTab) ){ u8 p5 = 0; sqlite3GenerateRowIndexDelete(pParse, pTab, iDataCur, iIdxCur,0,iIdxNoSeek); sqlite3VdbeAddOp2(v, OP_Delete, iDataCur, (count?OPFLAG_NCHANGE:0)); @@ -119886,9 +120741,9 @@ static void last_insert_rowid( /* ** Implementation of the changes() SQL function. ** -** IMP: R-62073-11209 The changes() SQL function is a wrapper -** around the sqlite3_changes() C/C++ function and hence follows the same -** rules for counting changes. +** IMP: R-32760-32347 The changes() SQL function is a wrapper +** around the sqlite3_changes64() C/C++ function and hence follows the +** same rules for counting changes. */ static void changes( sqlite3_context *context, @@ -119897,12 +120752,12 @@ static void changes( ){ sqlite3 *db = sqlite3_context_db_handle(context); UNUSED_PARAMETER2(NotUsed, NotUsed2); - sqlite3_result_int(context, sqlite3_changes(db)); + sqlite3_result_int64(context, sqlite3_changes64(db)); } /* ** Implementation of the total_changes() SQL function. The return value is -** the same as the sqlite3_total_changes() API function. +** the same as the sqlite3_total_changes64() API function. */ static void total_changes( sqlite3_context *context, @@ -119911,9 +120766,9 @@ static void total_changes( ){ sqlite3 *db = sqlite3_context_db_handle(context); UNUSED_PARAMETER2(NotUsed, NotUsed2); - /* IMP: R-52756-41993 This function is a wrapper around the - ** sqlite3_total_changes() C/C++ interface. */ - sqlite3_result_int(context, sqlite3_total_changes(db)); + /* IMP: R-11217-42568 This function is a wrapper around the + ** sqlite3_total_changes64() C/C++ interface. */ + sqlite3_result_int64(context, sqlite3_total_changes64(db)); } /* @@ -121030,97 +121885,167 @@ static void minMaxFinalize(sqlite3_context *context){ /* ** group_concat(EXPR, ?SEPARATOR?) +** +** The SEPARATOR goes before the EXPR string. This is tragic. The +** groupConcatInverse() implementation would have been easier if the +** SEPARATOR were appended after EXPR. And the order is undocumented, +** so we could change it, in theory. But the old behavior has been +** around for so long that we dare not, for fear of breaking something. */ +typedef struct { + StrAccum str; /* The accumulated concatenation */ +#ifndef SQLITE_OMIT_WINDOWFUNC + int nAccum; /* Number of strings presently concatenated */ + int nFirstSepLength; /* Used to detect separator length change */ + /* If pnSepLengths!=0, refs an array of inter-string separator lengths, + ** stored as actually incorporated into presently accumulated result. + ** (Hence, its slots in use number nAccum-1 between method calls.) + ** If pnSepLengths==0, nFirstSepLength is the length used throughout. + */ + int *pnSepLengths; +#endif +} GroupConcatCtx; + static void groupConcatStep( sqlite3_context *context, int argc, sqlite3_value **argv ){ const char *zVal; - StrAccum *pAccum; + GroupConcatCtx *pGCC; const char *zSep; int nVal, nSep; assert( argc==1 || argc==2 ); if( sqlite3_value_type(argv[0])==SQLITE_NULL ) return; - pAccum = (StrAccum*)sqlite3_aggregate_context(context, sizeof(*pAccum)); - - if( pAccum ){ + pGCC = (GroupConcatCtx*)sqlite3_aggregate_context(context, sizeof(*pGCC)); + if( pGCC ){ sqlite3 *db = sqlite3_context_db_handle(context); - int firstTerm = pAccum->mxAlloc==0; - pAccum->mxAlloc = db->aLimit[SQLITE_LIMIT_LENGTH]; - if( !firstTerm ){ - if( argc==2 ){ - zSep = (char*)sqlite3_value_text(argv[1]); - nSep = sqlite3_value_bytes(argv[1]); - }else{ - zSep = ","; - nSep = 1; + int firstTerm = pGCC->str.mxAlloc==0; + pGCC->str.mxAlloc = db->aLimit[SQLITE_LIMIT_LENGTH]; + if( argc==1 ){ + if( !firstTerm ){ + sqlite3_str_appendchar(&pGCC->str, 1, ','); } - if( zSep ) sqlite3_str_append(pAccum, zSep, nSep); +#ifndef SQLITE_OMIT_WINDOWFUNC + else{ + pGCC->nFirstSepLength = 1; + } +#endif + }else if( !firstTerm ){ + zSep = (char*)sqlite3_value_text(argv[1]); + nSep = sqlite3_value_bytes(argv[1]); + if( zSep ){ + sqlite3_str_append(&pGCC->str, zSep, nSep); + } +#ifndef SQLITE_OMIT_WINDOWFUNC + else{ + nSep = 0; + } + if( nSep != pGCC->nFirstSepLength || pGCC->pnSepLengths != 0 ){ + int *pnsl = pGCC->pnSepLengths; + if( pnsl == 0 ){ + /* First separator length variation seen, start tracking them. */ + pnsl = (int*)sqlite3_malloc64((pGCC->nAccum+1) * sizeof(int)); + if( pnsl!=0 ){ + int i = 0, nA = pGCC->nAccum-1; + while( inFirstSepLength; + } + }else{ + pnsl = (int*)sqlite3_realloc64(pnsl, pGCC->nAccum * sizeof(int)); + } + if( pnsl!=0 ){ + if( ALWAYS(pGCC->nAccum>0) ){ + pnsl[pGCC->nAccum-1] = nSep; + } + pGCC->pnSepLengths = pnsl; + }else{ + sqlite3StrAccumSetError(&pGCC->str, SQLITE_NOMEM); + } + } +#endif } +#ifndef SQLITE_OMIT_WINDOWFUNC + else{ + pGCC->nFirstSepLength = sqlite3_value_bytes(argv[1]); + } + pGCC->nAccum += 1; +#endif zVal = (char*)sqlite3_value_text(argv[0]); nVal = sqlite3_value_bytes(argv[0]); - if( zVal ) sqlite3_str_append(pAccum, zVal, nVal); + if( zVal ) sqlite3_str_append(&pGCC->str, zVal, nVal); } } + #ifndef SQLITE_OMIT_WINDOWFUNC static void groupConcatInverse( sqlite3_context *context, int argc, sqlite3_value **argv ){ - int n; - StrAccum *pAccum; + GroupConcatCtx *pGCC; assert( argc==1 || argc==2 ); + (void)argc; /* Suppress unused parameter warning */ if( sqlite3_value_type(argv[0])==SQLITE_NULL ) return; - pAccum = (StrAccum*)sqlite3_aggregate_context(context, sizeof(*pAccum)); - /* pAccum is always non-NULL since groupConcatStep() will have always + pGCC = (GroupConcatCtx*)sqlite3_aggregate_context(context, sizeof(*pGCC)); + /* pGCC is always non-NULL since groupConcatStep() will have always ** run frist to initialize it */ - if( ALWAYS(pAccum) ){ - n = sqlite3_value_bytes(argv[0]); - if( argc==2 ){ - n += sqlite3_value_bytes(argv[1]); + if( ALWAYS(pGCC) ){ + int nVS; + /* Must call sqlite3_value_text() to convert the argument into text prior + ** to invoking sqlite3_value_bytes(), in case the text encoding is UTF16 */ + (void)sqlite3_value_text(argv[0]); + nVS = sqlite3_value_bytes(argv[0]); + pGCC->nAccum -= 1; + if( pGCC->pnSepLengths!=0 ){ + assert(pGCC->nAccum >= 0); + if( pGCC->nAccum>0 ){ + nVS += *pGCC->pnSepLengths; + memmove(pGCC->pnSepLengths, pGCC->pnSepLengths+1, + (pGCC->nAccum-1)*sizeof(int)); + } }else{ - n++; + /* If removing single accumulated string, harmlessly over-do. */ + nVS += pGCC->nFirstSepLength; } - if( n>=(int)pAccum->nChar ){ - pAccum->nChar = 0; + if( nVS>=(int)pGCC->str.nChar ){ + pGCC->str.nChar = 0; }else{ - pAccum->nChar -= n; - memmove(pAccum->zText, &pAccum->zText[n], pAccum->nChar); + pGCC->str.nChar -= nVS; + memmove(pGCC->str.zText, &pGCC->str.zText[nVS], pGCC->str.nChar); + } + if( pGCC->str.nChar==0 ){ + pGCC->str.mxAlloc = 0; + sqlite3_free(pGCC->pnSepLengths); + pGCC->pnSepLengths = 0; } - if( pAccum->nChar==0 ) pAccum->mxAlloc = 0; } } #else # define groupConcatInverse 0 #endif /* SQLITE_OMIT_WINDOWFUNC */ static void groupConcatFinalize(sqlite3_context *context){ - StrAccum *pAccum; - pAccum = sqlite3_aggregate_context(context, 0); - if( pAccum ){ - if( pAccum->accError==SQLITE_TOOBIG ){ - sqlite3_result_error_toobig(context); - }else if( pAccum->accError==SQLITE_NOMEM ){ - sqlite3_result_error_nomem(context); - }else{ - sqlite3_result_text(context, sqlite3StrAccumFinish(pAccum), -1, - sqlite3_free); - } + GroupConcatCtx *pGCC + = (GroupConcatCtx*)sqlite3_aggregate_context(context, 0); + if( pGCC ){ + sqlite3ResultStrAccum(context, &pGCC->str); +#ifndef SQLITE_OMIT_WINDOWFUNC + sqlite3_free(pGCC->pnSepLengths); +#endif } } #ifndef SQLITE_OMIT_WINDOWFUNC static void groupConcatValue(sqlite3_context *context){ - sqlite3_str *pAccum; - pAccum = (sqlite3_str*)sqlite3_aggregate_context(context, 0); - if( pAccum ){ + GroupConcatCtx *pGCC + = (GroupConcatCtx*)sqlite3_aggregate_context(context, 0); + if( pGCC ){ + StrAccum *pAccum = &pGCC->str; if( pAccum->accError==SQLITE_TOOBIG ){ sqlite3_result_error_toobig(context); }else if( pAccum->accError==SQLITE_NOMEM ){ sqlite3_result_error_nomem(context); }else{ const char *zText = sqlite3_str_value(pAccum); - sqlite3_result_text(context, zText, -1, SQLITE_TRANSIENT); + sqlite3_result_text(context, zText, pAccum->nChar, SQLITE_TRANSIENT); } } } @@ -121184,11 +122109,12 @@ SQLITE_PRIVATE int sqlite3IsLikeFunction(sqlite3 *db, Expr *pExpr, int *pIsNocas int nExpr; assert( pExpr!=0 ); assert( pExpr->op==TK_FUNCTION ); + assert( ExprUseXList(pExpr) ); if( !pExpr->x.pList ){ return 0; } - assert( !ExprHasProperty(pExpr, EP_xIsSelect) ); nExpr = pExpr->x.pList->nExpr; + assert( !ExprHasProperty(pExpr, EP_IntValue) ); pDef = sqlite3FindFunction(db, pExpr->u.zToken, nExpr, SQLITE_UTF8, 0); #ifdef SQLITE_ENABLE_UNKNOWN_SQL_FUNCTION if( pDef==0 ) return 0; @@ -121212,6 +122138,7 @@ SQLITE_PRIVATE int sqlite3IsLikeFunction(sqlite3 *db, Expr *pExpr, int *pIsNocas Expr *pEscape = pExpr->x.pList->a[2].pExpr; char *zEscape; if( pEscape->op!=TK_STRING ) return 0; + assert( !ExprHasProperty(pEscape, EP_IntValue) ); zEscape = pEscape->u.zToken; if( zEscape[0]==0 || zEscape[1]!=0 ) return 0; if( zEscape[0]==aWc[0] ) return 0; @@ -121438,12 +122365,12 @@ SQLITE_PRIVATE void sqlite3RegisterBuiltinFunctions(void){ */ static FuncDef aBuiltinFunc[] = { /***** Functions only available with SQLITE_TESTCTRL_INTERNAL_FUNCTIONS *****/ +#if !defined(SQLITE_UNTESTABLE) TEST_FUNC(implies_nonnull_row, 2, INLINEFUNC_implies_nonnull_row, 0), TEST_FUNC(expr_compare, 2, INLINEFUNC_expr_compare, 0), TEST_FUNC(expr_implies_expr, 2, INLINEFUNC_expr_implies_expr, 0), -#ifdef SQLITE_DEBUG - TEST_FUNC(affinity, 1, INLINEFUNC_affinity, 0), -#endif + TEST_FUNC(affinity, 1, INLINEFUNC_affinity, 0), +#endif /* !defined(SQLITE_UNTESTABLE) */ /***** Regular functions *****/ #ifdef SQLITE_SOUNDEX FUNCTION(soundex, 1, 0, 0, soundexFunc ), @@ -121475,11 +122402,11 @@ SQLITE_PRIVATE void sqlite3RegisterBuiltinFunctions(void){ FUNCTION(min, -1, 0, 1, minmaxFunc ), FUNCTION(min, 0, 0, 1, 0 ), WAGGREGATE(min, 1, 0, 1, minmaxStep, minMaxFinalize, minMaxValue, 0, - SQLITE_FUNC_MINMAX ), + SQLITE_FUNC_MINMAX|SQLITE_FUNC_ANYORDER ), FUNCTION(max, -1, 1, 1, minmaxFunc ), FUNCTION(max, 0, 1, 1, 0 ), WAGGREGATE(max, 1, 1, 1, minmaxStep, minMaxFinalize, minMaxValue, 0, - SQLITE_FUNC_MINMAX ), + SQLITE_FUNC_MINMAX|SQLITE_FUNC_ANYORDER ), FUNCTION2(typeof, 1, 0, 0, typeofFunc, SQLITE_FUNC_TYPEOF), FUNCTION2(length, 1, 0, 0, lengthFunc, SQLITE_FUNC_LENGTH), FUNCTION(instr, 2, 0, 0, instrFunc ), @@ -121515,9 +122442,10 @@ SQLITE_PRIVATE void sqlite3RegisterBuiltinFunctions(void){ WAGGREGATE(total, 1,0,0, sumStep,totalFinalize,totalFinalize,sumInverse, 0), WAGGREGATE(avg, 1,0,0, sumStep, avgFinalize, avgFinalize, sumInverse, 0), WAGGREGATE(count, 0,0,0, countStep, - countFinalize, countFinalize, countInverse, SQLITE_FUNC_COUNT ), + countFinalize, countFinalize, countInverse, + SQLITE_FUNC_COUNT|SQLITE_FUNC_ANYORDER ), WAGGREGATE(count, 1,0,0, countStep, - countFinalize, countFinalize, countInverse, 0 ), + countFinalize, countFinalize, countInverse, SQLITE_FUNC_ANYORDER ), WAGGREGATE(group_concat, 1, 0, 0, groupConcatStep, groupConcatFinalize, groupConcatValue, groupConcatInverse, 0), WAGGREGATE(group_concat, 2, 0, 0, groupConcatStep, @@ -121592,6 +122520,7 @@ SQLITE_PRIVATE void sqlite3RegisterBuiltinFunctions(void){ for(p=sqlite3BuiltinFunctions.a[i]; p; p=p->u.pHash){ int n = sqlite3Strlen30(p->zName); int h = p->zName[0] + n; + assert( p->funcFlags & SQLITE_FUNC_BUILTIN ); printf(" %s(%d)", p->zName, h); } printf("\n"); @@ -121819,7 +122748,9 @@ SQLITE_PRIVATE int sqlite3FkLocateIndex( */ if( pParent->iPKey>=0 ){ if( !zKey ) return 0; - if( !sqlite3StrICmp(pParent->aCol[pParent->iPKey].zName, zKey) ) return 0; + if( !sqlite3StrICmp(pParent->aCol[pParent->iPKey].zCnName, zKey) ){ + return 0; + } } }else if( paiCol ){ assert( nCol>1 ); @@ -121861,11 +122792,11 @@ SQLITE_PRIVATE int sqlite3FkLocateIndex( /* If the index uses a collation sequence that is different from ** the default collation sequence for the column, this index is ** unusable. Bail out early in this case. */ - zDfltColl = pParent->aCol[iCol].zColl; + zDfltColl = sqlite3ColumnColl(&pParent->aCol[iCol]); if( !zDfltColl ) zDfltColl = sqlite3StrBINARY; if( sqlite3StrICmp(pIdx->azColl[i], zDfltColl) ) break; - zIdxCol = pParent->aCol[iCol].zName; + zIdxCol = pParent->aCol[iCol].zCnName; for(j=0; jaCol[j].zCol, zIdxCol)==0 ){ if( aiCol ) aiCol[i] = pFKey->aCol[j].iFrom; @@ -122089,7 +123020,7 @@ static Expr *exprTableRegister( pCol = &pTab->aCol[iCol]; pExpr->iTable = regBase + sqlite3TableColumnToStorage(pTab,iCol) + 1; pExpr->affExpr = pCol->affinity; - zColl = pCol->zColl; + zColl = sqlite3ColumnColl(pCol); if( zColl==0 ) zColl = db->pDfltColl->zName; pExpr = sqlite3ExprAddCollateString(pParse, pExpr, zColl); }else{ @@ -122112,6 +123043,7 @@ static Expr *exprTableColumn( ){ Expr *pExpr = sqlite3Expr(db, TK_COLUMN, 0); if( pExpr ){ + assert( ExprUseYTab(pExpr) ); pExpr->y.pTab = pTab; pExpr->iTable = iCursor; pExpr->iColumn = iCol; @@ -122198,7 +123130,7 @@ static void fkScanChildren( pLeft = exprTableRegister(pParse, pTab, regData, iCol); iCol = aiCol ? aiCol[i] : pFKey->aCol[0].iFrom; assert( iCol>=0 ); - zCol = pFKey->pFrom->aCol[iCol].zName; + zCol = pFKey->pFrom->aCol[iCol].zCnName; pRight = sqlite3Expr(db, TK_ID, zCol); pEq = sqlite3PExpr(pParse, TK_EQ, pLeft, pRight); pWhere = sqlite3ExprAnd(pParse, pWhere, pEq); @@ -122233,7 +123165,7 @@ static void fkScanChildren( i16 iCol = pIdx->aiColumn[i]; assert( iCol>=0 ); pLeft = exprTableRegister(pParse, pTab, regData, iCol); - pRight = sqlite3Expr(db, TK_ID, pTab->aCol[iCol].zName); + pRight = sqlite3Expr(db, TK_ID, pTab->aCol[iCol].zCnName); pEq = sqlite3PExpr(pParse, TK_IS, pLeft, pRight); pAll = sqlite3ExprAnd(pParse, pAll, pEq); } @@ -122322,12 +123254,12 @@ static void fkTriggerDelete(sqlite3 *dbMem, Trigger *p){ */ SQLITE_PRIVATE void sqlite3FkDropTable(Parse *pParse, SrcList *pName, Table *pTab){ sqlite3 *db = pParse->db; - if( (db->flags&SQLITE_ForeignKeys) && !IsVirtual(pTab) ){ + if( (db->flags&SQLITE_ForeignKeys) && IsOrdinaryTable(pTab) ){ int iSkip = 0; Vdbe *v = sqlite3GetVdbe(pParse); assert( v ); /* VDBE has already been allocated */ - assert( pTab->pSelect==0 ); /* Not a view */ + assert( IsOrdinaryTable(pTab) ); if( sqlite3FkReferences(pTab)==0 ){ /* Search for a deferred foreign key constraint for which this table ** is the child table. If one cannot be found, return without @@ -122335,7 +123267,7 @@ SQLITE_PRIVATE void sqlite3FkDropTable(Parse *pParse, SrcList *pName, Table *pTa ** the entire DELETE if there are no outstanding deferred constraints ** when this statement is run. */ FKey *p; - for(p=pTab->pFKey; p; p=p->pNextFrom){ + for(p=pTab->u.tab.pFKey; p; p=p->pNextFrom){ if( p->isDeferred || (db->flags & SQLITE_DeferFKs) ) break; } if( !p ) return; @@ -122424,7 +123356,7 @@ static int fkParentIsModified( if( aChange[iKey]>=0 || (iKey==pTab->iPKey && bChngRowid) ){ Column *pCol = &pTab->aCol[iKey]; if( zKey ){ - if( 0==sqlite3StrICmp(pCol->zName, zKey) ) return 1; + if( 0==sqlite3StrICmp(pCol->zCnName, zKey) ) return 1; }else if( pCol->colFlags & COLFLAG_PRIMKEY ){ return 1; } @@ -122491,13 +123423,14 @@ SQLITE_PRIVATE void sqlite3FkCheck( /* If foreign-keys are disabled, this function is a no-op. */ if( (db->flags&SQLITE_ForeignKeys)==0 ) return; + if( !IsOrdinaryTable(pTab) ) return; iDb = sqlite3SchemaToIndex(db, pTab->pSchema); zDb = db->aDb[iDb].zDbSName; /* Loop through all the foreign key constraints for which pTab is the ** child table (the table that the foreign key definition is part of). */ - for(pFKey=pTab->pFKey; pFKey; pFKey=pFKey->pNextFrom){ + for(pFKey=pTab->u.tab.pFKey; pFKey; pFKey=pFKey->pNextFrom){ Table *pTo; /* Parent table of foreign key pFKey */ Index *pIdx = 0; /* Index on key columns in pTo */ int *aiFree = 0; @@ -122564,7 +123497,7 @@ SQLITE_PRIVATE void sqlite3FkCheck( ** values read from the parent table are NULL. */ if( db->xAuth ){ int rcauth; - char *zCol = pTo->aCol[pIdx ? pIdx->aiColumn[i] : pTo->iPKey].zName; + char *zCol = pTo->aCol[pIdx ? pIdx->aiColumn[i] : pTo->iPKey].zCnName; rcauth = sqlite3AuthReadCol(pParse, pTo->zName, zCol, iDb); bIgnore = (rcauth==SQLITE_IGNORE); } @@ -122679,10 +123612,10 @@ SQLITE_PRIVATE u32 sqlite3FkOldmask( Table *pTab /* Table being modified */ ){ u32 mask = 0; - if( pParse->db->flags&SQLITE_ForeignKeys ){ + if( pParse->db->flags&SQLITE_ForeignKeys && IsOrdinaryTable(pTab) ){ FKey *p; int i; - for(p=pTab->pFKey; p; p=p->pNextFrom){ + for(p=pTab->u.tab.pFKey; p; p=p->pNextFrom){ for(i=0; inCol; i++) mask |= COLUMN_MASK(p->aCol[i].iFrom); } for(p=sqlite3FkReferences(pTab); p; p=p->pNextTo){ @@ -122732,19 +123665,19 @@ SQLITE_PRIVATE int sqlite3FkRequired( ){ int eRet = 1; /* Value to return if bHaveFK is true */ int bHaveFK = 0; /* If FK processing is required */ - if( pParse->db->flags&SQLITE_ForeignKeys ){ + if( pParse->db->flags&SQLITE_ForeignKeys && IsOrdinaryTable(pTab) ){ if( !aChange ){ /* A DELETE operation. Foreign key processing is required if the ** table in question is either the child or parent table for any ** foreign key constraint. */ - bHaveFK = (sqlite3FkReferences(pTab) || pTab->pFKey); + bHaveFK = (sqlite3FkReferences(pTab) || pTab->u.tab.pFKey); }else{ /* This is an UPDATE. Foreign key processing is only required if the ** operation modifies one or more child or parent key columns. */ FKey *p; /* Check if any child key columns are being modified. */ - for(p=pTab->pFKey; p; p=p->pNextFrom){ + for(p=pTab->u.tab.pFKey; p; p=p->pNextFrom){ if( fkChildIsModified(pTab, p, aChange, chngRowid) ){ if( 0==sqlite3_stricmp(pTab->zName, p->zTo) ) eRet = 2; bHaveFK = 1; @@ -122837,8 +123770,8 @@ static Trigger *fkActionTrigger( assert( pIdx!=0 || (pTab->iPKey>=0 && pTab->iPKeynCol) ); assert( pIdx==0 || pIdx->aiColumn[i]>=0 ); sqlite3TokenInit(&tToCol, - pTab->aCol[pIdx ? pIdx->aiColumn[i] : pTab->iPKey].zName); - sqlite3TokenInit(&tFromCol, pFKey->pFrom->aCol[iFromCol].zName); + pTab->aCol[pIdx ? pIdx->aiColumn[i] : pTab->iPKey].zCnName); + sqlite3TokenInit(&tFromCol, pFKey->pFrom->aCol[iFromCol].zCnName); /* Create the expression "OLD.zToCol = zFromCol". It is important ** that the "OLD.zToCol" term is on the LHS of the = operator, so @@ -122883,7 +123816,7 @@ static Trigger *fkActionTrigger( testcase( pCol->colFlags & COLFLAG_STORED ); pDflt = 0; }else{ - pDflt = pCol->pDflt; + pDflt = sqlite3ColumnExpr(pFKey->pFrom, pCol); } if( pDflt ){ pNew = sqlite3ExprDup(db, pDflt, 0); @@ -123020,9 +123953,9 @@ SQLITE_PRIVATE void sqlite3FkDelete(sqlite3 *db, Table *pTab){ FKey *pFKey; /* Iterator variable */ FKey *pNext; /* Copy of pFKey->pNextFrom */ - assert( db==0 || IsVirtual(pTab) - || sqlite3SchemaMutexHeld(db, 0, pTab->pSchema) ); - for(pFKey=pTab->pFKey; pFKey; pFKey=pNext){ + assert( IsOrdinaryTable(pTab) ); + for(pFKey=pTab->u.tab.pFKey; pFKey; pFKey=pNext){ + assert( db==0 || sqlite3SchemaMutexHeld(db, 0, pTab->pSchema) ); /* Remove the FK from the fkeyHash hash table. */ if( !db || db->pnBytesFreed==0 ){ @@ -123169,28 +124102,68 @@ SQLITE_PRIVATE const char *sqlite3IndexAffinityStr(sqlite3 *db, Index *pIdx){ } /* +** Make changes to the evolving bytecode to do affinity transformations +** of values that are about to be gathered into a row for table pTab. +** +** For ordinary (legacy, non-strict) tables: +** ----------------------------------------- +** ** Compute the affinity string for table pTab, if it has not already been ** computed. As an optimization, omit trailing SQLITE_AFF_BLOB affinities. ** -** If the affinity exists (if it is no entirely SQLITE_AFF_BLOB values) and -** if iReg>0 then code an OP_Affinity opcode that will set the affinities -** for register iReg and following. Or if affinities exists and iReg==0, +** If the affinity string is empty (because it was all SQLITE_AFF_BLOB entries +** which were then optimized out) then this routine becomes a no-op. +** +** Otherwise if iReg>0 then code an OP_Affinity opcode that will set the +** affinities for register iReg and following. Or if iReg==0, ** then just set the P4 operand of the previous opcode (which should be ** an OP_MakeRecord) to the affinity string. ** ** A column affinity string has one character per column: ** -** Character Column affinity -** ------------------------------ -** 'A' BLOB -** 'B' TEXT -** 'C' NUMERIC -** 'D' INTEGER -** 'E' REAL +** Character Column affinity +** --------- --------------- +** 'A' BLOB +** 'B' TEXT +** 'C' NUMERIC +** 'D' INTEGER +** 'E' REAL +** +** For STRICT tables: +** ------------------ +** +** Generate an appropropriate OP_TypeCheck opcode that will verify the +** datatypes against the column definitions in pTab. If iReg==0, that +** means an OP_MakeRecord opcode has already been generated and should be +** the last opcode generated. The new OP_TypeCheck needs to be inserted +** before the OP_MakeRecord. The new OP_TypeCheck should use the same +** register set as the OP_MakeRecord. If iReg>0 then register iReg is +** the first of a series of registers that will form the new record. +** Apply the type checking to that array of registers. */ SQLITE_PRIVATE void sqlite3TableAffinity(Vdbe *v, Table *pTab, int iReg){ int i, j; - char *zColAff = pTab->zColAff; + char *zColAff; + if( pTab->tabFlags & TF_Strict ){ + if( iReg==0 ){ + /* Move the previous opcode (which should be OP_MakeRecord) forward + ** by one slot and insert a new OP_TypeCheck where the current + ** OP_MakeRecord is found */ + VdbeOp *pPrev; + sqlite3VdbeAppendP4(v, pTab, P4_TABLE); + pPrev = sqlite3VdbeGetOp(v, -1); + assert( pPrev!=0 ); + assert( pPrev->opcode==OP_MakeRecord || sqlite3VdbeDb(v)->mallocFailed ); + pPrev->opcode = OP_TypeCheck; + sqlite3VdbeAddOp3(v, OP_MakeRecord, pPrev->p1, pPrev->p2, pPrev->p3); + }else{ + /* Insert an isolated OP_Typecheck */ + sqlite3VdbeAddOp2(v, OP_TypeCheck, iReg, pTab->nNVCol); + sqlite3VdbeAppendP4(v, pTab, P4_TABLE); + } + return; + } + zColAff = pTab->zColAff; if( zColAff==0 ){ sqlite3 *db = sqlite3VdbeDb(v); zColAff = (char *)sqlite3DbMallocRaw(0, pTab->nCol+1); @@ -123216,6 +124189,8 @@ SQLITE_PRIVATE void sqlite3TableAffinity(Vdbe *v, Table *pTab, int iReg){ if( iReg ){ sqlite3VdbeAddOp4(v, OP_Affinity, iReg, i, 0, zColAff, i); }else{ + assert( sqlite3VdbeGetOp(v, -1)->opcode==OP_MakeRecord + || sqlite3VdbeDb(v)->mallocFailed ); sqlite3VdbeChangeP4(v, -1, zColAff, i); } } @@ -123299,24 +124274,30 @@ SQLITE_PRIVATE void sqlite3ComputeGeneratedColumns( ** that appropriate affinity has been applied to the regular columns */ sqlite3TableAffinity(pParse->pVdbe, pTab, iRegStore); - if( (pTab->tabFlags & TF_HasStored)!=0 - && (pOp = sqlite3VdbeGetOp(pParse->pVdbe,-1))->opcode==OP_Affinity - ){ - /* Change the OP_Affinity argument to '@' (NONE) for all stored - ** columns. '@' is the no-op affinity and those columns have not - ** yet been computed. */ - int ii, jj; - char *zP4 = pOp->p4.z; - assert( zP4!=0 ); - assert( pOp->p4type==P4_DYNAMIC ); - for(ii=jj=0; zP4[jj]; ii++){ - if( pTab->aCol[ii].colFlags & COLFLAG_VIRTUAL ){ - continue; + if( (pTab->tabFlags & TF_HasStored)!=0 ){ + pOp = sqlite3VdbeGetOp(pParse->pVdbe,-1); + if( pOp->opcode==OP_Affinity ){ + /* Change the OP_Affinity argument to '@' (NONE) for all stored + ** columns. '@' is the no-op affinity and those columns have not + ** yet been computed. */ + int ii, jj; + char *zP4 = pOp->p4.z; + assert( zP4!=0 ); + assert( pOp->p4type==P4_DYNAMIC ); + for(ii=jj=0; zP4[jj]; ii++){ + if( pTab->aCol[ii].colFlags & COLFLAG_VIRTUAL ){ + continue; + } + if( pTab->aCol[ii].colFlags & COLFLAG_STORED ){ + zP4[jj] = SQLITE_AFF_NONE; + } + jj++; } - if( pTab->aCol[ii].colFlags & COLFLAG_STORED ){ - zP4[jj] = SQLITE_AFF_NONE; - } - jj++; + }else if( pOp->opcode==OP_TypeCheck ){ + /* If an OP_TypeCheck was generated because the table is STRICT, + ** then set the P3 operand to indicate that generated columns should + ** not be checked */ + pOp->p3 = 1; } } @@ -123352,7 +124333,7 @@ SQLITE_PRIVATE void sqlite3ComputeGeneratedColumns( int x; pCol->colFlags |= COLFLAG_BUSY; w.eCode = 0; - sqlite3WalkExpr(&w, pCol->pDflt); + sqlite3WalkExpr(&w, sqlite3ColumnExpr(pTab, pCol)); pCol->colFlags &= ~COLFLAG_BUSY; if( w.eCode & COLFLAG_NOTAVAIL ){ pRedo = pCol; @@ -123361,13 +124342,13 @@ SQLITE_PRIVATE void sqlite3ComputeGeneratedColumns( eProgress = 1; assert( pCol->colFlags & COLFLAG_GENERATED ); x = sqlite3TableColumnToStorage(pTab, i) + iRegStore; - sqlite3ExprCodeGeneratedColumn(pParse, pCol, x); + sqlite3ExprCodeGeneratedColumn(pParse, pTab, pCol, x); pCol->colFlags &= ~COLFLAG_NOTAVAIL; } } }while( pRedo && eProgress ); if( pRedo ){ - sqlite3ErrorMsg(pParse, "generated column loop on \"%s\"", pRedo->zName); + sqlite3ErrorMsg(pParse, "generated column loop on \"%s\"", pRedo->zCnName); } pParse->iSelfTab = 0; } @@ -123762,7 +124743,7 @@ SQLITE_PRIVATE void sqlite3Insert( */ #ifndef SQLITE_OMIT_TRIGGER pTrigger = sqlite3TriggersExist(pParse, pTab, TK_INSERT, 0, &tmask); - isView = pTab->pSelect!=0; + isView = IsView(pTab); #else # define pTrigger 0 # define tmask 0 @@ -123853,7 +124834,7 @@ SQLITE_PRIVATE void sqlite3Insert( } for(i=0; inId; i++){ for(j=0; jnCol; j++){ - if( sqlite3StrICmp(pColumn->a[i].zName, pTab->aCol[j].zName)==0 ){ + if( sqlite3StrICmp(pColumn->a[i].zName, pTab->aCol[j].zCnName)==0 ){ pColumn->a[i].idx = j; if( i!=j ) bIdListInOrder = 0; if( j==pTab->iPKey ){ @@ -123863,7 +124844,7 @@ SQLITE_PRIVATE void sqlite3Insert( if( pTab->aCol[j].colFlags & (COLFLAG_STORED|COLFLAG_VIRTUAL) ){ sqlite3ErrorMsg(pParse, "cannot INSERT into generated column \"%s\"", - pTab->aCol[j].zName); + pTab->aCol[j].zCnName); goto insert_cleanup; } #endif @@ -124048,7 +125029,7 @@ SQLITE_PRIVATE void sqlite3Insert( pTab->zName); goto insert_cleanup; } - if( pTab->pSelect ){ + if( IsView(pTab) ){ sqlite3ErrorMsg(pParse, "cannot UPSERT a view"); goto insert_cleanup; } @@ -124147,7 +125128,9 @@ SQLITE_PRIVATE void sqlite3Insert( }else if( pColumn==0 ){ /* Hidden columns that are not explicitly named in the INSERT ** get there default value */ - sqlite3ExprCodeFactorable(pParse, pTab->aCol[i].pDflt, iRegStore); + sqlite3ExprCodeFactorable(pParse, + sqlite3ColumnExpr(pTab, &pTab->aCol[i]), + iRegStore); continue; } } @@ -124156,13 +125139,17 @@ SQLITE_PRIVATE void sqlite3Insert( if( j>=pColumn->nId ){ /* A column not named in the insert column list gets its ** default value */ - sqlite3ExprCodeFactorable(pParse, pTab->aCol[i].pDflt, iRegStore); + sqlite3ExprCodeFactorable(pParse, + sqlite3ColumnExpr(pTab, &pTab->aCol[i]), + iRegStore); continue; } k = j; }else if( nColumn==0 ){ /* This is INSERT INTO ... DEFAULT VALUES. Load the default value. */ - sqlite3ExprCodeFactorable(pParse, pTab->aCol[i].pDflt, iRegStore); + sqlite3ExprCodeFactorable(pParse, + sqlite3ColumnExpr(pTab, &pTab->aCol[i]), + iRegStore); continue; }else{ k = i - nHidden; @@ -124677,7 +125664,7 @@ SQLITE_PRIVATE void sqlite3GenerateConstraintChecks( db = pParse->db; v = pParse->pVdbe; assert( v!=0 ); - assert( pTab->pSelect==0 ); /* This table is not a VIEW */ + assert( !IsView(pTab) ); /* This table is not a VIEW */ nCol = pTab->nCol; /* pPk is the PRIMARY KEY index for WITHOUT ROWID tables and NULL for @@ -124728,7 +125715,7 @@ SQLITE_PRIVATE void sqlite3GenerateConstraintChecks( } if( onError==OE_Replace ){ if( b2ndPass /* REPLACE becomes ABORT on the 2nd pass */ - || pCol->pDflt==0 /* REPLACE is ABORT if no DEFAULT value */ + || pCol->iDflt==0 /* REPLACE is ABORT if no DEFAULT value */ ){ testcase( pCol->colFlags & COLFLAG_VIRTUAL ); testcase( pCol->colFlags & COLFLAG_STORED ); @@ -124750,7 +125737,8 @@ SQLITE_PRIVATE void sqlite3GenerateConstraintChecks( VdbeCoverage(v); assert( (pCol->colFlags & COLFLAG_GENERATED)==0 ); nSeenReplace++; - sqlite3ExprCodeCopy(pParse, pCol->pDflt, iReg); + sqlite3ExprCodeCopy(pParse, + sqlite3ColumnExpr(pTab, pCol), iReg); sqlite3VdbeJumpHere(v, addr1); break; } @@ -124760,7 +125748,7 @@ SQLITE_PRIVATE void sqlite3GenerateConstraintChecks( case OE_Rollback: case OE_Fail: { char *zMsg = sqlite3MPrintf(db, "%s.%s", pTab->zName, - pCol->zName); + pCol->zCnName); sqlite3VdbeAddOp3(v, OP_HaltIfNull, SQLITE_CONSTRAINT_NOTNULL, onError, iReg); sqlite3VdbeAppendP4(v, zMsg, P4_DYNAMIC); @@ -125178,7 +126166,7 @@ SQLITE_PRIVATE void sqlite3GenerateConstraintChecks( testcase( sqlite3TableColumnToStorage(pTab, iField)!=iField ); x = sqlite3TableColumnToStorage(pTab, iField) + regNewData + 1; sqlite3VdbeAddOp2(v, OP_SCopy, x, regIdx+i); - VdbeComment((v, "%s", pTab->aCol[iField].zName)); + VdbeComment((v, "%s", pTab->aCol[iField].zCnName)); } } sqlite3VdbeAddOp3(v, OP_MakeRecord, regIdx, pIdx->nColumn, aRegIdx[ix]); @@ -125230,6 +126218,7 @@ SQLITE_PRIVATE void sqlite3GenerateConstraintChecks( ** This is not possible for ENABLE_PREUPDATE_HOOK builds, as the row ** must be explicitly deleted in order to ensure any pre-update hook ** is invoked. */ + assert( IsOrdinaryTable(pTab) ); #ifndef SQLITE_ENABLE_PREUPDATE_HOOK if( (ix==0 && pIdx->pNext==0) /* Condition 3 */ && pPk==pIdx /* Condition 2 */ @@ -125237,7 +126226,7 @@ SQLITE_PRIVATE void sqlite3GenerateConstraintChecks( && ( 0==(db->flags&SQLITE_RecTriggers) || /* Condition 4 */ 0==sqlite3TriggersExist(pParse, pTab, TK_DELETE, 0, 0)) && ( 0==(db->flags&SQLITE_ForeignKeys) || /* Condition 5 */ - (0==pTab->pFKey && 0==sqlite3FkReferences(pTab))) + (0==pTab->u.tab.pFKey && 0==sqlite3FkReferences(pTab))) ){ sqlite3VdbeResolveLabel(v, addrUniqueOk); continue; @@ -125272,7 +126261,7 @@ SQLITE_PRIVATE void sqlite3GenerateConstraintChecks( x = sqlite3TableColumnToIndex(pIdx, pPk->aiColumn[i]); sqlite3VdbeAddOp3(v, OP_Column, iThisCur, x, regR+i); VdbeComment((v, "%s.%s", pTab->zName, - pTab->aCol[pPk->aiColumn[i]].zName)); + pTab->aCol[pPk->aiColumn[i]].zCnName)); } } if( isUpdate ){ @@ -125336,7 +126325,8 @@ SQLITE_PRIVATE void sqlite3GenerateConstraintChecks( assert( onError==OE_Replace ); nConflictCk = sqlite3VdbeCurrentAddr(v) - addrConflictCk; - assert( nConflictCk>0 ); + assert( nConflictCk>0 || db->mallocFailed ); + testcase( nConflictCk<=0 ); testcase( nConflictCk>1 ); if( regTrigCnt ){ sqlite3MultiWrite(pParse); @@ -125471,7 +126461,7 @@ SQLITE_PRIVATE void sqlite3SetMakeRecordP5(Vdbe *v, Table *pTab){ if( pTab->pSchema->file_format<2 ) return; for(i=pTab->nCol-1; i>0; i--){ - if( pTab->aCol[i].pDflt!=0 ) break; + if( pTab->aCol[i].iDflt!=0 ) break; if( pTab->aCol[i].colFlags & COLFLAG_PRIMKEY ) break; } sqlite3VdbeChangeP5(v, i+1); @@ -125536,7 +126526,7 @@ SQLITE_PRIVATE void sqlite3CompleteInsertion( v = pParse->pVdbe; assert( v!=0 ); - assert( pTab->pSelect==0 ); /* This table is not a VIEW */ + assert( !IsView(pTab) ); /* This table is not a VIEW */ for(i=0, pIdx=pTab->pIndex; pIdx; pIdx=pIdx->pNext, i++){ /* All REPLACE indexes are at the end of the list */ assert( pIdx->onError!=OE_Replace @@ -125622,8 +126612,9 @@ SQLITE_PRIVATE int sqlite3OpenTableAndIndices( assert( op==OP_OpenWrite || p5==0 ); if( IsVirtual(pTab) ){ /* This routine is a no-op for virtual tables. Leave the output - ** variables *piDataCur and *piIdxCur uninitialized so that valgrind - ** can detect if they are used by mistake in the caller. */ + ** variables *piDataCur and *piIdxCur set to illegal cursor numbers + ** for improved error detection. */ + *piDataCur = *piIdxCur = -999; return 0; } iDb = sqlite3SchemaToIndex(pParse->db, pTab->pSchema); @@ -125838,13 +126829,8 @@ static int xferOptimization( if( HasRowid(pDest)!=HasRowid(pSrc) ){ return 0; /* source and destination must both be WITHOUT ROWID or not */ } -#ifndef SQLITE_OMIT_VIRTUALTABLE - if( IsVirtual(pSrc) ){ - return 0; /* tab2 must not be a virtual table */ - } -#endif - if( pSrc->pSelect ){ - return 0; /* tab2 may not be a view */ + if( !IsOrdinaryTable(pSrc) ){ + return 0; /* tab2 may not be a view or virtual table */ } if( pDest->nCol!=pSrc->nCol ){ return 0; /* Number of columns must be the same in tab1 and tab2 */ @@ -125852,6 +126838,9 @@ static int xferOptimization( if( pDest->iPKey!=pSrc->iPKey ){ return 0; /* Both tables must have the same INTEGER PRIMARY KEY */ } + if( (pDest->tabFlags & TF_Strict)!=0 && (pSrc->tabFlags & TF_Strict)==0 ){ + return 0; /* Cannot feed from a non-strict into a strict table */ + } for(i=0; inCol; i++){ Column *pDestCol = &pDest->aCol[i]; Column *pSrcCol = &pSrc->aCol[i]; @@ -125888,7 +126877,9 @@ static int xferOptimization( ** This requirement could be relaxed for VIRTUAL columns, I suppose. */ if( (pDestCol->colFlags & COLFLAG_GENERATED)!=0 ){ - if( sqlite3ExprCompare(0, pSrcCol->pDflt, pDestCol->pDflt, -1)!=0 ){ + if( sqlite3ExprCompare(0, + sqlite3ColumnExpr(pSrc, pSrcCol), + sqlite3ColumnExpr(pDest, pDestCol), -1)!=0 ){ testcase( pDestCol->colFlags & COLFLAG_VIRTUAL ); testcase( pDestCol->colFlags & COLFLAG_STORED ); return 0; /* Different generator expressions */ @@ -125898,7 +126889,8 @@ static int xferOptimization( if( pDestCol->affinity!=pSrcCol->affinity ){ return 0; /* Affinity must be the same on all columns */ } - if( sqlite3_stricmp(pDestCol->zColl, pSrcCol->zColl)!=0 ){ + if( sqlite3_stricmp(sqlite3ColumnColl(pDestCol), + sqlite3ColumnColl(pSrcCol))!=0 ){ return 0; /* Collating sequence must be the same on all columns */ } if( pDestCol->notNull && !pSrcCol->notNull ){ @@ -125906,11 +126898,15 @@ static int xferOptimization( } /* Default values for second and subsequent columns need to match. */ if( (pDestCol->colFlags & COLFLAG_GENERATED)==0 && i>0 ){ - assert( pDestCol->pDflt==0 || pDestCol->pDflt->op==TK_SPAN ); - assert( pSrcCol->pDflt==0 || pSrcCol->pDflt->op==TK_SPAN ); - if( (pDestCol->pDflt==0)!=(pSrcCol->pDflt==0) - || (pDestCol->pDflt && strcmp(pDestCol->pDflt->u.zToken, - pSrcCol->pDflt->u.zToken)!=0) + Expr *pDestExpr = sqlite3ColumnExpr(pDest, pDestCol); + Expr *pSrcExpr = sqlite3ColumnExpr(pSrc, pSrcCol); + assert( pDestExpr==0 || pDestExpr->op==TK_SPAN ); + assert( pDestExpr==0 || !ExprHasProperty(pDestExpr, EP_IntValue) ); + assert( pSrcExpr==0 || pSrcExpr->op==TK_SPAN ); + assert( pSrcExpr==0 || !ExprHasProperty(pSrcExpr, EP_IntValue) ); + if( (pDestExpr==0)!=(pSrcExpr==0) + || (pDestExpr!=0 && strcmp(pDestExpr->u.zToken, + pSrcExpr->u.zToken)!=0) ){ return 0; /* Default values must be the same for all columns */ } @@ -125947,7 +126943,8 @@ static int xferOptimization( ** the extra complication to make this rule less restrictive is probably ** not worth the effort. Ticket [6284df89debdfa61db8073e062908af0c9b6118e] */ - if( (db->flags & SQLITE_ForeignKeys)!=0 && pDest->pFKey!=0 ){ + assert( IsOrdinaryTable(pDest) ); + if( (db->flags & SQLITE_ForeignKeys)!=0 && pDest->u.tab.pFKey!=0 ){ return 0; } #endif @@ -126625,6 +127622,13 @@ struct sqlite3_api_routines { sqlite3_file *(*database_file_object)(const char*); /* Version 3.34.0 and later */ int (*txn_state)(sqlite3*,const char*); + /* Version 3.36.1 and later */ + sqlite3_int64 (*changes64)(sqlite3*); + sqlite3_int64 (*total_changes64)(sqlite3*); + /* Version 3.37.0 and later */ + int (*autovacuum_pages)(sqlite3*, + unsigned int(*)(void*,const char*,unsigned int,unsigned int,unsigned int), + void*, void(*)(void*)); }; /* @@ -126931,6 +127935,11 @@ typedef int (*sqlite3_loadext_entry)( #define sqlite3_database_file_object sqlite3_api->database_file_object /* Version 3.34.0 and later */ #define sqlite3_txn_state sqlite3_api->txn_state +/* Version 3.36.1 and later */ +#define sqlite3_changes64 sqlite3_api->changes64 +#define sqlite3_total_changes64 sqlite3_api->total_changes64 +/* Version 3.37.0 and later */ +#define sqlite3_autovacuum_pages sqlite3_api->autovacuum_pages #endif /* !defined(SQLITE_CORE) && !defined(SQLITE_OMIT_LOAD_EXTENSION) */ #if !defined(SQLITE_CORE) && !defined(SQLITE_OMIT_LOAD_EXTENSION) @@ -127415,6 +128424,11 @@ static const sqlite3_api_routines sqlite3Apis = { sqlite3_database_file_object, /* Version 3.34.0 and later */ sqlite3_txn_state, + /* Version 3.36.1 and later */ + sqlite3_changes64, + sqlite3_total_changes64, + /* Version 3.37.0 and later */ + sqlite3_autovacuum_pages, }; /* True if x is the directory separator character @@ -127877,13 +128891,14 @@ SQLITE_PRIVATE void sqlite3AutoLoadExtensions(sqlite3 *db){ #define PragTyp_SOFT_HEAP_LIMIT 35 #define PragTyp_SYNCHRONOUS 36 #define PragTyp_TABLE_INFO 37 -#define PragTyp_TEMP_STORE 38 -#define PragTyp_TEMP_STORE_DIRECTORY 39 -#define PragTyp_THREADS 40 -#define PragTyp_WAL_AUTOCHECKPOINT 41 -#define PragTyp_WAL_CHECKPOINT 42 -#define PragTyp_LOCK_STATUS 43 -#define PragTyp_STATS 44 +#define PragTyp_TABLE_LIST 38 +#define PragTyp_TEMP_STORE 39 +#define PragTyp_TEMP_STORE_DIRECTORY 40 +#define PragTyp_THREADS 41 +#define PragTyp_WAL_AUTOCHECKPOINT 42 +#define PragTyp_WAL_CHECKPOINT 43 +#define PragTyp_LOCK_STATUS 44 +#define PragTyp_STATS 45 /* Property flags associated with various pragma. */ #define PragFlg_NeedSchema 0x01 /* Force schema load before running */ @@ -127916,45 +128931,51 @@ static const char *const pragCName[] = { /* 13 */ "pk", /* 14 */ "hidden", /* table_info reuses 8 */ - /* 15 */ "seqno", /* Used by: index_xinfo */ - /* 16 */ "cid", - /* 17 */ "name", - /* 18 */ "desc", - /* 19 */ "coll", - /* 20 */ "key", - /* 21 */ "name", /* Used by: function_list */ - /* 22 */ "builtin", - /* 23 */ "type", - /* 24 */ "enc", - /* 25 */ "narg", - /* 26 */ "flags", - /* 27 */ "tbl", /* Used by: stats */ - /* 28 */ "idx", - /* 29 */ "wdth", - /* 30 */ "hght", - /* 31 */ "flgs", - /* 32 */ "seq", /* Used by: index_list */ - /* 33 */ "name", - /* 34 */ "unique", - /* 35 */ "origin", - /* 36 */ "partial", - /* 37 */ "table", /* Used by: foreign_key_check */ - /* 38 */ "rowid", - /* 39 */ "parent", - /* 40 */ "fkid", - /* index_info reuses 15 */ - /* 41 */ "seq", /* Used by: database_list */ - /* 42 */ "name", - /* 43 */ "file", - /* 44 */ "busy", /* Used by: wal_checkpoint */ - /* 45 */ "log", - /* 46 */ "checkpointed", - /* collation_list reuses 32 */ - /* 47 */ "database", /* Used by: lock_status */ - /* 48 */ "status", - /* 49 */ "cache_size", /* Used by: default_cache_size */ + /* 15 */ "schema", /* Used by: table_list */ + /* 16 */ "name", + /* 17 */ "type", + /* 18 */ "ncol", + /* 19 */ "wr", + /* 20 */ "strict", + /* 21 */ "seqno", /* Used by: index_xinfo */ + /* 22 */ "cid", + /* 23 */ "name", + /* 24 */ "desc", + /* 25 */ "coll", + /* 26 */ "key", + /* 27 */ "name", /* Used by: function_list */ + /* 28 */ "builtin", + /* 29 */ "type", + /* 30 */ "enc", + /* 31 */ "narg", + /* 32 */ "flags", + /* 33 */ "tbl", /* Used by: stats */ + /* 34 */ "idx", + /* 35 */ "wdth", + /* 36 */ "hght", + /* 37 */ "flgs", + /* 38 */ "seq", /* Used by: index_list */ + /* 39 */ "name", + /* 40 */ "unique", + /* 41 */ "origin", + /* 42 */ "partial", + /* 43 */ "table", /* Used by: foreign_key_check */ + /* 44 */ "rowid", + /* 45 */ "parent", + /* 46 */ "fkid", + /* index_info reuses 21 */ + /* 47 */ "seq", /* Used by: database_list */ + /* 48 */ "name", + /* 49 */ "file", + /* 50 */ "busy", /* Used by: wal_checkpoint */ + /* 51 */ "log", + /* 52 */ "checkpointed", + /* collation_list reuses 38 */ + /* 53 */ "database", /* Used by: lock_status */ + /* 54 */ "status", + /* 55 */ "cache_size", /* Used by: default_cache_size */ /* module_list pragma_list reuses 9 */ - /* 50 */ "timeout", /* Used by: busy_timeout */ + /* 56 */ "timeout", /* Used by: busy_timeout */ }; /* Definitions of all built-in pragmas */ @@ -128005,7 +129026,7 @@ static const PragmaName aPragmaName[] = { {/* zName: */ "busy_timeout", /* ePragTyp: */ PragTyp_BUSY_TIMEOUT, /* ePragFlg: */ PragFlg_Result0, - /* ColNames: */ 50, 1, + /* ColNames: */ 56, 1, /* iArg: */ 0 }, #if !defined(SQLITE_OMIT_PAGER_PRAGMAS) {/* zName: */ "cache_size", @@ -128044,7 +129065,7 @@ static const PragmaName aPragmaName[] = { {/* zName: */ "collation_list", /* ePragTyp: */ PragTyp_COLLATION_LIST, /* ePragFlg: */ PragFlg_Result0, - /* ColNames: */ 32, 2, + /* ColNames: */ 38, 2, /* iArg: */ 0 }, #endif #if !defined(SQLITE_OMIT_COMPILEOPTION_DIAGS) @@ -128079,14 +129100,14 @@ static const PragmaName aPragmaName[] = { {/* zName: */ "database_list", /* ePragTyp: */ PragTyp_DATABASE_LIST, /* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result0, - /* ColNames: */ 41, 3, + /* ColNames: */ 47, 3, /* iArg: */ 0 }, #endif #if !defined(SQLITE_OMIT_PAGER_PRAGMAS) && !defined(SQLITE_OMIT_DEPRECATED) {/* zName: */ "default_cache_size", /* ePragTyp: */ PragTyp_DEFAULT_CACHE_SIZE, /* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result0|PragFlg_SchemaReq|PragFlg_NoColumns1, - /* ColNames: */ 49, 1, + /* ColNames: */ 55, 1, /* iArg: */ 0 }, #endif #if !defined(SQLITE_OMIT_FLAG_PRAGMAS) @@ -128116,7 +129137,7 @@ static const PragmaName aPragmaName[] = { {/* zName: */ "foreign_key_check", /* ePragTyp: */ PragTyp_FOREIGN_KEY_CHECK, /* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result0|PragFlg_Result1|PragFlg_SchemaOpt, - /* ColNames: */ 37, 4, + /* ColNames: */ 43, 4, /* iArg: */ 0 }, #endif #if !defined(SQLITE_OMIT_FOREIGN_KEY) @@ -128159,7 +129180,7 @@ static const PragmaName aPragmaName[] = { {/* zName: */ "function_list", /* ePragTyp: */ PragTyp_FUNCTION_LIST, /* ePragFlg: */ PragFlg_Result0, - /* ColNames: */ 21, 6, + /* ColNames: */ 27, 6, /* iArg: */ 0 }, #endif #endif @@ -128188,23 +129209,23 @@ static const PragmaName aPragmaName[] = { {/* zName: */ "index_info", /* ePragTyp: */ PragTyp_INDEX_INFO, /* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result1|PragFlg_SchemaOpt, - /* ColNames: */ 15, 3, + /* ColNames: */ 21, 3, /* iArg: */ 0 }, {/* zName: */ "index_list", /* ePragTyp: */ PragTyp_INDEX_LIST, /* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result1|PragFlg_SchemaOpt, - /* ColNames: */ 32, 5, + /* ColNames: */ 38, 5, /* iArg: */ 0 }, {/* zName: */ "index_xinfo", /* ePragTyp: */ PragTyp_INDEX_INFO, /* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result1|PragFlg_SchemaOpt, - /* ColNames: */ 15, 6, + /* ColNames: */ 21, 6, /* iArg: */ 1 }, #endif #if !defined(SQLITE_OMIT_INTEGRITY_CHECK) {/* zName: */ "integrity_check", /* ePragTyp: */ PragTyp_INTEGRITY_CHECK, - /* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result0|PragFlg_Result1, + /* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result0|PragFlg_Result1|PragFlg_SchemaOpt, /* ColNames: */ 0, 0, /* iArg: */ 0 }, #endif @@ -128238,7 +129259,7 @@ static const PragmaName aPragmaName[] = { {/* zName: */ "lock_status", /* ePragTyp: */ PragTyp_LOCK_STATUS, /* ePragFlg: */ PragFlg_Result0, - /* ColNames: */ 47, 2, + /* ColNames: */ 53, 2, /* iArg: */ 0 }, #endif #if !defined(SQLITE_OMIT_PAGER_PRAGMAS) @@ -128312,7 +129333,7 @@ static const PragmaName aPragmaName[] = { #if !defined(SQLITE_OMIT_INTEGRITY_CHECK) {/* zName: */ "quick_check", /* ePragTyp: */ PragTyp_INTEGRITY_CHECK, - /* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result0|PragFlg_Result1, + /* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result0|PragFlg_Result1|PragFlg_SchemaOpt, /* ColNames: */ 0, 0, /* iArg: */ 0 }, #endif @@ -128377,7 +129398,7 @@ static const PragmaName aPragmaName[] = { {/* zName: */ "stats", /* ePragTyp: */ PragTyp_STATS, /* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result0|PragFlg_SchemaReq, - /* ColNames: */ 27, 5, + /* ColNames: */ 33, 5, /* iArg: */ 0 }, #endif #if !defined(SQLITE_OMIT_PAGER_PRAGMAS) @@ -128393,6 +129414,11 @@ static const PragmaName aPragmaName[] = { /* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result1|PragFlg_SchemaOpt, /* ColNames: */ 8, 6, /* iArg: */ 0 }, + {/* zName: */ "table_list", + /* ePragTyp: */ PragTyp_TABLE_LIST, + /* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result1, + /* ColNames: */ 15, 6, + /* iArg: */ 0 }, {/* zName: */ "table_xinfo", /* ePragTyp: */ PragTyp_TABLE_INFO, /* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result1|PragFlg_SchemaOpt, @@ -128468,7 +129494,7 @@ static const PragmaName aPragmaName[] = { {/* zName: */ "wal_checkpoint", /* ePragTyp: */ PragTyp_WAL_CHECKPOINT, /* ePragFlg: */ PragFlg_NeedSchema, - /* ColNames: */ 44, 3, + /* ColNames: */ 50, 3, /* iArg: */ 0 }, #endif #if !defined(SQLITE_OMIT_FLAG_PRAGMAS) @@ -128479,7 +129505,7 @@ static const PragmaName aPragmaName[] = { /* iArg: */ SQLITE_WriteSchema|SQLITE_NoSchemaError }, #endif }; -/* Number of pragmas: 67 on by default, 77 total. */ +/* Number of pragmas: 68 on by default, 78 total. */ /************** End of pragma.h **********************************************/ /************** Continuing where we left off in pragma.c *********************/ @@ -128921,7 +129947,11 @@ SQLITE_PRIVATE void sqlite3Pragma( /* Locate the pragma in the lookup table */ pPragma = pragmaLocate(zLeft); - if( pPragma==0 ) goto pragma_out; + if( pPragma==0 ){ + /* IMP: R-43042-22504 No error messages are generated if an + ** unknown pragma is issued. */ + goto pragma_out; + } /* Make sure the database schema is loaded if the pragma requires that */ if( (pPragma->mPragFlg & PragFlg_NeedSchema)!=0 ){ @@ -129571,6 +130601,14 @@ SQLITE_PRIVATE void sqlite3Pragma( }else{ db->flags &= ~mask; if( mask==SQLITE_DeferFKs ) db->nDeferredImmCons = 0; + if( (mask & SQLITE_WriteSchema)!=0 + && sqlite3_stricmp(zRight, "reset")==0 + ){ + /* IMP: R-60817-01178 If the argument is "RESET" then schema + ** writing is disabled (as with "PRAGMA writable_schema=OFF") and, + ** in addition, the schema is reloaded. */ + sqlite3ResetAllSchemasOfConnection(db); + } } /* Many of the flag-pragmas modify the code generated by the SQL @@ -129611,6 +130649,7 @@ SQLITE_PRIVATE void sqlite3Pragma( sqlite3ViewGetColumnNames(pParse, pTab); for(i=0, pCol=pTab->aCol; inCol; i++, pCol++){ int isHidden = 0; + const Expr *pColExpr; if( pCol->colFlags & COLFLAG_NOINSERT ){ if( pPragma->iArg==0 ){ nHidden++; @@ -129631,13 +130670,16 @@ SQLITE_PRIVATE void sqlite3Pragma( }else{ for(k=1; k<=pTab->nCol && pPk->aiColumn[k-1]!=i; k++){} } - assert( pCol->pDflt==0 || pCol->pDflt->op==TK_SPAN || isHidden>=2 ); + pColExpr = sqlite3ColumnExpr(pTab,pCol); + assert( pColExpr==0 || pColExpr->op==TK_SPAN || isHidden>=2 ); + assert( pColExpr==0 || !ExprHasProperty(pColExpr, EP_IntValue) + || isHidden>=2 ); sqlite3VdbeMultiLoad(v, 1, pPragma->iArg ? "issisii" : "issisi", i-nHidden, - pCol->zName, + pCol->zCnName, sqlite3ColumnType(pCol,""), pCol->notNull ? 1 : 0, - pCol->pDflt && isHidden<2 ? pCol->pDflt->u.zToken : 0, + (isHidden>=2 || pColExpr==0) ? 0 : pColExpr->u.zToken, k, isHidden); } @@ -129645,6 +130687,81 @@ SQLITE_PRIVATE void sqlite3Pragma( } break; + /* + ** PRAGMA table_list + ** + ** Return a single row for each table, virtual table, or view in the + ** entire schema. + ** + ** schema: Name of attached database hold this table + ** name: Name of the table itself + ** type: "table", "view", "virtual", "shadow" + ** ncol: Number of columns + ** wr: True for a WITHOUT ROWID table + ** strict: True for a STRICT table + */ + case PragTyp_TABLE_LIST: { + int ii; + pParse->nMem = 6; + sqlite3CodeVerifyNamedSchema(pParse, zDb); + for(ii=0; iinDb; ii++){ + HashElem *k; + Hash *pHash; + int initNCol; + if( zDb && sqlite3_stricmp(zDb, db->aDb[ii].zDbSName)!=0 ) continue; + + /* Ensure that the Table.nCol field is initialized for all views + ** and virtual tables. Each time we initialize a Table.nCol value + ** for a table, that can potentially disrupt the hash table, so restart + ** the initialization scan. + */ + pHash = &db->aDb[ii].pSchema->tblHash; + initNCol = sqliteHashCount(pHash); + while( initNCol-- ){ + for(k=sqliteHashFirst(pHash); 1; k=sqliteHashNext(k) ){ + Table *pTab; + if( k==0 ){ initNCol = 0; break; } + pTab = sqliteHashData(k); + if( pTab->nCol==0 ){ + char *zSql = sqlite3MPrintf(db, "SELECT*FROM\"%w\"", pTab->zName); + if( zSql ){ + sqlite3_stmt *pDummy = 0; + (void)sqlite3_prepare(db, zSql, -1, &pDummy, 0); + (void)sqlite3_finalize(pDummy); + sqlite3DbFree(db, zSql); + } + pHash = &db->aDb[ii].pSchema->tblHash; + break; + } + } + } + + for(k=sqliteHashFirst(pHash); k; k=sqliteHashNext(k) ){ + Table *pTab = sqliteHashData(k); + const char *zType; + if( zRight && sqlite3_stricmp(zRight, pTab->zName)!=0 ) continue; + if( IsView(pTab) ){ + zType = "view"; + }else if( IsVirtual(pTab) ){ + zType = "virtual"; + }else if( pTab->tabFlags & TF_Shadow ){ + zType = "shadow"; + }else{ + zType = "table"; + } + sqlite3VdbeMultiLoad(v, 1, "sssiii", + db->aDb[ii].zDbSName, + sqlite3PreferredTableName(pTab->zName), + zType, + pTab->nCol, + (pTab->tabFlags & TF_WithoutRowid)!=0, + (pTab->tabFlags & TF_Strict)!=0 + ); + } + } + } + break; + #ifdef SQLITE_DEBUG case PragTyp_STATS: { Index *pIdx; @@ -129654,7 +130771,7 @@ SQLITE_PRIVATE void sqlite3Pragma( for(i=sqliteHashFirst(&pDb->pSchema->tblHash); i; i=sqliteHashNext(i)){ Table *pTab = sqliteHashData(i); sqlite3VdbeMultiLoad(v, 1, "ssiii", - pTab->zName, + sqlite3PreferredTableName(pTab->zName), 0, pTab->szTabRow, pTab->nRowLogEst, @@ -129704,7 +130821,7 @@ SQLITE_PRIVATE void sqlite3Pragma( for(i=0; iaiColumn[i]; sqlite3VdbeMultiLoad(v, 1, "iisX", i, cnum, - cnum<0 ? 0 : pTab->aCol[cnum].zName); + cnum<0 ? 0 : pTab->aCol[cnum].zCnName); if( pPragma->iArg ){ sqlite3VdbeMultiLoad(v, 4, "isiX", pIdx->aSortOrder[i], @@ -129773,11 +130890,13 @@ SQLITE_PRIVATE void sqlite3Pragma( pParse->nMem = 6; for(i=0; iu.pHash ){ + assert( p->funcFlags & SQLITE_FUNC_BUILTIN ); pragmaFunclistLine(v, p, 1, showInternFunc); } } for(j=sqliteHashFirst(&db->aFunc); j; j=sqliteHashNext(j)){ p = (FuncDef*)sqliteHashData(j); + assert( (p->funcFlags & SQLITE_FUNC_BUILTIN)==0 ); pragmaFunclistLine(v, p, 0, showInternFunc); } } @@ -129811,8 +130930,8 @@ SQLITE_PRIVATE void sqlite3Pragma( FKey *pFK; Table *pTab; pTab = sqlite3FindTable(db, zRight, zDb); - if( pTab ){ - pFK = pTab->pFKey; + if( pTab && IsOrdinaryTable(pTab) ){ + pFK = pTab->u.tab.pFKey; if( pFK ){ int iTabDb = sqlite3SchemaToIndex(db, pTab->pSchema); int i = 0; @@ -129825,7 +130944,7 @@ SQLITE_PRIVATE void sqlite3Pragma( i, j, pFK->zTo, - pTab->aCol[pFK->aCol[j].iFrom].zName, + pTab->aCol[pFK->aCol[j].iFrom].zCnName, pFK->aCol[j].zCol, actionName(pFK->aAction[1]), /* ON UPDATE */ actionName(pFK->aAction[0]), /* ON DELETE */ @@ -129871,7 +130990,7 @@ SQLITE_PRIVATE void sqlite3Pragma( pTab = (Table*)sqliteHashData(k); k = sqliteHashNext(k); } - if( pTab==0 || pTab->pFKey==0 ) continue; + if( pTab==0 || !IsOrdinaryTable(pTab) || pTab->u.tab.pFKey==0 ) continue; iDb = sqlite3SchemaToIndex(db, pTab->pSchema); zDb = db->aDb[iDb].zDbSName; sqlite3CodeVerifySchema(pParse, iDb); @@ -129879,7 +130998,8 @@ SQLITE_PRIVATE void sqlite3Pragma( if( pTab->nCol+regRow>pParse->nMem ) pParse->nMem = pTab->nCol + regRow; sqlite3OpenTable(pParse, 0, iDb, pTab, OP_OpenRead); sqlite3VdbeLoadString(v, regResult, pTab->zName); - for(i=1, pFK=pTab->pFKey; pFK; i++, pFK=pFK->pNextFrom){ + assert( IsOrdinaryTable(pTab) ); + for(i=1, pFK=pTab->u.tab.pFKey; pFK; i++, pFK=pFK->pNextFrom){ pParent = sqlite3FindTable(db, pFK->zTo, zDb); if( pParent==0 ) continue; pIdx = 0; @@ -129901,7 +131021,8 @@ SQLITE_PRIVATE void sqlite3Pragma( if( pFK ) break; if( pParse->nTabnTab = i; addrTop = sqlite3VdbeAddOp1(v, OP_Rewind, 0); VdbeCoverage(v); - for(i=1, pFK=pTab->pFKey; pFK; i++, pFK=pFK->pNextFrom){ + assert( IsOrdinaryTable(pTab) ); + for(i=1, pFK=pTab->u.tab.pFKey; pFK; i++, pFK=pFK->pNextFrom){ pParent = sqlite3FindTable(db, pFK->zTo, zDb); pIdx = 0; aiCols = 0; @@ -129915,6 +131036,7 @@ SQLITE_PRIVATE void sqlite3Pragma( ** regRow..regRow+n. If any of the child key values are NULL, this ** row cannot cause an FK violation. Jump directly to addrOk in ** this case. */ + if( regRow+pFK->nCol>pParse->nMem ) pParse->nMem = regRow+pFK->nCol; for(j=0; jnCol; j++){ int iCol = aiCols ? aiCols[j] : pFK->aCol[j].iFrom; sqlite3ExprCodeGetColumnOfTable(v, pTab, 0, iCol, regRow+j); @@ -130101,8 +131223,9 @@ SQLITE_PRIVATE void sqlite3Pragma( int loopTop; int iDataCur, iIdxCur; int r1 = -1; + int bStrict; - if( pTab->tnum<1 ) continue; /* Skip VIEWs or VIRTUAL TABLEs */ + if( !IsOrdinaryTable(pTab) ) continue; if( pObjTab && pObjTab!=pTab ) continue; pPk = HasRowid(pTab) ? 0 : sqlite3PrimaryKeyIndex(pTab); sqlite3OpenTableAndIndices(pParse, pTab, OP_OpenRead, 0, @@ -130122,23 +131245,48 @@ SQLITE_PRIVATE void sqlite3Pragma( /* Sanity check on record header decoding */ sqlite3VdbeAddOp3(v, OP_Column, iDataCur, pTab->nNVCol-1,3); sqlite3VdbeChangeP5(v, OPFLAG_TYPEOFARG); + VdbeComment((v, "(right-most column)")); } - /* Verify that all NOT NULL columns really are NOT NULL */ + /* Verify that all NOT NULL columns really are NOT NULL. At the + ** same time verify the type of the content of STRICT tables */ + bStrict = (pTab->tabFlags & TF_Strict)!=0; for(j=0; jnCol; j++){ char *zErr; - int jmp2; + Column *pCol = pTab->aCol + j; + int doError, jmp2; if( j==pTab->iPKey ) continue; - if( pTab->aCol[j].notNull==0 ) continue; + if( pCol->notNull==0 && !bStrict ) continue; + doError = bStrict ? sqlite3VdbeMakeLabel(pParse) : 0; sqlite3ExprCodeGetColumnOfTable(v, pTab, iDataCur, j, 3); if( sqlite3VdbeGetOp(v,-1)->opcode==OP_Column ){ sqlite3VdbeChangeP5(v, OPFLAG_TYPEOFARG); } - jmp2 = sqlite3VdbeAddOp1(v, OP_NotNull, 3); VdbeCoverage(v); - zErr = sqlite3MPrintf(db, "NULL value in %s.%s", pTab->zName, - pTab->aCol[j].zName); - sqlite3VdbeAddOp4(v, OP_String8, 0, 3, 0, zErr, P4_DYNAMIC); - integrityCheckResultRow(v); - sqlite3VdbeJumpHere(v, jmp2); + if( pCol->notNull ){ + jmp2 = sqlite3VdbeAddOp1(v, OP_NotNull, 3); VdbeCoverage(v); + zErr = sqlite3MPrintf(db, "NULL value in %s.%s", pTab->zName, + pCol->zCnName); + sqlite3VdbeAddOp4(v, OP_String8, 0, 3, 0, zErr, P4_DYNAMIC); + if( bStrict && pCol->eCType!=COLTYPE_ANY ){ + sqlite3VdbeGoto(v, doError); + }else{ + integrityCheckResultRow(v); + } + sqlite3VdbeJumpHere(v, jmp2); + } + if( (pTab->tabFlags & TF_Strict)!=0 + && pCol->eCType!=COLTYPE_ANY + ){ + jmp2 = sqlite3VdbeAddOp3(v, OP_IsNullOrType, 3, 0, + sqlite3StdTypeMap[pCol->eCType-1]); + VdbeCoverage(v); + zErr = sqlite3MPrintf(db, "non-%s value in %s.%s", + sqlite3StdType[pCol->eCType-1], + pTab->zName, pTab->aCol[j].zCnName); + sqlite3VdbeAddOp4(v, OP_String8, 0, 3, 0, zErr, P4_DYNAMIC); + sqlite3VdbeResolveLabel(v, doError); + integrityCheckResultRow(v); + sqlite3VdbeJumpHere(v, jmp2); + } } /* Verify CHECK constraints */ if( pTab->pCheck && (db->flags & SQLITE_IgnoreChecks)==0 ){ @@ -130672,12 +131820,12 @@ SQLITE_PRIVATE void sqlite3Pragma( case PragTyp_ANALYSIS_LIMIT: { sqlite3_int64 N; if( zRight - && sqlite3DecOrHexToI64(zRight, &N)==SQLITE_OK + && sqlite3DecOrHexToI64(zRight, &N)==SQLITE_OK /* IMP: R-40975-20399 */ && N>=0 ){ db->nAnalysisLimit = (int)(N&0x7fffffff); } - returnSingleInt(v, db->nAnalysisLimit); + returnSingleInt(v, db->nAnalysisLimit); /* IMP: R-57594-65522 */ break; } @@ -131079,10 +132227,15 @@ static void corruptSchema( pData->rc = SQLITE_NOMEM_BKPT; }else if( pData->pzErrMsg[0]!=0 ){ /* A error message has already been generated. Do not overwrite it */ - }else if( pData->mInitFlags & (INITFLAG_AlterRename|INITFLAG_AlterDrop) ){ + }else if( pData->mInitFlags & (INITFLAG_AlterMask) ){ + static const char *azAlterType[] = { + "rename", + "drop column", + "add column" + }; *pData->pzErrMsg = sqlite3MPrintf(db, "error in %s %s after %s: %s", azObj[0], azObj[1], - (pData->mInitFlags & INITFLAG_AlterRename) ? "rename" : "drop column", + azAlterType[(pData->mInitFlags&INITFLAG_AlterMask)-1], zExtra ); pData->rc = SQLITE_ERROR; @@ -131184,7 +132337,7 @@ SQLITE_PRIVATE int sqlite3InitCallback(void *pInit, int argc, char **argv, char } } db->init.orphanTrigger = 0; - db->init.azInit = argv; + db->init.azInit = (const char**)argv; pStmt = 0; TESTONLY(rcp = ) sqlite3Prepare(db, argv[4], -1, 0, 0, &pStmt, 0); rc = db->errCode; @@ -131203,6 +132356,7 @@ SQLITE_PRIVATE int sqlite3InitCallback(void *pInit, int argc, char **argv, char } } } + db->init.azInit = sqlite3StdType; /* Any array of string ptrs will do */ sqlite3_finalize(pStmt); }else if( argv[1]==0 || (argv[4]!=0 && argv[4][0]!=0) ){ corruptSchema(pData, argv, 0); @@ -132343,7 +133497,7 @@ SQLITE_PRIVATE int sqlite3ColumnIndex(Table *pTab, const char *zCol){ u8 h = sqlite3StrIHash(zCol); Column *pCol; for(pCol=pTab->aCol, i=0; inCol; pCol++, i++){ - if( pCol->hName==h && sqlite3StrICmp(pCol->zName, zCol)==0 ) return i; + if( pCol->hName==h && sqlite3StrICmp(pCol->zCnName, zCol)==0 ) return i; } return -1; } @@ -132419,6 +133573,9 @@ static void addWhereTerm( pE2 = sqlite3CreateColumnExpr(db, pSrc, iRight, iColRight); pEq = sqlite3PExpr(pParse, TK_EQ, pE1, pE2); + assert( pE2!=0 || pEq==0 ); /* Due to db->mallocFailed test + ** in sqlite3DbMallocRawNN() called from + ** sqlite3PExpr(). */ if( pEq && isOuterJoin ){ ExprSetProperty(pEq, EP_FromJoin); assert( !ExprHasProperty(pEq, EP_TokenOnly|EP_Reduced) ); @@ -132460,10 +133617,13 @@ SQLITE_PRIVATE void sqlite3SetJoinExpr(Expr *p, int iTable){ assert( !ExprHasProperty(p, EP_TokenOnly|EP_Reduced) ); ExprSetVVAProperty(p, EP_NoReduce); p->iRightJoinTable = iTable; - if( p->op==TK_FUNCTION && p->x.pList ){ - int i; - for(i=0; ix.pList->nExpr; i++){ - sqlite3SetJoinExpr(p->x.pList->a[i].pExpr, iTable); + if( p->op==TK_FUNCTION ){ + assert( ExprUseXList(p) ); + if( p->x.pList ){ + int i; + for(i=0; ix.pList->nExpr; i++){ + sqlite3SetJoinExpr(p->x.pList->a[i].pExpr, iTable); + } } } sqlite3SetJoinExpr(p->pLeft, iTable); @@ -132486,10 +133646,13 @@ static void unsetJoinExpr(Expr *p, int iTable){ if( p->op==TK_COLUMN && p->iTable==iTable ){ ExprClearProperty(p, EP_CanBeNull); } - if( p->op==TK_FUNCTION && p->x.pList ){ - int i; - for(i=0; ix.pList->nExpr; i++){ - unsetJoinExpr(p->x.pList->a[i].pExpr, iTable); + if( p->op==TK_FUNCTION ){ + assert( ExprUseXList(p) ); + if( p->x.pList ){ + int i; + for(i=0; ix.pList->nExpr; i++){ + unsetJoinExpr(p->x.pList->a[i].pExpr, iTable); + } } } unsetJoinExpr(p->pLeft, iTable); @@ -132542,7 +133705,7 @@ static int sqliteProcessJoin(Parse *pParse, Select *p){ int iLeftCol; /* Matching column in the left table */ if( IsHiddenColumn(&pRightTab->aCol[j]) ) continue; - zName = pRightTab->aCol[j].zName; + zName = pRightTab->aCol[j].zCnName; if( tableAndColumnIndex(pSrc, i+1, zName, &iLeft, &iLeftCol, 1) ){ addWhereTerm(pParse, pSrc, iLeft, iLeftCol, i+1, j, isOuter, &p->pWhere); @@ -132945,7 +134108,9 @@ static void fixDistinctOpenEph( int iVal, /* Value returned by codeDistinct() */ int iOpenEphAddr /* Address of OP_OpenEphemeral instruction for iTab */ ){ - if( eTnctType==WHERE_DISTINCT_UNIQUE || eTnctType==WHERE_DISTINCT_ORDERED ){ + if( pParse->nErr==0 + && (eTnctType==WHERE_DISTINCT_UNIQUE || eTnctType==WHERE_DISTINCT_ORDERED) + ){ Vdbe *v = pParse->pVdbe; sqlite3VdbeChangeToNoop(v, iOpenEphAddr); if( sqlite3VdbeGetOp(v, iOpenEphAddr+1)->opcode==OP_Explain ){ @@ -133002,9 +134167,13 @@ static void selectExprDefer( struct ExprList_item *pItem = &pEList->a[i]; if( pItem->u.x.iOrderByCol==0 ){ Expr *pExpr = pItem->pExpr; - Table *pTab = pExpr->y.pTab; - if( pExpr->op==TK_COLUMN && pExpr->iColumn>=0 && pTab && !IsVirtual(pTab) - && (pTab->aCol[pExpr->iColumn].colFlags & COLFLAG_SORTERREF) + Table *pTab; + if( pExpr->op==TK_COLUMN + && pExpr->iColumn>=0 + && ALWAYS( ExprUseYTab(pExpr) ) + && (pTab = pExpr->y.pTab)!=0 + && IsOrdinaryTable(pTab) + && (pTab->aCol[pExpr->iColumn].colFlags & COLFLAG_SORTERREF)!=0 ){ int j; for(j=0; jiTable = pExpr->iTable; + assert( ExprUseYTab(pNew) ); pNew->y.pTab = pExpr->y.pTab; pNew->iColumn = pPk ? pPk->aiColumn[k] : -1; pExtra = sqlite3ExprListAppend(pParse, pExtra, pNew); @@ -133873,7 +135043,7 @@ static const char *columnTypeImpl( break; } - assert( pTab && pExpr->y.pTab==pTab ); + assert( pTab && ExprUseYTab(pExpr) && pExpr->y.pTab==pTab ); if( pS ){ /* The "table" is actually a sub-select or a view in the FROM clause ** of the SELECT statement. Return the declaration type and origin @@ -133907,7 +135077,7 @@ static const char *columnTypeImpl( zType = "INTEGER"; zOrigCol = "rowid"; }else{ - zOrigCol = pTab->aCol[iCol].zName; + zOrigCol = pTab->aCol[iCol].zCnName; zType = sqlite3ColumnType(&pTab->aCol[iCol],0); } zOrigTab = pTab->zName; @@ -133933,9 +135103,11 @@ static const char *columnTypeImpl( ** statement. */ NameContext sNC; - Select *pS = pExpr->x.pSelect; - Expr *p = pS->pEList->a[0].pExpr; - assert( ExprHasProperty(pExpr, EP_xIsSelect) ); + Select *pS; + Expr *p; + assert( ExprUseXSelect(pExpr) ); + pS = pExpr->x.pSelect; + p = pS->pEList->a[0].pExpr; sNC.pSrcList = pS->pSrc; sNC.pNext = pNC; sNC.pParse = pNC->pParse; @@ -134064,7 +135236,8 @@ SQLITE_PRIVATE void sqlite3GenerateColumnNames( assert( p!=0 ); assert( p->op!=TK_AGG_COLUMN ); /* Agg processing has not run yet */ - assert( p->op!=TK_COLUMN || p->y.pTab!=0 ); /* Covering idx not yet coded */ + assert( p->op!=TK_COLUMN + || (ExprUseYTab(p) && p->y.pTab!=0) ); /* Covering idx not yet coded */ if( pEList->a[i].zEName && pEList->a[i].eEName==ENAME_NAME ){ /* An AS clause always takes first priority */ char *zName = pEList->a[i].zEName; @@ -134079,7 +135252,7 @@ SQLITE_PRIVATE void sqlite3GenerateColumnNames( if( iCol<0 ){ zCol = "rowid"; }else{ - zCol = pTab->aCol[iCol].zName; + zCol = pTab->aCol[iCol].zCnName; } if( fullName ){ char *zName = 0; @@ -134160,11 +135333,14 @@ SQLITE_PRIVATE int sqlite3ColumnsFromExprList( pColExpr = pColExpr->pRight; assert( pColExpr!=0 ); } - if( pColExpr->op==TK_COLUMN && (pTab = pColExpr->y.pTab)!=0 ){ + if( pColExpr->op==TK_COLUMN + && ALWAYS( ExprUseYTab(pColExpr) ) + && (pTab = pColExpr->y.pTab)!=0 + ){ /* For columns use the column name name */ int iCol = pColExpr->iColumn; if( iCol<0 ) iCol = pTab->iPKey; - zName = iCol>=0 ? pTab->aCol[iCol].zName : "rowid"; + zName = iCol>=0 ? pTab->aCol[iCol].zCnName : "rowid"; }else if( pColExpr->op==TK_ID ){ assert( !ExprHasProperty(pColExpr, EP_IntValue) ); zName = pColExpr->u.zToken; @@ -134192,7 +135368,7 @@ SQLITE_PRIVATE int sqlite3ColumnsFromExprList( zName = sqlite3MPrintf(db, "%.*z:%u", nName, zName, ++cnt); if( cnt>3 ) sqlite3_randomness(sizeof(cnt), &cnt); } - pCol->zName = zName; + pCol->zCnName = zName; pCol->hName = sqlite3StrIHash(zName); sqlite3ColumnPropertiesFromName(0, pCol); if( zName && sqlite3HashInsert(&ht, zName, pCol)==pCol ){ @@ -134202,7 +135378,7 @@ SQLITE_PRIVATE int sqlite3ColumnsFromExprList( sqlite3HashClear(&ht); if( db->mallocFailed ){ for(j=0; jpEList->a; for(i=0, pCol=pTab->aCol; inCol; i++, pCol++){ const char *zType; - int n, m; + i64 n, m; pTab->tabFlags |= (pCol->colFlags & COLFLAG_NOINSERT); p = a[i].pExpr; zType = columnType(&sNC, p, 0, 0, 0); @@ -134254,17 +135430,21 @@ SQLITE_PRIVATE void sqlite3SelectAddColumnTypeAndCollation( pCol->affinity = sqlite3ExprAffinity(p); if( zType ){ m = sqlite3Strlen30(zType); - n = sqlite3Strlen30(pCol->zName); - pCol->zName = sqlite3DbReallocOrFree(db, pCol->zName, n+m+2); - if( pCol->zName ){ - memcpy(&pCol->zName[n+1], zType, m+1); + n = sqlite3Strlen30(pCol->zCnName); + pCol->zCnName = sqlite3DbReallocOrFree(db, pCol->zCnName, n+m+2); + if( pCol->zCnName ){ + memcpy(&pCol->zCnName[n+1], zType, m+1); pCol->colFlags |= COLFLAG_HASTYPE; + }else{ + testcase( pCol->colFlags & COLFLAG_HASTYPE ); + pCol->colFlags &= ~(COLFLAG_HASTYPE|COLFLAG_HASCOLL); } } if( pCol->affinity<=SQLITE_AFF_NONE ) pCol->affinity = aff; pColl = sqlite3ExprCollSeq(pParse, p); - if( pColl && pCol->zColl==0 ){ - pCol->zColl = sqlite3DbStrDup(db, pColl->zName); + if( pColl ){ + assert( pTab->pIndex==0 ); + sqlite3ColumnSetColl(db, pCol, pColl->zName); } } pTab->szTabRow = 1; /* Any non-zero value works */ @@ -134428,7 +135608,7 @@ static CollSeq *multiSelectCollSeq(Parse *pParse, Select *p, int iCol){ */ static KeyInfo *multiSelectOrderByKeyInfo(Parse *pParse, Select *p, int nExtra){ ExprList *pOrderBy = p->pOrderBy; - int nOrderBy = p->pOrderBy->nExpr; + int nOrderBy = ALWAYS(pOrderBy!=0) ? pOrderBy->nExpr : 0; sqlite3 *db = pParse->db; KeyInfo *pRet = sqlite3KeyInfoAlloc(db, nOrderBy+nExtra, 1); if( pRet ){ @@ -134500,7 +135680,7 @@ static void generateWithRecursiveQuery( SrcList *pSrc = p->pSrc; /* The FROM clause of the recursive query */ int nCol = p->pEList->nExpr; /* Number of columns in the recursive table */ Vdbe *v = pParse->pVdbe; /* The prepared statement under construction */ - Select *pSetup = p->pPrior; /* The setup query */ + Select *pSetup; /* The setup query */ Select *pFirstRec; /* Left-most recursive term */ int addrTop; /* Top of the loop */ int addrCont, addrBreak; /* CONTINUE and BREAK addresses */ @@ -134584,7 +135764,6 @@ static void generateWithRecursiveQuery( ** iDistinct table. pFirstRec is left pointing to the left-most ** recursive term of the CTE. */ - pFirstRec = p; for(pFirstRec=p; ALWAYS(pFirstRec!=0); pFirstRec=pFirstRec->pPrior){ if( pFirstRec->selFlags & SF_Aggregate ){ sqlite3ErrorMsg(pParse, "recursive aggregate queries not supported"); @@ -135050,6 +136229,7 @@ static int multiSelect( int nCol; /* Number of columns in result set */ assert( p->pNext==0 ); + assert( p->pEList!=0 ); nCol = p->pEList->nExpr; pKeyInfo = sqlite3KeyInfoAlloc(db, nCol, 1); if( !pKeyInfo ){ @@ -135084,7 +136264,11 @@ static int multiSelect( multi_select_end: pDest->iSdst = dest.iSdst; pDest->nSdst = dest.nSdst; - sqlite3SelectDelete(db, pDelete); + if( pDelete ){ + sqlite3ParserAddCleanup(pParse, + (void(*)(sqlite3*,void*))sqlite3SelectDelete, + pDelete); + } return rc; } #endif /* SQLITE_OMIT_COMPOUND_SELECT */ @@ -135397,6 +136581,7 @@ static int multiSelectOrderBy( for(i=1; db->mallocFailed==0 && i<=p->pEList->nExpr; i++){ struct ExprList_item *pItem; for(j=0, pItem=pOrderBy->a; ju.x.iOrderByCol>0 ); if( pItem->u.x.iOrderByCol==i ) break; } @@ -135423,6 +136608,7 @@ static int multiSelectOrderBy( struct ExprList_item *pItem; aPermute[0] = nOrderBy; for(i=1, pItem=pOrderBy->a; i<=nOrderBy; i++, pItem++){ + assert( pItem!=0 ); assert( pItem->u.x.iOrderByCol>0 ); assert( pItem->u.x.iOrderByCol<=p->pEList->nExpr ); aPermute[i] = pItem->u.x.iOrderByCol - 1; @@ -135735,7 +136921,7 @@ static Expr *substExpr( } pExpr->pLeft = substExpr(pSubst, pExpr->pLeft); pExpr->pRight = substExpr(pSubst, pExpr->pRight); - if( ExprHasProperty(pExpr, EP_xIsSelect) ){ + if( ExprUseXSelect(pExpr) ){ substSelect(pSubst, pExpr->x.pSelect, 1); }else{ substExprList(pSubst, pExpr->x.pList); @@ -135826,10 +137012,10 @@ static void recomputeColumnsUsed( ** new cursor number assigned, set an entry in the aCsrMap[] array ** to map the old cursor number to the new: ** -** aCsrMap[iOld] = iNew; +** aCsrMap[iOld+1] = iNew; ** ** The array is guaranteed by the caller to be large enough for all -** existing cursor numbers in pSrc. +** existing cursor numbers in pSrc. aCsrMap[0] is the array size. ** ** If pSrc contains any sub-selects, call this routine recursively ** on the FROM clause of each such sub-select, with iExcept set to -1. @@ -135845,10 +137031,11 @@ static void srclistRenumberCursors( for(i=0, pItem=pSrc->a; inSrc; i++, pItem++){ if( i!=iExcept ){ Select *p; - if( !pItem->fg.isRecursive || aCsrMap[pItem->iCursor]==0 ){ - aCsrMap[pItem->iCursor] = pParse->nTab++; + assert( pItem->iCursor < aCsrMap[0] ); + if( !pItem->fg.isRecursive || aCsrMap[pItem->iCursor+1]==0 ){ + aCsrMap[pItem->iCursor+1] = pParse->nTab++; } - pItem->iCursor = aCsrMap[pItem->iCursor]; + pItem->iCursor = aCsrMap[pItem->iCursor+1]; for(p=pItem->pSelect; p; p=p->pPrior){ srclistRenumberCursors(pParse, aCsrMap, p->pSrc, -1); } @@ -135856,18 +137043,28 @@ static void srclistRenumberCursors( } } +/* +** *piCursor is a cursor number. Change it if it needs to be mapped. +*/ +static void renumberCursorDoMapping(Walker *pWalker, int *piCursor){ + int *aCsrMap = pWalker->u.aiCol; + int iCsr = *piCursor; + if( iCsr < aCsrMap[0] && aCsrMap[iCsr+1]>0 ){ + *piCursor = aCsrMap[iCsr+1]; + } +} + /* ** Expression walker callback used by renumberCursors() to update ** Expr objects to match newly assigned cursor numbers. */ static int renumberCursorsCb(Walker *pWalker, Expr *pExpr){ - int *aCsrMap = pWalker->u.aiCol; int op = pExpr->op; - if( (op==TK_COLUMN || op==TK_IF_NULL_ROW) && aCsrMap[pExpr->iTable] ){ - pExpr->iTable = aCsrMap[pExpr->iTable]; + if( op==TK_COLUMN || op==TK_IF_NULL_ROW ){ + renumberCursorDoMapping(pWalker, &pExpr->iTable); } - if( ExprHasProperty(pExpr, EP_FromJoin) && aCsrMap[pExpr->iRightJoinTable] ){ - pExpr->iRightJoinTable = aCsrMap[pExpr->iRightJoinTable]; + if( ExprHasProperty(pExpr, EP_FromJoin) ){ + renumberCursorDoMapping(pWalker, &pExpr->iRightJoinTable); } return WRC_Continue; } @@ -136211,7 +137408,8 @@ static int flattenSubquery( if( pSrc->nSrc>1 ){ if( pParse->nSelect>500 ) return 0; - aCsrMap = sqlite3DbMallocZero(db, pParse->nTab*sizeof(int)); + aCsrMap = sqlite3DbMallocZero(db, ((i64)pParse->nTab+1)*sizeof(int)); + if( aCsrMap ) aCsrMap[0] = pParse->nTab; } } @@ -136934,7 +138132,7 @@ static int pushDownWhereTerms( */ static u8 minMaxQuery(sqlite3 *db, Expr *pFunc, ExprList **ppMinMax){ int eRet = WHERE_ORDERBY_NORMAL; /* Return value */ - ExprList *pEList = pFunc->x.pList; /* Arguments to agg function */ + ExprList *pEList; /* Arguments to agg function */ const char *zFunc; /* Name of aggregate function pFunc */ ExprList *pOrderBy; u8 sortFlags = 0; @@ -136942,6 +138140,8 @@ static u8 minMaxQuery(sqlite3 *db, Expr *pFunc, ExprList **ppMinMax){ assert( *ppMinMax==0 ); assert( pFunc->op==TK_AGG_FUNCTION ); assert( !IsWindowFunc(pFunc) ); + assert( ExprUseXList(pFunc) ); + pEList = pFunc->x.pList; if( pEList==0 || pEList->nExpr!=1 || ExprHasProperty(pFunc, EP_WinFunc) @@ -136949,6 +138149,7 @@ static u8 minMaxQuery(sqlite3 *db, Expr *pFunc, ExprList **ppMinMax){ ){ return eRet; } + assert( !ExprHasProperty(pFunc, EP_IntValue) ); zFunc = pFunc->u.zToken; if( sqlite3StrICmp(zFunc, "min")==0 ){ eRet = WHERE_ORDERBY_MIN; @@ -136976,7 +138177,13 @@ static u8 minMaxQuery(sqlite3 *db, Expr *pFunc, ExprList **ppMinMax){ ** ** where table is a database table, not a sub-select or view. If the query ** does match this pattern, then a pointer to the Table object representing -** is returned. Otherwise, 0 is returned. +** is returned. Otherwise, NULL is returned. +** +** This routine checks to see if it is safe to use the count optimization. +** A correct answer is still obtained (though perhaps more slowly) if +** this routine returns NULL when it could have returned a table pointer. +** But returning the pointer when NULL should have been returned can +** result in incorrect answers and/or crashes. So, when in doubt, return NULL. */ static Table *isSimpleCount(Select *p, AggInfo *pAggInfo){ Table *pTab; @@ -136984,19 +138191,26 @@ static Table *isSimpleCount(Select *p, AggInfo *pAggInfo){ assert( !p->pGroupBy ); - if( p->pWhere || p->pEList->nExpr!=1 - || p->pSrc->nSrc!=1 || p->pSrc->a[0].pSelect + if( p->pWhere + || p->pEList->nExpr!=1 + || p->pSrc->nSrc!=1 + || p->pSrc->a[0].pSelect + || pAggInfo->nFunc!=1 ){ return 0; } pTab = p->pSrc->a[0].pTab; + assert( pTab!=0 ); + assert( !IsView(pTab) ); + if( !IsOrdinaryTable(pTab) ) return 0; pExpr = p->pEList->a[0].pExpr; - assert( pTab && !pTab->pSelect && pExpr ); - - if( IsVirtual(pTab) ) return 0; + assert( pExpr!=0 ); if( pExpr->op!=TK_AGG_FUNCTION ) return 0; - if( NEVER(pAggInfo->nFunc==0) ) return 0; + if( pExpr->pAggInfo!=pAggInfo ) return 0; if( (pAggInfo->aFunc[0].pFunc->funcFlags&SQLITE_FUNC_COUNT)==0 ) return 0; + assert( pAggInfo->aFunc[0].pFExpr==pExpr ); + testcase( ExprHasProperty(pExpr, EP_Distinct) ); + testcase( ExprHasProperty(pExpr, EP_WinFunc) ); if( ExprHasProperty(pExpr, EP_Distinct|EP_WinFunc) ) return 0; return pTab; @@ -137025,6 +138239,7 @@ SQLITE_PRIVATE int sqlite3IndexedByLookup(Parse *pParse, SrcItem *pFrom){ pParse->checkSchema = 1; return SQLITE_ERROR; } + assert( pFrom->fg.isCte==0 ); pFrom->u2.pIBIndex = pIdx; return SQLITE_OK; } @@ -137282,6 +138497,10 @@ static int resolveFromTermToCte( if( db->mallocFailed ) return 2; pFrom->pSelect->selFlags |= SF_CopyCte; assert( pFrom->pSelect ); + if( pFrom->fg.isIndexedBy ){ + sqlite3ErrorMsg(pParse, "no such index: \"%s\"", pFrom->u1.zIndexedBy); + return 2; + } pFrom->fg.isCte = 1; pFrom->u2.pCteUse = pCteUse; pCteUse->nUse++; @@ -137536,30 +138755,31 @@ static int selectExpander(Walker *pWalker, Select *p){ return WRC_Abort; } #if !defined(SQLITE_OMIT_VIEW) || !defined(SQLITE_OMIT_VIRTUALTABLE) - if( IsVirtual(pTab) || pTab->pSelect ){ + if( !IsOrdinaryTable(pTab) ){ i16 nCol; u8 eCodeOrig = pWalker->eCode; if( sqlite3ViewGetColumnNames(pParse, pTab) ) return WRC_Abort; assert( pFrom->pSelect==0 ); - if( pTab->pSelect - && (db->flags & SQLITE_EnableView)==0 - && pTab->pSchema!=db->aDb[1].pSchema - ){ - sqlite3ErrorMsg(pParse, "access to view \"%s\" prohibited", - pTab->zName); + if( IsView(pTab) ){ + if( (db->flags & SQLITE_EnableView)==0 + && pTab->pSchema!=db->aDb[1].pSchema + ){ + sqlite3ErrorMsg(pParse, "access to view \"%s\" prohibited", + pTab->zName); + } + pFrom->pSelect = sqlite3SelectDup(db, pTab->u.view.pSelect, 0); } #ifndef SQLITE_OMIT_VIRTUALTABLE - assert( SQLITE_VTABRISK_Normal==1 && SQLITE_VTABRISK_High==2 ); - if( IsVirtual(pTab) + else if( ALWAYS(IsVirtual(pTab)) && pFrom->fg.fromDDL - && ALWAYS(pTab->pVTable!=0) - && pTab->pVTable->eVtabRisk > ((db->flags & SQLITE_TrustedSchema)!=0) + && ALWAYS(pTab->u.vtab.p!=0) + && pTab->u.vtab.p->eVtabRisk > ((db->flags & SQLITE_TrustedSchema)!=0) ){ sqlite3ErrorMsg(pParse, "unsafe use of virtual table \"%s\"", pTab->zName); } + assert( SQLITE_VTABRISK_Normal==1 && SQLITE_VTABRISK_High==2 ); #endif - pFrom->pSelect = sqlite3SelectDup(db, pTab->pSelect, 0); nCol = pTab->nCol; pTab->nCol = -1; pWalker->eCode = 1; /* Turn on Select.selId renumbering */ @@ -137659,7 +138879,7 @@ static int selectExpander(Walker *pWalker, Select *p){ zSchemaName = iDb>=0 ? db->aDb[iDb].zDbSName : "*"; } for(j=0; jnCol; j++){ - char *zName = pTab->aCol[j].zName; + char *zName = pTab->aCol[j].zCnName; char *zColname; /* The computed column name */ char *zToFree; /* Malloced string that needs to be freed */ Token sColname; /* Computed column name as a token */ @@ -137916,7 +139136,7 @@ static void resetAccumulator(Parse *pParse, AggInfo *pAggInfo){ for(pFunc=pAggInfo->aFunc, i=0; inFunc; i++, pFunc++){ if( pFunc->iDistinct>=0 ){ Expr *pE = pFunc->pFExpr; - assert( !ExprHasProperty(pE, EP_xIsSelect) ); + assert( ExprUseXList(pE) ); if( pE->x.pList==0 || pE->x.pList->nExpr!=1 ){ sqlite3ErrorMsg(pParse, "DISTINCT aggregates must have exactly one " "argument"); @@ -137941,8 +139161,9 @@ static void finalizeAggFunctions(Parse *pParse, AggInfo *pAggInfo){ int i; struct AggInfo_func *pF; for(i=0, pF=pAggInfo->aFunc; inFunc; i++, pF++){ - ExprList *pList = pF->pFExpr->x.pList; - assert( !ExprHasProperty(pF->pFExpr, EP_xIsSelect) ); + ExprList *pList; + assert( ExprUseXList(pF->pFExpr) ); + pList = pF->pFExpr->x.pList; sqlite3VdbeAddOp2(v, OP_AggFinal, pF->iMem, pList ? pList->nExpr : 0); sqlite3VdbeAppendP4(v, pF->pFunc, P4_FUNCDEF); } @@ -137976,9 +139197,10 @@ static void updateAccumulator( int nArg; int addrNext = 0; int regAgg; - ExprList *pList = pF->pFExpr->x.pList; - assert( !ExprHasProperty(pF->pFExpr, EP_xIsSelect) ); + ExprList *pList; + assert( ExprUseXList(pF->pFExpr) ); assert( !IsWindowFunc(pF->pFExpr) ); + pList = pF->pFExpr->x.pList; if( ExprHasProperty(pF->pFExpr, EP_WinFunc) ){ Expr *pFilter = pF->pFExpr->y.pWin->pFilter; if( pAggInfo->nAccumulator @@ -138091,8 +139313,16 @@ static void explainSimpleCount( static int havingToWhereExprCb(Walker *pWalker, Expr *pExpr){ if( pExpr->op!=TK_AND ){ Select *pS = pWalker->u.pSelect; + /* This routine is called before the HAVING clause of the current + ** SELECT is analyzed for aggregates. So if pExpr->pAggInfo is set + ** here, it indicates that the expression is a correlated reference to a + ** column from an outer aggregate query, or an aggregate function that + ** belongs to an outer query. Do not move the expression to the WHERE + ** clause in this obscure case, as doing so may corrupt the outer Select + ** statements AggInfo structure. */ if( sqlite3ExprIsConstantOrGroupBy(pWalker->pParse, pExpr, pS->pGroupBy) && ExprAlwaysFalse(pExpr)==0 + && pExpr->pAggInfo==0 ){ sqlite3 *db = pWalker->pParse->db; Expr *pNew = sqlite3Expr(db, TK_INTEGER, "1"); @@ -138216,7 +139446,9 @@ static int countOfViewOptimization(Parse *pParse, Select *p){ if( p->pGroupBy ) return 0; pExpr = p->pEList->a[0].pExpr; if( pExpr->op!=TK_AGG_FUNCTION ) return 0; /* Result is an aggregate */ + assert( ExprUseUToken(pExpr) ); if( sqlite3_stricmp(pExpr->u.zToken,"count") ) return 0; /* Is count() */ + assert( ExprUseXList(pExpr) ); if( pExpr->x.pList!=0 ) return 0; /* Must be count(*) */ if( p->pSrc->nSrc!=1 ) return 0; /* One table in FROM */ pSub = p->pSrc->a[0].pSelect; @@ -138358,11 +139590,16 @@ SQLITE_PRIVATE int sqlite3Select( } #endif - /* If the SF_UpdateFrom flag is set, then this function is being called + /* If the SF_UFSrcCheck flag is set, then this function is being called ** as part of populating the temp table for an UPDATE...FROM statement. ** In this case, it is an error if the target object (pSrc->a[0]) name - ** or alias is duplicated within FROM clause (pSrc->a[1..n]). */ - if( p->selFlags & SF_UpdateFrom ){ + ** or alias is duplicated within FROM clause (pSrc->a[1..n]). + ** + ** Postgres disallows this case too. The reason is that some other + ** systems handle this case differently, and not all the same way, + ** which is just confusing. To avoid this, we follow PG's lead and + ** disallow it altogether. */ + if( p->selFlags & SF_UFSrcCheck ){ SrcItem *p0 = &p->pSrc->a[0]; for(i=1; ipSrc->nSrc; i++){ SrcItem *p1 = &p->pSrc->a[i]; @@ -138374,6 +139611,12 @@ SQLITE_PRIVATE int sqlite3Select( goto select_end; } } + + /* Clear the SF_UFSrcCheck flag. The check has already been performed, + ** and leaving this flag set can cause errors if a compound sub-query + ** in p->pSrc is flattened into this query and this function called + ** again as part of compound SELECT processing. */ + p->selFlags &= ~SF_UFSrcCheck; } if( pDest->eDest==SRT_Output ){ @@ -138445,6 +139688,39 @@ SQLITE_PRIVATE int sqlite3Select( if( (pSub->selFlags & SF_Aggregate)!=0 ) continue; assert( pSub->pGroupBy==0 ); + /* If a FROM-clause subquery has an ORDER BY clause that is not + ** really doing anything, then delete it now so that it does not + ** interfere with query flattening. See the discussion at + ** https://sqlite.org/forum/forumpost/2d76f2bcf65d256a + ** + ** Beware of these cases where the ORDER BY clause may not be safely + ** omitted: + ** + ** (1) There is also a LIMIT clause + ** (2) The subquery was added to help with window-function + ** processing + ** (3) The subquery is in the FROM clause of an UPDATE + ** (4) The outer query uses an aggregate function other than + ** the built-in count(), min(), or max(). + ** (5) The ORDER BY isn't going to accomplish anything because + ** one of: + ** (a) The outer query has a different ORDER BY clause + ** (b) The subquery is part of a join + ** See forum post 062d576715d277c8 + */ + if( pSub->pOrderBy!=0 + && (p->pOrderBy!=0 || pTabList->nSrc>1) /* Condition (5) */ + && pSub->pLimit==0 /* Condition (1) */ + && (pSub->selFlags & SF_OrderByReqd)==0 /* Condition (2) */ + && (p->selFlags & SF_OrderByReqd)==0 /* Condition (3) and (4) */ + && OptimizationEnabled(db, SQLITE_OmitOrderBy) + ){ + SELECTTRACE(0x100,pParse,p, + ("omit superfluous ORDER BY on %r FROM-clause subquery\n",i+1)); + sqlite3ExprListDelete(db, pSub->pOrderBy); + pSub->pOrderBy = 0; + } + /* If the outer query contains a "complex" result set (that is, ** if the result set of the outer query uses functions or subqueries) ** and if the subquery contains an ORDER BY clause and if @@ -138587,7 +139863,8 @@ SQLITE_PRIVATE int sqlite3Select( ** inside the subquery. This can help the subquery to run more efficiently. */ if( OptimizationEnabled(db, SQLITE_PushDown) - && (pItem->fg.isCte==0 || pItem->u2.pCteUse->eM10d!=M10d_Yes) + && (pItem->fg.isCte==0 + || (pItem->u2.pCteUse->eM10d!=M10d_Yes && pItem->u2.pCteUse->nUse<2)) && pushDownWhereTerms(pParse, pSub, p->pWhere, pItem->iCursor, (pItem->fg.jointype & JT_OUTER)!=0) ){ @@ -138648,6 +139925,7 @@ SQLITE_PRIVATE int sqlite3Select( sqlite3VdbeAddOp2(v, OP_Gosub, pCteUse->regRtn, pCteUse->addrM9e); if( pItem->iCursor!=pCteUse->iCur ){ sqlite3VdbeAddOp2(v, OP_OpenDup, pItem->iCursor, pCteUse->iCur); + VdbeComment((v, "%!S", pItem)); } pSub->nSelectRow = pCteUse->nRowEst; }else if( (pPrior = isSelfJoinView(pTabList, pItem))!=0 ){ @@ -138985,7 +140263,7 @@ SQLITE_PRIVATE int sqlite3Select( } for(i=0; inFunc; i++){ Expr *pExpr = pAggInfo->aFunc[i].pFExpr; - assert( !ExprHasProperty(pExpr, EP_xIsSelect) ); + assert( ExprUseXList(pExpr) ); sNC.ncFlags |= NC_InAggFunc; sqlite3ExprAnalyzeAggList(&sNC, pExpr->x.pList); #ifndef SQLITE_OMIT_WINDOWFUNC @@ -139040,7 +140318,9 @@ SQLITE_PRIVATE int sqlite3Select( if( pAggInfo->nFunc==1 && pAggInfo->aFunc[0].iDistinct>=0 - && pAggInfo->aFunc[0].pFExpr->x.pList + && ALWAYS(pAggInfo->aFunc[0].pFExpr!=0) + && ALWAYS(ExprUseXList(pAggInfo->aFunc[0].pFExpr)) + && pAggInfo->aFunc[0].pFExpr->x.pList!=0 ){ Expr *pExpr = pAggInfo->aFunc[0].pFExpr->x.pList->a[0].pExpr; pExpr = sqlite3ExprDup(db, pExpr, 0); @@ -139361,6 +140641,7 @@ SQLITE_PRIVATE int sqlite3Select( sqlite3VdbeAddOp2(v, OP_Integer, 0, regAcc); } }else if( pAggInfo->nFunc==1 && pAggInfo->aFunc[0].iDistinct>=0 ){ + assert( ExprUseXList(pAggInfo->aFunc[0].pFExpr) ); pDistinct = pAggInfo->aFunc[0].pFExpr->x.pList; distFlag = pDistinct ? (WHERE_WANT_DISTINCT|WHERE_AGG_DISTINCT) : 0; } @@ -139892,12 +141173,12 @@ SQLITE_PRIVATE void sqlite3BeginTrigger( /* INSTEAD of triggers are only for views and views only support INSTEAD ** of triggers. */ - if( pTab->pSelect && tr_tm!=TK_INSTEAD ){ + if( IsView(pTab) && tr_tm!=TK_INSTEAD ){ sqlite3ErrorMsg(pParse, "cannot create %s trigger on view: %S", (tr_tm == TK_BEFORE)?"BEFORE":"AFTER", pTableName->a); goto trigger_orphan_error; } - if( !pTab->pSelect && tr_tm==TK_INSTEAD ){ + if( !IsView(pTab) && tr_tm==TK_INSTEAD ){ sqlite3ErrorMsg(pParse, "cannot create INSTEAD OF" " trigger on table: %S", pTableName->a); goto trigger_orphan_error; @@ -140034,7 +141315,7 @@ SQLITE_PRIVATE void sqlite3FinishTrigger( z = sqlite3DbStrNDup(db, (char*)pAll->z, pAll->n); testcase( z==0 ); sqlite3NestedParse(pParse, - "INSERT INTO %Q." DFLT_SCHEMA_TABLE + "INSERT INTO %Q." LEGACY_SCHEMA_TABLE " VALUES('trigger',%Q,%Q,0,'CREATE TRIGGER %q')", db->aDb[iDb].zDbSName, zName, pTrig->table, z); @@ -140348,7 +141629,7 @@ SQLITE_PRIVATE void sqlite3DropTriggerPtr(Parse *pParse, Trigger *pTrigger){ */ if( (v = sqlite3GetVdbe(pParse))!=0 ){ sqlite3NestedParse(pParse, - "DELETE FROM %Q." DFLT_SCHEMA_TABLE " WHERE name=%Q AND type='trigger'", + "DELETE FROM %Q." LEGACY_SCHEMA_TABLE " WHERE name=%Q AND type='trigger'", db->aDb[iDb].zDbSName, pTrigger->zName ); sqlite3ChangeCookie(pParse, iDb); @@ -140550,11 +141831,11 @@ static ExprList *sqlite3ExpandReturning( for(jj=0; jjnCol; jj++){ Expr *pNewExpr; if( IsHiddenColumn(pTab->aCol+jj) ) continue; - pNewExpr = sqlite3Expr(db, TK_ID, pTab->aCol[jj].zName); + pNewExpr = sqlite3Expr(db, TK_ID, pTab->aCol[jj].zCnName); pNew = sqlite3ExprListAppend(pParse, pNew, pNewExpr); if( !db->mallocFailed ){ struct ExprList_item *pItem = &pNew->a[pNew->nExpr-1]; - pItem->zEName = sqlite3DbStrDup(db, pTab->aCol[jj].zName); + pItem->zEName = sqlite3DbStrDup(db, pTab->aCol[jj].zCnName); pItem->eEName = ENAME_NAME; } } @@ -140599,6 +141880,7 @@ static void codeReturningTrigger( sSelect.pSrc = &sFrom; sFrom.nSrc = 1; sFrom.a[0].pTab = pTab; + sFrom.a[0].iCursor = -1; sqlite3SelectPrep(pParse, &sSelect, 0); if( db->mallocFailed==0 && pParse->nErr==0 ){ sqlite3GenerateColumnNames(pParse, &sSelect); @@ -141154,13 +142436,14 @@ static void updateVirtualTable( */ SQLITE_PRIVATE void sqlite3ColumnDefault(Vdbe *v, Table *pTab, int i, int iReg){ assert( pTab!=0 ); - if( !pTab->pSelect ){ + if( !IsView(pTab) ){ sqlite3_value *pValue = 0; u8 enc = ENC(sqlite3VdbeDb(v)); Column *pCol = &pTab->aCol[i]; - VdbeComment((v, "%s.%s", pTab->zName, pCol->zName)); + VdbeComment((v, "%s.%s", pTab->zName, pCol->zCnName)); assert( inCol ); - sqlite3ValueFromExpr(sqlite3VdbeDb(v), pCol->pDflt, enc, + sqlite3ValueFromExpr(sqlite3VdbeDb(v), + sqlite3ColumnExpr(pTab,pCol), enc, pCol->affinity, &pValue); if( pValue ){ sqlite3VdbeAppendP4(v, pValue, P4_MEM); @@ -141330,7 +142613,7 @@ static void updateFromSelect( pList = sqlite3ExprListAppend(pParse, pList, pNew); } eDest = IsVirtual(pTab) ? SRT_Table : SRT_Upfrom; - }else if( pTab->pSelect ){ + }else if( IsView(pTab) ){ for(i=0; inCol; i++){ pList = sqlite3ExprListAppend(pParse, pList, exprRowColumn(pParse, i)); } @@ -141353,8 +142636,9 @@ static void updateFromSelect( } } pSelect = sqlite3SelectNew(pParse, pList, - pSrc, pWhere2, pGrp, 0, pOrderBy2, SF_UpdateFrom|SF_IncludeHidden, pLimit2 + pSrc, pWhere2, pGrp, 0, pOrderBy2, SF_UFSrcCheck|SF_IncludeHidden, pLimit2 ); + if( pSelect ) pSelect->selFlags |= SF_OrderByReqd; sqlite3SelectDestInit(&dest, eDest, iEph); dest.iSDParm2 = (pPk ? pPk->nKeyCol : -1); sqlite3Select(pParse, pSelect, &dest); @@ -141454,7 +142738,7 @@ SQLITE_PRIVATE void sqlite3Update( */ #ifndef SQLITE_OMIT_TRIGGER pTrigger = sqlite3TriggersExist(pParse, pTab, TK_UPDATE, pChanges, &tmask); - isView = pTab->pSelect!=0; + isView = IsView(pTab); assert( pTrigger || tmask==0 ); #else # define pTrigger 0 @@ -141543,13 +142827,16 @@ SQLITE_PRIVATE void sqlite3Update( */ chngRowid = chngPk = 0; for(i=0; inExpr; i++){ + u8 hCol = sqlite3StrIHash(pChanges->a[i].zEName); /* If this is an UPDATE with a FROM clause, do not resolve expressions ** here. The call to sqlite3Select() below will do that. */ if( nChangeFrom==0 && sqlite3ResolveExprNames(&sNC, pChanges->a[i].pExpr) ){ goto update_cleanup; } for(j=0; jnCol; j++){ - if( sqlite3StrICmp(pTab->aCol[j].zName, pChanges->a[i].zEName)==0 ){ + if( pTab->aCol[j].hName==hCol + && sqlite3StrICmp(pTab->aCol[j].zCnName, pChanges->a[i].zEName)==0 + ){ if( j==pTab->iPKey ){ chngRowid = 1; pRowidExpr = pChanges->a[i].pExpr; @@ -141563,7 +142850,7 @@ SQLITE_PRIVATE void sqlite3Update( testcase( pTab->aCol[j].colFlags & COLFLAG_STORED ); sqlite3ErrorMsg(pParse, "cannot UPDATE generated column \"%s\"", - pTab->aCol[j].zName); + pTab->aCol[j].zCnName); goto update_cleanup; } #endif @@ -141587,7 +142874,7 @@ SQLITE_PRIVATE void sqlite3Update( { int rc; rc = sqlite3AuthCheck(pParse, SQLITE_UPDATE, pTab->zName, - j<0 ? "ROWID" : pTab->aCol[j].zName, + j<0 ? "ROWID" : pTab->aCol[j].zCnName, db->aDb[iDb].zDbSName); if( rc==SQLITE_DENY ){ goto update_cleanup; @@ -141619,8 +142906,10 @@ SQLITE_PRIVATE void sqlite3Update( for(i=0; inCol; i++){ if( aXRef[i]>=0 ) continue; if( (pTab->aCol[i].colFlags & COLFLAG_GENERATED)==0 ) continue; - if( sqlite3ExprReferencesUpdatedColumn(pTab->aCol[i].pDflt, - aXRef, chngRowid) ){ + if( sqlite3ExprReferencesUpdatedColumn( + sqlite3ColumnExpr(pTab, &pTab->aCol[i]), + aXRef, chngRowid) + ){ aXRef[i] = 99999; bProgress = 1; } @@ -142712,7 +144001,7 @@ SQLITE_PRIVATE void sqlite3UpsertDoUpdate( k = sqlite3TableColumnToIndex(pIdx, pPk->aiColumn[i]); sqlite3VdbeAddOp3(v, OP_Column, iCur, k, iPk+i); VdbeComment((v, "%s.%s", pIdx->zName, - pTab->aCol[pPk->aiColumn[i]].zName)); + pTab->aCol[pPk->aiColumn[i]].zCnName)); } sqlite3VdbeVerifyAbortable(v, OE_Abort); i = sqlite3VdbeAddOp4Int(v, OP_Found, iDataCur, 0, iPk, nPk); @@ -142894,8 +144183,8 @@ SQLITE_PRIVATE SQLITE_NOINLINE int sqlite3RunVacuum( Btree *pTemp; /* The temporary database we vacuum into */ u32 saved_mDbFlags; /* Saved value of db->mDbFlags */ u64 saved_flags; /* Saved value of db->flags */ - int saved_nChange; /* Saved value of db->nChange */ - int saved_nTotalChange; /* Saved value of db->nTotalChange */ + i64 saved_nChange; /* Saved value of db->nChange */ + i64 saved_nTotalChange; /* Saved value of db->nTotalChange */ u32 saved_openFlags; /* Saved value of db->openFlags */ u8 saved_mTrace; /* Saved trace settings */ Db *pDb = 0; /* Database to detach at end of vacuum */ @@ -142993,7 +144282,9 @@ SQLITE_PRIVATE SQLITE_NOINLINE int sqlite3RunVacuum( /* Do not attempt to change the page size for a WAL database */ if( sqlite3PagerGetJournalMode(sqlite3BtreePager(pMain)) - ==PAGER_JOURNALMODE_WAL ){ + ==PAGER_JOURNALMODE_WAL + && pOut==0 + ){ db->nextPagesize = 0; } @@ -143342,7 +144633,7 @@ SQLITE_PRIVATE void sqlite3VtabLock(VTable *pVTab){ SQLITE_PRIVATE VTable *sqlite3GetVTable(sqlite3 *db, Table *pTab){ VTable *pVtab; assert( IsVirtual(pTab) ); - for(pVtab=pTab->pVTable; pVtab && pVtab->db!=db; pVtab=pVtab->pNext); + for(pVtab=pTab->u.vtab.p; pVtab && pVtab->db!=db; pVtab=pVtab->pNext); return pVtab; } @@ -143355,7 +144646,8 @@ SQLITE_PRIVATE void sqlite3VtabUnlock(VTable *pVTab){ assert( db ); assert( pVTab->nRef>0 ); - assert( db->magic==SQLITE_MAGIC_OPEN || db->magic==SQLITE_MAGIC_ZOMBIE ); + assert( db->eOpenState==SQLITE_STATE_OPEN + || db->eOpenState==SQLITE_STATE_ZOMBIE ); pVTab->nRef--; if( pVTab->nRef==0 ){ @@ -143370,21 +144662,24 @@ SQLITE_PRIVATE void sqlite3VtabUnlock(VTable *pVTab){ /* ** Table p is a virtual table. This function moves all elements in the -** p->pVTable list to the sqlite3.pDisconnect lists of their associated +** p->u.vtab.p list to the sqlite3.pDisconnect lists of their associated ** database connections to be disconnected at the next opportunity. ** Except, if argument db is not NULL, then the entry associated with -** connection db is left in the p->pVTable list. +** connection db is left in the p->u.vtab.p list. */ static VTable *vtabDisconnectAll(sqlite3 *db, Table *p){ VTable *pRet = 0; - VTable *pVTable = p->pVTable; - p->pVTable = 0; + VTable *pVTable; + + assert( IsVirtual(p) ); + pVTable = p->u.vtab.p; + p->u.vtab.p = 0; /* Assert that the mutex (if any) associated with the BtShared database ** that contains table p is held by the caller. See header comments ** above function sqlite3VtabUnlockList() for an explanation of why ** this makes it safe to access the sqlite3.pDisconnect list of any - ** database connection that may have an entry in the p->pVTable list. + ** database connection that may have an entry in the p->u.vtab.p list. */ assert( db==0 || sqlite3SchemaMutexHeld(db, 0, p->pSchema) ); @@ -143394,7 +144689,7 @@ static VTable *vtabDisconnectAll(sqlite3 *db, Table *p){ assert( db2 ); if( db2==db ){ pRet = pVTable; - p->pVTable = pRet; + p->u.vtab.p = pRet; pRet->pNext = 0; }else{ pVTable->pNext = db2->pDisconnect; @@ -143422,7 +144717,7 @@ SQLITE_PRIVATE void sqlite3VtabDisconnect(sqlite3 *db, Table *p){ assert( sqlite3BtreeHoldsAllMutexes(db) ); assert( sqlite3_mutex_held(db->mutex) ); - for(ppVTab=&p->pVTable; *ppVTab; ppVTab=&(*ppVTab)->pNext){ + for(ppVTab=&p->u.vtab.p; *ppVTab; ppVTab=&(*ppVTab)->pNext){ if( (*ppVTab)->db==db ){ VTable *pVTab = *ppVTab; *ppVTab = pVTab->pNext; @@ -143485,37 +144780,41 @@ SQLITE_PRIVATE void sqlite3VtabUnlockList(sqlite3 *db){ ** database connection. */ SQLITE_PRIVATE void sqlite3VtabClear(sqlite3 *db, Table *p){ + assert( IsVirtual(p) ); if( !db || db->pnBytesFreed==0 ) vtabDisconnectAll(0, p); - if( p->azModuleArg ){ + if( p->u.vtab.azArg ){ int i; - for(i=0; inModuleArg; i++){ - if( i!=1 ) sqlite3DbFree(db, p->azModuleArg[i]); + for(i=0; iu.vtab.nArg; i++){ + if( i!=1 ) sqlite3DbFree(db, p->u.vtab.azArg[i]); } - sqlite3DbFree(db, p->azModuleArg); + sqlite3DbFree(db, p->u.vtab.azArg); } } /* -** Add a new module argument to pTable->azModuleArg[]. +** Add a new module argument to pTable->u.vtab.azArg[]. ** The string is not copied - the pointer is stored. The ** string will be freed automatically when the table is ** deleted. */ static void addModuleArgument(Parse *pParse, Table *pTable, char *zArg){ - sqlite3_int64 nBytes = sizeof(char *)*(2+pTable->nModuleArg); + sqlite3_int64 nBytes; char **azModuleArg; sqlite3 *db = pParse->db; - if( pTable->nModuleArg+3>=db->aLimit[SQLITE_LIMIT_COLUMN] ){ + + assert( IsVirtual(pTable) ); + nBytes = sizeof(char *)*(2+pTable->u.vtab.nArg); + if( pTable->u.vtab.nArg+3>=db->aLimit[SQLITE_LIMIT_COLUMN] ){ sqlite3ErrorMsg(pParse, "too many columns on %s", pTable->zName); } - azModuleArg = sqlite3DbRealloc(db, pTable->azModuleArg, nBytes); + azModuleArg = sqlite3DbRealloc(db, pTable->u.vtab.azArg, nBytes); if( azModuleArg==0 ){ sqlite3DbFree(db, zArg); }else{ - int i = pTable->nModuleArg++; + int i = pTable->u.vtab.nArg++; azModuleArg[i] = zArg; azModuleArg[i+1] = 0; - pTable->azModuleArg = azModuleArg; + pTable->u.vtab.azArg = azModuleArg; } } @@ -143538,10 +144837,11 @@ SQLITE_PRIVATE void sqlite3VtabBeginParse( pTable = pParse->pNewTable; if( pTable==0 ) return; assert( 0==pTable->pIndex ); + pTable->eTabType = TABTYP_VTAB; db = pParse->db; - assert( pTable->nModuleArg==0 ); + assert( pTable->u.vtab.nArg==0 ); addModuleArgument(pParse, pTable, sqlite3NameFromToken(db, pModuleName)); addModuleArgument(pParse, pTable, 0); addModuleArgument(pParse, pTable, sqlite3DbStrDup(db, pTable->zName)); @@ -143558,11 +144858,11 @@ SQLITE_PRIVATE void sqlite3VtabBeginParse( ** sqlite_schema table, has already been made by sqlite3StartTable(). ** The second call, to obtain permission to create the table, is made now. */ - if( pTable->azModuleArg ){ + if( pTable->u.vtab.azArg ){ int iDb = sqlite3SchemaToIndex(db, pTable->pSchema); assert( iDb>=0 ); /* The database the table is being created in */ sqlite3AuthCheck(pParse, SQLITE_CREATE_VTABLE, pTable->zName, - pTable->azModuleArg[0], pParse->db->aDb[iDb].zDbSName); + pTable->u.vtab.azArg[0], pParse->db->aDb[iDb].zDbSName); } #endif } @@ -143590,9 +144890,10 @@ SQLITE_PRIVATE void sqlite3VtabFinishParse(Parse *pParse, Token *pEnd){ sqlite3 *db = pParse->db; /* The database connection */ if( pTab==0 ) return; + assert( IsVirtual(pTab) ); addArgumentToVtab(pParse); pParse->sArg.z = 0; - if( pTab->nModuleArg<1 ) return; + if( pTab->u.vtab.nArg<1 ) return; /* If the CREATE VIRTUAL TABLE statement is being entered for the ** first time (in other words if the virtual table is actually being @@ -143625,7 +144926,7 @@ SQLITE_PRIVATE void sqlite3VtabFinishParse(Parse *pParse, Token *pEnd){ */ iDb = sqlite3SchemaToIndex(db, pTab->pSchema); sqlite3NestedParse(pParse, - "UPDATE %Q." DFLT_SCHEMA_TABLE " " + "UPDATE %Q." LEGACY_SCHEMA_TABLE " " "SET type='table', name=%Q, tbl_name=%Q, rootpage=0, sql=%Q " "WHERE rowid=#%d", db->aDb[iDb].zDbSName, @@ -143645,18 +144946,14 @@ SQLITE_PRIVATE void sqlite3VtabFinishParse(Parse *pParse, Token *pEnd){ iReg = ++pParse->nMem; sqlite3VdbeLoadString(v, iReg, pTab->zName); sqlite3VdbeAddOp2(v, OP_VCreate, iDb, iReg); - } - - /* If we are rereading the sqlite_schema table create the in-memory - ** record of the table. The xConnect() method is not called until - ** the first time the virtual table is used in an SQL statement. This - ** allows a schema that contains virtual tables to be loaded before - ** the required virtual table implementations are registered. */ - else { + }else{ + /* If we are rereading the sqlite_schema table create the in-memory + ** record of the table. */ Table *pOld; Schema *pSchema = pTab->pSchema; const char *zName = pTab->zName; - assert( sqlite3SchemaMutexHeld(db, 0, pSchema) ); + assert( zName!=0 ); + sqlite3MarkAllShadowTablesOf(db, pTab); pOld = sqlite3HashInsert(&pSchema->tblHash, zName, pTab); if( pOld ){ sqlite3OomFault(db); @@ -143707,13 +145004,16 @@ static int vtabCallConstructor( VtabCtx sCtx; VTable *pVTable; int rc; - const char *const*azArg = (const char *const*)pTab->azModuleArg; - int nArg = pTab->nModuleArg; + const char *const*azArg; + int nArg = pTab->u.vtab.nArg; char *zErr = 0; char *zModuleName; int iDb; VtabCtx *pCtx; + assert( IsVirtual(pTab) ); + azArg = (const char *const*)pTab->u.vtab.azArg; + /* Check that the virtual-table is not already being initialized */ for(pCtx=db->pVtabCtx; pCtx; pCtx=pCtx->pPrior){ if( pCtx->pTab==pTab ){ @@ -143740,7 +145040,7 @@ static int vtabCallConstructor( pVTable->eVtabRisk = SQLITE_VTABRISK_Normal; iDb = sqlite3SchemaToIndex(db, pTab->pSchema); - pTab->azModuleArg[1] = db->aDb[iDb].zDbSName; + pTab->u.vtab.azArg[1] = db->aDb[iDb].zDbSName; /* Invoke the virtual table constructor */ assert( &db->pVtabCtx ); @@ -143779,12 +145079,12 @@ static int vtabCallConstructor( int iCol; u16 oooHidden = 0; /* If everything went according to plan, link the new VTable structure - ** into the linked list headed by pTab->pVTable. Then loop through the + ** into the linked list headed by pTab->u.vtab.p. Then loop through the ** columns of the table to see if any of them contain the token "hidden". ** If so, set the Column COLFLAG_HIDDEN flag and remove the token from ** the type string. */ - pVTable->pNext = pTab->pVTable; - pTab->pVTable = pVTable; + pVTable->pNext = pTab->u.vtab.p; + pTab->u.vtab.p = pVTable; for(iCol=0; iColnCol; iCol++){ char *zType = sqlite3ColumnType(&pTab->aCol[iCol], ""); @@ -143837,16 +145137,17 @@ SQLITE_PRIVATE int sqlite3VtabCallConnect(Parse *pParse, Table *pTab){ int rc; assert( pTab ); - if( !IsVirtual(pTab) || sqlite3GetVTable(db, pTab) ){ + assert( IsVirtual(pTab) ); + if( sqlite3GetVTable(db, pTab) ){ return SQLITE_OK; } /* Locate the required virtual table module */ - zMod = pTab->azModuleArg[0]; + zMod = pTab->u.vtab.azArg[0]; pMod = (Module*)sqlite3HashFind(&db->aModule, zMod); if( !pMod ){ - const char *zModule = pTab->azModuleArg[0]; + const char *zModule = pTab->u.vtab.azArg[0]; sqlite3ErrorMsg(pParse, "no such module: %s", zModule); rc = SQLITE_ERROR; }else{ @@ -143909,10 +145210,10 @@ SQLITE_PRIVATE int sqlite3VtabCallCreate(sqlite3 *db, int iDb, const char *zTab, const char *zMod; pTab = sqlite3FindTable(db, zTab, db->aDb[iDb].zDbSName); - assert( pTab && IsVirtual(pTab) && !pTab->pVTable ); + assert( pTab && IsVirtual(pTab) && !pTab->u.vtab.p ); /* Locate the required virtual table module */ - zMod = pTab->azModuleArg[0]; + zMod = pTab->u.vtab.azArg[0]; pMod = (Module*)sqlite3HashFind(&db->aModule, zMod); /* If the module has been registered and includes a Create method, @@ -143949,6 +145250,7 @@ SQLITE_API int sqlite3_declare_vtab(sqlite3 *db, const char *zCreateTable){ Table *pTab; char *zErr = 0; Parse sParse; + int initBusy; #ifdef SQLITE_ENABLE_API_ARMOR if( !sqlite3SafetyCheckOk(db) || zCreateTable==0 ){ @@ -143968,17 +145270,23 @@ SQLITE_API int sqlite3_declare_vtab(sqlite3 *db, const char *zCreateTable){ memset(&sParse, 0, sizeof(sParse)); sParse.eParseMode = PARSE_MODE_DECLARE_VTAB; sParse.db = db; + /* We should never be able to reach this point while loading the + ** schema. Nevertheless, defend against that (turn off db->init.busy) + ** in case a bug arises. */ + assert( db->init.busy==0 ); + initBusy = db->init.busy; + db->init.busy = 0; sParse.nQueryLoop = 1; if( SQLITE_OK==sqlite3RunParser(&sParse, zCreateTable, &zErr) && sParse.pNewTable && !db->mallocFailed - && !sParse.pNewTable->pSelect - && !IsVirtual(sParse.pNewTable) + && IsOrdinaryTable(sParse.pNewTable) ){ if( !pTab->aCol ){ Table *pNew = sParse.pNewTable; Index *pIdx; pTab->aCol = pNew->aCol; + sqlite3ExprListDelete(db, pNew->u.tab.pDfltList); pTab->nNVCol = pTab->nCol = pNew->nCol; pTab->tabFlags |= pNew->tabFlags & (TF_WithoutRowid|TF_NoVisibleRowid); pNew->nCol = 0; @@ -144014,6 +145322,7 @@ SQLITE_API int sqlite3_declare_vtab(sqlite3 *db, const char *zCreateTable){ } sqlite3DeleteTable(db, sParse.pNewTable); sqlite3ParserReset(&sParse); + db->init.busy = initBusy; assert( (rc&0xff)==rc ); rc = sqlite3ApiExit(db, rc); @@ -144033,10 +145342,13 @@ SQLITE_PRIVATE int sqlite3VtabCallDestroy(sqlite3 *db, int iDb, const char *zTab Table *pTab; pTab = sqlite3FindTable(db, zTab, db->aDb[iDb].zDbSName); - if( pTab!=0 && ALWAYS(pTab->pVTable!=0) ){ + if( ALWAYS(pTab!=0) + && ALWAYS(IsVirtual(pTab)) + && ALWAYS(pTab->u.vtab.p!=0) + ){ VTable *p; int (*xDestroy)(sqlite3_vtab *); - for(p=pTab->pVTable; p; p=p->pNext){ + for(p=pTab->u.vtab.p; p; p=p->pNext){ assert( p->pVtab ); if( p->pVtab->nRef>0 ){ return SQLITE_LOCKED; @@ -144050,9 +145362,9 @@ SQLITE_PRIVATE int sqlite3VtabCallDestroy(sqlite3 *db, int iDb, const char *zTab rc = xDestroy(p->pVtab); /* Remove the sqlite3_vtab* from the aVTrans[] array, if applicable */ if( rc==SQLITE_OK ){ - assert( pTab->pVTable==p && p->pNext==0 ); + assert( pTab->u.vtab.p==p && p->pNext==0 ); p->pVtab = 0; - pTab->pVTable = 0; + pTab->u.vtab.p = 0; sqlite3VtabUnlock(p); } sqlite3DeleteTable(db, pTab); @@ -144266,6 +145578,7 @@ SQLITE_PRIVATE FuncDef *sqlite3VtabOverloadFunction( /* Check to see the left operand is a column in a virtual table */ if( NEVER(pExpr==0) ) return pDef; if( pExpr->op!=TK_COLUMN ) return pDef; + assert( ExprUseYTab(pExpr) ); pTab = pExpr->y.pTab; if( pTab==0 ) return pDef; if( !IsVirtual(pTab) ) return pDef; @@ -144340,8 +145653,9 @@ SQLITE_PRIVATE void sqlite3VtabMakeWritable(Parse *pParse, Table *pTab){ /* ** Check to see if virtual table module pMod can be have an eponymous ** virtual table instance. If it can, create one if one does not already -** exist. Return non-zero if the eponymous virtual table instance exists -** when this routine returns, and return zero if it does not exist. +** exist. Return non-zero if either the eponymous virtual table instance +** exists when this routine returns or if an attempt to create it failed +** and an error message was left in pParse. ** ** An eponymous virtual table instance is one that is named after its ** module, and more importantly, does not require a CREATE VIRTUAL TABLE @@ -144368,8 +145682,9 @@ SQLITE_PRIVATE int sqlite3VtabEponymousTableInit(Parse *pParse, Module *pMod){ } pMod->pEpoTab = pTab; pTab->nTabRef = 1; + pTab->eTabType = TABTYP_VTAB; pTab->pSchema = db->aDb[0].pSchema; - assert( pTab->nModuleArg==0 ); + assert( pTab->u.vtab.nArg==0 ); pTab->iPKey = -1; pTab->tabFlags |= TF_Eponymous; addModuleArgument(pParse, pTab, sqlite3DbStrDup(db, pTab->zName)); @@ -144380,7 +145695,6 @@ SQLITE_PRIVATE int sqlite3VtabEponymousTableInit(Parse *pParse, Module *pMod){ sqlite3ErrorMsg(pParse, "%s", zErr); sqlite3DbFree(db, zErr); sqlite3VtabEponymousTableClear(db, pMod); - return 0; } return 1; } @@ -144573,7 +145887,7 @@ struct WhereLevel { u8 eEndLoopOp; /* IN Loop terminator. OP_Next or OP_Prev */ } *aInLoop; /* Information about each nested IN operator */ } in; /* Used when pWLoop->wsFlags&WHERE_IN_ABLE */ - Index *pCovidx; /* Possible covering index for WHERE_MULTI_OR */ + Index *pCoveringIdx; /* Possible covering index for WHERE_MULTI_OR */ } u; struct WhereLoop *pWLoop; /* The selected WhereLoop object */ Bitmask notReady; /* FROM entries not usable at this level */ @@ -145114,7 +146428,7 @@ static const char *explainIndexColumnName(Index *pIdx, int i){ i = pIdx->aiColumn[i]; if( i==XN_EXPR ) return ""; if( i==XN_ROWID ) return "rowid"; - return pIdx->pTable->aCol[i].zName; + return pIdx->pTable->aCol[i].zCnName; } /* @@ -145501,16 +146815,23 @@ static Expr *removeUnindexableInClauseTerms( Expr *pNew; pNew = sqlite3ExprDup(db, pX, 0); if( db->mallocFailed==0 ){ - ExprList *pOrigRhs = pNew->x.pSelect->pEList; /* Original unmodified RHS */ - ExprList *pOrigLhs = pNew->pLeft->x.pList; /* Original unmodified LHS */ + ExprList *pOrigRhs; /* Original unmodified RHS */ + ExprList *pOrigLhs; /* Original unmodified LHS */ ExprList *pRhs = 0; /* New RHS after modifications */ ExprList *pLhs = 0; /* New LHS after mods */ int i; /* Loop counter */ Select *pSelect; /* Pointer to the SELECT on the RHS */ + assert( ExprUseXSelect(pNew) ); + pOrigRhs = pNew->x.pSelect->pEList; + assert( pNew->pLeft!=0 ); + assert( ExprUseXList(pNew->pLeft) ); + pOrigLhs = pNew->pLeft->x.pList; for(i=iEq; inLTerm; i++){ if( pLoop->aLTerm[i]->pExpr==pX ){ - int iField = pLoop->aLTerm[i]->u.x.iField - 1; + int iField; + assert( (pLoop->aLTerm[i]->eOperator & (WO_OR|WO_AND))==0 ); + iField = pLoop->aLTerm[i]->u.x.iField - 1; if( pOrigRhs->a[iField].pExpr==0 ) continue; /* Duplicate PK column */ pRhs = sqlite3ExprListAppend(pParse, pRhs, pOrigRhs->a[iField].pExpr); pOrigRhs->a[iField].pExpr = 0; @@ -145625,7 +146946,7 @@ static int codeEqualityTerm( } iTab = 0; - if( (pX->flags & EP_xIsSelect)==0 || pX->x.pSelect->pEList->nExpr==1 ){ + if( !ExprUseXSelect(pX) || pX->x.pSelect->pEList->nExpr==1 ){ eType = sqlite3FindInIndex(pParse, pX, IN_INDEX_LOOP, 0, 0, &iTab); }else{ sqlite3 *db = pParse->db; @@ -145647,8 +146968,8 @@ static int codeEqualityTerm( sqlite3VdbeAddOp2(v, bRev ? OP_Last : OP_Rewind, iTab, 0); VdbeCoverageIf(v, bRev); VdbeCoverageIf(v, !bRev); - assert( (pLoop->wsFlags & WHERE_MULTI_OR)==0 ); + assert( (pLoop->wsFlags & WHERE_MULTI_OR)==0 ); pLoop->wsFlags |= WHERE_IN_ABLE; if( pLevel->u.in.nIn==0 ){ pLevel->addrNxt = sqlite3VdbeMakeLabel(pParse); @@ -146190,7 +147511,7 @@ static void codeExprOrVector(Parse *pParse, Expr *p, int iReg, int nReg){ assert( nReg>0 ); if( p && sqlite3ExprIsVector(p) ){ #ifndef SQLITE_OMIT_SUBQUERY - if( (p->flags & EP_xIsSelect) ){ + if( ExprUseXSelect(p) ){ Vdbe *v = pParse->pVdbe; int iSelect; assert( p->op==TK_SELECT ); @@ -146200,7 +147521,9 @@ static void codeExprOrVector(Parse *pParse, Expr *p, int iReg, int nReg){ #endif { int i; - ExprList *pList = p->x.pList; + const ExprList *pList; + assert( ExprUseXList(p) ); + pList = p->x.pList; assert( nReg<=pList->nExpr ); for(i=0; ia[i].pExpr, iReg+i); @@ -146253,10 +147576,10 @@ static int whereIndexExprTransNode(Walker *p, Expr *pExpr){ pExpr->op = TK_COLUMN; pExpr->iTable = pX->iIdxCur; pExpr->iColumn = pX->iIdxCol; - pExpr->y.pTab = 0; testcase( ExprHasProperty(pExpr, EP_Skip) ); testcase( ExprHasProperty(pExpr, EP_Unlikely) ); - ExprClearProperty(pExpr, EP_Skip|EP_Unlikely); + ExprClearProperty(pExpr, EP_Skip|EP_Unlikely|EP_WinFunc|EP_Subrtn); + pExpr->y.pTab = 0; return WRC_Prune; }else{ return WRC_Continue; @@ -146271,7 +147594,7 @@ static int whereIndexExprTransColumn(Walker *p, Expr *pExpr){ if( pExpr->op==TK_COLUMN ){ IdxExprTrans *pX = p->u.pIdxTrans; if( pExpr->iTable==pX->iTabCur && pExpr->iColumn==pX->iTabCol ){ - assert( pExpr->y.pTab!=0 ); + assert( ExprUseYTab(pExpr) && pExpr->y.pTab!=0 ); preserveExpr(pX, pExpr); pExpr->affExpr = sqlite3TableColumnAffinity(pExpr->y.pTab,pExpr->iColumn); pExpr->iTable = pX->iIdxCur; @@ -146319,15 +147642,16 @@ static void whereIndexExprTrans( for(iIdxCol=0; iIdxColnColumn; iIdxCol++){ i16 iRef = pIdx->aiColumn[iIdxCol]; if( iRef==XN_EXPR ){ - assert( aColExpr->a[iIdxCol].pExpr!=0 ); + assert( aColExpr!=0 && aColExpr->a[iIdxCol].pExpr!=0 ); x.pIdxExpr = aColExpr->a[iIdxCol].pExpr; if( sqlite3ExprIsConstant(x.pIdxExpr) ) continue; w.xExprCallback = whereIndexExprTransNode; #ifndef SQLITE_OMIT_GENERATED_COLUMNS }else if( iRef>=0 && (pTab->aCol[iRef].colFlags & COLFLAG_VIRTUAL)!=0 - && (pTab->aCol[iRef].zColl==0 - || sqlite3StrICmp(pTab->aCol[iRef].zColl, sqlite3StrBINARY)==0) + && ((pTab->aCol[iRef].colFlags & COLFLAG_HASCOLL)==0 + || sqlite3StrICmp(sqlite3ColumnColl(&pTab->aCol[iRef]), + sqlite3StrBINARY)==0) ){ /* Check to see if there are direct references to generated columns ** that are contained in the index. Pulling the generated column @@ -146507,7 +147831,12 @@ SQLITE_PRIVATE Bitmask sqlite3WhereCodeOneLoopStart( pLevel->p1 = iCur; pLevel->op = pWInfo->eOnePass ? OP_Noop : OP_VNext; pLevel->p2 = sqlite3VdbeCurrentAddr(v); - iIn = pLevel->u.in.nIn; + assert( (pLoop->wsFlags & WHERE_MULTI_OR)==0 ); + if( pLoop->wsFlags & WHERE_IN_ABLE ){ + iIn = pLevel->u.in.nIn; + }else{ + iIn = 0; + } for(j=nConstraint-1; j>=0; j--){ pTerm = pLoop->aLTerm[j]; if( (pTerm->eOperator & WO_IN)!=0 ) iIn--; @@ -146584,9 +147913,6 @@ SQLITE_PRIVATE Bitmask sqlite3WhereCodeOneLoopStart( sqlite3VdbeAddOp3(v, OP_SeekRowid, iCur, addrNxt, iRowidReg); VdbeCoverage(v); pLevel->op = OP_Noop; - if( (pTerm->prereqAll & pLevel->notReady)==0 ){ - pTerm->wtFlags |= TERM_CODED; - } }else if( (pLoop->wsFlags & WHERE_IPK)!=0 && (pLoop->wsFlags & WHERE_COLUMN_RANGE)!=0 ){ @@ -146957,8 +148283,19 @@ SQLITE_PRIVATE Bitmask sqlite3WhereCodeOneLoopStart( ** range (if any). */ nConstraint = nEq; + assert( pLevel->p2==0 ); if( pRangeEnd ){ Expr *pRight = pRangeEnd->pExpr->pRight; + if( addrSeekScan ){ + /* For a seek-scan that has a range on the lowest term of the index, + ** we have to make the top of the loop be code that sets the end + ** condition of the range. Otherwise, the OP_SeekScan might jump + ** over that initialization, leaving the range-end value set to the + ** range-start value, resulting in a wrong answer. + ** See ticket 5981a8c041a3c2f3 (2021-11-02). + */ + pLevel->p2 = sqlite3VdbeCurrentAddr(v); + } codeExprOrVector(pParse, pRight, regBase+nEq, nTop); whereLikeOptimizationStringFixup(v, pLevel, pRangeEnd); if( (pRangeEnd->wtFlags & TERM_VNULL)==0 @@ -146992,7 +148329,7 @@ SQLITE_PRIVATE Bitmask sqlite3WhereCodeOneLoopStart( sqlite3DbFree(db, zEndAff); /* Top of the loop body */ - pLevel->p2 = sqlite3VdbeCurrentAddr(v); + if( pLevel->p2==0 ) pLevel->p2 = sqlite3VdbeCurrentAddr(v); /* Check if the index cursor is past the end of the range. */ if( nConstraint ){ @@ -147399,7 +148736,10 @@ SQLITE_PRIVATE Bitmask sqlite3WhereCodeOneLoopStart( } } ExplainQueryPlanPop(pParse); - pLevel->u.pCovidx = pCov; + assert( pLevel->pWLoop==pLoop ); + assert( (pLoop->wsFlags & WHERE_MULTI_OR)!=0 ); + assert( (pLoop->wsFlags & WHERE_IN_ABLE)==0 ); + pLevel->u.pCoveringIdx = pCov; if( pCov ) pLevel->iIdxCur = iCovCur; if( pAndExpr ){ pAndExpr->pLeft = 0; @@ -147543,12 +148883,13 @@ SQLITE_PRIVATE Bitmask sqlite3WhereCodeOneLoopStart( #endif assert( !ExprHasProperty(pE, EP_FromJoin) ); assert( (pTerm->prereqRight & pLevel->notReady)!=0 ); + assert( (pTerm->eOperator & (WO_OR|WO_AND))==0 ); pAlt = sqlite3WhereFindTerm(pWC, iCur, pTerm->u.x.leftColumn, notReady, WO_EQ|WO_IN|WO_IS, 0); if( pAlt==0 ) continue; if( pAlt->wtFlags & (TERM_CODED) ) continue; if( (pAlt->eOperator & WO_IN) - && (pAlt->pExpr->flags & EP_xIsSelect) + && ExprUseXSelect(pAlt->pExpr) && (pAlt->pExpr->x.pSelect->pEList->nExpr>1) ){ continue; @@ -147797,6 +149138,7 @@ static int isLikeOrGlob( #ifdef SQLITE_EBCDIC if( *pnoCase ) return 0; #endif + assert( ExprUseXList(pExpr) ); pList = pExpr->x.pList; pLeft = pList->a[1].pExpr; @@ -147812,7 +149154,8 @@ static int isLikeOrGlob( sqlite3VdbeSetVarmask(pParse->pVdbe, iCol); assert( pRight->op==TK_VARIABLE || pRight->op==TK_REGISTER ); }else if( op==TK_STRING ){ - z = (u8*)pRight->u.zToken; + assert( !ExprHasProperty(pRight, EP_IntValue) ); + z = (u8*)pRight->u.zToken; } if( z ){ @@ -147841,7 +149184,9 @@ static int isLikeOrGlob( pPrefix = sqlite3Expr(db, TK_STRING, (char*)z); if( pPrefix ){ int iFrom, iTo; - char *zNew = pPrefix->u.zToken; + char *zNew; + assert( !ExprHasProperty(pPrefix, EP_IntValue) ); + zNew = pPrefix->u.zToken; zNew[cnt] = 0; for(iFrom=iTo=0; iFromop!=TK_COLUMN || sqlite3ExprAffinity(pLeft)!=SQLITE_AFF_TEXT - || IsVirtual(pLeft->y.pTab) /* Value might be numeric */ + || (ALWAYS( ExprUseYTab(pLeft) ) + && pLeft->y.pTab + && IsVirtual(pLeft->y.pTab)) /* Might be numeric */ ){ int isNum; double rDummy; @@ -147893,6 +149240,7 @@ static int isLikeOrGlob( if( op==TK_VARIABLE ){ Vdbe *v = pParse->pVdbe; sqlite3VdbeSetVarmask(v, pRight->iColumn); + assert( !ExprHasProperty(pRight, EP_IntValue) ); if( *pisComplete && pRight->u.zToken[1] ){ /* If the rhs of the LIKE expression is a variable, and the current ** value of the variable means there is no need to invoke the LIKE @@ -147966,6 +149314,7 @@ static int isAuxiliaryVtabOperator( Expr *pCol; /* Column reference */ int i; + assert( ExprUseXList(pExpr) ); pList = pExpr->x.pList; if( pList==0 || pList->nExpr!=2 ){ return 0; @@ -147979,9 +149328,11 @@ static int isAuxiliaryVtabOperator( ** MATCH(expression,vtab_column) */ pCol = pList->a[1].pExpr; + assert( pCol->op!=TK_COLUMN || ExprUseYTab(pCol) ); testcase( pCol->op==TK_COLUMN && pCol->y.pTab==0 ); if( ExprIsVtab(pCol) ){ for(i=0; iu.zToken, aOp[i].zOp)==0 ){ *peOp2 = aOp[i].eOp2; *ppRight = pList->a[0].pExpr; @@ -148002,6 +149353,7 @@ static int isAuxiliaryVtabOperator( ** with function names in an arbitrary case. */ pCol = pList->a[0].pExpr; + assert( pCol->op!=TK_COLUMN || ExprUseYTab(pCol) ); testcase( pCol->op==TK_COLUMN && pCol->y.pTab==0 ); if( ExprIsVtab(pCol) ){ sqlite3_vtab *pVtab; @@ -148011,7 +149363,8 @@ static int isAuxiliaryVtabOperator( pVtab = sqlite3GetVTable(db, pCol->y.pTab)->pVtab; assert( pVtab!=0 ); assert( pVtab->pModule!=0 ); - pMod = (sqlite3_module *)pVtab->pModule; + assert( !ExprHasProperty(pExpr, EP_IntValue) ); + pMod = (sqlite3_module *)pVtab->pModule; if( pMod->xFindFunction!=0 ){ i = pMod->xFindFunction(pVtab,2, pExpr->u.zToken, &xNotUsed, &pNotUsed); if( i>=SQLITE_INDEX_CONSTRAINT_FUNCTION ){ @@ -148026,10 +149379,12 @@ static int isAuxiliaryVtabOperator( int res = 0; Expr *pLeft = pExpr->pLeft; Expr *pRight = pExpr->pRight; + assert( pLeft->op!=TK_COLUMN || ExprUseYTab(pLeft) ); testcase( pLeft->op==TK_COLUMN && pLeft->y.pTab==0 ); if( ExprIsVtab(pLeft) ){ res++; } + assert( pRight==0 || pRight->op!=TK_COLUMN || ExprUseYTab(pRight) ); testcase( pRight && pRight->op==TK_COLUMN && pRight->y.pTab==0 ); if( pRight && ExprIsVtab(pRight) ){ res++; @@ -148282,6 +149637,7 @@ static void exprAnalyzeOrTerm( pOrTerm->u.pAndInfo = pAndInfo; pOrTerm->wtFlags |= TERM_ANDINFO; pOrTerm->eOperator = WO_AND; + pOrTerm->leftCursor = -1; pAndWC = &pAndInfo->wc; memset(pAndWC->aStatic, 0, sizeof(pAndWC->aStatic)); sqlite3WhereClauseInit(pAndWC, pWC->pWInfo); @@ -148324,11 +149680,10 @@ static void exprAnalyzeOrTerm( ** empty. */ pOrInfo->indexable = indexable; + pTerm->eOperator = WO_OR; + pTerm->leftCursor = -1; if( indexable ){ - pTerm->eOperator = WO_OR; pWC->hasOr = 1; - }else{ - pTerm->eOperator = WO_OR; } /* For a two-way OR, attempt to implementation case 2. @@ -148401,6 +149756,7 @@ static void exprAnalyzeOrTerm( assert( pOrTerm->wtFlags & (TERM_COPIED|TERM_VIRTUAL) ); continue; } + assert( (pOrTerm->eOperator & (WO_OR|WO_AND))==0 ); iColumn = pOrTerm->u.x.leftColumn; iCursor = pOrTerm->leftCursor; pLeft = pOrTerm->pExpr->pLeft; @@ -148421,6 +149777,7 @@ static void exprAnalyzeOrTerm( okToChngToIN = 1; for(; i>=0 && okToChngToIN; i--, pOrTerm++){ assert( pOrTerm->eOperator & WO_EQ ); + assert( (pOrTerm->eOperator & (WO_OR|WO_AND))==0 ); if( pOrTerm->leftCursor!=iCursor ){ pOrTerm->wtFlags &= ~TERM_OR_OK; }else if( pOrTerm->u.x.leftColumn!=iColumn || (iColumn==XN_EXPR @@ -148457,6 +149814,7 @@ static void exprAnalyzeOrTerm( for(i=pOrWc->nTerm-1, pOrTerm=pOrWc->a; i>=0; i--, pOrTerm++){ if( (pOrTerm->wtFlags & TERM_OR_OK)==0 ) continue; assert( pOrTerm->eOperator & WO_EQ ); + assert( (pOrTerm->eOperator & (WO_OR|WO_AND))==0 ); assert( pOrTerm->leftCursor==iCursor ); assert( pOrTerm->u.x.leftColumn==iColumn ); pDup = sqlite3ExprDup(db, pOrTerm->pExpr->pRight, 0); @@ -148469,7 +149827,7 @@ static void exprAnalyzeOrTerm( if( pNew ){ int idxNew; transferJoinMarkings(pNew, pExpr); - assert( !ExprHasProperty(pNew, EP_xIsSelect) ); + assert( ExprUseXList(pNew) ); pNew->x.pList = pList; idxNew = whereClauseInsert(pWC, pNew, TERM_VIRTUAL|TERM_DYNAMIC); testcase( idxNew==0 ); @@ -148597,6 +149955,7 @@ static int exprMightBeIndexed( assert( TK_ISop==TK_VECTOR && (op>=TK_GT && ALWAYS(op<=TK_GE)) ){ + assert( ExprUseXList(pExpr) ); pExpr = pExpr->x.pList->a[0].pExpr; } @@ -148663,7 +150022,7 @@ static void exprAnalyze( if( op==TK_IN ){ assert( pExpr->pRight==0 ); if( sqlite3ExprCheckIN(pParse, pExpr) ) return; - if( ExprHasProperty(pExpr, EP_xIsSelect) ){ + if( ExprUseXSelect(pExpr) ){ pTerm->prereqRight = exprSelectUsage(pMaskSet, pExpr->x.pSelect); }else{ pTerm->prereqRight = sqlite3WhereExprListUsage(pMaskSet, pExpr->x.pList); @@ -148699,11 +150058,13 @@ static void exprAnalyze( if( pTerm->u.x.iField>0 ){ assert( op==TK_IN ); assert( pLeft->op==TK_VECTOR ); + assert( ExprUseXList(pLeft) ); pLeft = pLeft->x.pList->a[pTerm->u.x.iField-1].pExpr; } if( exprMightBeIndexed(pSrc, prereqLeft, aiCurCol, pLeft, op) ){ pTerm->leftCursor = aiCurCol[0]; + assert( (pTerm->eOperator & (WO_OR|WO_AND))==0 ); pTerm->u.x.leftColumn = aiCurCol[1]; pTerm->eOperator = operatorMask(op) & opMask; } @@ -148741,12 +150102,18 @@ static void exprAnalyze( } pNew->wtFlags |= exprCommute(pParse, pDup); pNew->leftCursor = aiCurCol[0]; + assert( (pTerm->eOperator & (WO_OR|WO_AND))==0 ); pNew->u.x.leftColumn = aiCurCol[1]; testcase( (prereqLeft | extraRight) != prereqLeft ); pNew->prereqRight = prereqLeft | extraRight; pNew->prereqAll = prereqAll; pNew->eOperator = (operatorMask(pDup->op) + eExtraOp) & opMask; - }else if( op==TK_ISNULL && 0==sqlite3ExprCanBeNull(pLeft) ){ + }else + if( op==TK_ISNULL + && !ExprHasProperty(pExpr,EP_FromJoin) + && 0==sqlite3ExprCanBeNull(pLeft) + ){ + assert( !ExprHasProperty(pExpr, EP_IntValue) ); pExpr->op = TK_TRUEFALSE; pExpr->u.zToken = "false"; ExprSetProperty(pExpr, EP_IsFalse); @@ -148772,9 +150139,11 @@ static void exprAnalyze( ** BETWEEN term is skipped. */ else if( pExpr->op==TK_BETWEEN && pWC->op==TK_AND ){ - ExprList *pList = pExpr->x.pList; + ExprList *pList; int i; static const u8 ops[] = {TK_GE, TK_LE}; + assert( ExprUseXList(pExpr) ); + pList = pExpr->x.pList; assert( pList!=0 ); assert( pList->nExpr==2 ); for(i=0; i<2; i++){ @@ -148867,8 +150236,12 @@ static void exprAnalyze( const char *zCollSeqName; /* Name of collating sequence */ const u16 wtFlags = TERM_LIKEOPT | TERM_VIRTUAL | TERM_DYNAMIC; + assert( ExprUseXList(pExpr) ); pLeft = pExpr->x.pList->a[1].pExpr; pStr2 = sqlite3ExprDup(db, pStr1, 0); + assert( pStr1==0 || !ExprHasProperty(pStr1, EP_IntValue) ); + assert( pStr2==0 || !ExprHasProperty(pStr2, EP_IntValue) ); + /* Convert the lower bound to upper-case and the upper bound to ** lower-case (upper-case is less than lower-case in ASCII) so that @@ -148943,8 +150316,8 @@ static void exprAnalyze( for(i=0; ipLeft, i); - Expr *pRight = sqlite3ExprForVectorField(pParse, pExpr->pRight, i); + Expr *pLeft = sqlite3ExprForVectorField(pParse, pExpr->pLeft, i, nLeft); + Expr *pRight = sqlite3ExprForVectorField(pParse, pExpr->pRight, i, nLeft); pNew = sqlite3PExpr(pParse, pExpr->op, pLeft, pRight); transferJoinMarkings(pNew, pExpr); @@ -148968,6 +150341,7 @@ static void exprAnalyze( else if( pExpr->op==TK_IN && pTerm->u.x.iField==0 && pExpr->pLeft->op==TK_VECTOR + && ALWAYS( ExprUseXSelect(pExpr) ) && pExpr->x.pSelect->pPrior==0 #ifndef SQLITE_OMIT_WINDOWFUNC && pExpr->x.pSelect->pWin==0 @@ -149131,14 +150505,15 @@ SQLITE_PRIVATE Bitmask sqlite3WhereExprUsageNN(WhereMaskSet *pMaskSet, Expr *p){ if( p->pRight ){ mask |= sqlite3WhereExprUsageNN(pMaskSet, p->pRight); assert( p->x.pList==0 ); - }else if( ExprHasProperty(p, EP_xIsSelect) ){ + }else if( ExprUseXSelect(p) ){ if( ExprHasProperty(p, EP_VarSelect) ) pMaskSet->bVarSelect = 1; mask |= exprSelectUsage(pMaskSet, p->x.pSelect); }else if( p->x.pList ){ mask |= sqlite3WhereExprListUsage(pMaskSet, p->x.pList); } #ifndef SQLITE_OMIT_WINDOWFUNC - if( (p->op==TK_FUNCTION || p->op==TK_AGG_FUNCTION) && p->y.pWin ){ + if( (p->op==TK_FUNCTION || p->op==TK_AGG_FUNCTION) && ExprUseYWin(p) ){ + assert( p->y.pWin!=0 ); mask |= sqlite3WhereExprListUsage(pMaskSet, p->y.pWin->pPartition); mask |= sqlite3WhereExprListUsage(pMaskSet, p->y.pWin->pOrderBy); mask |= sqlite3WhereExprUsage(pMaskSet, p->y.pWin->pFilter); @@ -149213,6 +150588,7 @@ SQLITE_PRIVATE void sqlite3WhereTabFuncArgs( if( pColRef==0 ) return; pColRef->iTable = pItem->iCursor; pColRef->iColumn = k++; + assert( ExprUseYTab(pColRef) ); pColRef->y.pTab = pTab; pRhs = sqlite3PExpr(pParse, TK_UPLUS, sqlite3ExprDup(pParse->db, pArgs->a[j].pExpr, 0), 0); @@ -149514,8 +150890,10 @@ static WhereTerm *whereScanNext(WhereScan *pScan){ iColumn = pScan->aiColumn[pScan->iEquiv-1]; iCur = pScan->aiCur[pScan->iEquiv-1]; assert( pWC!=0 ); + assert( iCur>=0 ); do{ for(pTerm=pWC->a+k; knTerm; k++, pTerm++){ + assert( (pTerm->eOperator & (WO_OR|WO_AND))==0 || pTerm->leftCursor<0 ); if( pTerm->leftCursor==iCur && pTerm->u.x.leftColumn==iColumn && (iColumn!=XN_EXPR @@ -149557,7 +150935,8 @@ static WhereTerm *whereScanNext(WhereScan *pScan){ } } if( (pTerm->eOperator & (WO_EQ|WO_IS))!=0 - && (pX = pTerm->pExpr->pRight)->op==TK_COLUMN + && (pX = pTerm->pExpr->pRight, ALWAYS(pX!=0)) + && pX->op==TK_COLUMN && pX->iTable==pScan->aiCur[0] && pX->iColumn==pScan->aiColumn[0] ){ @@ -149954,6 +151333,7 @@ static int termCanDriveIndex( return 0; } if( (pTerm->prereqRight & notReady)!=0 ) return 0; + assert( (pTerm->eOperator & (WO_OR|WO_AND))==0 ); if( pTerm->u.x.leftColumn<0 ) return 0; aff = pSrc->pTab->aCol[pTerm->u.x.leftColumn].affinity; if( !sqlite3IndexAffinityOk(pTerm->pExpr, aff) ) return 0; @@ -150026,14 +151406,17 @@ static void constructAutomaticIndex( sqlite3ExprDup(pParse->db, pExpr, 0)); } if( termCanDriveIndex(pTerm, pSrc, notReady) ){ - int iCol = pTerm->u.x.leftColumn; - Bitmask cMask = iCol>=BMS ? MASKBIT(BMS-1) : MASKBIT(iCol); + int iCol; + Bitmask cMask; + assert( (pTerm->eOperator & (WO_OR|WO_AND))==0 ); + iCol = pTerm->u.x.leftColumn; + cMask = iCol>=BMS ? MASKBIT(BMS-1) : MASKBIT(iCol); testcase( iCol==BMS ); testcase( iCol==BMS-1 ); if( !sentWarning ){ sqlite3_log(SQLITE_WARNING_AUTOINDEX, "automatic index on %s(%s)", pTable->zName, - pTable->aCol[iCol].zName); + pTable->aCol[iCol].zCnName); sentWarning = 1; } if( (idxCols & cMask)==0 ){ @@ -150079,8 +151462,11 @@ static void constructAutomaticIndex( idxCols = 0; for(pTerm=pWC->a; pTermu.x.leftColumn; - Bitmask cMask = iCol>=BMS ? MASKBIT(BMS-1) : MASKBIT(iCol); + int iCol; + Bitmask cMask; + assert( (pTerm->eOperator & (WO_OR|WO_AND))==0 ); + iCol = pTerm->u.x.leftColumn; + cMask = iCol>=BMS ? MASKBIT(BMS-1) : MASKBIT(iCol); testcase( iCol==BMS-1 ); testcase( iCol==BMS ); if( (idxCols & cMask)==0 ){ @@ -150207,6 +151593,7 @@ static sqlite3_index_info *allocateIndexInfo( testcase( pTerm->eOperator & WO_ALL ); if( (pTerm->eOperator & ~(WO_EQUIV))==0 ) continue; if( pTerm->wtFlags & TERM_VNULL ) continue; + assert( (pTerm->eOperator & (WO_OR|WO_AND))==0 ); assert( pTerm->u.x.leftColumn>=(-1) ); nTerm++; } @@ -150267,6 +151654,7 @@ static sqlite3_index_info *allocateIndexInfo( ){ continue; } + assert( (pTerm->eOperator & (WO_OR|WO_AND))==0 ); assert( pTerm->u.x.leftColumn>=(-1) ); pIdxCons[j].iColumn = pTerm->u.x.leftColumn; pIdxCons[j].iTermOffset = i; @@ -151030,6 +152418,7 @@ SQLITE_PRIVATE void sqlite3WhereTermPrint(WhereTerm *pTerm, int iTerm){ if( ExprHasProperty(pTerm->pExpr, EP_FromJoin) ) zType[2] = 'L'; if( pTerm->wtFlags & TERM_CODED ) zType[3] = 'C'; if( pTerm->eOperator & WO_SINGLE ){ + assert( (pTerm->eOperator & (WO_OR|WO_AND))==0 ); sqlite3_snprintf(sizeof(zLeft),zLeft,"left={%d:%d}", pTerm->leftCursor, pTerm->u.x.leftColumn); }else if( (pTerm->eOperator & WO_OR)!=0 && pTerm->u.pOrInfo!=0 ){ @@ -151047,7 +152436,7 @@ SQLITE_PRIVATE void sqlite3WhereTermPrint(WhereTerm *pTerm, int iTerm){ sqlite3DebugPrintf(" prob=%-3d prereq=%llx,%llx", pTerm->truthProb, (u64)pTerm->prereqAll, (u64)pTerm->prereqRight); } - if( pTerm->u.x.iField ){ + if( (pTerm->eOperator & (WO_OR|WO_AND))==0 && pTerm->u.x.iField ){ sqlite3DebugPrintf(" iField=%d", pTerm->u.x.iField); } if( pTerm->iParent>=0 ){ @@ -151211,7 +152600,8 @@ static void whereInfoFree(sqlite3 *db, WhereInfo *pWInfo){ assert( pWInfo!=0 ); for(i=0; inLevel; i++){ WhereLevel *pLevel = &pWInfo->a[i]; - if( pLevel->pWLoop && (pLevel->pWLoop->wsFlags & WHERE_IN_ABLE) ){ + if( pLevel->pWLoop && (pLevel->pWLoop->wsFlags & WHERE_IN_ABLE)!=0 ){ + assert( (pLevel->pWLoop->wsFlags & WHERE_MULTI_OR)==0 ); sqlite3DbFree(db, pLevel->u.in.aInLoop); } } @@ -151239,7 +152629,8 @@ static void whereUndoExprMods(WhereInfo *pWInfo){ /* ** Return TRUE if all of the following are true: ** -** (1) X has the same or lower cost that Y +** (1) X has the same or lower cost, or returns the same or fewer rows, +** than Y. ** (2) X uses fewer WHERE clause terms than Y ** (3) Every WHERE clause term used by X is also used by Y ** (4) X skips at least as many columns as Y @@ -151262,11 +152653,8 @@ static int whereLoopCheaperProperSubset( if( pX->nLTerm-pX->nSkip >= pY->nLTerm-pY->nSkip ){ return 0; /* X is not a subset of Y */ } + if( pX->rRun>pY->rRun && pX->nOut>pY->nOut ) return 0; if( pY->nSkip > pX->nSkip ) return 0; - if( pX->rRun >= pY->rRun ){ - if( pX->rRun > pY->rRun ) return 0; /* X costs more than Y */ - if( pX->nOut > pY->nOut ) return 0; /* X costs more than Y */ - } for(i=pX->nLTerm-1; i>=0; i--){ if( pX->aLTerm[i]==0 ) continue; for(j=pY->nLTerm-1; j>=0; j--){ @@ -151282,8 +152670,8 @@ static int whereLoopCheaperProperSubset( } /* -** Try to adjust the cost of WhereLoop pTemplate upwards or downwards so -** that: +** Try to adjust the cost and number of output rows of WhereLoop pTemplate +** upwards or downwards so that: ** ** (1) pTemplate costs less than any other WhereLoops that are a proper ** subset of pTemplate @@ -151304,16 +152692,20 @@ static void whereLoopAdjustCost(const WhereLoop *p, WhereLoop *pTemplate){ /* Adjust pTemplate cost downward so that it is cheaper than its ** subset p. */ WHERETRACE(0x80,("subset cost adjustment %d,%d to %d,%d\n", - pTemplate->rRun, pTemplate->nOut, p->rRun, p->nOut-1)); - pTemplate->rRun = p->rRun; - pTemplate->nOut = p->nOut - 1; + pTemplate->rRun, pTemplate->nOut, + MIN(p->rRun, pTemplate->rRun), + MIN(p->nOut - 1, pTemplate->nOut))); + pTemplate->rRun = MIN(p->rRun, pTemplate->rRun); + pTemplate->nOut = MIN(p->nOut - 1, pTemplate->nOut); }else if( whereLoopCheaperProperSubset(pTemplate, p) ){ /* Adjust pTemplate cost upward so that it is costlier than p since ** pTemplate is a proper subset of p */ WHERETRACE(0x80,("subset cost adjustment %d,%d to %d,%d\n", - pTemplate->rRun, pTemplate->nOut, p->rRun, p->nOut+1)); - pTemplate->rRun = p->rRun; - pTemplate->nOut = p->nOut + 1; + pTemplate->rRun, pTemplate->nOut, + MAX(p->rRun, pTemplate->rRun), + MAX(p->nOut + 1, pTemplate->nOut))); + pTemplate->rRun = MAX(p->rRun, pTemplate->rRun); + pTemplate->nOut = MAX(p->nOut + 1, pTemplate->nOut); } } } @@ -151644,9 +153036,12 @@ static int whereRangeVectorLen( char aff; /* Comparison affinity */ char idxaff = 0; /* Indexed columns affinity */ CollSeq *pColl; /* Comparison collation sequence */ - Expr *pLhs = pTerm->pExpr->pLeft->x.pList->a[i].pExpr; - Expr *pRhs = pTerm->pExpr->pRight; - if( pRhs->flags & EP_xIsSelect ){ + Expr *pLhs, *pRhs; + + assert( ExprUseXList(pTerm->pExpr->pLeft) ); + pLhs = pTerm->pExpr->pLeft->x.pList->a[i].pExpr; + pRhs = pTerm->pExpr->pRight; + if( ExprUseXSelect(pRhs) ){ pRhs = pRhs->x.pSelect->pEList->a[i].pExpr; }else{ pRhs = pRhs->x.pList->a[i].pExpr; @@ -151807,7 +153202,7 @@ static int whereLoopAddBtreeIndex( if( eOp & WO_IN ){ Expr *pExpr = pTerm->pExpr; - if( ExprHasProperty(pExpr, EP_xIsSelect) ){ + if( ExprUseXSelect(pExpr) ){ /* "x IN (SELECT ...)": TUNING: the SELECT returns 25 rows */ int i; nIn = 46; assert( 46==sqlite3LogEst(25) ); @@ -151948,7 +153343,7 @@ static int whereLoopAddBtreeIndex( if( nInMul==0 && pProbe->nSample && ALWAYS(pNew->u.btree.nEq<=pProbe->nSampleCol) - && ((eOp & WO_IN)==0 || !ExprHasProperty(pTerm->pExpr, EP_xIsSelect)) + && ((eOp & WO_IN)==0 || ExprUseXList(pTerm->pExpr)) && OptimizationEnabled(db, SQLITE_Stat4) ){ Expr *pExpr = pTerm->pExpr; @@ -152212,7 +153607,6 @@ static int whereLoopAddBtree( int iSortIdx = 1; /* Index number */ int b; /* A boolean value */ LogEst rSize; /* number of rows in the table */ - LogEst rLogSize; /* Logarithm of the number of rows in the table */ WhereClause *pWC; /* The parsed WHERE clause */ Table *pTab; /* Table being queried */ @@ -152225,6 +153619,7 @@ static int whereLoopAddBtree( assert( !IsVirtual(pSrc->pTab) ); if( pSrc->fg.isIndexedBy ){ + assert( pSrc->fg.isCte==0 ); /* An INDEXED BY clause specifies a particular index to use */ pProbe = pSrc->u2.pIBIndex; }else if( !HasRowid(pTab) ){ @@ -152255,7 +153650,6 @@ static int whereLoopAddBtree( pProbe = &sPk; } rSize = pTab->nRowLogEst; - rLogSize = estLog(rSize); #ifndef SQLITE_OMIT_AUTOMATIC_INDEX /* Automatic indexes */ @@ -152269,8 +153663,10 @@ static int whereLoopAddBtree( && !pSrc->fg.isRecursive /* Not a recursive common table expression. */ ){ /* Generate auto-index WhereLoops */ + LogEst rLogSize; /* Logarithm of the number of rows in the table */ WhereTerm *pTerm; WhereTerm *pWCEnd = pWC->a + pWC->nTerm; + rLogSize = estLog(rSize); for(pTerm=pWC->a; rc==SQLITE_OK && pTermprereqRight & pNew->maskSelf ) continue; if( termCanDriveIndex(pTerm, pSrc, 0) ){ @@ -152288,7 +153684,7 @@ static int whereLoopAddBtree( ** those objects, since there is no opportunity to add schema ** indexes on subqueries and views. */ pNew->rSetup = rLogSize + rSize; - if( pTab->pSelect==0 && (pTab->tabFlags & TF_Ephemeral)==0 ){ + if( !IsView(pTab) && (pTab->tabFlags & TF_Ephemeral)==0 ){ pNew->rSetup += 28; }else{ pNew->rSetup -= 10; @@ -153310,7 +154706,7 @@ static i8 wherePathSatisfiesOrderBy( if( obSat==obDone ) return (i8)nOrderBy; if( !isOrderDistinct ){ for(i=nOrderBy-1; i>0; i--){ - Bitmask m = MASKBIT(i) - 1; + Bitmask m = ALWAYS(ipWInfo; if( pWInfo->wctrlFlags & WHERE_OR_SUBCLAUSE ) return 0; @@ -153833,7 +155230,8 @@ static int whereShortCut(WhereLoopBuilder *pBuilder){ pLoop = pBuilder->pNew; pLoop->wsFlags = 0; pLoop->nSkip = 0; - pTerm = sqlite3WhereFindTerm(pWC, iCur, -1, 0, WO_EQ|WO_IS, 0); + pTerm = whereScanInit(&scan, pWC, iCur, -1, WO_EQ|WO_IS, 0); + while( pTerm && pTerm->prereqRight ) pTerm = whereScanNext(&scan); if( pTerm ){ testcase( pTerm->eOperator & WO_IS ); pLoop->wsFlags = WHERE_COLUMN_EQ|WHERE_IPK|WHERE_ONEROW; @@ -153852,7 +155250,8 @@ static int whereShortCut(WhereLoopBuilder *pBuilder){ ) continue; opMask = pIdx->uniqNotNull ? (WO_EQ|WO_IS) : WO_EQ; for(j=0; jnKeyCol; j++){ - pTerm = sqlite3WhereFindTerm(pWC, iCur, j, 0, opMask, pIdx); + pTerm = whereScanInit(&scan, pWC, iCur, j, opMask, pIdx); + while( pTerm && pTerm->prereqRight ) pTerm = whereScanNext(&scan); if( pTerm==0 ) break; testcase( pTerm->eOperator & WO_IS ); pLoop->aLTerm[j] = pTerm; @@ -153881,8 +155280,14 @@ static int whereShortCut(WhereLoopBuilder *pBuilder){ if( pWInfo->wctrlFlags & WHERE_WANT_DISTINCT ){ pWInfo->eDistinct = WHERE_DISTINCT_UNIQUE; } + if( scan.iEquiv>1 ) pLoop->wsFlags |= WHERE_TRANSCONS; #ifdef SQLITE_DEBUG pLoop->cId = '0'; +#endif +#ifdef WHERETRACE_ENABLED + if( sqlite3WhereTrace ){ + sqlite3DebugPrintf("whereShortCut() used to compute solution\n"); + } #endif return 1; } @@ -154439,7 +155844,7 @@ SQLITE_PRIVATE WhereInfo *sqlite3WhereBegin( pTab = pTabItem->pTab; iDb = sqlite3SchemaToIndex(db, pTab->pSchema); pLoop = pLevel->pWLoop; - if( (pTab->tabFlags & TF_Ephemeral)!=0 || pTab->pSelect ){ + if( (pTab->tabFlags & TF_Ephemeral)!=0 || IsView(pTab) ){ /* Do nothing */ }else #ifndef SQLITE_OMIT_VIRTUALTABLE @@ -154564,6 +155969,7 @@ SQLITE_PRIVATE WhereInfo *sqlite3WhereBegin( for(ii=0; iinErr ) goto whereBeginError; pLevel = &pWInfo->a[ii]; wsFlags = pLevel->pWLoop->wsFlags; #ifndef SQLITE_OMIT_AUTOMATIC_INDEX @@ -154685,7 +156091,7 @@ SQLITE_PRIVATE void sqlite3WhereEnd(WhereInfo *pWInfo){ }else{ sqlite3VdbeResolveLabel(v, pLevel->addrCont); } - if( pLoop->wsFlags & WHERE_IN_ABLE && pLevel->u.in.nIn>0 ){ + if( (pLoop->wsFlags & WHERE_IN_ABLE)!=0 && pLevel->u.in.nIn>0 ){ struct InLoop *pIn; int j; sqlite3VdbeResolveLabel(v, pLevel->addrNxt); @@ -154754,8 +156160,14 @@ SQLITE_PRIVATE void sqlite3WhereEnd(WhereInfo *pWInfo){ sqlite3VdbeAddOp1(v, OP_NullRow, pLevel->iTabCur); } if( (ws & WHERE_INDEXED) - || ((ws & WHERE_MULTI_OR) && pLevel->u.pCovidx) + || ((ws & WHERE_MULTI_OR) && pLevel->u.pCoveringIdx) ){ + if( ws & WHERE_MULTI_OR ){ + Index *pIx = pLevel->u.pCoveringIdx; + int iDb = sqlite3SchemaToIndex(db, pIx->pSchema); + sqlite3VdbeAddOp3(v, OP_ReopenIdx, pLevel->iIdxCur, pIx->tnum, iDb); + sqlite3VdbeSetP4KeyInfo(pParse, pIx); + } sqlite3VdbeAddOp1(v, OP_NullRow, pLevel->iIdxCur); } if( pLevel->op==OP_Return ){ @@ -154802,7 +156214,7 @@ SQLITE_PRIVATE void sqlite3WhereEnd(WhereInfo *pWInfo){ ** created for the ONEPASS optimization. */ if( (pTab->tabFlags & TF_Ephemeral)==0 - && pTab->pSelect==0 + && !IsView(pTab) && (pWInfo->wctrlFlags & WHERE_OR_SUBCLAUSE)==0 ){ int ws = pLoop->wsFlags; @@ -154832,7 +156244,7 @@ SQLITE_PRIVATE void sqlite3WhereEnd(WhereInfo *pWInfo){ if( pLoop->wsFlags & (WHERE_INDEXED|WHERE_IDX_ONLY) ){ pIdx = pLoop->u.btree.pIndex; }else if( pLoop->wsFlags & WHERE_MULTI_OR ){ - pIdx = pLevel->u.pCovidx; + pIdx = pLevel->u.pCoveringIdx; } if( pIdx && !db->mallocFailed @@ -155493,7 +156905,7 @@ static void noopValueFunc(sqlite3_context *p){ UNUSED_PARAMETER(p); /*no-op*/ } /* Window functions that use all window interfaces: xStep, xFinal, ** xValue, and xInverse */ #define WINDOWFUNCALL(name,nArg,extra) { \ - nArg, (SQLITE_UTF8|SQLITE_FUNC_WINDOW|extra), 0, 0, \ + nArg, (SQLITE_FUNC_BUILTIN|SQLITE_UTF8|SQLITE_FUNC_WINDOW|extra), 0, 0, \ name ## StepFunc, name ## FinalizeFunc, name ## ValueFunc, \ name ## InvFunc, name ## Name, {0} \ } @@ -155501,7 +156913,7 @@ static void noopValueFunc(sqlite3_context *p){ UNUSED_PARAMETER(p); /*no-op*/ } /* Window functions that are implemented using bytecode and thus have ** no-op routines for their methods */ #define WINDOWFUNCNOOP(name,nArg,extra) { \ - nArg, (SQLITE_UTF8|SQLITE_FUNC_WINDOW|extra), 0, 0, \ + nArg, (SQLITE_FUNC_BUILTIN|SQLITE_UTF8|SQLITE_FUNC_WINDOW|extra), 0, 0, \ noopStepFunc, noopValueFunc, noopValueFunc, \ noopStepFunc, name ## Name, {0} \ } @@ -155510,7 +156922,7 @@ static void noopValueFunc(sqlite3_context *p){ UNUSED_PARAMETER(p); /*no-op*/ } ** same routine for xFinalize and xValue and which never call ** xInverse. */ #define WINDOWFUNCX(name,nArg,extra) { \ - nArg, (SQLITE_UTF8|SQLITE_FUNC_WINDOW|extra), 0, 0, \ + nArg, (SQLITE_FUNC_BUILTIN|SQLITE_UTF8|SQLITE_FUNC_WINDOW|extra), 0, 0, \ name ## StepFunc, name ## ValueFunc, name ## ValueFunc, \ noopStepFunc, name ## Name, {0} \ } @@ -155820,9 +157232,7 @@ static ExprList *exprListAppendList( if( bIntToNull ){ int iDummy; Expr *pSub; - for(pSub=pDup; ExprHasProperty(pSub, EP_Skip); pSub=pSub->pLeft){ - assert( pSub ); - } + pSub = sqlite3ExprSkipCollateAndLikely(pDup); if( sqlite3ExprIsInteger(pSub, &iDummy) ){ pSub->op = TK_NULL; pSub->flags &= ~(EP_IntValue|EP_IsTrue|EP_IsFalse); @@ -155855,7 +157265,8 @@ static int sqlite3WindowExtraAggFuncDepth(Walker *pWalker, Expr *pExpr){ static int disallowAggregatesInOrderByCb(Walker *pWalker, Expr *pExpr){ if( pExpr->op==TK_AGG_FUNCTION && pExpr->pAggInfo==0 ){ - sqlite3ErrorMsg(pWalker->pParse, + assert( !ExprHasProperty(pExpr, EP_IntValue) ); + sqlite3ErrorMsg(pWalker->pParse, "misuse of aggregate: %s()", pExpr->u.zToken); } return WRC_Continue; @@ -155943,7 +157354,9 @@ SQLITE_PRIVATE int sqlite3WindowRewrite(Parse *pParse, Select *p){ ** window function - one for the accumulator, another for interim ** results. */ for(pWin=pMWin; pWin; pWin=pWin->pNextWin){ - ExprList *pArgs = pWin->pOwner->x.pList; + ExprList *pArgs; + assert( ExprUseXList(pWin->pOwner) ); + pArgs = pWin->pOwner->x.pList; if( pWin->pFunc->funcFlags & SQLITE_FUNC_SUBTYPE ){ selectWindowRewriteEList(pParse, pMWin, pSrc, pArgs, pTab, &pSublist); pWin->iArgCol = (pSublist ? pSublist->nExpr : 0); @@ -155980,11 +157393,14 @@ SQLITE_PRIVATE int sqlite3WindowRewrite(Parse *pParse, Select *p){ ("New window-function subquery in FROM clause of (%u/%p)\n", p->selId, p)); p->pSrc = sqlite3SrcListAppend(pParse, 0, 0, 0); + assert( pSub!=0 || p->pSrc==0 ); /* Due to db->mallocFailed test inside + ** of sqlite3DbMallocRawNN() called from + ** sqlite3SrcListAppend() */ if( p->pSrc ){ Table *pTab2; p->pSrc->a[0].pSelect = pSub; sqlite3SrcListAssignCursors(pParse, p->pSrc); - pSub->selFlags |= SF_Expanded; + pSub->selFlags |= SF_Expanded|SF_OrderByReqd; pTab2 = sqlite3ResultSetOfSelect(pParse, pSub, SQLITE_AFF_NONE); pSub->selFlags |= (selFlags & SF_Aggregate); if( pTab2==0 ){ @@ -156007,7 +157423,11 @@ SQLITE_PRIVATE int sqlite3WindowRewrite(Parse *pParse, Select *p){ sqlite3SelectDelete(db, pSub); } if( db->mallocFailed ) rc = SQLITE_NOMEM; - sqlite3DbFree(db, pTab); + + /* Defer deleting the temporary table pTab because if an error occurred, + ** there could still be references to that table embedded in the + ** result-set or ORDER BY clause of the SELECT statement p. */ + sqlite3ParserAddCleanup(pParse, sqlite3DbFree, pTab); } if( rc ){ @@ -156256,7 +157676,12 @@ SQLITE_PRIVATE void sqlite3WindowLink(Select *pSel, Window *pWin){ ** different, or 2 if it cannot be determined if the objects are identical ** or not. Identical window objects can be processed in a single scan. */ -SQLITE_PRIVATE int sqlite3WindowCompare(Parse *pParse, Window *p1, Window *p2, int bFilter){ +SQLITE_PRIVATE int sqlite3WindowCompare( + const Parse *pParse, + const Window *p1, + const Window *p2, + int bFilter +){ int res; if( NEVER(p1==0) || NEVER(p2==0) ) return 1; if( p1->eFrmType!=p2->eFrmType ) return 1; @@ -156328,8 +157753,11 @@ SQLITE_PRIVATE void sqlite3WindowCodeInit(Parse *pParse, Select *pSelect){ ** regApp+1: integer value used to ensure keys are unique ** regApp+2: output of MakeRecord */ - ExprList *pList = pWin->pOwner->x.pList; - KeyInfo *pKeyInfo = sqlite3KeyInfoFromExprList(pParse, pList, 0, 0); + ExprList *pList; + KeyInfo *pKeyInfo; + assert( ExprUseXList(pWin->pOwner) ); + pList = pWin->pOwner->x.pList; + pKeyInfo = sqlite3KeyInfoFromExprList(pParse, pList, 0, 0); pWin->csrApp = pParse->nTab++; pWin->regApp = pParse->nMem+1; pParse->nMem += 3; @@ -156417,7 +157845,9 @@ static void windowCheckValue(Parse *pParse, int reg, int eCond){ ** with the object passed as the only argument to this function. */ static int windowArgCount(Window *pWin){ - ExprList *pList = pWin->pOwner->x.pList; + const ExprList *pList; + assert( ExprUseXList(pWin->pOwner) ); + pList = pWin->pOwner->x.pList; return (pList ? pList->nExpr : 0); } @@ -156602,6 +158032,7 @@ static void windowAggStep( int addrIf = 0; if( pWin->pFilter ){ int regTmp; + assert( ExprUseXList(pWin->pOwner) ); assert( pWin->bExprArgs || !nArg ||nArg==pWin->pOwner->x.pList->nExpr ); assert( pWin->bExprArgs || nArg ||pWin->pOwner->x.pList==0 ); regTmp = sqlite3GetTempReg(pParse); @@ -156615,6 +158046,7 @@ static void windowAggStep( int iOp = sqlite3VdbeCurrentAddr(v); int iEnd; + assert( ExprUseXList(pWin->pOwner) ); nArg = pWin->pOwner->x.pList->nExpr; regArg = sqlite3GetTempRange(pParse, nArg); sqlite3ExprCodeExprList(pParse, pWin->pOwner->x.pList, regArg, 0, 0); @@ -156629,6 +158061,7 @@ static void windowAggStep( if( pFunc->funcFlags & SQLITE_FUNC_NEEDCOLL ){ CollSeq *pColl; assert( nArg>0 ); + assert( ExprUseXList(pWin->pOwner) ); pColl = sqlite3ExprNNCollSeq(pParse, pWin->pOwner->x.pList->a[0].pExpr); sqlite3VdbeAddOp4(v, OP_CollSeq, 0,0,0, (const char*)pColl, P4_COLLSEQ); } @@ -156814,6 +158247,7 @@ static void windowReturnOneRow(WindowCodeArg *p){ for(pWin=pMWin; pWin; pWin=pWin->pNextWin){ FuncDef *pFunc = pWin->pFunc; + assert( ExprUseXList(pWin->pOwner) ); if( pFunc->zName==nth_valueName || pFunc->zName==first_valueName ){ @@ -158163,9 +159597,9 @@ static void updateDeleteLimitError( ExprClearVVAProperties(p); p->iAgg = -1; p->pLeft = p->pRight = 0; - p->x.pList = 0; p->pAggInfo = 0; - p->y.pTab = 0; + memset(&p->x, 0, sizeof(p->x)); + memset(&p->y, 0, sizeof(p->y)); p->op2 = 0; p->iTable = 0; p->iColumn = 0; @@ -158251,8 +159685,8 @@ static void updateDeleteLimitError( #define TK_LP 22 #define TK_RP 23 #define TK_AS 24 -#define TK_WITHOUT 25 -#define TK_COMMA 26 +#define TK_COMMA 25 +#define TK_WITHOUT 26 #define TK_ABORT 27 #define TK_ACTION 28 #define TK_AFTER 29 @@ -158469,29 +159903,30 @@ static void updateDeleteLimitError( #endif /************* Begin control #defines *****************************************/ #define YYCODETYPE unsigned short int -#define YYNOCODE 317 +#define YYNOCODE 318 #define YYACTIONTYPE unsigned short int #define YYWILDCARD 101 #define sqlite3ParserTOKENTYPE Token typedef union { int yyinit; sqlite3ParserTOKENTYPE yy0; - Window* yy49; - ExprList* yy70; - Select* yy81; - With* yy103; - struct FrameBound yy117; - struct {int value; int mask;} yy139; - SrcList* yy153; - TriggerStep* yy157; - Upsert* yy190; - struct TrigEvent yy262; - Cte* yy329; - int yy376; - Expr* yy404; - IdList* yy436; - const char* yy504; - u8 yy552; + With* yy43; + u32 yy51; + int yy64; + struct FrameBound yy81; + struct {int value; int mask;} yy83; + TriggerStep* yy95; + Upsert* yy138; + IdList* yy240; + Cte* yy255; + Select* yy303; + Window* yy375; + u8 yy534; + ExprList* yy562; + struct TrigEvent yy570; + const char* yy600; + SrcList* yy607; + Expr* yy626; } YYMINORTYPE; #ifndef YYSTACKDEPTH #define YYSTACKDEPTH 100 @@ -158507,18 +159942,18 @@ typedef union { #define sqlite3ParserCTX_FETCH Parse *pParse=yypParser->pParse; #define sqlite3ParserCTX_STORE yypParser->pParse=pParse; #define YYFALLBACK 1 -#define YYNSTATE 570 -#define YYNRULE 398 -#define YYNRULE_WITH_ACTION 337 +#define YYNSTATE 572 +#define YYNRULE 401 +#define YYNRULE_WITH_ACTION 339 #define YYNTOKEN 184 -#define YY_MAX_SHIFT 569 -#define YY_MIN_SHIFTREDUCE 825 -#define YY_MAX_SHIFTREDUCE 1222 -#define YY_ERROR_ACTION 1223 -#define YY_ACCEPT_ACTION 1224 -#define YY_NO_ACTION 1225 -#define YY_MIN_REDUCE 1226 -#define YY_MAX_REDUCE 1623 +#define YY_MAX_SHIFT 571 +#define YY_MIN_SHIFTREDUCE 829 +#define YY_MAX_SHIFTREDUCE 1229 +#define YY_ERROR_ACTION 1230 +#define YY_ACCEPT_ACTION 1231 +#define YY_NO_ACTION 1232 +#define YY_MIN_REDUCE 1233 +#define YY_MAX_REDUCE 1633 /************* End control #defines *******************************************/ #define YY_NLOOKAHEAD ((int)(sizeof(yy_lookahead)/sizeof(yy_lookahead[0]))) @@ -158585,444 +160020,447 @@ typedef union { ** yy_default[] Default action for each state. ** *********** Begin parsing tables **********************************************/ -#define YY_ACTTAB_COUNT (2023) +#define YY_ACTTAB_COUNT (2037) static const YYACTIONTYPE yy_action[] = { - /* 0 */ 563, 1295, 563, 1274, 168, 1257, 115, 112, 218, 373, - /* 10 */ 563, 1295, 374, 563, 488, 563, 115, 112, 218, 406, - /* 20 */ 1300, 1300, 41, 41, 41, 41, 514, 1504, 520, 1298, - /* 30 */ 1298, 959, 41, 41, 1260, 71, 71, 51, 51, 960, - /* 40 */ 557, 557, 557, 122, 123, 113, 1200, 1200, 1035, 1038, - /* 50 */ 1028, 1028, 120, 120, 121, 121, 121, 121, 414, 406, - /* 60 */ 273, 273, 273, 273, 115, 112, 218, 115, 112, 218, - /* 70 */ 197, 268, 545, 560, 515, 560, 211, 563, 385, 248, - /* 80 */ 215, 521, 399, 122, 123, 113, 1200, 1200, 1035, 1038, - /* 90 */ 1028, 1028, 120, 120, 121, 121, 121, 121, 540, 13, - /* 100 */ 13, 1259, 119, 119, 119, 119, 118, 118, 117, 117, - /* 110 */ 117, 116, 441, 1176, 419, 197, 446, 320, 512, 1539, - /* 120 */ 1545, 372, 1547, 6, 371, 1176, 1148, 394, 1148, 406, - /* 130 */ 1545, 534, 115, 112, 218, 1415, 99, 30, 121, 121, + /* 0 */ 564, 115, 112, 220, 169, 199, 115, 112, 220, 564, + /* 10 */ 375, 1266, 564, 376, 564, 270, 1309, 1309, 406, 407, + /* 20 */ 1084, 199, 1513, 41, 41, 515, 489, 521, 558, 558, + /* 30 */ 558, 965, 41, 41, 395, 41, 41, 51, 51, 966, + /* 40 */ 296, 1269, 296, 122, 123, 113, 1207, 1207, 1041, 1044, + /* 50 */ 1034, 1034, 120, 120, 121, 121, 121, 121, 564, 407, + /* 60 */ 275, 275, 275, 275, 1268, 115, 112, 220, 115, 112, + /* 70 */ 220, 1512, 846, 561, 516, 561, 115, 112, 220, 250, + /* 80 */ 217, 71, 71, 122, 123, 113, 1207, 1207, 1041, 1044, + /* 90 */ 1034, 1034, 120, 120, 121, 121, 121, 121, 440, 440, + /* 100 */ 440, 1149, 119, 119, 119, 119, 118, 118, 117, 117, + /* 110 */ 117, 116, 442, 1183, 1149, 116, 442, 1149, 546, 513, + /* 120 */ 1548, 1554, 374, 442, 6, 1183, 1154, 522, 1154, 407, + /* 130 */ 1556, 461, 373, 1554, 535, 99, 463, 332, 121, 121, /* 140 */ 121, 121, 119, 119, 119, 119, 118, 118, 117, 117, - /* 150 */ 117, 116, 441, 122, 123, 113, 1200, 1200, 1035, 1038, - /* 160 */ 1028, 1028, 120, 120, 121, 121, 121, 121, 31, 1176, - /* 170 */ 1177, 1178, 241, 357, 1558, 501, 498, 497, 317, 124, - /* 180 */ 319, 1176, 1177, 1178, 1176, 496, 119, 119, 119, 119, - /* 190 */ 118, 118, 117, 117, 117, 116, 441, 139, 96, 406, - /* 200 */ 121, 121, 121, 121, 114, 117, 117, 117, 116, 441, - /* 210 */ 541, 1532, 119, 119, 119, 119, 118, 118, 117, 117, - /* 220 */ 117, 116, 441, 122, 123, 113, 1200, 1200, 1035, 1038, - /* 230 */ 1028, 1028, 120, 120, 121, 121, 121, 121, 406, 441, - /* 240 */ 1176, 1177, 1178, 81, 439, 439, 439, 80, 119, 119, - /* 250 */ 119, 119, 118, 118, 117, 117, 117, 116, 441, 488, - /* 260 */ 1176, 318, 122, 123, 113, 1200, 1200, 1035, 1038, 1028, - /* 270 */ 1028, 120, 120, 121, 121, 121, 121, 493, 1025, 1025, - /* 280 */ 1036, 1039, 119, 119, 119, 119, 118, 118, 117, 117, - /* 290 */ 117, 116, 441, 1584, 995, 1224, 1, 1, 569, 2, - /* 300 */ 1228, 1267, 137, 1503, 245, 305, 473, 140, 406, 860, - /* 310 */ 561, 1176, 914, 914, 1308, 359, 1176, 1177, 1178, 462, - /* 320 */ 330, 119, 119, 119, 119, 118, 118, 117, 117, 117, - /* 330 */ 116, 441, 122, 123, 113, 1200, 1200, 1035, 1038, 1028, - /* 340 */ 1028, 120, 120, 121, 121, 121, 121, 328, 273, 273, - /* 350 */ 1015, 83, 1029, 425, 1564, 569, 2, 1228, 304, 554, - /* 360 */ 925, 560, 305, 944, 140, 860, 1006, 1176, 1177, 1178, - /* 370 */ 1005, 1308, 411, 213, 511, 229, 119, 119, 119, 119, - /* 380 */ 118, 118, 117, 117, 117, 116, 441, 519, 347, 116, - /* 390 */ 441, 119, 119, 119, 119, 118, 118, 117, 117, 117, - /* 400 */ 116, 441, 1005, 1005, 1007, 273, 273, 445, 563, 16, - /* 410 */ 16, 1590, 563, 1540, 563, 406, 1176, 6, 560, 344, - /* 420 */ 182, 118, 118, 117, 117, 117, 116, 441, 416, 142, - /* 430 */ 71, 71, 229, 563, 71, 71, 55, 55, 203, 122, - /* 440 */ 123, 113, 1200, 1200, 1035, 1038, 1028, 1028, 120, 120, - /* 450 */ 121, 121, 121, 121, 217, 13, 13, 1176, 406, 568, - /* 460 */ 1400, 1228, 502, 137, 445, 168, 305, 545, 140, 1180, - /* 470 */ 424, 545, 1176, 1177, 1178, 1308, 544, 438, 437, 944, - /* 480 */ 513, 452, 122, 123, 113, 1200, 1200, 1035, 1038, 1028, - /* 490 */ 1028, 120, 120, 121, 121, 121, 121, 315, 119, 119, - /* 500 */ 119, 119, 118, 118, 117, 117, 117, 116, 441, 273, - /* 510 */ 273, 1143, 416, 1176, 1177, 1178, 543, 563, 1143, 304, - /* 520 */ 554, 1561, 560, 1207, 1143, 1207, 1180, 1143, 406, 530, - /* 530 */ 421, 1143, 864, 183, 1143, 143, 229, 562, 32, 71, - /* 540 */ 71, 119, 119, 119, 119, 118, 118, 117, 117, 117, - /* 550 */ 116, 441, 122, 123, 113, 1200, 1200, 1035, 1038, 1028, - /* 560 */ 1028, 120, 120, 121, 121, 121, 121, 406, 445, 241, - /* 570 */ 1176, 857, 501, 498, 497, 1176, 526, 189, 245, 538, - /* 580 */ 1539, 282, 496, 370, 6, 563, 529, 477, 5, 279, - /* 590 */ 1015, 122, 123, 113, 1200, 1200, 1035, 1038, 1028, 1028, - /* 600 */ 120, 120, 121, 121, 121, 121, 1006, 13, 13, 1414, - /* 610 */ 1005, 119, 119, 119, 119, 118, 118, 117, 117, 117, - /* 620 */ 116, 441, 426, 273, 273, 1176, 1176, 1177, 1178, 1619, - /* 630 */ 392, 1176, 1177, 1178, 1176, 342, 560, 406, 525, 361, - /* 640 */ 430, 1161, 1005, 1005, 1007, 348, 411, 357, 1558, 488, + /* 150 */ 117, 116, 442, 122, 123, 113, 1207, 1207, 1041, 1044, + /* 160 */ 1034, 1034, 120, 120, 121, 121, 121, 121, 1257, 1183, + /* 170 */ 1184, 1185, 243, 1064, 564, 502, 499, 498, 567, 124, + /* 180 */ 567, 1183, 1184, 1185, 474, 497, 119, 119, 119, 119, + /* 190 */ 118, 118, 117, 117, 117, 116, 442, 70, 70, 407, + /* 200 */ 121, 121, 121, 121, 114, 117, 117, 117, 116, 442, + /* 210 */ 1409, 1469, 119, 119, 119, 119, 118, 118, 117, 117, + /* 220 */ 117, 116, 442, 122, 123, 113, 1207, 1207, 1041, 1044, + /* 230 */ 1034, 1034, 120, 120, 121, 121, 121, 121, 407, 1031, + /* 240 */ 1031, 1042, 1045, 81, 382, 541, 378, 80, 119, 119, + /* 250 */ 119, 119, 118, 118, 117, 117, 117, 116, 442, 381, + /* 260 */ 463, 332, 122, 123, 113, 1207, 1207, 1041, 1044, 1034, + /* 270 */ 1034, 120, 120, 121, 121, 121, 121, 262, 215, 512, + /* 280 */ 1424, 422, 119, 119, 119, 119, 118, 118, 117, 117, + /* 290 */ 117, 116, 442, 1231, 1, 1, 571, 2, 1235, 1573, + /* 300 */ 571, 2, 1235, 307, 1149, 141, 1600, 307, 407, 141, + /* 310 */ 1183, 361, 1317, 1035, 866, 531, 1317, 1149, 359, 1567, + /* 320 */ 1149, 119, 119, 119, 119, 118, 118, 117, 117, 117, + /* 330 */ 116, 442, 122, 123, 113, 1207, 1207, 1041, 1044, 1034, + /* 340 */ 1034, 120, 120, 121, 121, 121, 121, 275, 275, 1001, + /* 350 */ 426, 275, 275, 1128, 1627, 1021, 1627, 137, 542, 1541, + /* 360 */ 561, 272, 950, 950, 561, 1423, 1183, 1184, 1185, 1594, + /* 370 */ 866, 1012, 530, 315, 231, 1011, 468, 1276, 231, 119, + /* 380 */ 119, 119, 119, 118, 118, 117, 117, 117, 116, 442, + /* 390 */ 1570, 119, 119, 119, 119, 118, 118, 117, 117, 117, + /* 400 */ 116, 442, 330, 359, 1567, 564, 446, 1011, 1011, 1013, + /* 410 */ 446, 207, 564, 306, 555, 407, 363, 1021, 363, 346, + /* 420 */ 184, 118, 118, 117, 117, 117, 116, 442, 71, 71, + /* 430 */ 439, 438, 1126, 1012, 472, 71, 71, 1011, 205, 122, + /* 440 */ 123, 113, 1207, 1207, 1041, 1044, 1034, 1034, 120, 120, + /* 450 */ 121, 121, 121, 121, 219, 219, 472, 1183, 407, 570, + /* 460 */ 1183, 1235, 503, 1477, 149, 546, 307, 489, 141, 1011, + /* 470 */ 1011, 1013, 546, 140, 545, 1317, 1214, 191, 1214, 950, + /* 480 */ 950, 514, 122, 123, 113, 1207, 1207, 1041, 1044, 1034, + /* 490 */ 1034, 120, 120, 121, 121, 121, 121, 563, 119, 119, + /* 500 */ 119, 119, 118, 118, 117, 117, 117, 116, 442, 283, + /* 510 */ 275, 275, 415, 1183, 1184, 1185, 1183, 1184, 1185, 372, + /* 520 */ 1183, 243, 344, 561, 502, 499, 498, 1539, 407, 1540, + /* 530 */ 1183, 288, 870, 143, 497, 1549, 185, 231, 9, 6, + /* 540 */ 253, 119, 119, 119, 119, 118, 118, 117, 117, 117, + /* 550 */ 116, 442, 122, 123, 113, 1207, 1207, 1041, 1044, 1034, + /* 560 */ 1034, 120, 120, 121, 121, 121, 121, 407, 137, 446, + /* 570 */ 447, 863, 169, 1183, 397, 1204, 1183, 1184, 1185, 931, + /* 580 */ 526, 1001, 98, 339, 564, 342, 1183, 1184, 1185, 306, + /* 590 */ 555, 122, 123, 113, 1207, 1207, 1041, 1044, 1034, 1034, + /* 600 */ 120, 120, 121, 121, 121, 121, 452, 71, 71, 275, + /* 610 */ 275, 119, 119, 119, 119, 118, 118, 117, 117, 117, + /* 620 */ 116, 442, 561, 417, 306, 555, 1183, 1307, 1307, 1183, + /* 630 */ 1184, 1185, 1204, 1149, 330, 458, 318, 407, 363, 470, + /* 640 */ 431, 1167, 32, 541, 527, 350, 1149, 1629, 393, 1149, /* 650 */ 119, 119, 119, 119, 118, 118, 117, 117, 117, 116, - /* 660 */ 441, 122, 123, 113, 1200, 1200, 1035, 1038, 1028, 1028, - /* 670 */ 120, 120, 121, 121, 121, 121, 406, 830, 831, 832, - /* 680 */ 1016, 1176, 1177, 1178, 396, 285, 148, 1312, 304, 554, - /* 690 */ 1176, 1177, 1178, 1467, 216, 3, 337, 137, 340, 560, - /* 700 */ 122, 123, 113, 1200, 1200, 1035, 1038, 1028, 1028, 120, - /* 710 */ 120, 121, 121, 121, 121, 563, 504, 946, 273, 273, + /* 660 */ 442, 122, 123, 113, 1207, 1207, 1041, 1044, 1034, 1034, + /* 670 */ 120, 120, 121, 121, 121, 121, 407, 199, 472, 1183, + /* 680 */ 1022, 472, 1183, 1184, 1185, 386, 151, 539, 1548, 277, + /* 690 */ 400, 137, 6, 317, 5, 564, 562, 3, 920, 920, + /* 700 */ 122, 123, 113, 1207, 1207, 1041, 1044, 1034, 1034, 120, + /* 710 */ 120, 121, 121, 121, 121, 411, 505, 83, 71, 71, /* 720 */ 119, 119, 119, 119, 118, 118, 117, 117, 117, 116, - /* 730 */ 441, 560, 1176, 427, 563, 451, 98, 13, 13, 259, - /* 740 */ 276, 356, 507, 351, 506, 246, 406, 361, 469, 1530, - /* 750 */ 1000, 347, 293, 304, 554, 1589, 71, 71, 889, 119, - /* 760 */ 119, 119, 119, 118, 118, 117, 117, 117, 116, 441, - /* 770 */ 122, 123, 113, 1200, 1200, 1035, 1038, 1028, 1028, 120, - /* 780 */ 120, 121, 121, 121, 121, 406, 1143, 1078, 1176, 1177, - /* 790 */ 1178, 416, 1080, 300, 150, 995, 1080, 361, 361, 1143, - /* 800 */ 361, 378, 1143, 477, 563, 244, 243, 242, 1278, 122, - /* 810 */ 123, 113, 1200, 1200, 1035, 1038, 1028, 1028, 120, 120, - /* 820 */ 121, 121, 121, 121, 563, 880, 13, 13, 483, 119, - /* 830 */ 119, 119, 119, 118, 118, 117, 117, 117, 116, 441, - /* 840 */ 1176, 191, 540, 563, 147, 149, 13, 13, 328, 457, - /* 850 */ 316, 1083, 1083, 485, 1537, 406, 505, 1530, 6, 1514, - /* 860 */ 284, 192, 1277, 145, 881, 71, 71, 488, 119, 119, - /* 870 */ 119, 119, 118, 118, 117, 117, 117, 116, 441, 122, - /* 880 */ 123, 113, 1200, 1200, 1035, 1038, 1028, 1028, 120, 120, - /* 890 */ 121, 121, 121, 121, 563, 471, 1176, 1177, 1178, 406, - /* 900 */ 852, 327, 301, 462, 330, 1516, 270, 1530, 1530, 944, - /* 910 */ 1531, 1307, 313, 9, 842, 251, 71, 71, 477, 428, - /* 920 */ 146, 488, 38, 945, 101, 113, 1200, 1200, 1035, 1038, - /* 930 */ 1028, 1028, 120, 120, 121, 121, 121, 121, 119, 119, - /* 940 */ 119, 119, 118, 118, 117, 117, 117, 116, 441, 563, - /* 950 */ 1197, 1099, 563, 436, 563, 1533, 563, 852, 1122, 1617, - /* 960 */ 454, 290, 1617, 546, 251, 1303, 1100, 267, 267, 281, - /* 970 */ 404, 70, 70, 460, 71, 71, 71, 71, 13, 13, - /* 980 */ 560, 1101, 119, 119, 119, 119, 118, 118, 117, 117, - /* 990 */ 117, 116, 441, 542, 104, 273, 273, 273, 273, 1197, - /* 1000 */ 217, 1468, 900, 471, 450, 563, 1473, 1197, 560, 447, - /* 1010 */ 560, 545, 901, 440, 406, 1058, 292, 274, 274, 198, - /* 1020 */ 547, 450, 449, 1473, 1475, 944, 455, 56, 56, 410, - /* 1030 */ 560, 1122, 1618, 379, 406, 1618, 404, 1120, 122, 123, - /* 1040 */ 113, 1200, 1200, 1035, 1038, 1028, 1028, 120, 120, 121, - /* 1050 */ 121, 121, 121, 1460, 406, 12, 1197, 1512, 122, 123, - /* 1060 */ 113, 1200, 1200, 1035, 1038, 1028, 1028, 120, 120, 121, - /* 1070 */ 121, 121, 121, 308, 471, 126, 359, 286, 122, 111, - /* 1080 */ 113, 1200, 1200, 1035, 1038, 1028, 1028, 120, 120, 121, - /* 1090 */ 121, 121, 121, 309, 450, 471, 1473, 119, 119, 119, - /* 1100 */ 119, 118, 118, 117, 117, 117, 116, 441, 1176, 563, - /* 1110 */ 1120, 482, 563, 312, 433, 479, 197, 119, 119, 119, - /* 1120 */ 119, 118, 118, 117, 117, 117, 116, 441, 405, 12, - /* 1130 */ 536, 15, 15, 478, 43, 43, 509, 119, 119, 119, - /* 1140 */ 119, 118, 118, 117, 117, 117, 116, 441, 289, 535, - /* 1150 */ 294, 563, 294, 391, 1220, 438, 437, 406, 1154, 403, - /* 1160 */ 402, 1400, 920, 1204, 1176, 1177, 1178, 919, 1206, 291, - /* 1170 */ 1306, 1249, 412, 57, 57, 488, 1205, 563, 556, 412, - /* 1180 */ 1176, 1344, 123, 113, 1200, 1200, 1035, 1038, 1028, 1028, - /* 1190 */ 120, 120, 121, 121, 121, 121, 1400, 1143, 563, 44, - /* 1200 */ 44, 1207, 194, 1207, 273, 273, 1400, 461, 537, 1154, - /* 1210 */ 1143, 108, 555, 1143, 4, 391, 1121, 560, 1538, 335, - /* 1220 */ 58, 58, 6, 1246, 1099, 380, 1400, 376, 558, 1536, - /* 1230 */ 563, 422, 1221, 6, 304, 554, 1176, 1177, 1178, 1100, + /* 730 */ 442, 1183, 218, 428, 1183, 1183, 1184, 1185, 363, 261, + /* 740 */ 278, 358, 508, 353, 507, 248, 407, 306, 555, 1539, + /* 750 */ 1006, 349, 363, 291, 489, 302, 293, 1542, 281, 119, + /* 760 */ 119, 119, 119, 118, 118, 117, 117, 117, 116, 442, + /* 770 */ 122, 123, 113, 1207, 1207, 1041, 1044, 1034, 1034, 120, + /* 780 */ 120, 121, 121, 121, 121, 407, 148, 1183, 1184, 1185, + /* 790 */ 1183, 1184, 1185, 275, 275, 1304, 1257, 1283, 483, 1476, + /* 800 */ 150, 489, 480, 564, 1187, 1304, 561, 1587, 1255, 122, + /* 810 */ 123, 113, 1207, 1207, 1041, 1044, 1034, 1034, 120, 120, + /* 820 */ 121, 121, 121, 121, 564, 886, 13, 13, 520, 119, + /* 830 */ 119, 119, 119, 118, 118, 117, 117, 117, 116, 442, + /* 840 */ 1183, 420, 417, 564, 269, 269, 1316, 13, 13, 1539, + /* 850 */ 1546, 16, 16, 322, 6, 407, 506, 561, 1089, 1089, + /* 860 */ 486, 1187, 425, 1539, 887, 292, 71, 71, 119, 119, + /* 870 */ 119, 119, 118, 118, 117, 117, 117, 116, 442, 122, + /* 880 */ 123, 113, 1207, 1207, 1041, 1044, 1034, 1034, 120, 120, + /* 890 */ 121, 121, 121, 121, 564, 12, 1183, 1184, 1185, 407, + /* 900 */ 275, 275, 451, 303, 834, 835, 836, 417, 489, 276, + /* 910 */ 276, 1547, 284, 561, 319, 6, 321, 71, 71, 429, + /* 920 */ 451, 450, 561, 952, 101, 113, 1207, 1207, 1041, 1044, + /* 930 */ 1034, 1034, 120, 120, 121, 121, 121, 121, 119, 119, + /* 940 */ 119, 119, 118, 118, 117, 117, 117, 116, 442, 1105, + /* 950 */ 1183, 489, 564, 1312, 437, 455, 478, 564, 246, 245, + /* 960 */ 244, 1409, 1545, 547, 1106, 405, 6, 1544, 196, 1258, + /* 970 */ 413, 6, 105, 462, 103, 71, 71, 286, 564, 1107, + /* 980 */ 13, 13, 119, 119, 119, 119, 118, 118, 117, 117, + /* 990 */ 117, 116, 442, 451, 104, 427, 337, 320, 275, 275, + /* 1000 */ 906, 13, 13, 564, 1482, 1105, 1183, 1184, 1185, 126, + /* 1010 */ 907, 561, 546, 564, 407, 478, 295, 1321, 253, 200, + /* 1020 */ 1106, 548, 1482, 1484, 280, 1409, 55, 55, 1287, 561, + /* 1030 */ 478, 380, 423, 951, 407, 1107, 71, 71, 122, 123, + /* 1040 */ 113, 1207, 1207, 1041, 1044, 1034, 1034, 120, 120, 121, + /* 1050 */ 121, 121, 121, 1204, 407, 287, 552, 309, 122, 123, + /* 1060 */ 113, 1207, 1207, 1041, 1044, 1034, 1034, 120, 120, 121, + /* 1070 */ 121, 121, 121, 441, 1128, 1628, 146, 1628, 122, 111, + /* 1080 */ 113, 1207, 1207, 1041, 1044, 1034, 1034, 120, 120, 121, + /* 1090 */ 121, 121, 121, 404, 403, 1482, 424, 119, 119, 119, + /* 1100 */ 119, 118, 118, 117, 117, 117, 116, 442, 1183, 564, + /* 1110 */ 1204, 544, 1086, 858, 329, 361, 1086, 119, 119, 119, + /* 1120 */ 119, 118, 118, 117, 117, 117, 116, 442, 564, 294, + /* 1130 */ 144, 523, 56, 56, 224, 564, 510, 119, 119, 119, + /* 1140 */ 119, 118, 118, 117, 117, 117, 116, 442, 484, 1409, + /* 1150 */ 537, 15, 15, 1126, 434, 439, 438, 407, 13, 13, + /* 1160 */ 1523, 12, 926, 1211, 1183, 1184, 1185, 925, 1213, 536, + /* 1170 */ 858, 557, 413, 193, 1525, 494, 1212, 448, 1160, 1222, + /* 1180 */ 1183, 564, 123, 113, 1207, 1207, 1041, 1044, 1034, 1034, + /* 1190 */ 120, 120, 121, 121, 121, 121, 1521, 1149, 564, 965, + /* 1200 */ 564, 1214, 247, 1214, 13, 13, 1409, 966, 538, 564, + /* 1210 */ 1149, 108, 556, 1149, 4, 310, 392, 1227, 17, 194, + /* 1220 */ 485, 43, 43, 57, 57, 306, 555, 524, 559, 1160, + /* 1230 */ 464, 564, 44, 44, 392, 1127, 1183, 1184, 1185, 479, /* 1240 */ 119, 119, 119, 119, 118, 118, 117, 117, 117, 116, - /* 1250 */ 441, 442, 59, 59, 1101, 516, 1535, 273, 273, 563, - /* 1260 */ 6, 563, 110, 552, 563, 528, 423, 413, 169, 548, - /* 1270 */ 560, 108, 555, 137, 4, 551, 484, 272, 215, 222, - /* 1280 */ 211, 60, 60, 61, 61, 98, 62, 62, 558, 273, - /* 1290 */ 273, 563, 1015, 467, 1221, 563, 434, 563, 106, 106, - /* 1300 */ 8, 920, 560, 273, 273, 107, 919, 442, 565, 564, - /* 1310 */ 563, 442, 1005, 45, 45, 464, 560, 46, 46, 47, - /* 1320 */ 47, 84, 202, 552, 1215, 404, 468, 563, 205, 304, - /* 1330 */ 554, 563, 49, 49, 563, 522, 404, 532, 563, 867, - /* 1340 */ 563, 105, 531, 103, 1005, 1005, 1007, 1008, 27, 50, - /* 1350 */ 50, 563, 1015, 63, 63, 475, 64, 64, 106, 106, - /* 1360 */ 65, 65, 14, 14, 17, 107, 563, 442, 565, 564, - /* 1370 */ 563, 303, 1005, 66, 66, 563, 226, 563, 959, 563, - /* 1380 */ 543, 404, 1196, 1343, 871, 278, 960, 456, 128, 128, - /* 1390 */ 563, 1065, 67, 67, 563, 206, 867, 52, 52, 68, - /* 1400 */ 68, 69, 69, 417, 1005, 1005, 1007, 1008, 27, 1563, - /* 1410 */ 1165, 444, 53, 53, 277, 1519, 156, 156, 307, 389, - /* 1420 */ 389, 388, 262, 386, 1165, 444, 839, 321, 277, 108, - /* 1430 */ 555, 523, 4, 389, 389, 388, 262, 386, 563, 223, - /* 1440 */ 839, 311, 326, 1492, 1117, 98, 558, 393, 1065, 310, - /* 1450 */ 563, 476, 563, 223, 563, 311, 879, 878, 1009, 277, - /* 1460 */ 157, 157, 463, 310, 389, 389, 388, 262, 386, 442, - /* 1470 */ 518, 839, 76, 76, 54, 54, 72, 72, 355, 225, - /* 1480 */ 563, 552, 275, 563, 223, 325, 311, 161, 354, 465, - /* 1490 */ 135, 563, 228, 225, 310, 532, 563, 206, 886, 887, - /* 1500 */ 533, 161, 129, 129, 135, 73, 73, 224, 962, 963, - /* 1510 */ 1015, 563, 287, 130, 130, 1009, 106, 106, 131, 131, - /* 1520 */ 563, 224, 563, 107, 225, 442, 565, 564, 997, 1276, - /* 1530 */ 1005, 250, 161, 127, 127, 135, 108, 555, 1077, 4, - /* 1540 */ 1077, 407, 155, 155, 154, 154, 304, 554, 1126, 563, - /* 1550 */ 1331, 563, 224, 558, 470, 407, 563, 250, 563, 1491, - /* 1560 */ 304, 554, 1005, 1005, 1007, 1008, 27, 563, 480, 332, - /* 1570 */ 448, 136, 136, 134, 134, 1340, 442, 336, 132, 132, - /* 1580 */ 133, 133, 563, 1076, 448, 1076, 407, 563, 552, 75, - /* 1590 */ 75, 304, 554, 339, 341, 343, 108, 555, 563, 4, - /* 1600 */ 1577, 299, 532, 563, 77, 77, 1291, 531, 472, 74, - /* 1610 */ 74, 250, 1275, 558, 350, 448, 331, 1015, 360, 98, - /* 1620 */ 42, 42, 1352, 106, 106, 48, 48, 1399, 494, 1327, - /* 1630 */ 107, 247, 442, 565, 564, 345, 442, 1005, 98, 1061, - /* 1640 */ 953, 917, 247, 250, 110, 1552, 550, 850, 552, 918, - /* 1650 */ 144, 1338, 110, 549, 1405, 1256, 1248, 1237, 1236, 1238, - /* 1660 */ 1571, 1324, 208, 390, 489, 265, 363, 200, 365, 1005, - /* 1670 */ 1005, 1007, 1008, 27, 11, 280, 221, 1015, 323, 474, - /* 1680 */ 1274, 367, 212, 106, 106, 924, 1386, 324, 288, 1381, - /* 1690 */ 107, 453, 442, 565, 564, 283, 329, 1005, 1391, 499, - /* 1700 */ 353, 1374, 1464, 108, 555, 1463, 4, 1574, 1390, 397, - /* 1710 */ 1215, 171, 254, 369, 383, 207, 195, 196, 1511, 553, - /* 1720 */ 558, 1509, 415, 1212, 100, 555, 83, 4, 204, 1005, - /* 1730 */ 1005, 1007, 1008, 27, 180, 166, 173, 219, 79, 82, - /* 1740 */ 458, 558, 175, 442, 35, 1387, 176, 459, 177, 178, - /* 1750 */ 492, 231, 96, 1469, 395, 552, 1393, 1392, 36, 466, - /* 1760 */ 1395, 184, 398, 481, 442, 1458, 235, 89, 1480, 487, - /* 1770 */ 266, 334, 237, 188, 490, 400, 552, 338, 238, 508, - /* 1780 */ 1239, 239, 1294, 1293, 1015, 1292, 1285, 429, 91, 871, - /* 1790 */ 106, 106, 1588, 213, 401, 1587, 431, 107, 1264, 442, - /* 1800 */ 565, 564, 1263, 352, 1005, 1015, 1262, 1586, 1557, 517, - /* 1810 */ 432, 106, 106, 1284, 297, 298, 358, 524, 107, 1335, - /* 1820 */ 442, 565, 564, 95, 1336, 1005, 252, 253, 435, 125, - /* 1830 */ 543, 1543, 10, 1444, 377, 1542, 1005, 1005, 1007, 1008, - /* 1840 */ 27, 97, 527, 375, 362, 102, 260, 364, 381, 1317, - /* 1850 */ 382, 1334, 366, 1245, 1333, 1316, 368, 1005, 1005, 1007, - /* 1860 */ 1008, 27, 1359, 1358, 34, 199, 1171, 566, 261, 263, - /* 1870 */ 264, 567, 1234, 158, 1229, 141, 295, 159, 1496, 302, - /* 1880 */ 1497, 1495, 1494, 160, 826, 209, 443, 201, 306, 210, - /* 1890 */ 78, 220, 1075, 138, 1073, 314, 162, 172, 1196, 227, - /* 1900 */ 174, 903, 322, 230, 1089, 179, 163, 164, 418, 408, - /* 1910 */ 409, 170, 181, 85, 86, 420, 87, 165, 1092, 88, - /* 1920 */ 233, 232, 1088, 151, 18, 234, 1081, 250, 333, 1209, - /* 1930 */ 185, 486, 236, 186, 37, 841, 491, 354, 240, 346, - /* 1940 */ 495, 187, 90, 869, 19, 20, 500, 503, 349, 92, - /* 1950 */ 167, 152, 296, 882, 93, 510, 94, 1159, 153, 1041, - /* 1960 */ 1128, 39, 214, 269, 1127, 271, 249, 952, 190, 947, - /* 1970 */ 110, 1149, 21, 7, 1153, 22, 1145, 23, 1147, 24, - /* 1980 */ 1133, 25, 1152, 33, 539, 193, 26, 1056, 98, 1042, - /* 1990 */ 1040, 1044, 1098, 1045, 1097, 256, 255, 28, 40, 257, - /* 2000 */ 1010, 851, 109, 29, 913, 559, 384, 387, 258, 1167, - /* 2010 */ 1166, 1225, 1225, 1225, 1579, 1225, 1225, 1225, 1225, 1225, - /* 2020 */ 1225, 1225, 1578, + /* 1250 */ 442, 443, 564, 327, 13, 13, 564, 418, 1315, 414, + /* 1260 */ 171, 564, 311, 553, 213, 529, 1253, 564, 517, 543, + /* 1270 */ 412, 108, 556, 137, 4, 58, 58, 435, 314, 59, + /* 1280 */ 59, 274, 217, 549, 60, 60, 349, 476, 559, 1353, + /* 1290 */ 61, 61, 1021, 275, 275, 1228, 213, 564, 106, 106, + /* 1300 */ 8, 275, 275, 275, 275, 107, 561, 443, 566, 565, + /* 1310 */ 564, 443, 1011, 1228, 561, 564, 561, 564, 275, 275, + /* 1320 */ 62, 62, 1352, 553, 247, 456, 564, 98, 110, 306, + /* 1330 */ 555, 561, 564, 45, 45, 405, 1203, 533, 46, 46, + /* 1340 */ 47, 47, 532, 465, 1011, 1011, 1013, 1014, 27, 49, + /* 1350 */ 49, 564, 1021, 405, 469, 50, 50, 564, 106, 106, + /* 1360 */ 305, 564, 84, 204, 405, 107, 564, 443, 566, 565, + /* 1370 */ 405, 564, 1011, 564, 63, 63, 564, 1599, 564, 895, + /* 1380 */ 64, 64, 457, 477, 65, 65, 147, 96, 38, 14, + /* 1390 */ 14, 1528, 412, 564, 66, 66, 128, 128, 926, 67, + /* 1400 */ 67, 52, 52, 925, 1011, 1011, 1013, 1014, 27, 1572, + /* 1410 */ 1171, 445, 208, 1123, 279, 394, 68, 68, 228, 390, + /* 1420 */ 390, 389, 264, 387, 1171, 445, 843, 877, 279, 108, + /* 1430 */ 556, 453, 4, 390, 390, 389, 264, 387, 564, 225, + /* 1440 */ 843, 313, 328, 1003, 98, 252, 559, 544, 471, 312, + /* 1450 */ 252, 564, 208, 225, 564, 313, 473, 30, 252, 279, + /* 1460 */ 466, 69, 69, 312, 390, 390, 389, 264, 387, 443, + /* 1470 */ 333, 843, 98, 564, 53, 53, 323, 157, 157, 227, + /* 1480 */ 495, 553, 249, 289, 225, 564, 313, 162, 31, 1501, + /* 1490 */ 135, 564, 1500, 227, 312, 533, 158, 158, 885, 884, + /* 1500 */ 534, 162, 873, 301, 135, 564, 481, 226, 76, 76, + /* 1510 */ 1021, 347, 1071, 98, 54, 54, 106, 106, 1067, 564, + /* 1520 */ 249, 226, 519, 107, 227, 443, 566, 565, 72, 72, + /* 1530 */ 1011, 334, 162, 564, 230, 135, 108, 556, 959, 4, + /* 1540 */ 252, 408, 129, 129, 564, 1349, 306, 555, 564, 923, + /* 1550 */ 564, 110, 226, 559, 564, 408, 73, 73, 564, 873, + /* 1560 */ 306, 555, 1011, 1011, 1013, 1014, 27, 130, 130, 1071, + /* 1570 */ 449, 131, 131, 127, 127, 357, 443, 156, 156, 892, + /* 1580 */ 893, 155, 155, 338, 449, 356, 408, 564, 553, 968, + /* 1590 */ 969, 306, 555, 1015, 341, 564, 108, 556, 564, 4, + /* 1600 */ 1132, 1286, 533, 564, 856, 343, 145, 532, 345, 1300, + /* 1610 */ 136, 136, 1083, 559, 1083, 449, 564, 1021, 134, 134, + /* 1620 */ 1284, 132, 132, 106, 106, 1285, 133, 133, 564, 352, + /* 1630 */ 107, 564, 443, 566, 565, 1340, 443, 1011, 362, 75, + /* 1640 */ 75, 1082, 564, 1082, 564, 924, 1561, 110, 553, 551, + /* 1650 */ 1015, 77, 77, 1361, 74, 74, 1408, 1336, 1347, 550, + /* 1660 */ 1414, 1265, 1256, 1244, 1243, 42, 42, 48, 48, 1011, + /* 1670 */ 1011, 1013, 1014, 27, 1245, 1580, 490, 1021, 267, 202, + /* 1680 */ 1333, 365, 11, 106, 106, 930, 367, 210, 369, 391, + /* 1690 */ 107, 1395, 443, 566, 565, 223, 1390, 1011, 500, 454, + /* 1700 */ 282, 1400, 285, 108, 556, 214, 4, 325, 1383, 1283, + /* 1710 */ 475, 355, 1473, 1583, 1472, 1399, 371, 1222, 326, 398, + /* 1720 */ 559, 290, 331, 197, 100, 556, 209, 4, 198, 1011, + /* 1730 */ 1011, 1013, 1014, 27, 385, 256, 1520, 1518, 554, 1219, + /* 1740 */ 416, 559, 83, 443, 173, 206, 182, 221, 459, 167, + /* 1750 */ 177, 460, 175, 493, 233, 553, 79, 178, 1396, 179, + /* 1760 */ 35, 180, 96, 1402, 443, 396, 36, 467, 1478, 1401, + /* 1770 */ 482, 237, 1404, 399, 82, 186, 553, 1467, 89, 488, + /* 1780 */ 190, 268, 239, 491, 1021, 340, 240, 401, 1246, 1489, + /* 1790 */ 106, 106, 336, 509, 1294, 241, 1303, 107, 430, 443, + /* 1800 */ 566, 565, 1302, 91, 1011, 1021, 1598, 1301, 1273, 215, + /* 1810 */ 1597, 106, 106, 402, 877, 432, 354, 1272, 107, 1271, + /* 1820 */ 443, 566, 565, 1596, 1566, 1011, 1293, 433, 518, 299, + /* 1830 */ 300, 360, 95, 525, 1344, 364, 1011, 1011, 1013, 1014, + /* 1840 */ 27, 254, 255, 1552, 436, 1551, 125, 544, 10, 379, + /* 1850 */ 1326, 1453, 102, 97, 1345, 528, 304, 1011, 1011, 1013, + /* 1860 */ 1014, 27, 366, 377, 1343, 1342, 368, 370, 1325, 384, + /* 1870 */ 201, 383, 34, 1368, 1367, 568, 1177, 266, 263, 265, + /* 1880 */ 1505, 159, 569, 1241, 1236, 1506, 160, 142, 1504, 1503, + /* 1890 */ 297, 211, 830, 161, 212, 78, 444, 203, 308, 222, + /* 1900 */ 1081, 139, 1079, 316, 174, 163, 1203, 229, 176, 909, + /* 1910 */ 324, 232, 1095, 181, 409, 410, 172, 164, 165, 419, + /* 1920 */ 183, 85, 86, 421, 166, 87, 88, 1098, 1094, 234, + /* 1930 */ 235, 152, 18, 236, 335, 1087, 1216, 252, 187, 487, + /* 1940 */ 238, 188, 37, 845, 492, 356, 242, 496, 351, 501, + /* 1950 */ 189, 90, 19, 504, 348, 20, 875, 92, 298, 168, + /* 1960 */ 888, 153, 93, 511, 94, 1165, 154, 1047, 1134, 39, + /* 1970 */ 216, 1133, 271, 273, 958, 192, 953, 110, 1151, 1155, + /* 1980 */ 251, 7, 21, 1159, 1139, 22, 1153, 33, 23, 24, + /* 1990 */ 25, 540, 1158, 195, 98, 1062, 26, 1048, 1046, 1050, + /* 2000 */ 1104, 1051, 1103, 257, 258, 28, 40, 1173, 1016, 857, + /* 2010 */ 109, 29, 560, 388, 138, 1172, 259, 170, 260, 1232, + /* 2020 */ 1232, 919, 1232, 1232, 1232, 1232, 1232, 1232, 1232, 1232, + /* 2030 */ 1232, 1232, 1589, 1232, 1232, 1232, 1588, }; static const YYCODETYPE yy_lookahead[] = { - /* 0 */ 192, 221, 192, 223, 192, 214, 272, 273, 274, 217, - /* 10 */ 192, 231, 217, 192, 192, 192, 272, 273, 274, 19, - /* 20 */ 233, 234, 214, 215, 214, 215, 203, 293, 203, 233, - /* 30 */ 234, 31, 214, 215, 214, 214, 215, 214, 215, 39, - /* 40 */ 208, 209, 210, 43, 44, 45, 46, 47, 48, 49, - /* 50 */ 50, 51, 52, 53, 54, 55, 56, 57, 236, 19, - /* 60 */ 237, 238, 237, 238, 272, 273, 274, 272, 273, 274, - /* 70 */ 192, 211, 251, 250, 251, 250, 26, 192, 200, 254, - /* 80 */ 255, 260, 204, 43, 44, 45, 46, 47, 48, 49, - /* 90 */ 50, 51, 52, 53, 54, 55, 56, 57, 192, 214, - /* 100 */ 215, 214, 102, 103, 104, 105, 106, 107, 108, 109, - /* 110 */ 110, 111, 112, 59, 229, 192, 294, 16, 306, 307, - /* 120 */ 312, 313, 312, 311, 314, 59, 86, 204, 88, 19, - /* 130 */ 312, 313, 272, 273, 274, 271, 26, 22, 54, 55, + /* 0 */ 192, 273, 274, 275, 192, 192, 273, 274, 275, 192, + /* 10 */ 218, 215, 192, 218, 192, 212, 234, 235, 205, 19, + /* 20 */ 11, 192, 294, 215, 216, 203, 192, 203, 209, 210, + /* 30 */ 211, 31, 215, 216, 205, 215, 216, 215, 216, 39, + /* 40 */ 227, 215, 229, 43, 44, 45, 46, 47, 48, 49, + /* 50 */ 50, 51, 52, 53, 54, 55, 56, 57, 192, 19, + /* 60 */ 238, 239, 238, 239, 215, 273, 274, 275, 273, 274, + /* 70 */ 275, 237, 21, 251, 252, 251, 273, 274, 275, 255, + /* 80 */ 256, 215, 216, 43, 44, 45, 46, 47, 48, 49, + /* 90 */ 50, 51, 52, 53, 54, 55, 56, 57, 209, 210, + /* 100 */ 211, 76, 102, 103, 104, 105, 106, 107, 108, 109, + /* 110 */ 110, 111, 112, 59, 89, 111, 112, 92, 252, 307, + /* 120 */ 308, 313, 314, 112, 312, 59, 86, 261, 88, 19, + /* 130 */ 313, 80, 315, 313, 314, 25, 127, 128, 54, 55, /* 140 */ 56, 57, 102, 103, 104, 105, 106, 107, 108, 109, /* 150 */ 110, 111, 112, 43, 44, 45, 46, 47, 48, 49, - /* 160 */ 50, 51, 52, 53, 54, 55, 56, 57, 53, 115, - /* 170 */ 116, 117, 118, 309, 310, 121, 122, 123, 77, 69, - /* 180 */ 79, 115, 116, 117, 59, 131, 102, 103, 104, 105, - /* 190 */ 106, 107, 108, 109, 110, 111, 112, 72, 148, 19, + /* 160 */ 50, 51, 52, 53, 54, 55, 56, 57, 192, 115, + /* 170 */ 116, 117, 118, 122, 192, 121, 122, 123, 202, 69, + /* 180 */ 204, 115, 116, 117, 192, 131, 102, 103, 104, 105, + /* 190 */ 106, 107, 108, 109, 110, 111, 112, 215, 216, 19, /* 200 */ 54, 55, 56, 57, 58, 108, 109, 110, 111, 112, - /* 210 */ 304, 305, 102, 103, 104, 105, 106, 107, 108, 109, + /* 210 */ 192, 160, 102, 103, 104, 105, 106, 107, 108, 109, /* 220 */ 110, 111, 112, 43, 44, 45, 46, 47, 48, 49, - /* 230 */ 50, 51, 52, 53, 54, 55, 56, 57, 19, 112, - /* 240 */ 115, 116, 117, 24, 208, 209, 210, 67, 102, 103, - /* 250 */ 104, 105, 106, 107, 108, 109, 110, 111, 112, 192, - /* 260 */ 59, 160, 43, 44, 45, 46, 47, 48, 49, 50, - /* 270 */ 51, 52, 53, 54, 55, 56, 57, 19, 46, 47, - /* 280 */ 48, 49, 102, 103, 104, 105, 106, 107, 108, 109, - /* 290 */ 110, 111, 112, 213, 73, 184, 185, 186, 187, 188, - /* 300 */ 189, 221, 81, 236, 46, 194, 192, 196, 19, 59, - /* 310 */ 133, 59, 135, 136, 203, 192, 115, 116, 117, 127, - /* 320 */ 128, 102, 103, 104, 105, 106, 107, 108, 109, 110, + /* 230 */ 50, 51, 52, 53, 54, 55, 56, 57, 19, 46, + /* 240 */ 47, 48, 49, 24, 248, 192, 250, 67, 102, 103, + /* 250 */ 104, 105, 106, 107, 108, 109, 110, 111, 112, 277, + /* 260 */ 127, 128, 43, 44, 45, 46, 47, 48, 49, 50, + /* 270 */ 51, 52, 53, 54, 55, 56, 57, 26, 164, 165, + /* 280 */ 272, 263, 102, 103, 104, 105, 106, 107, 108, 109, + /* 290 */ 110, 111, 112, 184, 185, 186, 187, 188, 189, 186, + /* 300 */ 187, 188, 189, 194, 76, 196, 229, 194, 19, 196, + /* 310 */ 59, 192, 203, 120, 59, 87, 203, 89, 310, 311, + /* 320 */ 92, 102, 103, 104, 105, 106, 107, 108, 109, 110, /* 330 */ 111, 112, 43, 44, 45, 46, 47, 48, 49, 50, - /* 340 */ 51, 52, 53, 54, 55, 56, 57, 126, 237, 238, - /* 350 */ 100, 150, 120, 230, 186, 187, 188, 189, 137, 138, - /* 360 */ 108, 250, 194, 26, 196, 115, 116, 115, 116, 117, - /* 370 */ 120, 203, 114, 164, 165, 264, 102, 103, 104, 105, - /* 380 */ 106, 107, 108, 109, 110, 111, 112, 192, 130, 111, - /* 390 */ 112, 102, 103, 104, 105, 106, 107, 108, 109, 110, - /* 400 */ 111, 112, 152, 153, 154, 237, 238, 296, 192, 214, - /* 410 */ 215, 228, 192, 307, 192, 19, 59, 311, 250, 23, - /* 420 */ 22, 106, 107, 108, 109, 110, 111, 112, 192, 72, - /* 430 */ 214, 215, 264, 192, 214, 215, 214, 215, 149, 43, + /* 340 */ 51, 52, 53, 54, 55, 56, 57, 238, 239, 73, + /* 350 */ 231, 238, 239, 22, 23, 100, 25, 81, 305, 306, + /* 360 */ 251, 23, 25, 25, 251, 272, 115, 116, 117, 214, + /* 370 */ 115, 116, 144, 192, 265, 120, 114, 222, 265, 102, + /* 380 */ 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, + /* 390 */ 192, 102, 103, 104, 105, 106, 107, 108, 109, 110, + /* 400 */ 111, 112, 126, 310, 311, 192, 297, 152, 153, 154, + /* 410 */ 297, 149, 192, 137, 138, 19, 192, 100, 192, 23, + /* 420 */ 22, 106, 107, 108, 109, 110, 111, 112, 215, 216, + /* 430 */ 106, 107, 101, 116, 192, 215, 216, 120, 149, 43, /* 440 */ 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, - /* 450 */ 54, 55, 56, 57, 117, 214, 215, 59, 19, 187, - /* 460 */ 192, 189, 23, 81, 296, 192, 194, 251, 196, 59, - /* 470 */ 229, 251, 115, 116, 117, 203, 260, 106, 107, 142, - /* 480 */ 260, 267, 43, 44, 45, 46, 47, 48, 49, 50, - /* 490 */ 51, 52, 53, 54, 55, 56, 57, 261, 102, 103, - /* 500 */ 104, 105, 106, 107, 108, 109, 110, 111, 112, 237, - /* 510 */ 238, 76, 192, 115, 116, 117, 144, 192, 76, 137, - /* 520 */ 138, 192, 250, 152, 89, 154, 116, 92, 19, 87, - /* 530 */ 262, 89, 23, 22, 92, 163, 264, 192, 22, 214, - /* 540 */ 215, 102, 103, 104, 105, 106, 107, 108, 109, 110, + /* 450 */ 54, 55, 56, 57, 117, 117, 192, 59, 19, 187, + /* 460 */ 59, 189, 23, 282, 240, 252, 194, 192, 196, 152, + /* 470 */ 153, 154, 252, 72, 261, 203, 152, 25, 154, 142, + /* 480 */ 142, 261, 43, 44, 45, 46, 47, 48, 49, 50, + /* 490 */ 51, 52, 53, 54, 55, 56, 57, 192, 102, 103, + /* 500 */ 104, 105, 106, 107, 108, 109, 110, 111, 112, 267, + /* 510 */ 238, 239, 237, 115, 116, 117, 115, 116, 117, 192, + /* 520 */ 59, 118, 16, 251, 121, 122, 123, 303, 19, 303, + /* 530 */ 59, 267, 23, 72, 131, 308, 22, 265, 22, 312, + /* 540 */ 24, 102, 103, 104, 105, 106, 107, 108, 109, 110, /* 550 */ 111, 112, 43, 44, 45, 46, 47, 48, 49, 50, - /* 560 */ 51, 52, 53, 54, 55, 56, 57, 19, 296, 118, - /* 570 */ 59, 23, 121, 122, 123, 59, 251, 26, 46, 306, - /* 580 */ 307, 261, 131, 192, 311, 192, 144, 192, 22, 203, - /* 590 */ 100, 43, 44, 45, 46, 47, 48, 49, 50, 51, - /* 600 */ 52, 53, 54, 55, 56, 57, 116, 214, 215, 271, - /* 610 */ 120, 102, 103, 104, 105, 106, 107, 108, 109, 110, - /* 620 */ 111, 112, 229, 237, 238, 59, 115, 116, 117, 299, - /* 630 */ 300, 115, 116, 117, 59, 16, 250, 19, 192, 192, - /* 640 */ 19, 23, 152, 153, 154, 24, 114, 309, 310, 192, + /* 560 */ 51, 52, 53, 54, 55, 56, 57, 19, 81, 297, + /* 570 */ 295, 23, 192, 59, 203, 59, 115, 116, 117, 108, + /* 580 */ 192, 73, 25, 77, 192, 79, 115, 116, 117, 137, + /* 590 */ 138, 43, 44, 45, 46, 47, 48, 49, 50, 51, + /* 600 */ 52, 53, 54, 55, 56, 57, 119, 215, 216, 238, + /* 610 */ 239, 102, 103, 104, 105, 106, 107, 108, 109, 110, + /* 620 */ 111, 112, 251, 192, 137, 138, 59, 234, 235, 115, + /* 630 */ 116, 117, 116, 76, 126, 127, 128, 19, 192, 268, + /* 640 */ 19, 23, 22, 192, 252, 24, 89, 300, 301, 92, /* 650 */ 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, /* 660 */ 112, 43, 44, 45, 46, 47, 48, 49, 50, 51, - /* 670 */ 52, 53, 54, 55, 56, 57, 19, 7, 8, 9, - /* 680 */ 23, 115, 116, 117, 203, 290, 239, 238, 137, 138, - /* 690 */ 115, 116, 117, 236, 192, 22, 77, 81, 79, 250, + /* 670 */ 52, 53, 54, 55, 56, 57, 19, 192, 192, 59, + /* 680 */ 23, 192, 115, 116, 117, 200, 240, 307, 308, 22, + /* 690 */ 205, 81, 312, 262, 22, 192, 133, 22, 135, 136, /* 700 */ 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, - /* 710 */ 53, 54, 55, 56, 57, 192, 95, 142, 237, 238, + /* 710 */ 53, 54, 55, 56, 57, 197, 95, 150, 215, 216, /* 720 */ 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, - /* 730 */ 112, 250, 59, 112, 192, 119, 26, 214, 215, 118, - /* 740 */ 119, 120, 121, 122, 123, 124, 19, 192, 267, 302, - /* 750 */ 23, 130, 229, 137, 138, 23, 214, 215, 26, 102, + /* 730 */ 112, 59, 192, 112, 59, 115, 116, 117, 192, 118, + /* 740 */ 119, 120, 121, 122, 123, 124, 19, 137, 138, 303, + /* 750 */ 23, 130, 192, 267, 192, 252, 267, 306, 203, 102, /* 760 */ 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, /* 770 */ 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, - /* 780 */ 53, 54, 55, 56, 57, 19, 76, 11, 115, 116, - /* 790 */ 117, 192, 29, 251, 239, 73, 33, 192, 192, 89, - /* 800 */ 192, 192, 92, 192, 192, 126, 127, 128, 224, 43, + /* 780 */ 53, 54, 55, 56, 57, 19, 240, 115, 116, 117, + /* 790 */ 115, 116, 117, 238, 239, 222, 192, 224, 280, 237, + /* 800 */ 240, 192, 284, 192, 59, 232, 251, 140, 204, 43, /* 810 */ 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, - /* 820 */ 54, 55, 56, 57, 192, 35, 214, 215, 65, 102, + /* 820 */ 54, 55, 56, 57, 192, 35, 215, 216, 192, 102, /* 830 */ 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, - /* 840 */ 59, 229, 192, 192, 239, 239, 214, 215, 126, 127, - /* 850 */ 128, 126, 127, 128, 307, 19, 66, 302, 311, 192, - /* 860 */ 261, 229, 224, 22, 74, 214, 215, 192, 102, 103, + /* 840 */ 59, 230, 192, 192, 238, 239, 237, 215, 216, 303, + /* 850 */ 308, 215, 216, 16, 312, 19, 66, 251, 126, 127, + /* 860 */ 128, 116, 230, 303, 74, 203, 215, 216, 102, 103, /* 870 */ 104, 105, 106, 107, 108, 109, 110, 111, 112, 43, /* 880 */ 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, - /* 890 */ 54, 55, 56, 57, 192, 192, 115, 116, 117, 19, - /* 900 */ 59, 290, 251, 127, 128, 192, 23, 302, 302, 26, - /* 910 */ 302, 236, 192, 22, 21, 24, 214, 215, 192, 129, - /* 920 */ 22, 192, 24, 142, 158, 45, 46, 47, 48, 49, + /* 890 */ 54, 55, 56, 57, 192, 212, 115, 116, 117, 19, + /* 900 */ 238, 239, 192, 252, 7, 8, 9, 192, 192, 238, + /* 910 */ 239, 308, 262, 251, 77, 312, 79, 215, 216, 129, + /* 920 */ 210, 211, 251, 142, 158, 45, 46, 47, 48, 49, /* 930 */ 50, 51, 52, 53, 54, 55, 56, 57, 102, 103, - /* 940 */ 104, 105, 106, 107, 108, 109, 110, 111, 112, 192, - /* 950 */ 59, 12, 192, 251, 192, 305, 192, 116, 22, 23, - /* 960 */ 242, 203, 26, 203, 24, 236, 27, 237, 238, 266, - /* 970 */ 252, 214, 215, 80, 214, 215, 214, 215, 214, 215, - /* 980 */ 250, 42, 102, 103, 104, 105, 106, 107, 108, 109, - /* 990 */ 110, 111, 112, 229, 158, 237, 238, 237, 238, 59, - /* 1000 */ 117, 281, 63, 192, 192, 192, 192, 116, 250, 192, - /* 1010 */ 250, 251, 73, 251, 19, 122, 290, 237, 238, 24, - /* 1020 */ 260, 209, 210, 209, 210, 142, 242, 214, 215, 197, - /* 1030 */ 250, 22, 23, 276, 19, 26, 252, 101, 43, 44, + /* 940 */ 104, 105, 106, 107, 108, 109, 110, 111, 112, 12, + /* 950 */ 59, 192, 192, 237, 252, 243, 192, 192, 126, 127, + /* 960 */ 128, 192, 308, 203, 27, 253, 312, 308, 285, 207, + /* 970 */ 208, 312, 157, 290, 159, 215, 216, 262, 192, 42, + /* 980 */ 215, 216, 102, 103, 104, 105, 106, 107, 108, 109, + /* 990 */ 110, 111, 112, 283, 158, 230, 237, 160, 238, 239, + /* 1000 */ 63, 215, 216, 192, 192, 12, 115, 116, 117, 22, + /* 1010 */ 73, 251, 252, 192, 19, 192, 230, 239, 24, 24, + /* 1020 */ 27, 261, 210, 211, 99, 192, 215, 216, 225, 251, + /* 1030 */ 192, 192, 263, 142, 19, 42, 215, 216, 43, 44, /* 1040 */ 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, - /* 1050 */ 55, 56, 57, 160, 19, 211, 116, 192, 43, 44, + /* 1050 */ 55, 56, 57, 59, 19, 291, 63, 132, 43, 44, /* 1060 */ 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, - /* 1070 */ 55, 56, 57, 192, 192, 22, 192, 266, 43, 44, + /* 1070 */ 55, 56, 57, 252, 22, 23, 22, 25, 43, 44, /* 1080 */ 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, - /* 1090 */ 55, 56, 57, 192, 282, 192, 282, 102, 103, 104, + /* 1090 */ 55, 56, 57, 106, 107, 283, 263, 102, 103, 104, /* 1100 */ 105, 106, 107, 108, 109, 110, 111, 112, 59, 192, - /* 1110 */ 101, 279, 192, 192, 230, 283, 192, 102, 103, 104, - /* 1120 */ 105, 106, 107, 108, 109, 110, 111, 112, 204, 211, - /* 1130 */ 66, 214, 215, 289, 214, 215, 108, 102, 103, 104, - /* 1140 */ 105, 106, 107, 108, 109, 110, 111, 112, 266, 85, - /* 1150 */ 226, 192, 228, 22, 23, 106, 107, 19, 94, 106, - /* 1160 */ 107, 192, 134, 114, 115, 116, 117, 139, 119, 266, - /* 1170 */ 203, 206, 207, 214, 215, 192, 127, 192, 206, 207, + /* 1110 */ 116, 144, 29, 59, 291, 192, 33, 102, 103, 104, + /* 1120 */ 105, 106, 107, 108, 109, 110, 111, 112, 192, 291, + /* 1130 */ 163, 19, 215, 216, 15, 192, 108, 102, 103, 104, + /* 1140 */ 105, 106, 107, 108, 109, 110, 111, 112, 65, 192, + /* 1150 */ 66, 215, 216, 101, 231, 106, 107, 19, 215, 216, + /* 1160 */ 192, 212, 134, 114, 115, 116, 117, 139, 119, 85, + /* 1170 */ 116, 207, 208, 230, 192, 19, 127, 192, 94, 60, /* 1180 */ 59, 192, 44, 45, 46, 47, 48, 49, 50, 51, - /* 1190 */ 52, 53, 54, 55, 56, 57, 192, 76, 192, 214, - /* 1200 */ 215, 152, 284, 154, 237, 238, 192, 289, 87, 145, - /* 1210 */ 89, 19, 20, 92, 22, 22, 23, 250, 307, 236, - /* 1220 */ 214, 215, 311, 203, 12, 247, 192, 249, 36, 307, - /* 1230 */ 192, 262, 101, 311, 137, 138, 115, 116, 117, 27, + /* 1190 */ 52, 53, 54, 55, 56, 57, 192, 76, 192, 31, + /* 1200 */ 192, 152, 46, 154, 215, 216, 192, 39, 87, 192, + /* 1210 */ 89, 19, 20, 92, 22, 192, 22, 23, 22, 230, + /* 1220 */ 263, 215, 216, 215, 216, 137, 138, 115, 36, 145, + /* 1230 */ 128, 192, 215, 216, 22, 23, 115, 116, 117, 290, /* 1240 */ 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, - /* 1250 */ 112, 59, 214, 215, 42, 203, 307, 237, 238, 192, - /* 1260 */ 311, 192, 26, 71, 192, 144, 262, 297, 298, 203, - /* 1270 */ 250, 19, 20, 81, 22, 63, 262, 254, 255, 15, - /* 1280 */ 26, 214, 215, 214, 215, 26, 214, 215, 36, 237, - /* 1290 */ 238, 192, 100, 114, 101, 192, 262, 192, 106, 107, - /* 1300 */ 48, 134, 250, 237, 238, 113, 139, 115, 116, 117, - /* 1310 */ 192, 59, 120, 214, 215, 242, 250, 214, 215, 214, - /* 1320 */ 215, 148, 149, 71, 60, 252, 242, 192, 149, 137, - /* 1330 */ 138, 192, 214, 215, 192, 19, 252, 85, 192, 59, - /* 1340 */ 192, 157, 90, 159, 152, 153, 154, 155, 156, 214, - /* 1350 */ 215, 192, 100, 214, 215, 19, 214, 215, 106, 107, - /* 1360 */ 214, 215, 214, 215, 22, 113, 192, 115, 116, 117, - /* 1370 */ 192, 242, 120, 214, 215, 192, 24, 192, 31, 192, - /* 1380 */ 144, 252, 26, 192, 125, 99, 39, 192, 214, 215, - /* 1390 */ 192, 59, 214, 215, 192, 141, 116, 214, 215, 214, - /* 1400 */ 215, 214, 215, 61, 152, 153, 154, 155, 156, 0, - /* 1410 */ 1, 2, 214, 215, 5, 192, 214, 215, 132, 10, - /* 1420 */ 11, 12, 13, 14, 1, 2, 17, 192, 5, 19, - /* 1430 */ 20, 115, 22, 10, 11, 12, 13, 14, 192, 30, - /* 1440 */ 17, 32, 23, 192, 23, 26, 36, 26, 116, 40, - /* 1450 */ 192, 115, 192, 30, 192, 32, 119, 120, 59, 5, - /* 1460 */ 214, 215, 128, 40, 10, 11, 12, 13, 14, 59, - /* 1470 */ 19, 17, 214, 215, 214, 215, 214, 215, 120, 70, - /* 1480 */ 192, 71, 22, 192, 30, 151, 32, 78, 130, 128, - /* 1490 */ 81, 192, 140, 70, 40, 85, 192, 141, 7, 8, - /* 1500 */ 90, 78, 214, 215, 81, 214, 215, 98, 83, 84, - /* 1510 */ 100, 192, 151, 214, 215, 116, 106, 107, 214, 215, - /* 1520 */ 192, 98, 192, 113, 70, 115, 116, 117, 23, 224, - /* 1530 */ 120, 26, 78, 214, 215, 81, 19, 20, 152, 22, - /* 1540 */ 154, 132, 214, 215, 214, 215, 137, 138, 97, 192, - /* 1550 */ 256, 192, 98, 36, 23, 132, 192, 26, 192, 192, - /* 1560 */ 137, 138, 152, 153, 154, 155, 156, 192, 192, 192, - /* 1570 */ 161, 214, 215, 214, 215, 192, 59, 192, 214, 215, - /* 1580 */ 214, 215, 192, 152, 161, 154, 132, 192, 71, 214, - /* 1590 */ 215, 137, 138, 192, 192, 192, 19, 20, 192, 22, - /* 1600 */ 140, 253, 85, 192, 214, 215, 192, 90, 23, 214, - /* 1610 */ 215, 26, 192, 36, 192, 161, 23, 100, 192, 26, - /* 1620 */ 214, 215, 192, 106, 107, 214, 215, 192, 23, 192, - /* 1630 */ 113, 26, 115, 116, 117, 23, 59, 120, 26, 23, - /* 1640 */ 23, 23, 26, 26, 26, 316, 234, 23, 71, 23, - /* 1650 */ 26, 192, 26, 192, 192, 192, 192, 192, 192, 192, - /* 1660 */ 192, 253, 212, 190, 286, 285, 253, 240, 253, 152, - /* 1670 */ 153, 154, 155, 156, 241, 243, 295, 100, 291, 291, - /* 1680 */ 223, 253, 227, 106, 107, 108, 269, 244, 244, 265, - /* 1690 */ 113, 257, 115, 116, 117, 257, 243, 120, 269, 218, - /* 1700 */ 217, 265, 217, 19, 20, 217, 22, 195, 269, 269, - /* 1710 */ 60, 295, 140, 257, 243, 241, 247, 247, 199, 278, - /* 1720 */ 36, 199, 199, 38, 19, 20, 150, 22, 149, 152, - /* 1730 */ 153, 154, 155, 156, 22, 43, 232, 295, 292, 292, - /* 1740 */ 18, 36, 235, 59, 268, 270, 235, 199, 235, 235, - /* 1750 */ 18, 198, 148, 281, 244, 71, 270, 270, 268, 244, - /* 1760 */ 232, 232, 244, 199, 59, 244, 198, 157, 288, 62, - /* 1770 */ 199, 287, 198, 22, 219, 219, 71, 199, 198, 114, - /* 1780 */ 199, 198, 216, 216, 100, 216, 225, 64, 22, 125, - /* 1790 */ 106, 107, 222, 164, 219, 222, 24, 113, 216, 115, - /* 1800 */ 116, 117, 218, 216, 120, 100, 216, 216, 310, 303, - /* 1810 */ 112, 106, 107, 225, 280, 280, 219, 143, 113, 259, - /* 1820 */ 115, 116, 117, 114, 259, 120, 199, 91, 82, 147, - /* 1830 */ 144, 315, 22, 275, 199, 315, 152, 153, 154, 155, - /* 1840 */ 156, 146, 145, 247, 258, 157, 25, 258, 245, 248, - /* 1850 */ 244, 259, 258, 202, 259, 248, 258, 152, 153, 154, - /* 1860 */ 155, 156, 263, 263, 26, 246, 13, 201, 193, 193, - /* 1870 */ 6, 191, 191, 205, 191, 220, 220, 205, 211, 277, - /* 1880 */ 211, 211, 211, 205, 4, 212, 3, 22, 162, 212, - /* 1890 */ 211, 15, 23, 16, 23, 138, 129, 150, 26, 24, - /* 1900 */ 141, 20, 16, 143, 1, 141, 129, 129, 61, 301, - /* 1910 */ 301, 298, 150, 53, 53, 37, 53, 129, 115, 53, - /* 1920 */ 140, 34, 1, 5, 22, 114, 68, 26, 160, 75, - /* 1930 */ 68, 41, 140, 114, 24, 20, 19, 130, 124, 23, - /* 1940 */ 67, 22, 22, 59, 22, 22, 67, 96, 24, 22, - /* 1950 */ 37, 23, 67, 28, 148, 22, 26, 23, 23, 23, - /* 1960 */ 23, 22, 140, 23, 97, 23, 34, 115, 22, 142, - /* 1970 */ 26, 75, 34, 44, 75, 34, 88, 34, 86, 34, - /* 1980 */ 23, 34, 93, 22, 24, 26, 34, 23, 26, 23, - /* 1990 */ 23, 23, 23, 11, 23, 22, 26, 22, 22, 140, - /* 2000 */ 23, 23, 22, 22, 134, 26, 23, 15, 140, 1, - /* 2010 */ 1, 317, 317, 317, 140, 317, 317, 317, 317, 317, - /* 2020 */ 317, 317, 140, 317, 317, 317, 317, 317, 317, 317, - /* 2030 */ 317, 317, 317, 317, 317, 317, 317, 317, 317, 317, - /* 2040 */ 317, 317, 317, 317, 317, 317, 317, 317, 317, 317, - /* 2050 */ 317, 317, 317, 317, 317, 317, 317, 317, 317, 317, - /* 2060 */ 317, 317, 317, 317, 317, 317, 317, 317, 317, 317, - /* 2070 */ 317, 317, 317, 317, 317, 317, 317, 317, 317, 317, - /* 2080 */ 317, 317, 317, 317, 317, 317, 317, 317, 317, 317, - /* 2090 */ 317, 317, 317, 317, 317, 317, 317, 317, 317, 317, - /* 2100 */ 317, 317, 317, 317, 317, 317, 317, 317, 317, 317, - /* 2110 */ 317, 317, 317, 317, 317, 317, 317, 317, 317, 317, - /* 2120 */ 317, 317, 317, 317, 317, 317, 317, 317, 317, 317, - /* 2130 */ 317, 317, 317, 317, 317, 317, 317, 317, 317, 317, - /* 2140 */ 317, 317, 317, 317, 317, 317, 317, 317, 317, 317, - /* 2150 */ 317, 317, 317, 317, 317, 317, 317, 317, 317, 317, - /* 2160 */ 317, 317, 317, 317, 317, 317, 317, 317, 317, 317, - /* 2170 */ 317, 317, 317, 317, 317, 317, 317, 317, 317, 317, - /* 2180 */ 317, 317, 317, 317, 317, 317, 317, 317, 317, 317, - /* 2190 */ 317, 317, 317, 317, 317, 317, 317, 317, 317, 317, - /* 2200 */ 317, 317, 317, 317, 317, 317, 317, + /* 1250 */ 112, 59, 192, 151, 215, 216, 192, 61, 203, 298, + /* 1260 */ 299, 192, 192, 71, 25, 144, 203, 192, 203, 230, + /* 1270 */ 114, 19, 20, 81, 22, 215, 216, 263, 192, 215, + /* 1280 */ 216, 255, 256, 203, 215, 216, 130, 19, 36, 192, + /* 1290 */ 215, 216, 100, 238, 239, 101, 25, 192, 106, 107, + /* 1300 */ 48, 238, 239, 238, 239, 113, 251, 115, 116, 117, + /* 1310 */ 192, 59, 120, 101, 251, 192, 251, 192, 238, 239, + /* 1320 */ 215, 216, 192, 71, 46, 243, 192, 25, 25, 137, + /* 1330 */ 138, 251, 192, 215, 216, 253, 25, 85, 215, 216, + /* 1340 */ 215, 216, 90, 243, 152, 153, 154, 155, 156, 215, + /* 1350 */ 216, 192, 100, 253, 243, 215, 216, 192, 106, 107, + /* 1360 */ 243, 192, 148, 149, 253, 113, 192, 115, 116, 117, + /* 1370 */ 253, 192, 120, 192, 215, 216, 192, 23, 192, 25, + /* 1380 */ 215, 216, 192, 115, 215, 216, 22, 148, 24, 215, + /* 1390 */ 216, 192, 114, 192, 215, 216, 215, 216, 134, 215, + /* 1400 */ 216, 215, 216, 139, 152, 153, 154, 155, 156, 0, + /* 1410 */ 1, 2, 141, 23, 5, 25, 215, 216, 24, 10, + /* 1420 */ 11, 12, 13, 14, 1, 2, 17, 125, 5, 19, + /* 1430 */ 20, 268, 22, 10, 11, 12, 13, 14, 192, 30, + /* 1440 */ 17, 32, 23, 23, 25, 25, 36, 144, 23, 40, + /* 1450 */ 25, 192, 141, 30, 192, 32, 23, 22, 25, 5, + /* 1460 */ 128, 215, 216, 40, 10, 11, 12, 13, 14, 59, + /* 1470 */ 23, 17, 25, 192, 215, 216, 192, 215, 216, 70, + /* 1480 */ 23, 71, 25, 151, 30, 192, 32, 78, 53, 192, + /* 1490 */ 81, 192, 192, 70, 40, 85, 215, 216, 119, 120, + /* 1500 */ 90, 78, 59, 254, 81, 192, 192, 98, 215, 216, + /* 1510 */ 100, 23, 59, 25, 215, 216, 106, 107, 23, 192, + /* 1520 */ 25, 98, 19, 113, 70, 115, 116, 117, 215, 216, + /* 1530 */ 120, 192, 78, 192, 140, 81, 19, 20, 23, 22, + /* 1540 */ 25, 132, 215, 216, 192, 192, 137, 138, 192, 23, + /* 1550 */ 192, 25, 98, 36, 192, 132, 215, 216, 192, 116, + /* 1560 */ 137, 138, 152, 153, 154, 155, 156, 215, 216, 116, + /* 1570 */ 161, 215, 216, 215, 216, 120, 59, 215, 216, 7, + /* 1580 */ 8, 215, 216, 192, 161, 130, 132, 192, 71, 83, + /* 1590 */ 84, 137, 138, 59, 192, 192, 19, 20, 192, 22, + /* 1600 */ 97, 225, 85, 192, 23, 192, 25, 90, 192, 192, + /* 1610 */ 215, 216, 152, 36, 154, 161, 192, 100, 215, 216, + /* 1620 */ 192, 215, 216, 106, 107, 225, 215, 216, 192, 192, + /* 1630 */ 113, 192, 115, 116, 117, 257, 59, 120, 192, 215, + /* 1640 */ 216, 152, 192, 154, 192, 23, 317, 25, 71, 235, + /* 1650 */ 116, 215, 216, 192, 215, 216, 192, 192, 192, 192, + /* 1660 */ 192, 192, 192, 192, 192, 215, 216, 215, 216, 152, + /* 1670 */ 153, 154, 155, 156, 192, 192, 287, 100, 286, 241, + /* 1680 */ 254, 254, 242, 106, 107, 108, 254, 213, 254, 190, + /* 1690 */ 113, 270, 115, 116, 117, 296, 266, 120, 219, 258, + /* 1700 */ 244, 270, 258, 19, 20, 228, 22, 292, 266, 224, + /* 1710 */ 292, 218, 218, 195, 218, 270, 258, 60, 245, 270, + /* 1720 */ 36, 245, 244, 248, 19, 20, 242, 22, 248, 152, + /* 1730 */ 153, 154, 155, 156, 244, 140, 199, 199, 279, 38, + /* 1740 */ 199, 36, 150, 59, 296, 149, 22, 296, 18, 43, + /* 1750 */ 236, 199, 233, 18, 198, 71, 293, 236, 271, 236, + /* 1760 */ 269, 236, 148, 271, 59, 245, 269, 245, 282, 271, + /* 1770 */ 199, 198, 233, 245, 293, 233, 71, 245, 157, 62, + /* 1780 */ 22, 199, 198, 220, 100, 199, 198, 220, 199, 289, + /* 1790 */ 106, 107, 288, 114, 226, 198, 217, 113, 64, 115, + /* 1800 */ 116, 117, 217, 22, 120, 100, 223, 217, 217, 164, + /* 1810 */ 223, 106, 107, 220, 125, 24, 217, 219, 113, 217, + /* 1820 */ 115, 116, 117, 217, 311, 120, 226, 112, 304, 281, + /* 1830 */ 281, 220, 114, 143, 260, 259, 152, 153, 154, 155, + /* 1840 */ 156, 199, 91, 316, 82, 316, 147, 144, 22, 199, + /* 1850 */ 249, 276, 157, 146, 260, 145, 278, 152, 153, 154, + /* 1860 */ 155, 156, 259, 248, 260, 260, 259, 259, 249, 245, + /* 1870 */ 247, 246, 25, 264, 264, 201, 13, 6, 193, 193, + /* 1880 */ 212, 206, 191, 191, 191, 212, 206, 221, 212, 212, + /* 1890 */ 221, 213, 4, 206, 213, 212, 3, 22, 162, 15, + /* 1900 */ 23, 16, 23, 138, 150, 129, 25, 24, 141, 20, + /* 1910 */ 16, 143, 1, 141, 302, 302, 299, 129, 129, 61, + /* 1920 */ 150, 53, 53, 37, 129, 53, 53, 115, 1, 34, + /* 1930 */ 140, 5, 22, 114, 160, 68, 75, 25, 68, 41, + /* 1940 */ 140, 114, 24, 20, 19, 130, 124, 67, 24, 67, + /* 1950 */ 22, 22, 22, 96, 23, 22, 59, 22, 67, 37, + /* 1960 */ 28, 23, 148, 22, 25, 23, 23, 23, 23, 22, + /* 1970 */ 140, 97, 23, 23, 115, 22, 142, 25, 88, 75, + /* 1980 */ 34, 44, 34, 75, 23, 34, 86, 22, 34, 34, + /* 1990 */ 34, 24, 93, 25, 25, 23, 34, 23, 23, 23, + /* 2000 */ 23, 11, 23, 25, 22, 22, 22, 1, 23, 23, + /* 2010 */ 22, 22, 25, 15, 23, 1, 140, 25, 140, 318, + /* 2020 */ 318, 134, 318, 318, 318, 318, 318, 318, 318, 318, + /* 2030 */ 318, 318, 140, 318, 318, 318, 140, 318, 318, 318, + /* 2040 */ 318, 318, 318, 318, 318, 318, 318, 318, 318, 318, + /* 2050 */ 318, 318, 318, 318, 318, 318, 318, 318, 318, 318, + /* 2060 */ 318, 318, 318, 318, 318, 318, 318, 318, 318, 318, + /* 2070 */ 318, 318, 318, 318, 318, 318, 318, 318, 318, 318, + /* 2080 */ 318, 318, 318, 318, 318, 318, 318, 318, 318, 318, + /* 2090 */ 318, 318, 318, 318, 318, 318, 318, 318, 318, 318, + /* 2100 */ 318, 318, 318, 318, 318, 318, 318, 318, 318, 318, + /* 2110 */ 318, 318, 318, 318, 318, 318, 318, 318, 318, 318, + /* 2120 */ 318, 318, 318, 318, 318, 318, 318, 318, 318, 318, + /* 2130 */ 318, 318, 318, 318, 318, 318, 318, 318, 318, 318, + /* 2140 */ 318, 318, 318, 318, 318, 318, 318, 318, 318, 318, + /* 2150 */ 318, 318, 318, 318, 318, 318, 318, 318, 318, 318, + /* 2160 */ 318, 318, 318, 318, 318, 318, 318, 318, 318, 318, + /* 2170 */ 318, 318, 318, 318, 318, 318, 318, 318, 318, 318, + /* 2180 */ 318, 318, 318, 318, 318, 318, 318, 318, 318, 318, + /* 2190 */ 318, 318, 318, 318, 318, 318, 318, 318, 318, 318, + /* 2200 */ 318, 318, 318, 318, 318, 318, 318, 318, 318, 318, + /* 2210 */ 318, 318, 318, 318, 318, 318, 318, 318, 318, 318, + /* 2220 */ 318, }; -#define YY_SHIFT_COUNT (569) +#define YY_SHIFT_COUNT (571) #define YY_SHIFT_MIN (0) -#define YY_SHIFT_MAX (2009) +#define YY_SHIFT_MAX (2014) static const unsigned short int yy_shift_ofst[] = { - /* 0 */ 1423, 1409, 1454, 1192, 1192, 382, 1252, 1410, 1517, 1684, - /* 10 */ 1684, 1684, 221, 0, 0, 180, 1015, 1684, 1684, 1684, + /* 0 */ 1423, 1409, 1454, 1192, 1192, 610, 1252, 1410, 1517, 1684, + /* 10 */ 1684, 1684, 276, 0, 0, 180, 1015, 1684, 1684, 1684, /* 20 */ 1684, 1684, 1684, 1684, 1684, 1684, 1684, 1684, 1684, 1684, - /* 30 */ 1049, 1049, 1121, 1121, 54, 616, 382, 382, 382, 382, - /* 40 */ 382, 40, 110, 219, 289, 396, 439, 509, 548, 618, + /* 30 */ 1049, 1049, 1121, 1121, 54, 487, 610, 610, 610, 610, + /* 40 */ 610, 40, 110, 219, 289, 396, 439, 509, 548, 618, /* 50 */ 657, 727, 766, 836, 995, 1015, 1015, 1015, 1015, 1015, /* 60 */ 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, /* 70 */ 1015, 1015, 1015, 1035, 1015, 1138, 880, 880, 1577, 1684, @@ -159031,155 +160469,157 @@ static const unsigned short int yy_shift_ofst[] = { /* 100 */ 1684, 1684, 1684, 1684, 1684, 1684, 1684, 1684, 1684, 1684, /* 110 */ 1684, 1684, 1684, 1705, 1684, 1684, 1684, 1684, 1684, 1684, /* 120 */ 1684, 1684, 1684, 1684, 1684, 1684, 1684, 146, 84, 84, - /* 130 */ 84, 84, 84, 274, 315, 125, 97, 357, 66, 66, - /* 140 */ 893, 258, 66, 66, 371, 371, 66, 551, 551, 551, - /* 150 */ 551, 192, 209, 209, 278, 127, 2023, 2023, 621, 621, - /* 160 */ 621, 201, 398, 398, 398, 398, 939, 939, 442, 936, - /* 170 */ 1009, 66, 66, 66, 66, 66, 66, 66, 66, 66, + /* 130 */ 84, 84, 84, 277, 315, 401, 97, 461, 251, 66, + /* 140 */ 66, 51, 1156, 66, 66, 324, 324, 66, 452, 452, + /* 150 */ 452, 452, 133, 114, 114, 4, 11, 2037, 2037, 621, + /* 160 */ 621, 621, 567, 398, 398, 398, 398, 937, 937, 228, + /* 170 */ 251, 331, 1052, 66, 66, 66, 66, 66, 66, 66, /* 180 */ 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, - /* 190 */ 66, 710, 710, 66, 776, 435, 435, 410, 410, 372, - /* 200 */ 1097, 2023, 2023, 2023, 2023, 2023, 2023, 2023, 250, 490, - /* 210 */ 490, 511, 451, 516, 252, 566, 575, 781, 673, 66, - /* 220 */ 66, 66, 66, 66, 66, 66, 66, 66, 66, 722, - /* 230 */ 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, - /* 240 */ 66, 66, 790, 790, 790, 66, 66, 66, 883, 66, - /* 250 */ 66, 66, 891, 1064, 66, 66, 1212, 66, 66, 66, - /* 260 */ 66, 66, 66, 66, 66, 725, 763, 177, 940, 940, - /* 270 */ 940, 940, 337, 177, 177, 1028, 1053, 670, 1264, 1179, - /* 280 */ 1173, 1254, 1316, 1173, 1316, 1336, 50, 1179, 1179, 50, - /* 290 */ 1179, 1254, 1336, 1259, 732, 532, 1347, 1347, 1347, 1316, - /* 300 */ 1236, 1236, 1184, 1356, 1167, 898, 1650, 1650, 1572, 1572, - /* 310 */ 1685, 1685, 1572, 1576, 1579, 1712, 1692, 1722, 1722, 1722, - /* 320 */ 1722, 1572, 1732, 1604, 1579, 1579, 1604, 1712, 1692, 1604, - /* 330 */ 1692, 1604, 1572, 1732, 1610, 1707, 1572, 1732, 1751, 1572, - /* 340 */ 1732, 1572, 1732, 1751, 1665, 1665, 1665, 1723, 1766, 1766, - /* 350 */ 1751, 1665, 1664, 1665, 1723, 1665, 1665, 1629, 1772, 1698, - /* 360 */ 1698, 1751, 1674, 1709, 1674, 1709, 1674, 1709, 1674, 1709, - /* 370 */ 1572, 1736, 1736, 1746, 1746, 1682, 1686, 1810, 1572, 1688, - /* 380 */ 1682, 1695, 1697, 1604, 1821, 1838, 1853, 1853, 1864, 1864, - /* 390 */ 1864, 2023, 2023, 2023, 2023, 2023, 2023, 2023, 2023, 2023, - /* 400 */ 2023, 2023, 2023, 2023, 2023, 2023, 232, 101, 1131, 1193, - /* 410 */ 619, 679, 841, 1421, 1286, 115, 1352, 1334, 1361, 1419, - /* 420 */ 1342, 1505, 1531, 1585, 1593, 1605, 1612, 1280, 1337, 1491, - /* 430 */ 1358, 1451, 1332, 1616, 1617, 1425, 1618, 1386, 1431, 1624, - /* 440 */ 1626, 1399, 1460, 1880, 1883, 1865, 1726, 1876, 1877, 1869, - /* 450 */ 1871, 1757, 1747, 1767, 1872, 1872, 1875, 1759, 1881, 1760, - /* 460 */ 1886, 1903, 1764, 1777, 1872, 1778, 1847, 1878, 1872, 1762, - /* 470 */ 1860, 1861, 1863, 1866, 1788, 1803, 1887, 1780, 1921, 1918, - /* 480 */ 1902, 1811, 1768, 1858, 1901, 1862, 1854, 1890, 1792, 1819, - /* 490 */ 1910, 1915, 1917, 1807, 1814, 1919, 1873, 1920, 1922, 1916, - /* 500 */ 1923, 1879, 1884, 1924, 1851, 1925, 1927, 1885, 1913, 1928, - /* 510 */ 1806, 1933, 1934, 1935, 1936, 1930, 1937, 1939, 1867, 1822, - /* 520 */ 1940, 1942, 1852, 1932, 1946, 1827, 1944, 1938, 1941, 1943, - /* 530 */ 1945, 1888, 1896, 1892, 1929, 1899, 1889, 1947, 1957, 1961, - /* 540 */ 1960, 1959, 1962, 1952, 1964, 1944, 1966, 1967, 1968, 1969, - /* 550 */ 1970, 1971, 1973, 1982, 1975, 1976, 1977, 1978, 1980, 1981, - /* 560 */ 1979, 1870, 1859, 1868, 1874, 1882, 1983, 1992, 2008, 2009, + /* 190 */ 66, 66, 66, 557, 557, 66, 9, 25, 25, 745, + /* 200 */ 745, 967, 1088, 2037, 2037, 2037, 2037, 2037, 2037, 2037, + /* 210 */ 255, 317, 317, 514, 403, 620, 471, 672, 781, 891, + /* 220 */ 675, 66, 66, 66, 66, 66, 66, 66, 66, 66, + /* 230 */ 66, 508, 66, 66, 66, 66, 66, 66, 66, 66, + /* 240 */ 66, 66, 66, 66, 790, 790, 790, 66, 66, 66, + /* 250 */ 338, 66, 66, 66, 516, 1084, 66, 66, 993, 66, + /* 260 */ 66, 66, 66, 66, 66, 66, 66, 732, 1083, 563, + /* 270 */ 994, 994, 994, 994, 337, 563, 563, 1028, 987, 897, + /* 280 */ 1119, 262, 1214, 1271, 1112, 1214, 1112, 1268, 1239, 262, + /* 290 */ 262, 1239, 262, 1271, 1268, 1302, 1354, 1278, 1168, 1168, + /* 300 */ 1168, 1112, 1303, 1303, 815, 1311, 1264, 1364, 1657, 1657, + /* 310 */ 1595, 1595, 1701, 1701, 1595, 1592, 1596, 1724, 1706, 1730, + /* 320 */ 1730, 1730, 1730, 1595, 1735, 1614, 1596, 1596, 1614, 1724, + /* 330 */ 1706, 1614, 1706, 1614, 1595, 1735, 1621, 1717, 1595, 1735, + /* 340 */ 1758, 1595, 1735, 1595, 1735, 1758, 1679, 1679, 1679, 1734, + /* 350 */ 1781, 1781, 1758, 1679, 1689, 1679, 1734, 1679, 1679, 1645, + /* 360 */ 1791, 1715, 1715, 1758, 1690, 1718, 1690, 1718, 1690, 1718, + /* 370 */ 1690, 1718, 1595, 1751, 1751, 1762, 1762, 1699, 1703, 1826, + /* 380 */ 1595, 1695, 1699, 1707, 1710, 1614, 1847, 1863, 1863, 1871, + /* 390 */ 1871, 1871, 2037, 2037, 2037, 2037, 2037, 2037, 2037, 2037, + /* 400 */ 2037, 2037, 2037, 2037, 2037, 2037, 2037, 193, 837, 1194, + /* 410 */ 1212, 506, 832, 1054, 1390, 925, 1435, 1394, 1102, 1332, + /* 420 */ 1419, 1196, 1420, 1425, 1433, 1447, 1457, 1488, 1443, 1379, + /* 430 */ 1572, 1455, 1503, 1453, 1495, 1515, 1506, 1526, 1460, 1489, + /* 440 */ 1581, 1622, 1534, 667, 1888, 1893, 1875, 1736, 1884, 1885, + /* 450 */ 1877, 1879, 1765, 1754, 1776, 1881, 1881, 1883, 1767, 1889, + /* 460 */ 1768, 1894, 1911, 1772, 1788, 1881, 1789, 1858, 1886, 1881, + /* 470 */ 1770, 1868, 1869, 1872, 1873, 1795, 1812, 1895, 1790, 1927, + /* 480 */ 1926, 1910, 1819, 1774, 1867, 1912, 1870, 1861, 1898, 1800, + /* 490 */ 1827, 1918, 1923, 1925, 1815, 1822, 1928, 1880, 1929, 1930, + /* 500 */ 1931, 1933, 1882, 1897, 1924, 1857, 1932, 1935, 1891, 1922, + /* 510 */ 1938, 1814, 1941, 1942, 1943, 1944, 1939, 1945, 1947, 1874, + /* 520 */ 1830, 1949, 1950, 1859, 1946, 1953, 1834, 1952, 1948, 1951, + /* 530 */ 1954, 1955, 1890, 1904, 1900, 1937, 1908, 1899, 1956, 1961, + /* 540 */ 1965, 1967, 1968, 1969, 1962, 1972, 1952, 1974, 1975, 1976, + /* 550 */ 1977, 1978, 1979, 1982, 1990, 1983, 1984, 1985, 1986, 1988, + /* 560 */ 1989, 1987, 1887, 1876, 1878, 1892, 1896, 1992, 1991, 1998, + /* 570 */ 2006, 2014, }; -#define YY_REDUCE_COUNT (405) -#define YY_REDUCE_MIN (-266) -#define YY_REDUCE_MAX (1683) +#define YY_REDUCE_COUNT (406) +#define YY_REDUCE_MIN (-272) +#define YY_REDUCE_MAX (1693) static const short yy_reduce_ofst[] = { - /* 0 */ 111, 168, 272, 760, -177, -175, -192, -190, -182, -179, - /* 10 */ 216, 220, 481, -208, -205, -266, -140, -115, 241, 393, - /* 20 */ 523, 325, 612, 632, 542, 651, 764, 757, 702, 762, - /* 30 */ 812, 814, -188, 273, 924, 386, 758, 967, 1020, 1052, - /* 40 */ 1066, -256, -256, -256, -256, -256, -256, -256, -256, -256, - /* 50 */ -256, -256, -256, -256, -256, -256, -256, -256, -256, -256, - /* 60 */ -256, -256, -256, -256, -256, -256, -256, -256, -256, -256, - /* 70 */ -256, -256, -256, -256, -256, -256, -256, -256, 195, 222, - /* 80 */ 813, 917, 920, 959, 985, 1006, 1038, 1067, 1069, 1072, - /* 90 */ 1099, 1103, 1105, 1118, 1135, 1139, 1142, 1146, 1148, 1159, - /* 100 */ 1174, 1178, 1183, 1185, 1187, 1198, 1202, 1246, 1258, 1260, - /* 110 */ 1262, 1288, 1291, 1299, 1304, 1319, 1328, 1330, 1357, 1359, - /* 120 */ 1364, 1366, 1375, 1390, 1395, 1406, 1411, -256, -256, -256, - /* 130 */ -256, -256, -256, -256, -256, 447, -256, 555, -178, 605, - /* 140 */ 832, -220, 606, -94, -168, 36, -122, 730, 780, 730, - /* 150 */ 780, 918, -136, 338, -256, -256, -256, -256, 80, 80, - /* 160 */ 80, 720, 703, 811, 882, 903, -213, -204, 106, 330, - /* 170 */ 330, -77, 236, 320, 599, 67, 457, 675, 729, 395, - /* 180 */ 268, 611, 969, 1004, 726, 1014, 983, 123, 884, 608, - /* 190 */ 1034, 547, 911, 650, 844, 922, 949, 965, 972, 978, - /* 200 */ 449, 970, 718, 784, 1073, 1084, 1023, 1129, -209, -180, - /* 210 */ -113, 114, 183, 329, 345, 391, 446, 502, 609, 667, - /* 220 */ 713, 817, 865, 881, 901, 921, 989, 1191, 1195, 214, - /* 230 */ 1223, 1235, 1251, 1367, 1376, 1377, 1383, 1385, 1401, 1402, - /* 240 */ 1403, 1414, 584, 638, 1305, 1420, 1422, 1426, 1294, 1430, - /* 250 */ 1435, 1437, 1348, 1329, 1459, 1461, 1412, 1462, 345, 1463, - /* 260 */ 1464, 1465, 1466, 1467, 1468, 1378, 1380, 1427, 1408, 1413, - /* 270 */ 1415, 1428, 1294, 1427, 1427, 1433, 1450, 1473, 1381, 1417, - /* 280 */ 1424, 1432, 1434, 1436, 1438, 1387, 1443, 1429, 1439, 1444, - /* 290 */ 1440, 1453, 1388, 1481, 1455, 1457, 1483, 1485, 1488, 1456, - /* 300 */ 1469, 1470, 1441, 1471, 1474, 1512, 1416, 1442, 1519, 1522, - /* 310 */ 1446, 1447, 1523, 1472, 1475, 1476, 1504, 1507, 1511, 1513, - /* 320 */ 1514, 1548, 1553, 1510, 1486, 1487, 1515, 1490, 1528, 1518, - /* 330 */ 1529, 1521, 1564, 1568, 1480, 1484, 1571, 1574, 1555, 1578, - /* 340 */ 1580, 1581, 1583, 1556, 1566, 1567, 1569, 1561, 1570, 1573, - /* 350 */ 1575, 1582, 1584, 1587, 1588, 1590, 1591, 1498, 1506, 1534, - /* 360 */ 1535, 1597, 1560, 1586, 1565, 1589, 1592, 1594, 1595, 1598, - /* 370 */ 1627, 1516, 1520, 1599, 1600, 1601, 1596, 1558, 1635, 1602, - /* 380 */ 1607, 1619, 1603, 1606, 1651, 1666, 1675, 1676, 1680, 1681, - /* 390 */ 1683, 1608, 1609, 1613, 1668, 1667, 1669, 1670, 1671, 1672, - /* 400 */ 1655, 1656, 1673, 1677, 1679, 1678, + /* 0 */ 109, 113, 272, 760, -178, -176, -192, -183, -180, -134, + /* 10 */ 213, 220, 371, -208, -205, -272, -197, 611, 632, 765, + /* 20 */ 786, 392, 943, 989, 503, 651, 1039, -18, 702, 821, + /* 30 */ 710, 812, -188, 380, -187, 555, 662, 1055, 1063, 1065, + /* 40 */ 1080, -267, -267, -267, -267, -267, -267, -267, -267, -267, + /* 50 */ -267, -267, -267, -267, -267, -267, -267, -267, -267, -267, + /* 60 */ -267, -267, -267, -267, -267, -267, -267, -267, -267, -267, + /* 70 */ -267, -267, -267, -267, -267, -267, -267, -267, 636, 811, + /* 80 */ 917, 936, 1006, 1008, 1017, 1060, 1064, 1069, 1075, 1105, + /* 90 */ 1118, 1123, 1125, 1134, 1140, 1159, 1165, 1169, 1174, 1179, + /* 100 */ 1181, 1184, 1186, 1201, 1246, 1259, 1262, 1281, 1293, 1299, + /* 110 */ 1313, 1327, 1341, 1352, 1356, 1358, 1362, 1366, 1395, 1403, + /* 120 */ 1406, 1411, 1424, 1436, 1439, 1450, 1452, -267, -267, -267, + /* 130 */ -267, -267, -267, -267, -267, 224, -267, 446, -24, 275, + /* 140 */ 546, 518, 573, 560, 53, -181, -111, 485, 606, 671, + /* 150 */ 606, 671, 683, 8, 93, -267, -267, -267, -267, 155, + /* 160 */ 155, 155, 181, 242, 264, 486, 489, -218, 393, 227, + /* 170 */ 604, 347, 347, -171, 431, 650, 715, -166, 562, 609, + /* 180 */ 716, 764, 18, 823, 769, 833, 838, 957, 759, 119, + /* 190 */ 923, 226, 1014, 542, 603, 451, 949, 654, 659, 762, + /* 200 */ 964, -4, 778, 961, 712, 1082, 1100, 1111, 1026, 1117, + /* 210 */ -204, -174, -151, -8, 77, 198, 305, 327, 388, 540, + /* 220 */ 839, 968, 982, 985, 1004, 1023, 1070, 1086, 1097, 1130, + /* 230 */ 1190, 1163, 1199, 1284, 1297, 1300, 1314, 1339, 1353, 1391, + /* 240 */ 1402, 1413, 1416, 1417, 803, 1376, 1400, 1428, 1437, 1446, + /* 250 */ 1378, 1461, 1464, 1465, 1249, 1329, 1466, 1467, 1414, 1468, + /* 260 */ 305, 1469, 1470, 1471, 1472, 1482, 1483, 1389, 1392, 1438, + /* 270 */ 1426, 1427, 1432, 1434, 1378, 1438, 1438, 1440, 1474, 1499, + /* 280 */ 1399, 1421, 1430, 1456, 1441, 1442, 1444, 1415, 1473, 1431, + /* 290 */ 1445, 1476, 1449, 1478, 1418, 1479, 1477, 1485, 1493, 1494, + /* 300 */ 1496, 1458, 1475, 1480, 1459, 1490, 1484, 1518, 1448, 1451, + /* 310 */ 1537, 1538, 1463, 1481, 1541, 1486, 1487, 1491, 1519, 1514, + /* 320 */ 1521, 1523, 1525, 1552, 1556, 1520, 1492, 1498, 1522, 1497, + /* 330 */ 1539, 1528, 1542, 1532, 1571, 1573, 1500, 1504, 1582, 1584, + /* 340 */ 1563, 1586, 1588, 1589, 1597, 1567, 1579, 1585, 1590, 1568, + /* 350 */ 1583, 1587, 1593, 1591, 1598, 1599, 1600, 1602, 1606, 1513, + /* 360 */ 1524, 1548, 1549, 1611, 1574, 1576, 1594, 1603, 1604, 1607, + /* 370 */ 1605, 1608, 1642, 1527, 1529, 1609, 1610, 1601, 1615, 1575, + /* 380 */ 1650, 1578, 1619, 1623, 1625, 1624, 1674, 1685, 1686, 1691, + /* 390 */ 1692, 1693, 1612, 1613, 1617, 1675, 1668, 1673, 1676, 1677, + /* 400 */ 1680, 1666, 1669, 1678, 1681, 1683, 1687, }; static const YYACTIONTYPE yy_default[] = { - /* 0 */ 1623, 1623, 1623, 1453, 1223, 1332, 1223, 1223, 1223, 1453, - /* 10 */ 1453, 1453, 1223, 1362, 1362, 1506, 1254, 1223, 1223, 1223, - /* 20 */ 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1452, 1223, 1223, - /* 30 */ 1223, 1223, 1541, 1541, 1223, 1223, 1223, 1223, 1223, 1223, - /* 40 */ 1223, 1223, 1371, 1223, 1378, 1223, 1223, 1223, 1223, 1223, - /* 50 */ 1454, 1455, 1223, 1223, 1223, 1505, 1507, 1470, 1385, 1384, - /* 60 */ 1383, 1382, 1488, 1349, 1376, 1369, 1373, 1448, 1449, 1447, - /* 70 */ 1451, 1455, 1454, 1223, 1372, 1419, 1433, 1418, 1223, 1223, - /* 80 */ 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, - /* 90 */ 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, - /* 100 */ 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, - /* 110 */ 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, - /* 120 */ 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1427, 1432, 1438, - /* 130 */ 1431, 1428, 1421, 1420, 1422, 1223, 1423, 1223, 1223, 1223, - /* 140 */ 1244, 1296, 1223, 1223, 1223, 1223, 1223, 1525, 1524, 1223, - /* 150 */ 1223, 1254, 1413, 1412, 1424, 1425, 1435, 1434, 1513, 1576, - /* 160 */ 1575, 1471, 1223, 1223, 1223, 1223, 1223, 1223, 1541, 1223, - /* 170 */ 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, - /* 180 */ 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, - /* 190 */ 1223, 1541, 1541, 1223, 1254, 1541, 1541, 1250, 1250, 1356, - /* 200 */ 1223, 1520, 1323, 1323, 1323, 1323, 1332, 1323, 1223, 1223, - /* 210 */ 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, - /* 220 */ 1223, 1223, 1223, 1510, 1508, 1223, 1223, 1223, 1223, 1223, - /* 230 */ 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, - /* 240 */ 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, - /* 250 */ 1223, 1223, 1328, 1223, 1223, 1223, 1223, 1223, 1223, 1223, - /* 260 */ 1223, 1223, 1223, 1223, 1570, 1223, 1483, 1310, 1328, 1328, - /* 270 */ 1328, 1328, 1330, 1311, 1309, 1322, 1255, 1230, 1615, 1388, - /* 280 */ 1377, 1329, 1351, 1377, 1351, 1612, 1375, 1388, 1388, 1375, - /* 290 */ 1388, 1329, 1612, 1271, 1592, 1266, 1362, 1362, 1362, 1351, - /* 300 */ 1356, 1356, 1450, 1329, 1322, 1223, 1615, 1615, 1337, 1337, - /* 310 */ 1614, 1614, 1337, 1471, 1599, 1397, 1299, 1305, 1305, 1305, - /* 320 */ 1305, 1337, 1241, 1375, 1599, 1599, 1375, 1397, 1299, 1375, - /* 330 */ 1299, 1375, 1337, 1241, 1487, 1609, 1337, 1241, 1461, 1337, - /* 340 */ 1241, 1337, 1241, 1461, 1297, 1297, 1297, 1286, 1223, 1223, - /* 350 */ 1461, 1297, 1271, 1297, 1286, 1297, 1297, 1559, 1223, 1465, - /* 360 */ 1465, 1461, 1355, 1350, 1355, 1350, 1355, 1350, 1355, 1350, - /* 370 */ 1337, 1551, 1551, 1365, 1365, 1370, 1356, 1456, 1337, 1223, - /* 380 */ 1370, 1368, 1366, 1375, 1247, 1289, 1573, 1573, 1569, 1569, - /* 390 */ 1569, 1620, 1620, 1520, 1585, 1254, 1254, 1254, 1254, 1585, - /* 400 */ 1273, 1273, 1255, 1255, 1254, 1585, 1223, 1223, 1223, 1223, - /* 410 */ 1223, 1223, 1580, 1223, 1515, 1472, 1341, 1223, 1223, 1223, - /* 420 */ 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, - /* 430 */ 1223, 1526, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, - /* 440 */ 1223, 1223, 1402, 1223, 1226, 1517, 1223, 1223, 1223, 1223, - /* 450 */ 1223, 1223, 1223, 1223, 1379, 1380, 1342, 1223, 1223, 1223, - /* 460 */ 1223, 1223, 1223, 1223, 1394, 1223, 1223, 1223, 1389, 1223, - /* 470 */ 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1611, 1223, 1223, - /* 480 */ 1223, 1223, 1223, 1223, 1486, 1485, 1223, 1223, 1339, 1223, - /* 490 */ 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, - /* 500 */ 1223, 1223, 1269, 1223, 1223, 1223, 1223, 1223, 1223, 1223, - /* 510 */ 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, - /* 520 */ 1223, 1223, 1223, 1223, 1223, 1223, 1367, 1223, 1223, 1223, - /* 530 */ 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, - /* 540 */ 1223, 1556, 1357, 1223, 1223, 1602, 1223, 1223, 1223, 1223, - /* 550 */ 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, 1223, - /* 560 */ 1596, 1313, 1404, 1223, 1403, 1407, 1223, 1235, 1223, 1223, + /* 0 */ 1633, 1633, 1633, 1462, 1230, 1341, 1230, 1230, 1230, 1462, + /* 10 */ 1462, 1462, 1230, 1371, 1371, 1515, 1263, 1230, 1230, 1230, + /* 20 */ 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1461, 1230, 1230, + /* 30 */ 1230, 1230, 1550, 1550, 1230, 1230, 1230, 1230, 1230, 1230, + /* 40 */ 1230, 1230, 1380, 1230, 1387, 1230, 1230, 1230, 1230, 1230, + /* 50 */ 1463, 1464, 1230, 1230, 1230, 1514, 1516, 1479, 1394, 1393, + /* 60 */ 1392, 1391, 1497, 1358, 1385, 1378, 1382, 1457, 1458, 1456, + /* 70 */ 1460, 1464, 1463, 1230, 1381, 1428, 1442, 1427, 1230, 1230, + /* 80 */ 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, + /* 90 */ 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, + /* 100 */ 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, + /* 110 */ 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, + /* 120 */ 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1436, 1441, 1447, + /* 130 */ 1440, 1437, 1430, 1429, 1431, 1230, 1432, 1230, 1254, 1230, + /* 140 */ 1230, 1251, 1305, 1230, 1230, 1230, 1230, 1230, 1534, 1533, + /* 150 */ 1230, 1230, 1263, 1422, 1421, 1433, 1434, 1444, 1443, 1522, + /* 160 */ 1586, 1585, 1480, 1230, 1230, 1230, 1230, 1230, 1230, 1550, + /* 170 */ 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, + /* 180 */ 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, + /* 190 */ 1230, 1230, 1230, 1550, 1550, 1230, 1263, 1550, 1550, 1259, + /* 200 */ 1259, 1365, 1230, 1529, 1332, 1332, 1332, 1332, 1341, 1332, + /* 210 */ 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, + /* 220 */ 1230, 1230, 1230, 1230, 1230, 1519, 1517, 1230, 1230, 1230, + /* 230 */ 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, + /* 240 */ 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, + /* 250 */ 1230, 1230, 1230, 1230, 1337, 1230, 1230, 1230, 1230, 1230, + /* 260 */ 1230, 1230, 1230, 1230, 1230, 1230, 1579, 1230, 1492, 1319, + /* 270 */ 1337, 1337, 1337, 1337, 1339, 1320, 1318, 1331, 1264, 1237, + /* 280 */ 1625, 1397, 1386, 1338, 1360, 1386, 1360, 1622, 1384, 1397, + /* 290 */ 1397, 1384, 1397, 1338, 1622, 1280, 1602, 1275, 1371, 1371, + /* 300 */ 1371, 1360, 1365, 1365, 1459, 1338, 1331, 1230, 1625, 1625, + /* 310 */ 1346, 1346, 1624, 1624, 1346, 1480, 1609, 1406, 1308, 1314, + /* 320 */ 1314, 1314, 1314, 1346, 1248, 1384, 1609, 1609, 1384, 1406, + /* 330 */ 1308, 1384, 1308, 1384, 1346, 1248, 1496, 1619, 1346, 1248, + /* 340 */ 1470, 1346, 1248, 1346, 1248, 1470, 1306, 1306, 1306, 1295, + /* 350 */ 1230, 1230, 1470, 1306, 1280, 1306, 1295, 1306, 1306, 1568, + /* 360 */ 1230, 1474, 1474, 1470, 1364, 1359, 1364, 1359, 1364, 1359, + /* 370 */ 1364, 1359, 1346, 1560, 1560, 1374, 1374, 1379, 1365, 1465, + /* 380 */ 1346, 1230, 1379, 1377, 1375, 1384, 1298, 1582, 1582, 1578, + /* 390 */ 1578, 1578, 1630, 1630, 1529, 1595, 1263, 1263, 1263, 1263, + /* 400 */ 1595, 1282, 1282, 1264, 1264, 1263, 1595, 1230, 1230, 1230, + /* 410 */ 1230, 1230, 1230, 1590, 1230, 1524, 1481, 1350, 1230, 1230, + /* 420 */ 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, + /* 430 */ 1230, 1230, 1535, 1230, 1230, 1230, 1230, 1230, 1230, 1230, + /* 440 */ 1230, 1230, 1230, 1411, 1230, 1233, 1526, 1230, 1230, 1230, + /* 450 */ 1230, 1230, 1230, 1230, 1230, 1388, 1389, 1351, 1230, 1230, + /* 460 */ 1230, 1230, 1230, 1230, 1230, 1403, 1230, 1230, 1230, 1398, + /* 470 */ 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1621, 1230, + /* 480 */ 1230, 1230, 1230, 1230, 1230, 1495, 1494, 1230, 1230, 1348, + /* 490 */ 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, + /* 500 */ 1230, 1230, 1230, 1278, 1230, 1230, 1230, 1230, 1230, 1230, + /* 510 */ 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, + /* 520 */ 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1376, 1230, 1230, + /* 530 */ 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, + /* 540 */ 1230, 1230, 1565, 1366, 1230, 1230, 1612, 1230, 1230, 1230, + /* 550 */ 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, 1230, + /* 560 */ 1230, 1606, 1322, 1413, 1230, 1412, 1416, 1252, 1230, 1242, + /* 570 */ 1230, 1230, }; /********** End of lemon-generated parsing tables *****************************/ @@ -159224,8 +160664,8 @@ static const YYCODETYPE yyFallback[] = { 0, /* LP => nothing */ 0, /* RP => nothing */ 0, /* AS => nothing */ - 59, /* WITHOUT => ID */ 0, /* COMMA => nothing */ + 59, /* WITHOUT => ID */ 59, /* ABORT => ID */ 59, /* ACTION => ID */ 59, /* AFTER => ID */ @@ -159434,9 +160874,9 @@ struct yyParser { }; typedef struct yyParser yyParser; +/* #include */ #ifndef NDEBUG /* #include */ -/* #include */ static FILE *yyTraceFILE = 0; static char *yyTracePrompt = 0; #endif /* NDEBUG */ @@ -159496,8 +160936,8 @@ static const char *const yyTokenName[] = { /* 22 */ "LP", /* 23 */ "RP", /* 24 */ "AS", - /* 25 */ "WITHOUT", - /* 26 */ "COMMA", + /* 25 */ "COMMA", + /* 26 */ "WITHOUT", /* 27 */ "ABORT", /* 28 */ "ACTION", /* 29 */ "AFTER", @@ -159673,121 +161113,122 @@ static const char *const yyTokenName[] = { /* 199 */ "dbnm", /* 200 */ "columnlist", /* 201 */ "conslist_opt", - /* 202 */ "table_options", + /* 202 */ "table_option_set", /* 203 */ "select", - /* 204 */ "columnname", - /* 205 */ "carglist", - /* 206 */ "typetoken", - /* 207 */ "typename", - /* 208 */ "signed", - /* 209 */ "plus_num", - /* 210 */ "minus_num", - /* 211 */ "scanpt", - /* 212 */ "scantok", - /* 213 */ "ccons", - /* 214 */ "term", - /* 215 */ "expr", - /* 216 */ "onconf", - /* 217 */ "sortorder", - /* 218 */ "autoinc", - /* 219 */ "eidlist_opt", - /* 220 */ "refargs", - /* 221 */ "defer_subclause", - /* 222 */ "generated", - /* 223 */ "refarg", - /* 224 */ "refact", - /* 225 */ "init_deferred_pred_opt", - /* 226 */ "conslist", - /* 227 */ "tconscomma", - /* 228 */ "tcons", - /* 229 */ "sortlist", - /* 230 */ "eidlist", - /* 231 */ "defer_subclause_opt", - /* 232 */ "orconf", - /* 233 */ "resolvetype", - /* 234 */ "raisetype", - /* 235 */ "ifexists", - /* 236 */ "fullname", - /* 237 */ "selectnowith", - /* 238 */ "oneselect", - /* 239 */ "wqlist", - /* 240 */ "multiselect_op", - /* 241 */ "distinct", - /* 242 */ "selcollist", - /* 243 */ "from", - /* 244 */ "where_opt", - /* 245 */ "groupby_opt", - /* 246 */ "having_opt", - /* 247 */ "orderby_opt", - /* 248 */ "limit_opt", - /* 249 */ "window_clause", - /* 250 */ "values", - /* 251 */ "nexprlist", - /* 252 */ "sclp", - /* 253 */ "as", - /* 254 */ "seltablist", - /* 255 */ "stl_prefix", - /* 256 */ "joinop", - /* 257 */ "indexed_opt", - /* 258 */ "on_opt", - /* 259 */ "using_opt", - /* 260 */ "exprlist", - /* 261 */ "xfullname", - /* 262 */ "idlist", - /* 263 */ "nulls", - /* 264 */ "with", - /* 265 */ "where_opt_ret", - /* 266 */ "setlist", - /* 267 */ "insert_cmd", - /* 268 */ "idlist_opt", - /* 269 */ "upsert", - /* 270 */ "returning", - /* 271 */ "filter_over", - /* 272 */ "likeop", - /* 273 */ "between_op", - /* 274 */ "in_op", - /* 275 */ "paren_exprlist", - /* 276 */ "case_operand", - /* 277 */ "case_exprlist", - /* 278 */ "case_else", - /* 279 */ "uniqueflag", - /* 280 */ "collate", - /* 281 */ "vinto", - /* 282 */ "nmnum", - /* 283 */ "trigger_decl", - /* 284 */ "trigger_cmd_list", - /* 285 */ "trigger_time", - /* 286 */ "trigger_event", - /* 287 */ "foreach_clause", - /* 288 */ "when_clause", - /* 289 */ "trigger_cmd", - /* 290 */ "trnm", - /* 291 */ "tridxby", - /* 292 */ "database_kw_opt", - /* 293 */ "key_opt", - /* 294 */ "add_column_fullname", - /* 295 */ "kwcolumn_opt", - /* 296 */ "create_vtab", - /* 297 */ "vtabarglist", - /* 298 */ "vtabarg", - /* 299 */ "vtabargtoken", - /* 300 */ "lp", - /* 301 */ "anylist", - /* 302 */ "wqitem", - /* 303 */ "wqas", - /* 304 */ "windowdefn_list", - /* 305 */ "windowdefn", - /* 306 */ "window", - /* 307 */ "frame_opt", - /* 308 */ "part_opt", - /* 309 */ "filter_clause", - /* 310 */ "over_clause", - /* 311 */ "range_or_rows", - /* 312 */ "frame_bound", - /* 313 */ "frame_bound_s", - /* 314 */ "frame_bound_e", - /* 315 */ "frame_exclude_opt", - /* 316 */ "frame_exclude", + /* 204 */ "table_option", + /* 205 */ "columnname", + /* 206 */ "carglist", + /* 207 */ "typetoken", + /* 208 */ "typename", + /* 209 */ "signed", + /* 210 */ "plus_num", + /* 211 */ "minus_num", + /* 212 */ "scanpt", + /* 213 */ "scantok", + /* 214 */ "ccons", + /* 215 */ "term", + /* 216 */ "expr", + /* 217 */ "onconf", + /* 218 */ "sortorder", + /* 219 */ "autoinc", + /* 220 */ "eidlist_opt", + /* 221 */ "refargs", + /* 222 */ "defer_subclause", + /* 223 */ "generated", + /* 224 */ "refarg", + /* 225 */ "refact", + /* 226 */ "init_deferred_pred_opt", + /* 227 */ "conslist", + /* 228 */ "tconscomma", + /* 229 */ "tcons", + /* 230 */ "sortlist", + /* 231 */ "eidlist", + /* 232 */ "defer_subclause_opt", + /* 233 */ "orconf", + /* 234 */ "resolvetype", + /* 235 */ "raisetype", + /* 236 */ "ifexists", + /* 237 */ "fullname", + /* 238 */ "selectnowith", + /* 239 */ "oneselect", + /* 240 */ "wqlist", + /* 241 */ "multiselect_op", + /* 242 */ "distinct", + /* 243 */ "selcollist", + /* 244 */ "from", + /* 245 */ "where_opt", + /* 246 */ "groupby_opt", + /* 247 */ "having_opt", + /* 248 */ "orderby_opt", + /* 249 */ "limit_opt", + /* 250 */ "window_clause", + /* 251 */ "values", + /* 252 */ "nexprlist", + /* 253 */ "sclp", + /* 254 */ "as", + /* 255 */ "seltablist", + /* 256 */ "stl_prefix", + /* 257 */ "joinop", + /* 258 */ "indexed_opt", + /* 259 */ "on_opt", + /* 260 */ "using_opt", + /* 261 */ "exprlist", + /* 262 */ "xfullname", + /* 263 */ "idlist", + /* 264 */ "nulls", + /* 265 */ "with", + /* 266 */ "where_opt_ret", + /* 267 */ "setlist", + /* 268 */ "insert_cmd", + /* 269 */ "idlist_opt", + /* 270 */ "upsert", + /* 271 */ "returning", + /* 272 */ "filter_over", + /* 273 */ "likeop", + /* 274 */ "between_op", + /* 275 */ "in_op", + /* 276 */ "paren_exprlist", + /* 277 */ "case_operand", + /* 278 */ "case_exprlist", + /* 279 */ "case_else", + /* 280 */ "uniqueflag", + /* 281 */ "collate", + /* 282 */ "vinto", + /* 283 */ "nmnum", + /* 284 */ "trigger_decl", + /* 285 */ "trigger_cmd_list", + /* 286 */ "trigger_time", + /* 287 */ "trigger_event", + /* 288 */ "foreach_clause", + /* 289 */ "when_clause", + /* 290 */ "trigger_cmd", + /* 291 */ "trnm", + /* 292 */ "tridxby", + /* 293 */ "database_kw_opt", + /* 294 */ "key_opt", + /* 295 */ "add_column_fullname", + /* 296 */ "kwcolumn_opt", + /* 297 */ "create_vtab", + /* 298 */ "vtabarglist", + /* 299 */ "vtabarg", + /* 300 */ "vtabargtoken", + /* 301 */ "lp", + /* 302 */ "anylist", + /* 303 */ "wqitem", + /* 304 */ "wqas", + /* 305 */ "windowdefn_list", + /* 306 */ "windowdefn", + /* 307 */ "window", + /* 308 */ "frame_opt", + /* 309 */ "part_opt", + /* 310 */ "filter_clause", + /* 311 */ "over_clause", + /* 312 */ "range_or_rows", + /* 313 */ "frame_bound", + /* 314 */ "frame_bound_s", + /* 315 */ "frame_bound_e", + /* 316 */ "frame_exclude_opt", + /* 317 */ "frame_exclude", }; #endif /* defined(YYCOVERAGE) || !defined(NDEBUG) */ @@ -159814,385 +161255,388 @@ static const char *const yyRuleName[] = { /* 16 */ "ifnotexists ::= IF NOT EXISTS", /* 17 */ "temp ::= TEMP", /* 18 */ "temp ::=", - /* 19 */ "create_table_args ::= LP columnlist conslist_opt RP table_options", + /* 19 */ "create_table_args ::= LP columnlist conslist_opt RP table_option_set", /* 20 */ "create_table_args ::= AS select", - /* 21 */ "table_options ::=", - /* 22 */ "table_options ::= WITHOUT nm", - /* 23 */ "columnname ::= nm typetoken", - /* 24 */ "typetoken ::=", - /* 25 */ "typetoken ::= typename LP signed RP", - /* 26 */ "typetoken ::= typename LP signed COMMA signed RP", - /* 27 */ "typename ::= typename ID|STRING", - /* 28 */ "scanpt ::=", - /* 29 */ "scantok ::=", - /* 30 */ "ccons ::= CONSTRAINT nm", - /* 31 */ "ccons ::= DEFAULT scantok term", - /* 32 */ "ccons ::= DEFAULT LP expr RP", - /* 33 */ "ccons ::= DEFAULT PLUS scantok term", - /* 34 */ "ccons ::= DEFAULT MINUS scantok term", - /* 35 */ "ccons ::= DEFAULT scantok ID|INDEXED", - /* 36 */ "ccons ::= NOT NULL onconf", - /* 37 */ "ccons ::= PRIMARY KEY sortorder onconf autoinc", - /* 38 */ "ccons ::= UNIQUE onconf", - /* 39 */ "ccons ::= CHECK LP expr RP", - /* 40 */ "ccons ::= REFERENCES nm eidlist_opt refargs", - /* 41 */ "ccons ::= defer_subclause", - /* 42 */ "ccons ::= COLLATE ID|STRING", - /* 43 */ "generated ::= LP expr RP", - /* 44 */ "generated ::= LP expr RP ID", - /* 45 */ "autoinc ::=", - /* 46 */ "autoinc ::= AUTOINCR", - /* 47 */ "refargs ::=", - /* 48 */ "refargs ::= refargs refarg", - /* 49 */ "refarg ::= MATCH nm", - /* 50 */ "refarg ::= ON INSERT refact", - /* 51 */ "refarg ::= ON DELETE refact", - /* 52 */ "refarg ::= ON UPDATE refact", - /* 53 */ "refact ::= SET NULL", - /* 54 */ "refact ::= SET DEFAULT", - /* 55 */ "refact ::= CASCADE", - /* 56 */ "refact ::= RESTRICT", - /* 57 */ "refact ::= NO ACTION", - /* 58 */ "defer_subclause ::= NOT DEFERRABLE init_deferred_pred_opt", - /* 59 */ "defer_subclause ::= DEFERRABLE init_deferred_pred_opt", - /* 60 */ "init_deferred_pred_opt ::=", - /* 61 */ "init_deferred_pred_opt ::= INITIALLY DEFERRED", - /* 62 */ "init_deferred_pred_opt ::= INITIALLY IMMEDIATE", - /* 63 */ "conslist_opt ::=", - /* 64 */ "tconscomma ::= COMMA", - /* 65 */ "tcons ::= CONSTRAINT nm", - /* 66 */ "tcons ::= PRIMARY KEY LP sortlist autoinc RP onconf", - /* 67 */ "tcons ::= UNIQUE LP sortlist RP onconf", - /* 68 */ "tcons ::= CHECK LP expr RP onconf", - /* 69 */ "tcons ::= FOREIGN KEY LP eidlist RP REFERENCES nm eidlist_opt refargs defer_subclause_opt", - /* 70 */ "defer_subclause_opt ::=", - /* 71 */ "onconf ::=", - /* 72 */ "onconf ::= ON CONFLICT resolvetype", - /* 73 */ "orconf ::=", - /* 74 */ "orconf ::= OR resolvetype", - /* 75 */ "resolvetype ::= IGNORE", - /* 76 */ "resolvetype ::= REPLACE", - /* 77 */ "cmd ::= DROP TABLE ifexists fullname", - /* 78 */ "ifexists ::= IF EXISTS", - /* 79 */ "ifexists ::=", - /* 80 */ "cmd ::= createkw temp VIEW ifnotexists nm dbnm eidlist_opt AS select", - /* 81 */ "cmd ::= DROP VIEW ifexists fullname", - /* 82 */ "cmd ::= select", - /* 83 */ "select ::= WITH wqlist selectnowith", - /* 84 */ "select ::= WITH RECURSIVE wqlist selectnowith", - /* 85 */ "select ::= selectnowith", - /* 86 */ "selectnowith ::= selectnowith multiselect_op oneselect", - /* 87 */ "multiselect_op ::= UNION", - /* 88 */ "multiselect_op ::= UNION ALL", - /* 89 */ "multiselect_op ::= EXCEPT|INTERSECT", - /* 90 */ "oneselect ::= SELECT distinct selcollist from where_opt groupby_opt having_opt orderby_opt limit_opt", - /* 91 */ "oneselect ::= SELECT distinct selcollist from where_opt groupby_opt having_opt window_clause orderby_opt limit_opt", - /* 92 */ "values ::= VALUES LP nexprlist RP", - /* 93 */ "values ::= values COMMA LP nexprlist RP", - /* 94 */ "distinct ::= DISTINCT", - /* 95 */ "distinct ::= ALL", - /* 96 */ "distinct ::=", - /* 97 */ "sclp ::=", - /* 98 */ "selcollist ::= sclp scanpt expr scanpt as", - /* 99 */ "selcollist ::= sclp scanpt STAR", - /* 100 */ "selcollist ::= sclp scanpt nm DOT STAR", - /* 101 */ "as ::= AS nm", - /* 102 */ "as ::=", - /* 103 */ "from ::=", - /* 104 */ "from ::= FROM seltablist", - /* 105 */ "stl_prefix ::= seltablist joinop", - /* 106 */ "stl_prefix ::=", - /* 107 */ "seltablist ::= stl_prefix nm dbnm as indexed_opt on_opt using_opt", - /* 108 */ "seltablist ::= stl_prefix nm dbnm LP exprlist RP as on_opt using_opt", - /* 109 */ "seltablist ::= stl_prefix LP select RP as on_opt using_opt", - /* 110 */ "seltablist ::= stl_prefix LP seltablist RP as on_opt using_opt", - /* 111 */ "dbnm ::=", - /* 112 */ "dbnm ::= DOT nm", - /* 113 */ "fullname ::= nm", - /* 114 */ "fullname ::= nm DOT nm", - /* 115 */ "xfullname ::= nm", - /* 116 */ "xfullname ::= nm DOT nm", - /* 117 */ "xfullname ::= nm DOT nm AS nm", - /* 118 */ "xfullname ::= nm AS nm", - /* 119 */ "joinop ::= COMMA|JOIN", - /* 120 */ "joinop ::= JOIN_KW JOIN", - /* 121 */ "joinop ::= JOIN_KW nm JOIN", - /* 122 */ "joinop ::= JOIN_KW nm nm JOIN", - /* 123 */ "on_opt ::= ON expr", - /* 124 */ "on_opt ::=", - /* 125 */ "indexed_opt ::=", - /* 126 */ "indexed_opt ::= INDEXED BY nm", - /* 127 */ "indexed_opt ::= NOT INDEXED", - /* 128 */ "using_opt ::= USING LP idlist RP", - /* 129 */ "using_opt ::=", - /* 130 */ "orderby_opt ::=", - /* 131 */ "orderby_opt ::= ORDER BY sortlist", - /* 132 */ "sortlist ::= sortlist COMMA expr sortorder nulls", - /* 133 */ "sortlist ::= expr sortorder nulls", - /* 134 */ "sortorder ::= ASC", - /* 135 */ "sortorder ::= DESC", - /* 136 */ "sortorder ::=", - /* 137 */ "nulls ::= NULLS FIRST", - /* 138 */ "nulls ::= NULLS LAST", - /* 139 */ "nulls ::=", - /* 140 */ "groupby_opt ::=", - /* 141 */ "groupby_opt ::= GROUP BY nexprlist", - /* 142 */ "having_opt ::=", - /* 143 */ "having_opt ::= HAVING expr", - /* 144 */ "limit_opt ::=", - /* 145 */ "limit_opt ::= LIMIT expr", - /* 146 */ "limit_opt ::= LIMIT expr OFFSET expr", - /* 147 */ "limit_opt ::= LIMIT expr COMMA expr", - /* 148 */ "cmd ::= with DELETE FROM xfullname indexed_opt where_opt_ret", - /* 149 */ "where_opt ::=", - /* 150 */ "where_opt ::= WHERE expr", - /* 151 */ "where_opt_ret ::=", - /* 152 */ "where_opt_ret ::= WHERE expr", - /* 153 */ "where_opt_ret ::= RETURNING selcollist", - /* 154 */ "where_opt_ret ::= WHERE expr RETURNING selcollist", - /* 155 */ "cmd ::= with UPDATE orconf xfullname indexed_opt SET setlist from where_opt_ret", - /* 156 */ "setlist ::= setlist COMMA nm EQ expr", - /* 157 */ "setlist ::= setlist COMMA LP idlist RP EQ expr", - /* 158 */ "setlist ::= nm EQ expr", - /* 159 */ "setlist ::= LP idlist RP EQ expr", - /* 160 */ "cmd ::= with insert_cmd INTO xfullname idlist_opt select upsert", - /* 161 */ "cmd ::= with insert_cmd INTO xfullname idlist_opt DEFAULT VALUES returning", - /* 162 */ "upsert ::=", - /* 163 */ "upsert ::= RETURNING selcollist", - /* 164 */ "upsert ::= ON CONFLICT LP sortlist RP where_opt DO UPDATE SET setlist where_opt upsert", - /* 165 */ "upsert ::= ON CONFLICT LP sortlist RP where_opt DO NOTHING upsert", - /* 166 */ "upsert ::= ON CONFLICT DO NOTHING returning", - /* 167 */ "upsert ::= ON CONFLICT DO UPDATE SET setlist where_opt returning", - /* 168 */ "returning ::= RETURNING selcollist", - /* 169 */ "insert_cmd ::= INSERT orconf", - /* 170 */ "insert_cmd ::= REPLACE", - /* 171 */ "idlist_opt ::=", - /* 172 */ "idlist_opt ::= LP idlist RP", - /* 173 */ "idlist ::= idlist COMMA nm", - /* 174 */ "idlist ::= nm", - /* 175 */ "expr ::= LP expr RP", - /* 176 */ "expr ::= ID|INDEXED", - /* 177 */ "expr ::= JOIN_KW", - /* 178 */ "expr ::= nm DOT nm", - /* 179 */ "expr ::= nm DOT nm DOT nm", - /* 180 */ "term ::= NULL|FLOAT|BLOB", - /* 181 */ "term ::= STRING", - /* 182 */ "term ::= INTEGER", - /* 183 */ "expr ::= VARIABLE", - /* 184 */ "expr ::= expr COLLATE ID|STRING", - /* 185 */ "expr ::= CAST LP expr AS typetoken RP", - /* 186 */ "expr ::= ID|INDEXED LP distinct exprlist RP", - /* 187 */ "expr ::= ID|INDEXED LP STAR RP", - /* 188 */ "expr ::= ID|INDEXED LP distinct exprlist RP filter_over", - /* 189 */ "expr ::= ID|INDEXED LP STAR RP filter_over", - /* 190 */ "term ::= CTIME_KW", - /* 191 */ "expr ::= LP nexprlist COMMA expr RP", - /* 192 */ "expr ::= expr AND expr", - /* 193 */ "expr ::= expr OR expr", - /* 194 */ "expr ::= expr LT|GT|GE|LE expr", - /* 195 */ "expr ::= expr EQ|NE expr", - /* 196 */ "expr ::= expr BITAND|BITOR|LSHIFT|RSHIFT expr", - /* 197 */ "expr ::= expr PLUS|MINUS expr", - /* 198 */ "expr ::= expr STAR|SLASH|REM expr", - /* 199 */ "expr ::= expr CONCAT expr", - /* 200 */ "likeop ::= NOT LIKE_KW|MATCH", - /* 201 */ "expr ::= expr likeop expr", - /* 202 */ "expr ::= expr likeop expr ESCAPE expr", - /* 203 */ "expr ::= expr ISNULL|NOTNULL", - /* 204 */ "expr ::= expr NOT NULL", - /* 205 */ "expr ::= expr IS expr", - /* 206 */ "expr ::= expr IS NOT expr", - /* 207 */ "expr ::= NOT expr", - /* 208 */ "expr ::= BITNOT expr", - /* 209 */ "expr ::= PLUS|MINUS expr", - /* 210 */ "between_op ::= BETWEEN", - /* 211 */ "between_op ::= NOT BETWEEN", - /* 212 */ "expr ::= expr between_op expr AND expr", - /* 213 */ "in_op ::= IN", - /* 214 */ "in_op ::= NOT IN", - /* 215 */ "expr ::= expr in_op LP exprlist RP", - /* 216 */ "expr ::= LP select RP", - /* 217 */ "expr ::= expr in_op LP select RP", - /* 218 */ "expr ::= expr in_op nm dbnm paren_exprlist", - /* 219 */ "expr ::= EXISTS LP select RP", - /* 220 */ "expr ::= CASE case_operand case_exprlist case_else END", - /* 221 */ "case_exprlist ::= case_exprlist WHEN expr THEN expr", - /* 222 */ "case_exprlist ::= WHEN expr THEN expr", - /* 223 */ "case_else ::= ELSE expr", - /* 224 */ "case_else ::=", - /* 225 */ "case_operand ::= expr", - /* 226 */ "case_operand ::=", - /* 227 */ "exprlist ::=", - /* 228 */ "nexprlist ::= nexprlist COMMA expr", - /* 229 */ "nexprlist ::= expr", - /* 230 */ "paren_exprlist ::=", - /* 231 */ "paren_exprlist ::= LP exprlist RP", - /* 232 */ "cmd ::= createkw uniqueflag INDEX ifnotexists nm dbnm ON nm LP sortlist RP where_opt", - /* 233 */ "uniqueflag ::= UNIQUE", - /* 234 */ "uniqueflag ::=", - /* 235 */ "eidlist_opt ::=", - /* 236 */ "eidlist_opt ::= LP eidlist RP", - /* 237 */ "eidlist ::= eidlist COMMA nm collate sortorder", - /* 238 */ "eidlist ::= nm collate sortorder", - /* 239 */ "collate ::=", - /* 240 */ "collate ::= COLLATE ID|STRING", - /* 241 */ "cmd ::= DROP INDEX ifexists fullname", - /* 242 */ "cmd ::= VACUUM vinto", - /* 243 */ "cmd ::= VACUUM nm vinto", - /* 244 */ "vinto ::= INTO expr", - /* 245 */ "vinto ::=", - /* 246 */ "cmd ::= PRAGMA nm dbnm", - /* 247 */ "cmd ::= PRAGMA nm dbnm EQ nmnum", - /* 248 */ "cmd ::= PRAGMA nm dbnm LP nmnum RP", - /* 249 */ "cmd ::= PRAGMA nm dbnm EQ minus_num", - /* 250 */ "cmd ::= PRAGMA nm dbnm LP minus_num RP", - /* 251 */ "plus_num ::= PLUS INTEGER|FLOAT", - /* 252 */ "minus_num ::= MINUS INTEGER|FLOAT", - /* 253 */ "cmd ::= createkw trigger_decl BEGIN trigger_cmd_list END", - /* 254 */ "trigger_decl ::= temp TRIGGER ifnotexists nm dbnm trigger_time trigger_event ON fullname foreach_clause when_clause", - /* 255 */ "trigger_time ::= BEFORE|AFTER", - /* 256 */ "trigger_time ::= INSTEAD OF", - /* 257 */ "trigger_time ::=", - /* 258 */ "trigger_event ::= DELETE|INSERT", - /* 259 */ "trigger_event ::= UPDATE", - /* 260 */ "trigger_event ::= UPDATE OF idlist", - /* 261 */ "when_clause ::=", - /* 262 */ "when_clause ::= WHEN expr", - /* 263 */ "trigger_cmd_list ::= trigger_cmd_list trigger_cmd SEMI", - /* 264 */ "trigger_cmd_list ::= trigger_cmd SEMI", - /* 265 */ "trnm ::= nm DOT nm", - /* 266 */ "tridxby ::= INDEXED BY nm", - /* 267 */ "tridxby ::= NOT INDEXED", - /* 268 */ "trigger_cmd ::= UPDATE orconf trnm tridxby SET setlist from where_opt scanpt", - /* 269 */ "trigger_cmd ::= scanpt insert_cmd INTO trnm idlist_opt select upsert scanpt", - /* 270 */ "trigger_cmd ::= DELETE FROM trnm tridxby where_opt scanpt", - /* 271 */ "trigger_cmd ::= scanpt select scanpt", - /* 272 */ "expr ::= RAISE LP IGNORE RP", - /* 273 */ "expr ::= RAISE LP raisetype COMMA nm RP", - /* 274 */ "raisetype ::= ROLLBACK", - /* 275 */ "raisetype ::= ABORT", - /* 276 */ "raisetype ::= FAIL", - /* 277 */ "cmd ::= DROP TRIGGER ifexists fullname", - /* 278 */ "cmd ::= ATTACH database_kw_opt expr AS expr key_opt", - /* 279 */ "cmd ::= DETACH database_kw_opt expr", - /* 280 */ "key_opt ::=", - /* 281 */ "key_opt ::= KEY expr", - /* 282 */ "cmd ::= REINDEX", - /* 283 */ "cmd ::= REINDEX nm dbnm", - /* 284 */ "cmd ::= ANALYZE", - /* 285 */ "cmd ::= ANALYZE nm dbnm", - /* 286 */ "cmd ::= ALTER TABLE fullname RENAME TO nm", - /* 287 */ "cmd ::= ALTER TABLE add_column_fullname ADD kwcolumn_opt columnname carglist", - /* 288 */ "cmd ::= ALTER TABLE fullname DROP kwcolumn_opt nm", - /* 289 */ "add_column_fullname ::= fullname", - /* 290 */ "cmd ::= ALTER TABLE fullname RENAME kwcolumn_opt nm TO nm", - /* 291 */ "cmd ::= create_vtab", - /* 292 */ "cmd ::= create_vtab LP vtabarglist RP", - /* 293 */ "create_vtab ::= createkw VIRTUAL TABLE ifnotexists nm dbnm USING nm", - /* 294 */ "vtabarg ::=", - /* 295 */ "vtabargtoken ::= ANY", - /* 296 */ "vtabargtoken ::= lp anylist RP", - /* 297 */ "lp ::= LP", - /* 298 */ "with ::= WITH wqlist", - /* 299 */ "with ::= WITH RECURSIVE wqlist", - /* 300 */ "wqas ::= AS", - /* 301 */ "wqas ::= AS MATERIALIZED", - /* 302 */ "wqas ::= AS NOT MATERIALIZED", - /* 303 */ "wqitem ::= nm eidlist_opt wqas LP select RP", - /* 304 */ "wqlist ::= wqitem", - /* 305 */ "wqlist ::= wqlist COMMA wqitem", - /* 306 */ "windowdefn_list ::= windowdefn", - /* 307 */ "windowdefn_list ::= windowdefn_list COMMA windowdefn", - /* 308 */ "windowdefn ::= nm AS LP window RP", - /* 309 */ "window ::= PARTITION BY nexprlist orderby_opt frame_opt", - /* 310 */ "window ::= nm PARTITION BY nexprlist orderby_opt frame_opt", - /* 311 */ "window ::= ORDER BY sortlist frame_opt", - /* 312 */ "window ::= nm ORDER BY sortlist frame_opt", - /* 313 */ "window ::= frame_opt", - /* 314 */ "window ::= nm frame_opt", - /* 315 */ "frame_opt ::=", - /* 316 */ "frame_opt ::= range_or_rows frame_bound_s frame_exclude_opt", - /* 317 */ "frame_opt ::= range_or_rows BETWEEN frame_bound_s AND frame_bound_e frame_exclude_opt", - /* 318 */ "range_or_rows ::= RANGE|ROWS|GROUPS", - /* 319 */ "frame_bound_s ::= frame_bound", - /* 320 */ "frame_bound_s ::= UNBOUNDED PRECEDING", - /* 321 */ "frame_bound_e ::= frame_bound", - /* 322 */ "frame_bound_e ::= UNBOUNDED FOLLOWING", - /* 323 */ "frame_bound ::= expr PRECEDING|FOLLOWING", - /* 324 */ "frame_bound ::= CURRENT ROW", - /* 325 */ "frame_exclude_opt ::=", - /* 326 */ "frame_exclude_opt ::= EXCLUDE frame_exclude", - /* 327 */ "frame_exclude ::= NO OTHERS", - /* 328 */ "frame_exclude ::= CURRENT ROW", - /* 329 */ "frame_exclude ::= GROUP|TIES", - /* 330 */ "window_clause ::= WINDOW windowdefn_list", - /* 331 */ "filter_over ::= filter_clause over_clause", - /* 332 */ "filter_over ::= over_clause", - /* 333 */ "filter_over ::= filter_clause", - /* 334 */ "over_clause ::= OVER LP window RP", - /* 335 */ "over_clause ::= OVER nm", - /* 336 */ "filter_clause ::= FILTER LP WHERE expr RP", - /* 337 */ "input ::= cmdlist", - /* 338 */ "cmdlist ::= cmdlist ecmd", - /* 339 */ "cmdlist ::= ecmd", - /* 340 */ "ecmd ::= SEMI", - /* 341 */ "ecmd ::= cmdx SEMI", - /* 342 */ "ecmd ::= explain cmdx SEMI", - /* 343 */ "trans_opt ::=", - /* 344 */ "trans_opt ::= TRANSACTION", - /* 345 */ "trans_opt ::= TRANSACTION nm", - /* 346 */ "savepoint_opt ::= SAVEPOINT", - /* 347 */ "savepoint_opt ::=", - /* 348 */ "cmd ::= create_table create_table_args", - /* 349 */ "columnlist ::= columnlist COMMA columnname carglist", - /* 350 */ "columnlist ::= columnname carglist", - /* 351 */ "nm ::= ID|INDEXED", - /* 352 */ "nm ::= STRING", - /* 353 */ "nm ::= JOIN_KW", - /* 354 */ "typetoken ::= typename", - /* 355 */ "typename ::= ID|STRING", - /* 356 */ "signed ::= plus_num", - /* 357 */ "signed ::= minus_num", - /* 358 */ "carglist ::= carglist ccons", - /* 359 */ "carglist ::=", - /* 360 */ "ccons ::= NULL onconf", - /* 361 */ "ccons ::= GENERATED ALWAYS AS generated", - /* 362 */ "ccons ::= AS generated", - /* 363 */ "conslist_opt ::= COMMA conslist", - /* 364 */ "conslist ::= conslist tconscomma tcons", - /* 365 */ "conslist ::= tcons", - /* 366 */ "tconscomma ::=", - /* 367 */ "defer_subclause_opt ::= defer_subclause", - /* 368 */ "resolvetype ::= raisetype", - /* 369 */ "selectnowith ::= oneselect", - /* 370 */ "oneselect ::= values", - /* 371 */ "sclp ::= selcollist COMMA", - /* 372 */ "as ::= ID|STRING", - /* 373 */ "returning ::=", - /* 374 */ "expr ::= term", - /* 375 */ "likeop ::= LIKE_KW|MATCH", - /* 376 */ "exprlist ::= nexprlist", - /* 377 */ "nmnum ::= plus_num", - /* 378 */ "nmnum ::= nm", - /* 379 */ "nmnum ::= ON", - /* 380 */ "nmnum ::= DELETE", - /* 381 */ "nmnum ::= DEFAULT", - /* 382 */ "plus_num ::= INTEGER|FLOAT", - /* 383 */ "foreach_clause ::=", - /* 384 */ "foreach_clause ::= FOR EACH ROW", - /* 385 */ "trnm ::= nm", - /* 386 */ "tridxby ::=", - /* 387 */ "database_kw_opt ::= DATABASE", - /* 388 */ "database_kw_opt ::=", - /* 389 */ "kwcolumn_opt ::=", - /* 390 */ "kwcolumn_opt ::= COLUMNKW", - /* 391 */ "vtabarglist ::= vtabarg", - /* 392 */ "vtabarglist ::= vtabarglist COMMA vtabarg", - /* 393 */ "vtabarg ::= vtabarg vtabargtoken", - /* 394 */ "anylist ::=", - /* 395 */ "anylist ::= anylist LP anylist RP", - /* 396 */ "anylist ::= anylist ANY", - /* 397 */ "with ::=", + /* 21 */ "table_option_set ::=", + /* 22 */ "table_option_set ::= table_option_set COMMA table_option", + /* 23 */ "table_option ::= WITHOUT nm", + /* 24 */ "table_option ::= nm", + /* 25 */ "columnname ::= nm typetoken", + /* 26 */ "typetoken ::=", + /* 27 */ "typetoken ::= typename LP signed RP", + /* 28 */ "typetoken ::= typename LP signed COMMA signed RP", + /* 29 */ "typename ::= typename ID|STRING", + /* 30 */ "scanpt ::=", + /* 31 */ "scantok ::=", + /* 32 */ "ccons ::= CONSTRAINT nm", + /* 33 */ "ccons ::= DEFAULT scantok term", + /* 34 */ "ccons ::= DEFAULT LP expr RP", + /* 35 */ "ccons ::= DEFAULT PLUS scantok term", + /* 36 */ "ccons ::= DEFAULT MINUS scantok term", + /* 37 */ "ccons ::= DEFAULT scantok ID|INDEXED", + /* 38 */ "ccons ::= NOT NULL onconf", + /* 39 */ "ccons ::= PRIMARY KEY sortorder onconf autoinc", + /* 40 */ "ccons ::= UNIQUE onconf", + /* 41 */ "ccons ::= CHECK LP expr RP", + /* 42 */ "ccons ::= REFERENCES nm eidlist_opt refargs", + /* 43 */ "ccons ::= defer_subclause", + /* 44 */ "ccons ::= COLLATE ID|STRING", + /* 45 */ "generated ::= LP expr RP", + /* 46 */ "generated ::= LP expr RP ID", + /* 47 */ "autoinc ::=", + /* 48 */ "autoinc ::= AUTOINCR", + /* 49 */ "refargs ::=", + /* 50 */ "refargs ::= refargs refarg", + /* 51 */ "refarg ::= MATCH nm", + /* 52 */ "refarg ::= ON INSERT refact", + /* 53 */ "refarg ::= ON DELETE refact", + /* 54 */ "refarg ::= ON UPDATE refact", + /* 55 */ "refact ::= SET NULL", + /* 56 */ "refact ::= SET DEFAULT", + /* 57 */ "refact ::= CASCADE", + /* 58 */ "refact ::= RESTRICT", + /* 59 */ "refact ::= NO ACTION", + /* 60 */ "defer_subclause ::= NOT DEFERRABLE init_deferred_pred_opt", + /* 61 */ "defer_subclause ::= DEFERRABLE init_deferred_pred_opt", + /* 62 */ "init_deferred_pred_opt ::=", + /* 63 */ "init_deferred_pred_opt ::= INITIALLY DEFERRED", + /* 64 */ "init_deferred_pred_opt ::= INITIALLY IMMEDIATE", + /* 65 */ "conslist_opt ::=", + /* 66 */ "tconscomma ::= COMMA", + /* 67 */ "tcons ::= CONSTRAINT nm", + /* 68 */ "tcons ::= PRIMARY KEY LP sortlist autoinc RP onconf", + /* 69 */ "tcons ::= UNIQUE LP sortlist RP onconf", + /* 70 */ "tcons ::= CHECK LP expr RP onconf", + /* 71 */ "tcons ::= FOREIGN KEY LP eidlist RP REFERENCES nm eidlist_opt refargs defer_subclause_opt", + /* 72 */ "defer_subclause_opt ::=", + /* 73 */ "onconf ::=", + /* 74 */ "onconf ::= ON CONFLICT resolvetype", + /* 75 */ "orconf ::=", + /* 76 */ "orconf ::= OR resolvetype", + /* 77 */ "resolvetype ::= IGNORE", + /* 78 */ "resolvetype ::= REPLACE", + /* 79 */ "cmd ::= DROP TABLE ifexists fullname", + /* 80 */ "ifexists ::= IF EXISTS", + /* 81 */ "ifexists ::=", + /* 82 */ "cmd ::= createkw temp VIEW ifnotexists nm dbnm eidlist_opt AS select", + /* 83 */ "cmd ::= DROP VIEW ifexists fullname", + /* 84 */ "cmd ::= select", + /* 85 */ "select ::= WITH wqlist selectnowith", + /* 86 */ "select ::= WITH RECURSIVE wqlist selectnowith", + /* 87 */ "select ::= selectnowith", + /* 88 */ "selectnowith ::= selectnowith multiselect_op oneselect", + /* 89 */ "multiselect_op ::= UNION", + /* 90 */ "multiselect_op ::= UNION ALL", + /* 91 */ "multiselect_op ::= EXCEPT|INTERSECT", + /* 92 */ "oneselect ::= SELECT distinct selcollist from where_opt groupby_opt having_opt orderby_opt limit_opt", + /* 93 */ "oneselect ::= SELECT distinct selcollist from where_opt groupby_opt having_opt window_clause orderby_opt limit_opt", + /* 94 */ "values ::= VALUES LP nexprlist RP", + /* 95 */ "values ::= values COMMA LP nexprlist RP", + /* 96 */ "distinct ::= DISTINCT", + /* 97 */ "distinct ::= ALL", + /* 98 */ "distinct ::=", + /* 99 */ "sclp ::=", + /* 100 */ "selcollist ::= sclp scanpt expr scanpt as", + /* 101 */ "selcollist ::= sclp scanpt STAR", + /* 102 */ "selcollist ::= sclp scanpt nm DOT STAR", + /* 103 */ "as ::= AS nm", + /* 104 */ "as ::=", + /* 105 */ "from ::=", + /* 106 */ "from ::= FROM seltablist", + /* 107 */ "stl_prefix ::= seltablist joinop", + /* 108 */ "stl_prefix ::=", + /* 109 */ "seltablist ::= stl_prefix nm dbnm as indexed_opt on_opt using_opt", + /* 110 */ "seltablist ::= stl_prefix nm dbnm LP exprlist RP as on_opt using_opt", + /* 111 */ "seltablist ::= stl_prefix LP select RP as on_opt using_opt", + /* 112 */ "seltablist ::= stl_prefix LP seltablist RP as on_opt using_opt", + /* 113 */ "dbnm ::=", + /* 114 */ "dbnm ::= DOT nm", + /* 115 */ "fullname ::= nm", + /* 116 */ "fullname ::= nm DOT nm", + /* 117 */ "xfullname ::= nm", + /* 118 */ "xfullname ::= nm DOT nm", + /* 119 */ "xfullname ::= nm DOT nm AS nm", + /* 120 */ "xfullname ::= nm AS nm", + /* 121 */ "joinop ::= COMMA|JOIN", + /* 122 */ "joinop ::= JOIN_KW JOIN", + /* 123 */ "joinop ::= JOIN_KW nm JOIN", + /* 124 */ "joinop ::= JOIN_KW nm nm JOIN", + /* 125 */ "on_opt ::= ON expr", + /* 126 */ "on_opt ::=", + /* 127 */ "indexed_opt ::=", + /* 128 */ "indexed_opt ::= INDEXED BY nm", + /* 129 */ "indexed_opt ::= NOT INDEXED", + /* 130 */ "using_opt ::= USING LP idlist RP", + /* 131 */ "using_opt ::=", + /* 132 */ "orderby_opt ::=", + /* 133 */ "orderby_opt ::= ORDER BY sortlist", + /* 134 */ "sortlist ::= sortlist COMMA expr sortorder nulls", + /* 135 */ "sortlist ::= expr sortorder nulls", + /* 136 */ "sortorder ::= ASC", + /* 137 */ "sortorder ::= DESC", + /* 138 */ "sortorder ::=", + /* 139 */ "nulls ::= NULLS FIRST", + /* 140 */ "nulls ::= NULLS LAST", + /* 141 */ "nulls ::=", + /* 142 */ "groupby_opt ::=", + /* 143 */ "groupby_opt ::= GROUP BY nexprlist", + /* 144 */ "having_opt ::=", + /* 145 */ "having_opt ::= HAVING expr", + /* 146 */ "limit_opt ::=", + /* 147 */ "limit_opt ::= LIMIT expr", + /* 148 */ "limit_opt ::= LIMIT expr OFFSET expr", + /* 149 */ "limit_opt ::= LIMIT expr COMMA expr", + /* 150 */ "cmd ::= with DELETE FROM xfullname indexed_opt where_opt_ret", + /* 151 */ "where_opt ::=", + /* 152 */ "where_opt ::= WHERE expr", + /* 153 */ "where_opt_ret ::=", + /* 154 */ "where_opt_ret ::= WHERE expr", + /* 155 */ "where_opt_ret ::= RETURNING selcollist", + /* 156 */ "where_opt_ret ::= WHERE expr RETURNING selcollist", + /* 157 */ "cmd ::= with UPDATE orconf xfullname indexed_opt SET setlist from where_opt_ret", + /* 158 */ "setlist ::= setlist COMMA nm EQ expr", + /* 159 */ "setlist ::= setlist COMMA LP idlist RP EQ expr", + /* 160 */ "setlist ::= nm EQ expr", + /* 161 */ "setlist ::= LP idlist RP EQ expr", + /* 162 */ "cmd ::= with insert_cmd INTO xfullname idlist_opt select upsert", + /* 163 */ "cmd ::= with insert_cmd INTO xfullname idlist_opt DEFAULT VALUES returning", + /* 164 */ "upsert ::=", + /* 165 */ "upsert ::= RETURNING selcollist", + /* 166 */ "upsert ::= ON CONFLICT LP sortlist RP where_opt DO UPDATE SET setlist where_opt upsert", + /* 167 */ "upsert ::= ON CONFLICT LP sortlist RP where_opt DO NOTHING upsert", + /* 168 */ "upsert ::= ON CONFLICT DO NOTHING returning", + /* 169 */ "upsert ::= ON CONFLICT DO UPDATE SET setlist where_opt returning", + /* 170 */ "returning ::= RETURNING selcollist", + /* 171 */ "insert_cmd ::= INSERT orconf", + /* 172 */ "insert_cmd ::= REPLACE", + /* 173 */ "idlist_opt ::=", + /* 174 */ "idlist_opt ::= LP idlist RP", + /* 175 */ "idlist ::= idlist COMMA nm", + /* 176 */ "idlist ::= nm", + /* 177 */ "expr ::= LP expr RP", + /* 178 */ "expr ::= ID|INDEXED", + /* 179 */ "expr ::= JOIN_KW", + /* 180 */ "expr ::= nm DOT nm", + /* 181 */ "expr ::= nm DOT nm DOT nm", + /* 182 */ "term ::= NULL|FLOAT|BLOB", + /* 183 */ "term ::= STRING", + /* 184 */ "term ::= INTEGER", + /* 185 */ "expr ::= VARIABLE", + /* 186 */ "expr ::= expr COLLATE ID|STRING", + /* 187 */ "expr ::= CAST LP expr AS typetoken RP", + /* 188 */ "expr ::= ID|INDEXED LP distinct exprlist RP", + /* 189 */ "expr ::= ID|INDEXED LP STAR RP", + /* 190 */ "expr ::= ID|INDEXED LP distinct exprlist RP filter_over", + /* 191 */ "expr ::= ID|INDEXED LP STAR RP filter_over", + /* 192 */ "term ::= CTIME_KW", + /* 193 */ "expr ::= LP nexprlist COMMA expr RP", + /* 194 */ "expr ::= expr AND expr", + /* 195 */ "expr ::= expr OR expr", + /* 196 */ "expr ::= expr LT|GT|GE|LE expr", + /* 197 */ "expr ::= expr EQ|NE expr", + /* 198 */ "expr ::= expr BITAND|BITOR|LSHIFT|RSHIFT expr", + /* 199 */ "expr ::= expr PLUS|MINUS expr", + /* 200 */ "expr ::= expr STAR|SLASH|REM expr", + /* 201 */ "expr ::= expr CONCAT expr", + /* 202 */ "likeop ::= NOT LIKE_KW|MATCH", + /* 203 */ "expr ::= expr likeop expr", + /* 204 */ "expr ::= expr likeop expr ESCAPE expr", + /* 205 */ "expr ::= expr ISNULL|NOTNULL", + /* 206 */ "expr ::= expr NOT NULL", + /* 207 */ "expr ::= expr IS expr", + /* 208 */ "expr ::= expr IS NOT expr", + /* 209 */ "expr ::= NOT expr", + /* 210 */ "expr ::= BITNOT expr", + /* 211 */ "expr ::= PLUS|MINUS expr", + /* 212 */ "between_op ::= BETWEEN", + /* 213 */ "between_op ::= NOT BETWEEN", + /* 214 */ "expr ::= expr between_op expr AND expr", + /* 215 */ "in_op ::= IN", + /* 216 */ "in_op ::= NOT IN", + /* 217 */ "expr ::= expr in_op LP exprlist RP", + /* 218 */ "expr ::= LP select RP", + /* 219 */ "expr ::= expr in_op LP select RP", + /* 220 */ "expr ::= expr in_op nm dbnm paren_exprlist", + /* 221 */ "expr ::= EXISTS LP select RP", + /* 222 */ "expr ::= CASE case_operand case_exprlist case_else END", + /* 223 */ "case_exprlist ::= case_exprlist WHEN expr THEN expr", + /* 224 */ "case_exprlist ::= WHEN expr THEN expr", + /* 225 */ "case_else ::= ELSE expr", + /* 226 */ "case_else ::=", + /* 227 */ "case_operand ::= expr", + /* 228 */ "case_operand ::=", + /* 229 */ "exprlist ::=", + /* 230 */ "nexprlist ::= nexprlist COMMA expr", + /* 231 */ "nexprlist ::= expr", + /* 232 */ "paren_exprlist ::=", + /* 233 */ "paren_exprlist ::= LP exprlist RP", + /* 234 */ "cmd ::= createkw uniqueflag INDEX ifnotexists nm dbnm ON nm LP sortlist RP where_opt", + /* 235 */ "uniqueflag ::= UNIQUE", + /* 236 */ "uniqueflag ::=", + /* 237 */ "eidlist_opt ::=", + /* 238 */ "eidlist_opt ::= LP eidlist RP", + /* 239 */ "eidlist ::= eidlist COMMA nm collate sortorder", + /* 240 */ "eidlist ::= nm collate sortorder", + /* 241 */ "collate ::=", + /* 242 */ "collate ::= COLLATE ID|STRING", + /* 243 */ "cmd ::= DROP INDEX ifexists fullname", + /* 244 */ "cmd ::= VACUUM vinto", + /* 245 */ "cmd ::= VACUUM nm vinto", + /* 246 */ "vinto ::= INTO expr", + /* 247 */ "vinto ::=", + /* 248 */ "cmd ::= PRAGMA nm dbnm", + /* 249 */ "cmd ::= PRAGMA nm dbnm EQ nmnum", + /* 250 */ "cmd ::= PRAGMA nm dbnm LP nmnum RP", + /* 251 */ "cmd ::= PRAGMA nm dbnm EQ minus_num", + /* 252 */ "cmd ::= PRAGMA nm dbnm LP minus_num RP", + /* 253 */ "plus_num ::= PLUS INTEGER|FLOAT", + /* 254 */ "minus_num ::= MINUS INTEGER|FLOAT", + /* 255 */ "cmd ::= createkw trigger_decl BEGIN trigger_cmd_list END", + /* 256 */ "trigger_decl ::= temp TRIGGER ifnotexists nm dbnm trigger_time trigger_event ON fullname foreach_clause when_clause", + /* 257 */ "trigger_time ::= BEFORE|AFTER", + /* 258 */ "trigger_time ::= INSTEAD OF", + /* 259 */ "trigger_time ::=", + /* 260 */ "trigger_event ::= DELETE|INSERT", + /* 261 */ "trigger_event ::= UPDATE", + /* 262 */ "trigger_event ::= UPDATE OF idlist", + /* 263 */ "when_clause ::=", + /* 264 */ "when_clause ::= WHEN expr", + /* 265 */ "trigger_cmd_list ::= trigger_cmd_list trigger_cmd SEMI", + /* 266 */ "trigger_cmd_list ::= trigger_cmd SEMI", + /* 267 */ "trnm ::= nm DOT nm", + /* 268 */ "tridxby ::= INDEXED BY nm", + /* 269 */ "tridxby ::= NOT INDEXED", + /* 270 */ "trigger_cmd ::= UPDATE orconf trnm tridxby SET setlist from where_opt scanpt", + /* 271 */ "trigger_cmd ::= scanpt insert_cmd INTO trnm idlist_opt select upsert scanpt", + /* 272 */ "trigger_cmd ::= DELETE FROM trnm tridxby where_opt scanpt", + /* 273 */ "trigger_cmd ::= scanpt select scanpt", + /* 274 */ "expr ::= RAISE LP IGNORE RP", + /* 275 */ "expr ::= RAISE LP raisetype COMMA nm RP", + /* 276 */ "raisetype ::= ROLLBACK", + /* 277 */ "raisetype ::= ABORT", + /* 278 */ "raisetype ::= FAIL", + /* 279 */ "cmd ::= DROP TRIGGER ifexists fullname", + /* 280 */ "cmd ::= ATTACH database_kw_opt expr AS expr key_opt", + /* 281 */ "cmd ::= DETACH database_kw_opt expr", + /* 282 */ "key_opt ::=", + /* 283 */ "key_opt ::= KEY expr", + /* 284 */ "cmd ::= REINDEX", + /* 285 */ "cmd ::= REINDEX nm dbnm", + /* 286 */ "cmd ::= ANALYZE", + /* 287 */ "cmd ::= ANALYZE nm dbnm", + /* 288 */ "cmd ::= ALTER TABLE fullname RENAME TO nm", + /* 289 */ "cmd ::= ALTER TABLE add_column_fullname ADD kwcolumn_opt columnname carglist", + /* 290 */ "cmd ::= ALTER TABLE fullname DROP kwcolumn_opt nm", + /* 291 */ "add_column_fullname ::= fullname", + /* 292 */ "cmd ::= ALTER TABLE fullname RENAME kwcolumn_opt nm TO nm", + /* 293 */ "cmd ::= create_vtab", + /* 294 */ "cmd ::= create_vtab LP vtabarglist RP", + /* 295 */ "create_vtab ::= createkw VIRTUAL TABLE ifnotexists nm dbnm USING nm", + /* 296 */ "vtabarg ::=", + /* 297 */ "vtabargtoken ::= ANY", + /* 298 */ "vtabargtoken ::= lp anylist RP", + /* 299 */ "lp ::= LP", + /* 300 */ "with ::= WITH wqlist", + /* 301 */ "with ::= WITH RECURSIVE wqlist", + /* 302 */ "wqas ::= AS", + /* 303 */ "wqas ::= AS MATERIALIZED", + /* 304 */ "wqas ::= AS NOT MATERIALIZED", + /* 305 */ "wqitem ::= nm eidlist_opt wqas LP select RP", + /* 306 */ "wqlist ::= wqitem", + /* 307 */ "wqlist ::= wqlist COMMA wqitem", + /* 308 */ "windowdefn_list ::= windowdefn", + /* 309 */ "windowdefn_list ::= windowdefn_list COMMA windowdefn", + /* 310 */ "windowdefn ::= nm AS LP window RP", + /* 311 */ "window ::= PARTITION BY nexprlist orderby_opt frame_opt", + /* 312 */ "window ::= nm PARTITION BY nexprlist orderby_opt frame_opt", + /* 313 */ "window ::= ORDER BY sortlist frame_opt", + /* 314 */ "window ::= nm ORDER BY sortlist frame_opt", + /* 315 */ "window ::= frame_opt", + /* 316 */ "window ::= nm frame_opt", + /* 317 */ "frame_opt ::=", + /* 318 */ "frame_opt ::= range_or_rows frame_bound_s frame_exclude_opt", + /* 319 */ "frame_opt ::= range_or_rows BETWEEN frame_bound_s AND frame_bound_e frame_exclude_opt", + /* 320 */ "range_or_rows ::= RANGE|ROWS|GROUPS", + /* 321 */ "frame_bound_s ::= frame_bound", + /* 322 */ "frame_bound_s ::= UNBOUNDED PRECEDING", + /* 323 */ "frame_bound_e ::= frame_bound", + /* 324 */ "frame_bound_e ::= UNBOUNDED FOLLOWING", + /* 325 */ "frame_bound ::= expr PRECEDING|FOLLOWING", + /* 326 */ "frame_bound ::= CURRENT ROW", + /* 327 */ "frame_exclude_opt ::=", + /* 328 */ "frame_exclude_opt ::= EXCLUDE frame_exclude", + /* 329 */ "frame_exclude ::= NO OTHERS", + /* 330 */ "frame_exclude ::= CURRENT ROW", + /* 331 */ "frame_exclude ::= GROUP|TIES", + /* 332 */ "window_clause ::= WINDOW windowdefn_list", + /* 333 */ "filter_over ::= filter_clause over_clause", + /* 334 */ "filter_over ::= over_clause", + /* 335 */ "filter_over ::= filter_clause", + /* 336 */ "over_clause ::= OVER LP window RP", + /* 337 */ "over_clause ::= OVER nm", + /* 338 */ "filter_clause ::= FILTER LP WHERE expr RP", + /* 339 */ "input ::= cmdlist", + /* 340 */ "cmdlist ::= cmdlist ecmd", + /* 341 */ "cmdlist ::= ecmd", + /* 342 */ "ecmd ::= SEMI", + /* 343 */ "ecmd ::= cmdx SEMI", + /* 344 */ "ecmd ::= explain cmdx SEMI", + /* 345 */ "trans_opt ::=", + /* 346 */ "trans_opt ::= TRANSACTION", + /* 347 */ "trans_opt ::= TRANSACTION nm", + /* 348 */ "savepoint_opt ::= SAVEPOINT", + /* 349 */ "savepoint_opt ::=", + /* 350 */ "cmd ::= create_table create_table_args", + /* 351 */ "table_option_set ::= table_option", + /* 352 */ "columnlist ::= columnlist COMMA columnname carglist", + /* 353 */ "columnlist ::= columnname carglist", + /* 354 */ "nm ::= ID|INDEXED", + /* 355 */ "nm ::= STRING", + /* 356 */ "nm ::= JOIN_KW", + /* 357 */ "typetoken ::= typename", + /* 358 */ "typename ::= ID|STRING", + /* 359 */ "signed ::= plus_num", + /* 360 */ "signed ::= minus_num", + /* 361 */ "carglist ::= carglist ccons", + /* 362 */ "carglist ::=", + /* 363 */ "ccons ::= NULL onconf", + /* 364 */ "ccons ::= GENERATED ALWAYS AS generated", + /* 365 */ "ccons ::= AS generated", + /* 366 */ "conslist_opt ::= COMMA conslist", + /* 367 */ "conslist ::= conslist tconscomma tcons", + /* 368 */ "conslist ::= tcons", + /* 369 */ "tconscomma ::=", + /* 370 */ "defer_subclause_opt ::= defer_subclause", + /* 371 */ "resolvetype ::= raisetype", + /* 372 */ "selectnowith ::= oneselect", + /* 373 */ "oneselect ::= values", + /* 374 */ "sclp ::= selcollist COMMA", + /* 375 */ "as ::= ID|STRING", + /* 376 */ "returning ::=", + /* 377 */ "expr ::= term", + /* 378 */ "likeop ::= LIKE_KW|MATCH", + /* 379 */ "exprlist ::= nexprlist", + /* 380 */ "nmnum ::= plus_num", + /* 381 */ "nmnum ::= nm", + /* 382 */ "nmnum ::= ON", + /* 383 */ "nmnum ::= DELETE", + /* 384 */ "nmnum ::= DEFAULT", + /* 385 */ "plus_num ::= INTEGER|FLOAT", + /* 386 */ "foreach_clause ::=", + /* 387 */ "foreach_clause ::= FOR EACH ROW", + /* 388 */ "trnm ::= nm", + /* 389 */ "tridxby ::=", + /* 390 */ "database_kw_opt ::= DATABASE", + /* 391 */ "database_kw_opt ::=", + /* 392 */ "kwcolumn_opt ::=", + /* 393 */ "kwcolumn_opt ::= COLUMNKW", + /* 394 */ "vtabarglist ::= vtabarg", + /* 395 */ "vtabarglist ::= vtabarglist COMMA vtabarg", + /* 396 */ "vtabarg ::= vtabarg vtabargtoken", + /* 397 */ "anylist ::=", + /* 398 */ "anylist ::= anylist LP anylist RP", + /* 399 */ "anylist ::= anylist ANY", + /* 400 */ "with ::=", }; #endif /* NDEBUG */ @@ -160319,98 +161763,98 @@ static void yy_destructor( */ /********* Begin destructor definitions ***************************************/ case 203: /* select */ - case 237: /* selectnowith */ - case 238: /* oneselect */ - case 250: /* values */ + case 238: /* selectnowith */ + case 239: /* oneselect */ + case 251: /* values */ { -sqlite3SelectDelete(pParse->db, (yypminor->yy81)); +sqlite3SelectDelete(pParse->db, (yypminor->yy303)); } break; - case 214: /* term */ - case 215: /* expr */ - case 244: /* where_opt */ - case 246: /* having_opt */ - case 258: /* on_opt */ - case 265: /* where_opt_ret */ - case 276: /* case_operand */ - case 278: /* case_else */ - case 281: /* vinto */ - case 288: /* when_clause */ - case 293: /* key_opt */ - case 309: /* filter_clause */ + case 215: /* term */ + case 216: /* expr */ + case 245: /* where_opt */ + case 247: /* having_opt */ + case 259: /* on_opt */ + case 266: /* where_opt_ret */ + case 277: /* case_operand */ + case 279: /* case_else */ + case 282: /* vinto */ + case 289: /* when_clause */ + case 294: /* key_opt */ + case 310: /* filter_clause */ { -sqlite3ExprDelete(pParse->db, (yypminor->yy404)); +sqlite3ExprDelete(pParse->db, (yypminor->yy626)); } break; - case 219: /* eidlist_opt */ - case 229: /* sortlist */ - case 230: /* eidlist */ - case 242: /* selcollist */ - case 245: /* groupby_opt */ - case 247: /* orderby_opt */ - case 251: /* nexprlist */ - case 252: /* sclp */ - case 260: /* exprlist */ - case 266: /* setlist */ - case 275: /* paren_exprlist */ - case 277: /* case_exprlist */ - case 308: /* part_opt */ + case 220: /* eidlist_opt */ + case 230: /* sortlist */ + case 231: /* eidlist */ + case 243: /* selcollist */ + case 246: /* groupby_opt */ + case 248: /* orderby_opt */ + case 252: /* nexprlist */ + case 253: /* sclp */ + case 261: /* exprlist */ + case 267: /* setlist */ + case 276: /* paren_exprlist */ + case 278: /* case_exprlist */ + case 309: /* part_opt */ { -sqlite3ExprListDelete(pParse->db, (yypminor->yy70)); +sqlite3ExprListDelete(pParse->db, (yypminor->yy562)); } break; - case 236: /* fullname */ - case 243: /* from */ - case 254: /* seltablist */ - case 255: /* stl_prefix */ - case 261: /* xfullname */ + case 237: /* fullname */ + case 244: /* from */ + case 255: /* seltablist */ + case 256: /* stl_prefix */ + case 262: /* xfullname */ { -sqlite3SrcListDelete(pParse->db, (yypminor->yy153)); +sqlite3SrcListDelete(pParse->db, (yypminor->yy607)); } break; - case 239: /* wqlist */ + case 240: /* wqlist */ { -sqlite3WithDelete(pParse->db, (yypminor->yy103)); +sqlite3WithDelete(pParse->db, (yypminor->yy43)); } break; - case 249: /* window_clause */ - case 304: /* windowdefn_list */ + case 250: /* window_clause */ + case 305: /* windowdefn_list */ { -sqlite3WindowListDelete(pParse->db, (yypminor->yy49)); +sqlite3WindowListDelete(pParse->db, (yypminor->yy375)); } break; - case 259: /* using_opt */ - case 262: /* idlist */ - case 268: /* idlist_opt */ + case 260: /* using_opt */ + case 263: /* idlist */ + case 269: /* idlist_opt */ { -sqlite3IdListDelete(pParse->db, (yypminor->yy436)); +sqlite3IdListDelete(pParse->db, (yypminor->yy240)); } break; - case 271: /* filter_over */ - case 305: /* windowdefn */ - case 306: /* window */ - case 307: /* frame_opt */ - case 310: /* over_clause */ + case 272: /* filter_over */ + case 306: /* windowdefn */ + case 307: /* window */ + case 308: /* frame_opt */ + case 311: /* over_clause */ { -sqlite3WindowDelete(pParse->db, (yypminor->yy49)); +sqlite3WindowDelete(pParse->db, (yypminor->yy375)); } break; - case 284: /* trigger_cmd_list */ - case 289: /* trigger_cmd */ + case 285: /* trigger_cmd_list */ + case 290: /* trigger_cmd */ { -sqlite3DeleteTriggerStep(pParse->db, (yypminor->yy157)); +sqlite3DeleteTriggerStep(pParse->db, (yypminor->yy95)); } break; - case 286: /* trigger_event */ + case 287: /* trigger_event */ { -sqlite3IdListDelete(pParse->db, (yypminor->yy262).b); +sqlite3IdListDelete(pParse->db, (yypminor->yy570).b); } break; - case 312: /* frame_bound */ - case 313: /* frame_bound_s */ - case 314: /* frame_bound_e */ + case 313: /* frame_bound */ + case 314: /* frame_bound_s */ + case 315: /* frame_bound_e */ { -sqlite3ExprDelete(pParse->db, (yypminor->yy117).pExpr); +sqlite3ExprDelete(pParse->db, (yypminor->yy81).pExpr); } break; /********* End destructor definitions *****************************************/ @@ -160720,385 +162164,388 @@ static const YYCODETYPE yyRuleInfoLhs[] = { 198, /* (16) ifnotexists ::= IF NOT EXISTS */ 197, /* (17) temp ::= TEMP */ 197, /* (18) temp ::= */ - 195, /* (19) create_table_args ::= LP columnlist conslist_opt RP table_options */ + 195, /* (19) create_table_args ::= LP columnlist conslist_opt RP table_option_set */ 195, /* (20) create_table_args ::= AS select */ - 202, /* (21) table_options ::= */ - 202, /* (22) table_options ::= WITHOUT nm */ - 204, /* (23) columnname ::= nm typetoken */ - 206, /* (24) typetoken ::= */ - 206, /* (25) typetoken ::= typename LP signed RP */ - 206, /* (26) typetoken ::= typename LP signed COMMA signed RP */ - 207, /* (27) typename ::= typename ID|STRING */ - 211, /* (28) scanpt ::= */ - 212, /* (29) scantok ::= */ - 213, /* (30) ccons ::= CONSTRAINT nm */ - 213, /* (31) ccons ::= DEFAULT scantok term */ - 213, /* (32) ccons ::= DEFAULT LP expr RP */ - 213, /* (33) ccons ::= DEFAULT PLUS scantok term */ - 213, /* (34) ccons ::= DEFAULT MINUS scantok term */ - 213, /* (35) ccons ::= DEFAULT scantok ID|INDEXED */ - 213, /* (36) ccons ::= NOT NULL onconf */ - 213, /* (37) ccons ::= PRIMARY KEY sortorder onconf autoinc */ - 213, /* (38) ccons ::= UNIQUE onconf */ - 213, /* (39) ccons ::= CHECK LP expr RP */ - 213, /* (40) ccons ::= REFERENCES nm eidlist_opt refargs */ - 213, /* (41) ccons ::= defer_subclause */ - 213, /* (42) ccons ::= COLLATE ID|STRING */ - 222, /* (43) generated ::= LP expr RP */ - 222, /* (44) generated ::= LP expr RP ID */ - 218, /* (45) autoinc ::= */ - 218, /* (46) autoinc ::= AUTOINCR */ - 220, /* (47) refargs ::= */ - 220, /* (48) refargs ::= refargs refarg */ - 223, /* (49) refarg ::= MATCH nm */ - 223, /* (50) refarg ::= ON INSERT refact */ - 223, /* (51) refarg ::= ON DELETE refact */ - 223, /* (52) refarg ::= ON UPDATE refact */ - 224, /* (53) refact ::= SET NULL */ - 224, /* (54) refact ::= SET DEFAULT */ - 224, /* (55) refact ::= CASCADE */ - 224, /* (56) refact ::= RESTRICT */ - 224, /* (57) refact ::= NO ACTION */ - 221, /* (58) defer_subclause ::= NOT DEFERRABLE init_deferred_pred_opt */ - 221, /* (59) defer_subclause ::= DEFERRABLE init_deferred_pred_opt */ - 225, /* (60) init_deferred_pred_opt ::= */ - 225, /* (61) init_deferred_pred_opt ::= INITIALLY DEFERRED */ - 225, /* (62) init_deferred_pred_opt ::= INITIALLY IMMEDIATE */ - 201, /* (63) conslist_opt ::= */ - 227, /* (64) tconscomma ::= COMMA */ - 228, /* (65) tcons ::= CONSTRAINT nm */ - 228, /* (66) tcons ::= PRIMARY KEY LP sortlist autoinc RP onconf */ - 228, /* (67) tcons ::= UNIQUE LP sortlist RP onconf */ - 228, /* (68) tcons ::= CHECK LP expr RP onconf */ - 228, /* (69) tcons ::= FOREIGN KEY LP eidlist RP REFERENCES nm eidlist_opt refargs defer_subclause_opt */ - 231, /* (70) defer_subclause_opt ::= */ - 216, /* (71) onconf ::= */ - 216, /* (72) onconf ::= ON CONFLICT resolvetype */ - 232, /* (73) orconf ::= */ - 232, /* (74) orconf ::= OR resolvetype */ - 233, /* (75) resolvetype ::= IGNORE */ - 233, /* (76) resolvetype ::= REPLACE */ - 189, /* (77) cmd ::= DROP TABLE ifexists fullname */ - 235, /* (78) ifexists ::= IF EXISTS */ - 235, /* (79) ifexists ::= */ - 189, /* (80) cmd ::= createkw temp VIEW ifnotexists nm dbnm eidlist_opt AS select */ - 189, /* (81) cmd ::= DROP VIEW ifexists fullname */ - 189, /* (82) cmd ::= select */ - 203, /* (83) select ::= WITH wqlist selectnowith */ - 203, /* (84) select ::= WITH RECURSIVE wqlist selectnowith */ - 203, /* (85) select ::= selectnowith */ - 237, /* (86) selectnowith ::= selectnowith multiselect_op oneselect */ - 240, /* (87) multiselect_op ::= UNION */ - 240, /* (88) multiselect_op ::= UNION ALL */ - 240, /* (89) multiselect_op ::= EXCEPT|INTERSECT */ - 238, /* (90) oneselect ::= SELECT distinct selcollist from where_opt groupby_opt having_opt orderby_opt limit_opt */ - 238, /* (91) oneselect ::= SELECT distinct selcollist from where_opt groupby_opt having_opt window_clause orderby_opt limit_opt */ - 250, /* (92) values ::= VALUES LP nexprlist RP */ - 250, /* (93) values ::= values COMMA LP nexprlist RP */ - 241, /* (94) distinct ::= DISTINCT */ - 241, /* (95) distinct ::= ALL */ - 241, /* (96) distinct ::= */ - 252, /* (97) sclp ::= */ - 242, /* (98) selcollist ::= sclp scanpt expr scanpt as */ - 242, /* (99) selcollist ::= sclp scanpt STAR */ - 242, /* (100) selcollist ::= sclp scanpt nm DOT STAR */ - 253, /* (101) as ::= AS nm */ - 253, /* (102) as ::= */ - 243, /* (103) from ::= */ - 243, /* (104) from ::= FROM seltablist */ - 255, /* (105) stl_prefix ::= seltablist joinop */ - 255, /* (106) stl_prefix ::= */ - 254, /* (107) seltablist ::= stl_prefix nm dbnm as indexed_opt on_opt using_opt */ - 254, /* (108) seltablist ::= stl_prefix nm dbnm LP exprlist RP as on_opt using_opt */ - 254, /* (109) seltablist ::= stl_prefix LP select RP as on_opt using_opt */ - 254, /* (110) seltablist ::= stl_prefix LP seltablist RP as on_opt using_opt */ - 199, /* (111) dbnm ::= */ - 199, /* (112) dbnm ::= DOT nm */ - 236, /* (113) fullname ::= nm */ - 236, /* (114) fullname ::= nm DOT nm */ - 261, /* (115) xfullname ::= nm */ - 261, /* (116) xfullname ::= nm DOT nm */ - 261, /* (117) xfullname ::= nm DOT nm AS nm */ - 261, /* (118) xfullname ::= nm AS nm */ - 256, /* (119) joinop ::= COMMA|JOIN */ - 256, /* (120) joinop ::= JOIN_KW JOIN */ - 256, /* (121) joinop ::= JOIN_KW nm JOIN */ - 256, /* (122) joinop ::= JOIN_KW nm nm JOIN */ - 258, /* (123) on_opt ::= ON expr */ - 258, /* (124) on_opt ::= */ - 257, /* (125) indexed_opt ::= */ - 257, /* (126) indexed_opt ::= INDEXED BY nm */ - 257, /* (127) indexed_opt ::= NOT INDEXED */ - 259, /* (128) using_opt ::= USING LP idlist RP */ - 259, /* (129) using_opt ::= */ - 247, /* (130) orderby_opt ::= */ - 247, /* (131) orderby_opt ::= ORDER BY sortlist */ - 229, /* (132) sortlist ::= sortlist COMMA expr sortorder nulls */ - 229, /* (133) sortlist ::= expr sortorder nulls */ - 217, /* (134) sortorder ::= ASC */ - 217, /* (135) sortorder ::= DESC */ - 217, /* (136) sortorder ::= */ - 263, /* (137) nulls ::= NULLS FIRST */ - 263, /* (138) nulls ::= NULLS LAST */ - 263, /* (139) nulls ::= */ - 245, /* (140) groupby_opt ::= */ - 245, /* (141) groupby_opt ::= GROUP BY nexprlist */ - 246, /* (142) having_opt ::= */ - 246, /* (143) having_opt ::= HAVING expr */ - 248, /* (144) limit_opt ::= */ - 248, /* (145) limit_opt ::= LIMIT expr */ - 248, /* (146) limit_opt ::= LIMIT expr OFFSET expr */ - 248, /* (147) limit_opt ::= LIMIT expr COMMA expr */ - 189, /* (148) cmd ::= with DELETE FROM xfullname indexed_opt where_opt_ret */ - 244, /* (149) where_opt ::= */ - 244, /* (150) where_opt ::= WHERE expr */ - 265, /* (151) where_opt_ret ::= */ - 265, /* (152) where_opt_ret ::= WHERE expr */ - 265, /* (153) where_opt_ret ::= RETURNING selcollist */ - 265, /* (154) where_opt_ret ::= WHERE expr RETURNING selcollist */ - 189, /* (155) cmd ::= with UPDATE orconf xfullname indexed_opt SET setlist from where_opt_ret */ - 266, /* (156) setlist ::= setlist COMMA nm EQ expr */ - 266, /* (157) setlist ::= setlist COMMA LP idlist RP EQ expr */ - 266, /* (158) setlist ::= nm EQ expr */ - 266, /* (159) setlist ::= LP idlist RP EQ expr */ - 189, /* (160) cmd ::= with insert_cmd INTO xfullname idlist_opt select upsert */ - 189, /* (161) cmd ::= with insert_cmd INTO xfullname idlist_opt DEFAULT VALUES returning */ - 269, /* (162) upsert ::= */ - 269, /* (163) upsert ::= RETURNING selcollist */ - 269, /* (164) upsert ::= ON CONFLICT LP sortlist RP where_opt DO UPDATE SET setlist where_opt upsert */ - 269, /* (165) upsert ::= ON CONFLICT LP sortlist RP where_opt DO NOTHING upsert */ - 269, /* (166) upsert ::= ON CONFLICT DO NOTHING returning */ - 269, /* (167) upsert ::= ON CONFLICT DO UPDATE SET setlist where_opt returning */ - 270, /* (168) returning ::= RETURNING selcollist */ - 267, /* (169) insert_cmd ::= INSERT orconf */ - 267, /* (170) insert_cmd ::= REPLACE */ - 268, /* (171) idlist_opt ::= */ - 268, /* (172) idlist_opt ::= LP idlist RP */ - 262, /* (173) idlist ::= idlist COMMA nm */ - 262, /* (174) idlist ::= nm */ - 215, /* (175) expr ::= LP expr RP */ - 215, /* (176) expr ::= ID|INDEXED */ - 215, /* (177) expr ::= JOIN_KW */ - 215, /* (178) expr ::= nm DOT nm */ - 215, /* (179) expr ::= nm DOT nm DOT nm */ - 214, /* (180) term ::= NULL|FLOAT|BLOB */ - 214, /* (181) term ::= STRING */ - 214, /* (182) term ::= INTEGER */ - 215, /* (183) expr ::= VARIABLE */ - 215, /* (184) expr ::= expr COLLATE ID|STRING */ - 215, /* (185) expr ::= CAST LP expr AS typetoken RP */ - 215, /* (186) expr ::= ID|INDEXED LP distinct exprlist RP */ - 215, /* (187) expr ::= ID|INDEXED LP STAR RP */ - 215, /* (188) expr ::= ID|INDEXED LP distinct exprlist RP filter_over */ - 215, /* (189) expr ::= ID|INDEXED LP STAR RP filter_over */ - 214, /* (190) term ::= CTIME_KW */ - 215, /* (191) expr ::= LP nexprlist COMMA expr RP */ - 215, /* (192) expr ::= expr AND expr */ - 215, /* (193) expr ::= expr OR expr */ - 215, /* (194) expr ::= expr LT|GT|GE|LE expr */ - 215, /* (195) expr ::= expr EQ|NE expr */ - 215, /* (196) expr ::= expr BITAND|BITOR|LSHIFT|RSHIFT expr */ - 215, /* (197) expr ::= expr PLUS|MINUS expr */ - 215, /* (198) expr ::= expr STAR|SLASH|REM expr */ - 215, /* (199) expr ::= expr CONCAT expr */ - 272, /* (200) likeop ::= NOT LIKE_KW|MATCH */ - 215, /* (201) expr ::= expr likeop expr */ - 215, /* (202) expr ::= expr likeop expr ESCAPE expr */ - 215, /* (203) expr ::= expr ISNULL|NOTNULL */ - 215, /* (204) expr ::= expr NOT NULL */ - 215, /* (205) expr ::= expr IS expr */ - 215, /* (206) expr ::= expr IS NOT expr */ - 215, /* (207) expr ::= NOT expr */ - 215, /* (208) expr ::= BITNOT expr */ - 215, /* (209) expr ::= PLUS|MINUS expr */ - 273, /* (210) between_op ::= BETWEEN */ - 273, /* (211) between_op ::= NOT BETWEEN */ - 215, /* (212) expr ::= expr between_op expr AND expr */ - 274, /* (213) in_op ::= IN */ - 274, /* (214) in_op ::= NOT IN */ - 215, /* (215) expr ::= expr in_op LP exprlist RP */ - 215, /* (216) expr ::= LP select RP */ - 215, /* (217) expr ::= expr in_op LP select RP */ - 215, /* (218) expr ::= expr in_op nm dbnm paren_exprlist */ - 215, /* (219) expr ::= EXISTS LP select RP */ - 215, /* (220) expr ::= CASE case_operand case_exprlist case_else END */ - 277, /* (221) case_exprlist ::= case_exprlist WHEN expr THEN expr */ - 277, /* (222) case_exprlist ::= WHEN expr THEN expr */ - 278, /* (223) case_else ::= ELSE expr */ - 278, /* (224) case_else ::= */ - 276, /* (225) case_operand ::= expr */ - 276, /* (226) case_operand ::= */ - 260, /* (227) exprlist ::= */ - 251, /* (228) nexprlist ::= nexprlist COMMA expr */ - 251, /* (229) nexprlist ::= expr */ - 275, /* (230) paren_exprlist ::= */ - 275, /* (231) paren_exprlist ::= LP exprlist RP */ - 189, /* (232) cmd ::= createkw uniqueflag INDEX ifnotexists nm dbnm ON nm LP sortlist RP where_opt */ - 279, /* (233) uniqueflag ::= UNIQUE */ - 279, /* (234) uniqueflag ::= */ - 219, /* (235) eidlist_opt ::= */ - 219, /* (236) eidlist_opt ::= LP eidlist RP */ - 230, /* (237) eidlist ::= eidlist COMMA nm collate sortorder */ - 230, /* (238) eidlist ::= nm collate sortorder */ - 280, /* (239) collate ::= */ - 280, /* (240) collate ::= COLLATE ID|STRING */ - 189, /* (241) cmd ::= DROP INDEX ifexists fullname */ - 189, /* (242) cmd ::= VACUUM vinto */ - 189, /* (243) cmd ::= VACUUM nm vinto */ - 281, /* (244) vinto ::= INTO expr */ - 281, /* (245) vinto ::= */ - 189, /* (246) cmd ::= PRAGMA nm dbnm */ - 189, /* (247) cmd ::= PRAGMA nm dbnm EQ nmnum */ - 189, /* (248) cmd ::= PRAGMA nm dbnm LP nmnum RP */ - 189, /* (249) cmd ::= PRAGMA nm dbnm EQ minus_num */ - 189, /* (250) cmd ::= PRAGMA nm dbnm LP minus_num RP */ - 209, /* (251) plus_num ::= PLUS INTEGER|FLOAT */ - 210, /* (252) minus_num ::= MINUS INTEGER|FLOAT */ - 189, /* (253) cmd ::= createkw trigger_decl BEGIN trigger_cmd_list END */ - 283, /* (254) trigger_decl ::= temp TRIGGER ifnotexists nm dbnm trigger_time trigger_event ON fullname foreach_clause when_clause */ - 285, /* (255) trigger_time ::= BEFORE|AFTER */ - 285, /* (256) trigger_time ::= INSTEAD OF */ - 285, /* (257) trigger_time ::= */ - 286, /* (258) trigger_event ::= DELETE|INSERT */ - 286, /* (259) trigger_event ::= UPDATE */ - 286, /* (260) trigger_event ::= UPDATE OF idlist */ - 288, /* (261) when_clause ::= */ - 288, /* (262) when_clause ::= WHEN expr */ - 284, /* (263) trigger_cmd_list ::= trigger_cmd_list trigger_cmd SEMI */ - 284, /* (264) trigger_cmd_list ::= trigger_cmd SEMI */ - 290, /* (265) trnm ::= nm DOT nm */ - 291, /* (266) tridxby ::= INDEXED BY nm */ - 291, /* (267) tridxby ::= NOT INDEXED */ - 289, /* (268) trigger_cmd ::= UPDATE orconf trnm tridxby SET setlist from where_opt scanpt */ - 289, /* (269) trigger_cmd ::= scanpt insert_cmd INTO trnm idlist_opt select upsert scanpt */ - 289, /* (270) trigger_cmd ::= DELETE FROM trnm tridxby where_opt scanpt */ - 289, /* (271) trigger_cmd ::= scanpt select scanpt */ - 215, /* (272) expr ::= RAISE LP IGNORE RP */ - 215, /* (273) expr ::= RAISE LP raisetype COMMA nm RP */ - 234, /* (274) raisetype ::= ROLLBACK */ - 234, /* (275) raisetype ::= ABORT */ - 234, /* (276) raisetype ::= FAIL */ - 189, /* (277) cmd ::= DROP TRIGGER ifexists fullname */ - 189, /* (278) cmd ::= ATTACH database_kw_opt expr AS expr key_opt */ - 189, /* (279) cmd ::= DETACH database_kw_opt expr */ - 293, /* (280) key_opt ::= */ - 293, /* (281) key_opt ::= KEY expr */ - 189, /* (282) cmd ::= REINDEX */ - 189, /* (283) cmd ::= REINDEX nm dbnm */ - 189, /* (284) cmd ::= ANALYZE */ - 189, /* (285) cmd ::= ANALYZE nm dbnm */ - 189, /* (286) cmd ::= ALTER TABLE fullname RENAME TO nm */ - 189, /* (287) cmd ::= ALTER TABLE add_column_fullname ADD kwcolumn_opt columnname carglist */ - 189, /* (288) cmd ::= ALTER TABLE fullname DROP kwcolumn_opt nm */ - 294, /* (289) add_column_fullname ::= fullname */ - 189, /* (290) cmd ::= ALTER TABLE fullname RENAME kwcolumn_opt nm TO nm */ - 189, /* (291) cmd ::= create_vtab */ - 189, /* (292) cmd ::= create_vtab LP vtabarglist RP */ - 296, /* (293) create_vtab ::= createkw VIRTUAL TABLE ifnotexists nm dbnm USING nm */ - 298, /* (294) vtabarg ::= */ - 299, /* (295) vtabargtoken ::= ANY */ - 299, /* (296) vtabargtoken ::= lp anylist RP */ - 300, /* (297) lp ::= LP */ - 264, /* (298) with ::= WITH wqlist */ - 264, /* (299) with ::= WITH RECURSIVE wqlist */ - 303, /* (300) wqas ::= AS */ - 303, /* (301) wqas ::= AS MATERIALIZED */ - 303, /* (302) wqas ::= AS NOT MATERIALIZED */ - 302, /* (303) wqitem ::= nm eidlist_opt wqas LP select RP */ - 239, /* (304) wqlist ::= wqitem */ - 239, /* (305) wqlist ::= wqlist COMMA wqitem */ - 304, /* (306) windowdefn_list ::= windowdefn */ - 304, /* (307) windowdefn_list ::= windowdefn_list COMMA windowdefn */ - 305, /* (308) windowdefn ::= nm AS LP window RP */ - 306, /* (309) window ::= PARTITION BY nexprlist orderby_opt frame_opt */ - 306, /* (310) window ::= nm PARTITION BY nexprlist orderby_opt frame_opt */ - 306, /* (311) window ::= ORDER BY sortlist frame_opt */ - 306, /* (312) window ::= nm ORDER BY sortlist frame_opt */ - 306, /* (313) window ::= frame_opt */ - 306, /* (314) window ::= nm frame_opt */ - 307, /* (315) frame_opt ::= */ - 307, /* (316) frame_opt ::= range_or_rows frame_bound_s frame_exclude_opt */ - 307, /* (317) frame_opt ::= range_or_rows BETWEEN frame_bound_s AND frame_bound_e frame_exclude_opt */ - 311, /* (318) range_or_rows ::= RANGE|ROWS|GROUPS */ - 313, /* (319) frame_bound_s ::= frame_bound */ - 313, /* (320) frame_bound_s ::= UNBOUNDED PRECEDING */ - 314, /* (321) frame_bound_e ::= frame_bound */ - 314, /* (322) frame_bound_e ::= UNBOUNDED FOLLOWING */ - 312, /* (323) frame_bound ::= expr PRECEDING|FOLLOWING */ - 312, /* (324) frame_bound ::= CURRENT ROW */ - 315, /* (325) frame_exclude_opt ::= */ - 315, /* (326) frame_exclude_opt ::= EXCLUDE frame_exclude */ - 316, /* (327) frame_exclude ::= NO OTHERS */ - 316, /* (328) frame_exclude ::= CURRENT ROW */ - 316, /* (329) frame_exclude ::= GROUP|TIES */ - 249, /* (330) window_clause ::= WINDOW windowdefn_list */ - 271, /* (331) filter_over ::= filter_clause over_clause */ - 271, /* (332) filter_over ::= over_clause */ - 271, /* (333) filter_over ::= filter_clause */ - 310, /* (334) over_clause ::= OVER LP window RP */ - 310, /* (335) over_clause ::= OVER nm */ - 309, /* (336) filter_clause ::= FILTER LP WHERE expr RP */ - 184, /* (337) input ::= cmdlist */ - 185, /* (338) cmdlist ::= cmdlist ecmd */ - 185, /* (339) cmdlist ::= ecmd */ - 186, /* (340) ecmd ::= SEMI */ - 186, /* (341) ecmd ::= cmdx SEMI */ - 186, /* (342) ecmd ::= explain cmdx SEMI */ - 191, /* (343) trans_opt ::= */ - 191, /* (344) trans_opt ::= TRANSACTION */ - 191, /* (345) trans_opt ::= TRANSACTION nm */ - 193, /* (346) savepoint_opt ::= SAVEPOINT */ - 193, /* (347) savepoint_opt ::= */ - 189, /* (348) cmd ::= create_table create_table_args */ - 200, /* (349) columnlist ::= columnlist COMMA columnname carglist */ - 200, /* (350) columnlist ::= columnname carglist */ - 192, /* (351) nm ::= ID|INDEXED */ - 192, /* (352) nm ::= STRING */ - 192, /* (353) nm ::= JOIN_KW */ - 206, /* (354) typetoken ::= typename */ - 207, /* (355) typename ::= ID|STRING */ - 208, /* (356) signed ::= plus_num */ - 208, /* (357) signed ::= minus_num */ - 205, /* (358) carglist ::= carglist ccons */ - 205, /* (359) carglist ::= */ - 213, /* (360) ccons ::= NULL onconf */ - 213, /* (361) ccons ::= GENERATED ALWAYS AS generated */ - 213, /* (362) ccons ::= AS generated */ - 201, /* (363) conslist_opt ::= COMMA conslist */ - 226, /* (364) conslist ::= conslist tconscomma tcons */ - 226, /* (365) conslist ::= tcons */ - 227, /* (366) tconscomma ::= */ - 231, /* (367) defer_subclause_opt ::= defer_subclause */ - 233, /* (368) resolvetype ::= raisetype */ - 237, /* (369) selectnowith ::= oneselect */ - 238, /* (370) oneselect ::= values */ - 252, /* (371) sclp ::= selcollist COMMA */ - 253, /* (372) as ::= ID|STRING */ - 270, /* (373) returning ::= */ - 215, /* (374) expr ::= term */ - 272, /* (375) likeop ::= LIKE_KW|MATCH */ - 260, /* (376) exprlist ::= nexprlist */ - 282, /* (377) nmnum ::= plus_num */ - 282, /* (378) nmnum ::= nm */ - 282, /* (379) nmnum ::= ON */ - 282, /* (380) nmnum ::= DELETE */ - 282, /* (381) nmnum ::= DEFAULT */ - 209, /* (382) plus_num ::= INTEGER|FLOAT */ - 287, /* (383) foreach_clause ::= */ - 287, /* (384) foreach_clause ::= FOR EACH ROW */ - 290, /* (385) trnm ::= nm */ - 291, /* (386) tridxby ::= */ - 292, /* (387) database_kw_opt ::= DATABASE */ - 292, /* (388) database_kw_opt ::= */ - 295, /* (389) kwcolumn_opt ::= */ - 295, /* (390) kwcolumn_opt ::= COLUMNKW */ - 297, /* (391) vtabarglist ::= vtabarg */ - 297, /* (392) vtabarglist ::= vtabarglist COMMA vtabarg */ - 298, /* (393) vtabarg ::= vtabarg vtabargtoken */ - 301, /* (394) anylist ::= */ - 301, /* (395) anylist ::= anylist LP anylist RP */ - 301, /* (396) anylist ::= anylist ANY */ - 264, /* (397) with ::= */ + 202, /* (21) table_option_set ::= */ + 202, /* (22) table_option_set ::= table_option_set COMMA table_option */ + 204, /* (23) table_option ::= WITHOUT nm */ + 204, /* (24) table_option ::= nm */ + 205, /* (25) columnname ::= nm typetoken */ + 207, /* (26) typetoken ::= */ + 207, /* (27) typetoken ::= typename LP signed RP */ + 207, /* (28) typetoken ::= typename LP signed COMMA signed RP */ + 208, /* (29) typename ::= typename ID|STRING */ + 212, /* (30) scanpt ::= */ + 213, /* (31) scantok ::= */ + 214, /* (32) ccons ::= CONSTRAINT nm */ + 214, /* (33) ccons ::= DEFAULT scantok term */ + 214, /* (34) ccons ::= DEFAULT LP expr RP */ + 214, /* (35) ccons ::= DEFAULT PLUS scantok term */ + 214, /* (36) ccons ::= DEFAULT MINUS scantok term */ + 214, /* (37) ccons ::= DEFAULT scantok ID|INDEXED */ + 214, /* (38) ccons ::= NOT NULL onconf */ + 214, /* (39) ccons ::= PRIMARY KEY sortorder onconf autoinc */ + 214, /* (40) ccons ::= UNIQUE onconf */ + 214, /* (41) ccons ::= CHECK LP expr RP */ + 214, /* (42) ccons ::= REFERENCES nm eidlist_opt refargs */ + 214, /* (43) ccons ::= defer_subclause */ + 214, /* (44) ccons ::= COLLATE ID|STRING */ + 223, /* (45) generated ::= LP expr RP */ + 223, /* (46) generated ::= LP expr RP ID */ + 219, /* (47) autoinc ::= */ + 219, /* (48) autoinc ::= AUTOINCR */ + 221, /* (49) refargs ::= */ + 221, /* (50) refargs ::= refargs refarg */ + 224, /* (51) refarg ::= MATCH nm */ + 224, /* (52) refarg ::= ON INSERT refact */ + 224, /* (53) refarg ::= ON DELETE refact */ + 224, /* (54) refarg ::= ON UPDATE refact */ + 225, /* (55) refact ::= SET NULL */ + 225, /* (56) refact ::= SET DEFAULT */ + 225, /* (57) refact ::= CASCADE */ + 225, /* (58) refact ::= RESTRICT */ + 225, /* (59) refact ::= NO ACTION */ + 222, /* (60) defer_subclause ::= NOT DEFERRABLE init_deferred_pred_opt */ + 222, /* (61) defer_subclause ::= DEFERRABLE init_deferred_pred_opt */ + 226, /* (62) init_deferred_pred_opt ::= */ + 226, /* (63) init_deferred_pred_opt ::= INITIALLY DEFERRED */ + 226, /* (64) init_deferred_pred_opt ::= INITIALLY IMMEDIATE */ + 201, /* (65) conslist_opt ::= */ + 228, /* (66) tconscomma ::= COMMA */ + 229, /* (67) tcons ::= CONSTRAINT nm */ + 229, /* (68) tcons ::= PRIMARY KEY LP sortlist autoinc RP onconf */ + 229, /* (69) tcons ::= UNIQUE LP sortlist RP onconf */ + 229, /* (70) tcons ::= CHECK LP expr RP onconf */ + 229, /* (71) tcons ::= FOREIGN KEY LP eidlist RP REFERENCES nm eidlist_opt refargs defer_subclause_opt */ + 232, /* (72) defer_subclause_opt ::= */ + 217, /* (73) onconf ::= */ + 217, /* (74) onconf ::= ON CONFLICT resolvetype */ + 233, /* (75) orconf ::= */ + 233, /* (76) orconf ::= OR resolvetype */ + 234, /* (77) resolvetype ::= IGNORE */ + 234, /* (78) resolvetype ::= REPLACE */ + 189, /* (79) cmd ::= DROP TABLE ifexists fullname */ + 236, /* (80) ifexists ::= IF EXISTS */ + 236, /* (81) ifexists ::= */ + 189, /* (82) cmd ::= createkw temp VIEW ifnotexists nm dbnm eidlist_opt AS select */ + 189, /* (83) cmd ::= DROP VIEW ifexists fullname */ + 189, /* (84) cmd ::= select */ + 203, /* (85) select ::= WITH wqlist selectnowith */ + 203, /* (86) select ::= WITH RECURSIVE wqlist selectnowith */ + 203, /* (87) select ::= selectnowith */ + 238, /* (88) selectnowith ::= selectnowith multiselect_op oneselect */ + 241, /* (89) multiselect_op ::= UNION */ + 241, /* (90) multiselect_op ::= UNION ALL */ + 241, /* (91) multiselect_op ::= EXCEPT|INTERSECT */ + 239, /* (92) oneselect ::= SELECT distinct selcollist from where_opt groupby_opt having_opt orderby_opt limit_opt */ + 239, /* (93) oneselect ::= SELECT distinct selcollist from where_opt groupby_opt having_opt window_clause orderby_opt limit_opt */ + 251, /* (94) values ::= VALUES LP nexprlist RP */ + 251, /* (95) values ::= values COMMA LP nexprlist RP */ + 242, /* (96) distinct ::= DISTINCT */ + 242, /* (97) distinct ::= ALL */ + 242, /* (98) distinct ::= */ + 253, /* (99) sclp ::= */ + 243, /* (100) selcollist ::= sclp scanpt expr scanpt as */ + 243, /* (101) selcollist ::= sclp scanpt STAR */ + 243, /* (102) selcollist ::= sclp scanpt nm DOT STAR */ + 254, /* (103) as ::= AS nm */ + 254, /* (104) as ::= */ + 244, /* (105) from ::= */ + 244, /* (106) from ::= FROM seltablist */ + 256, /* (107) stl_prefix ::= seltablist joinop */ + 256, /* (108) stl_prefix ::= */ + 255, /* (109) seltablist ::= stl_prefix nm dbnm as indexed_opt on_opt using_opt */ + 255, /* (110) seltablist ::= stl_prefix nm dbnm LP exprlist RP as on_opt using_opt */ + 255, /* (111) seltablist ::= stl_prefix LP select RP as on_opt using_opt */ + 255, /* (112) seltablist ::= stl_prefix LP seltablist RP as on_opt using_opt */ + 199, /* (113) dbnm ::= */ + 199, /* (114) dbnm ::= DOT nm */ + 237, /* (115) fullname ::= nm */ + 237, /* (116) fullname ::= nm DOT nm */ + 262, /* (117) xfullname ::= nm */ + 262, /* (118) xfullname ::= nm DOT nm */ + 262, /* (119) xfullname ::= nm DOT nm AS nm */ + 262, /* (120) xfullname ::= nm AS nm */ + 257, /* (121) joinop ::= COMMA|JOIN */ + 257, /* (122) joinop ::= JOIN_KW JOIN */ + 257, /* (123) joinop ::= JOIN_KW nm JOIN */ + 257, /* (124) joinop ::= JOIN_KW nm nm JOIN */ + 259, /* (125) on_opt ::= ON expr */ + 259, /* (126) on_opt ::= */ + 258, /* (127) indexed_opt ::= */ + 258, /* (128) indexed_opt ::= INDEXED BY nm */ + 258, /* (129) indexed_opt ::= NOT INDEXED */ + 260, /* (130) using_opt ::= USING LP idlist RP */ + 260, /* (131) using_opt ::= */ + 248, /* (132) orderby_opt ::= */ + 248, /* (133) orderby_opt ::= ORDER BY sortlist */ + 230, /* (134) sortlist ::= sortlist COMMA expr sortorder nulls */ + 230, /* (135) sortlist ::= expr sortorder nulls */ + 218, /* (136) sortorder ::= ASC */ + 218, /* (137) sortorder ::= DESC */ + 218, /* (138) sortorder ::= */ + 264, /* (139) nulls ::= NULLS FIRST */ + 264, /* (140) nulls ::= NULLS LAST */ + 264, /* (141) nulls ::= */ + 246, /* (142) groupby_opt ::= */ + 246, /* (143) groupby_opt ::= GROUP BY nexprlist */ + 247, /* (144) having_opt ::= */ + 247, /* (145) having_opt ::= HAVING expr */ + 249, /* (146) limit_opt ::= */ + 249, /* (147) limit_opt ::= LIMIT expr */ + 249, /* (148) limit_opt ::= LIMIT expr OFFSET expr */ + 249, /* (149) limit_opt ::= LIMIT expr COMMA expr */ + 189, /* (150) cmd ::= with DELETE FROM xfullname indexed_opt where_opt_ret */ + 245, /* (151) where_opt ::= */ + 245, /* (152) where_opt ::= WHERE expr */ + 266, /* (153) where_opt_ret ::= */ + 266, /* (154) where_opt_ret ::= WHERE expr */ + 266, /* (155) where_opt_ret ::= RETURNING selcollist */ + 266, /* (156) where_opt_ret ::= WHERE expr RETURNING selcollist */ + 189, /* (157) cmd ::= with UPDATE orconf xfullname indexed_opt SET setlist from where_opt_ret */ + 267, /* (158) setlist ::= setlist COMMA nm EQ expr */ + 267, /* (159) setlist ::= setlist COMMA LP idlist RP EQ expr */ + 267, /* (160) setlist ::= nm EQ expr */ + 267, /* (161) setlist ::= LP idlist RP EQ expr */ + 189, /* (162) cmd ::= with insert_cmd INTO xfullname idlist_opt select upsert */ + 189, /* (163) cmd ::= with insert_cmd INTO xfullname idlist_opt DEFAULT VALUES returning */ + 270, /* (164) upsert ::= */ + 270, /* (165) upsert ::= RETURNING selcollist */ + 270, /* (166) upsert ::= ON CONFLICT LP sortlist RP where_opt DO UPDATE SET setlist where_opt upsert */ + 270, /* (167) upsert ::= ON CONFLICT LP sortlist RP where_opt DO NOTHING upsert */ + 270, /* (168) upsert ::= ON CONFLICT DO NOTHING returning */ + 270, /* (169) upsert ::= ON CONFLICT DO UPDATE SET setlist where_opt returning */ + 271, /* (170) returning ::= RETURNING selcollist */ + 268, /* (171) insert_cmd ::= INSERT orconf */ + 268, /* (172) insert_cmd ::= REPLACE */ + 269, /* (173) idlist_opt ::= */ + 269, /* (174) idlist_opt ::= LP idlist RP */ + 263, /* (175) idlist ::= idlist COMMA nm */ + 263, /* (176) idlist ::= nm */ + 216, /* (177) expr ::= LP expr RP */ + 216, /* (178) expr ::= ID|INDEXED */ + 216, /* (179) expr ::= JOIN_KW */ + 216, /* (180) expr ::= nm DOT nm */ + 216, /* (181) expr ::= nm DOT nm DOT nm */ + 215, /* (182) term ::= NULL|FLOAT|BLOB */ + 215, /* (183) term ::= STRING */ + 215, /* (184) term ::= INTEGER */ + 216, /* (185) expr ::= VARIABLE */ + 216, /* (186) expr ::= expr COLLATE ID|STRING */ + 216, /* (187) expr ::= CAST LP expr AS typetoken RP */ + 216, /* (188) expr ::= ID|INDEXED LP distinct exprlist RP */ + 216, /* (189) expr ::= ID|INDEXED LP STAR RP */ + 216, /* (190) expr ::= ID|INDEXED LP distinct exprlist RP filter_over */ + 216, /* (191) expr ::= ID|INDEXED LP STAR RP filter_over */ + 215, /* (192) term ::= CTIME_KW */ + 216, /* (193) expr ::= LP nexprlist COMMA expr RP */ + 216, /* (194) expr ::= expr AND expr */ + 216, /* (195) expr ::= expr OR expr */ + 216, /* (196) expr ::= expr LT|GT|GE|LE expr */ + 216, /* (197) expr ::= expr EQ|NE expr */ + 216, /* (198) expr ::= expr BITAND|BITOR|LSHIFT|RSHIFT expr */ + 216, /* (199) expr ::= expr PLUS|MINUS expr */ + 216, /* (200) expr ::= expr STAR|SLASH|REM expr */ + 216, /* (201) expr ::= expr CONCAT expr */ + 273, /* (202) likeop ::= NOT LIKE_KW|MATCH */ + 216, /* (203) expr ::= expr likeop expr */ + 216, /* (204) expr ::= expr likeop expr ESCAPE expr */ + 216, /* (205) expr ::= expr ISNULL|NOTNULL */ + 216, /* (206) expr ::= expr NOT NULL */ + 216, /* (207) expr ::= expr IS expr */ + 216, /* (208) expr ::= expr IS NOT expr */ + 216, /* (209) expr ::= NOT expr */ + 216, /* (210) expr ::= BITNOT expr */ + 216, /* (211) expr ::= PLUS|MINUS expr */ + 274, /* (212) between_op ::= BETWEEN */ + 274, /* (213) between_op ::= NOT BETWEEN */ + 216, /* (214) expr ::= expr between_op expr AND expr */ + 275, /* (215) in_op ::= IN */ + 275, /* (216) in_op ::= NOT IN */ + 216, /* (217) expr ::= expr in_op LP exprlist RP */ + 216, /* (218) expr ::= LP select RP */ + 216, /* (219) expr ::= expr in_op LP select RP */ + 216, /* (220) expr ::= expr in_op nm dbnm paren_exprlist */ + 216, /* (221) expr ::= EXISTS LP select RP */ + 216, /* (222) expr ::= CASE case_operand case_exprlist case_else END */ + 278, /* (223) case_exprlist ::= case_exprlist WHEN expr THEN expr */ + 278, /* (224) case_exprlist ::= WHEN expr THEN expr */ + 279, /* (225) case_else ::= ELSE expr */ + 279, /* (226) case_else ::= */ + 277, /* (227) case_operand ::= expr */ + 277, /* (228) case_operand ::= */ + 261, /* (229) exprlist ::= */ + 252, /* (230) nexprlist ::= nexprlist COMMA expr */ + 252, /* (231) nexprlist ::= expr */ + 276, /* (232) paren_exprlist ::= */ + 276, /* (233) paren_exprlist ::= LP exprlist RP */ + 189, /* (234) cmd ::= createkw uniqueflag INDEX ifnotexists nm dbnm ON nm LP sortlist RP where_opt */ + 280, /* (235) uniqueflag ::= UNIQUE */ + 280, /* (236) uniqueflag ::= */ + 220, /* (237) eidlist_opt ::= */ + 220, /* (238) eidlist_opt ::= LP eidlist RP */ + 231, /* (239) eidlist ::= eidlist COMMA nm collate sortorder */ + 231, /* (240) eidlist ::= nm collate sortorder */ + 281, /* (241) collate ::= */ + 281, /* (242) collate ::= COLLATE ID|STRING */ + 189, /* (243) cmd ::= DROP INDEX ifexists fullname */ + 189, /* (244) cmd ::= VACUUM vinto */ + 189, /* (245) cmd ::= VACUUM nm vinto */ + 282, /* (246) vinto ::= INTO expr */ + 282, /* (247) vinto ::= */ + 189, /* (248) cmd ::= PRAGMA nm dbnm */ + 189, /* (249) cmd ::= PRAGMA nm dbnm EQ nmnum */ + 189, /* (250) cmd ::= PRAGMA nm dbnm LP nmnum RP */ + 189, /* (251) cmd ::= PRAGMA nm dbnm EQ minus_num */ + 189, /* (252) cmd ::= PRAGMA nm dbnm LP minus_num RP */ + 210, /* (253) plus_num ::= PLUS INTEGER|FLOAT */ + 211, /* (254) minus_num ::= MINUS INTEGER|FLOAT */ + 189, /* (255) cmd ::= createkw trigger_decl BEGIN trigger_cmd_list END */ + 284, /* (256) trigger_decl ::= temp TRIGGER ifnotexists nm dbnm trigger_time trigger_event ON fullname foreach_clause when_clause */ + 286, /* (257) trigger_time ::= BEFORE|AFTER */ + 286, /* (258) trigger_time ::= INSTEAD OF */ + 286, /* (259) trigger_time ::= */ + 287, /* (260) trigger_event ::= DELETE|INSERT */ + 287, /* (261) trigger_event ::= UPDATE */ + 287, /* (262) trigger_event ::= UPDATE OF idlist */ + 289, /* (263) when_clause ::= */ + 289, /* (264) when_clause ::= WHEN expr */ + 285, /* (265) trigger_cmd_list ::= trigger_cmd_list trigger_cmd SEMI */ + 285, /* (266) trigger_cmd_list ::= trigger_cmd SEMI */ + 291, /* (267) trnm ::= nm DOT nm */ + 292, /* (268) tridxby ::= INDEXED BY nm */ + 292, /* (269) tridxby ::= NOT INDEXED */ + 290, /* (270) trigger_cmd ::= UPDATE orconf trnm tridxby SET setlist from where_opt scanpt */ + 290, /* (271) trigger_cmd ::= scanpt insert_cmd INTO trnm idlist_opt select upsert scanpt */ + 290, /* (272) trigger_cmd ::= DELETE FROM trnm tridxby where_opt scanpt */ + 290, /* (273) trigger_cmd ::= scanpt select scanpt */ + 216, /* (274) expr ::= RAISE LP IGNORE RP */ + 216, /* (275) expr ::= RAISE LP raisetype COMMA nm RP */ + 235, /* (276) raisetype ::= ROLLBACK */ + 235, /* (277) raisetype ::= ABORT */ + 235, /* (278) raisetype ::= FAIL */ + 189, /* (279) cmd ::= DROP TRIGGER ifexists fullname */ + 189, /* (280) cmd ::= ATTACH database_kw_opt expr AS expr key_opt */ + 189, /* (281) cmd ::= DETACH database_kw_opt expr */ + 294, /* (282) key_opt ::= */ + 294, /* (283) key_opt ::= KEY expr */ + 189, /* (284) cmd ::= REINDEX */ + 189, /* (285) cmd ::= REINDEX nm dbnm */ + 189, /* (286) cmd ::= ANALYZE */ + 189, /* (287) cmd ::= ANALYZE nm dbnm */ + 189, /* (288) cmd ::= ALTER TABLE fullname RENAME TO nm */ + 189, /* (289) cmd ::= ALTER TABLE add_column_fullname ADD kwcolumn_opt columnname carglist */ + 189, /* (290) cmd ::= ALTER TABLE fullname DROP kwcolumn_opt nm */ + 295, /* (291) add_column_fullname ::= fullname */ + 189, /* (292) cmd ::= ALTER TABLE fullname RENAME kwcolumn_opt nm TO nm */ + 189, /* (293) cmd ::= create_vtab */ + 189, /* (294) cmd ::= create_vtab LP vtabarglist RP */ + 297, /* (295) create_vtab ::= createkw VIRTUAL TABLE ifnotexists nm dbnm USING nm */ + 299, /* (296) vtabarg ::= */ + 300, /* (297) vtabargtoken ::= ANY */ + 300, /* (298) vtabargtoken ::= lp anylist RP */ + 301, /* (299) lp ::= LP */ + 265, /* (300) with ::= WITH wqlist */ + 265, /* (301) with ::= WITH RECURSIVE wqlist */ + 304, /* (302) wqas ::= AS */ + 304, /* (303) wqas ::= AS MATERIALIZED */ + 304, /* (304) wqas ::= AS NOT MATERIALIZED */ + 303, /* (305) wqitem ::= nm eidlist_opt wqas LP select RP */ + 240, /* (306) wqlist ::= wqitem */ + 240, /* (307) wqlist ::= wqlist COMMA wqitem */ + 305, /* (308) windowdefn_list ::= windowdefn */ + 305, /* (309) windowdefn_list ::= windowdefn_list COMMA windowdefn */ + 306, /* (310) windowdefn ::= nm AS LP window RP */ + 307, /* (311) window ::= PARTITION BY nexprlist orderby_opt frame_opt */ + 307, /* (312) window ::= nm PARTITION BY nexprlist orderby_opt frame_opt */ + 307, /* (313) window ::= ORDER BY sortlist frame_opt */ + 307, /* (314) window ::= nm ORDER BY sortlist frame_opt */ + 307, /* (315) window ::= frame_opt */ + 307, /* (316) window ::= nm frame_opt */ + 308, /* (317) frame_opt ::= */ + 308, /* (318) frame_opt ::= range_or_rows frame_bound_s frame_exclude_opt */ + 308, /* (319) frame_opt ::= range_or_rows BETWEEN frame_bound_s AND frame_bound_e frame_exclude_opt */ + 312, /* (320) range_or_rows ::= RANGE|ROWS|GROUPS */ + 314, /* (321) frame_bound_s ::= frame_bound */ + 314, /* (322) frame_bound_s ::= UNBOUNDED PRECEDING */ + 315, /* (323) frame_bound_e ::= frame_bound */ + 315, /* (324) frame_bound_e ::= UNBOUNDED FOLLOWING */ + 313, /* (325) frame_bound ::= expr PRECEDING|FOLLOWING */ + 313, /* (326) frame_bound ::= CURRENT ROW */ + 316, /* (327) frame_exclude_opt ::= */ + 316, /* (328) frame_exclude_opt ::= EXCLUDE frame_exclude */ + 317, /* (329) frame_exclude ::= NO OTHERS */ + 317, /* (330) frame_exclude ::= CURRENT ROW */ + 317, /* (331) frame_exclude ::= GROUP|TIES */ + 250, /* (332) window_clause ::= WINDOW windowdefn_list */ + 272, /* (333) filter_over ::= filter_clause over_clause */ + 272, /* (334) filter_over ::= over_clause */ + 272, /* (335) filter_over ::= filter_clause */ + 311, /* (336) over_clause ::= OVER LP window RP */ + 311, /* (337) over_clause ::= OVER nm */ + 310, /* (338) filter_clause ::= FILTER LP WHERE expr RP */ + 184, /* (339) input ::= cmdlist */ + 185, /* (340) cmdlist ::= cmdlist ecmd */ + 185, /* (341) cmdlist ::= ecmd */ + 186, /* (342) ecmd ::= SEMI */ + 186, /* (343) ecmd ::= cmdx SEMI */ + 186, /* (344) ecmd ::= explain cmdx SEMI */ + 191, /* (345) trans_opt ::= */ + 191, /* (346) trans_opt ::= TRANSACTION */ + 191, /* (347) trans_opt ::= TRANSACTION nm */ + 193, /* (348) savepoint_opt ::= SAVEPOINT */ + 193, /* (349) savepoint_opt ::= */ + 189, /* (350) cmd ::= create_table create_table_args */ + 202, /* (351) table_option_set ::= table_option */ + 200, /* (352) columnlist ::= columnlist COMMA columnname carglist */ + 200, /* (353) columnlist ::= columnname carglist */ + 192, /* (354) nm ::= ID|INDEXED */ + 192, /* (355) nm ::= STRING */ + 192, /* (356) nm ::= JOIN_KW */ + 207, /* (357) typetoken ::= typename */ + 208, /* (358) typename ::= ID|STRING */ + 209, /* (359) signed ::= plus_num */ + 209, /* (360) signed ::= minus_num */ + 206, /* (361) carglist ::= carglist ccons */ + 206, /* (362) carglist ::= */ + 214, /* (363) ccons ::= NULL onconf */ + 214, /* (364) ccons ::= GENERATED ALWAYS AS generated */ + 214, /* (365) ccons ::= AS generated */ + 201, /* (366) conslist_opt ::= COMMA conslist */ + 227, /* (367) conslist ::= conslist tconscomma tcons */ + 227, /* (368) conslist ::= tcons */ + 228, /* (369) tconscomma ::= */ + 232, /* (370) defer_subclause_opt ::= defer_subclause */ + 234, /* (371) resolvetype ::= raisetype */ + 238, /* (372) selectnowith ::= oneselect */ + 239, /* (373) oneselect ::= values */ + 253, /* (374) sclp ::= selcollist COMMA */ + 254, /* (375) as ::= ID|STRING */ + 271, /* (376) returning ::= */ + 216, /* (377) expr ::= term */ + 273, /* (378) likeop ::= LIKE_KW|MATCH */ + 261, /* (379) exprlist ::= nexprlist */ + 283, /* (380) nmnum ::= plus_num */ + 283, /* (381) nmnum ::= nm */ + 283, /* (382) nmnum ::= ON */ + 283, /* (383) nmnum ::= DELETE */ + 283, /* (384) nmnum ::= DEFAULT */ + 210, /* (385) plus_num ::= INTEGER|FLOAT */ + 288, /* (386) foreach_clause ::= */ + 288, /* (387) foreach_clause ::= FOR EACH ROW */ + 291, /* (388) trnm ::= nm */ + 292, /* (389) tridxby ::= */ + 293, /* (390) database_kw_opt ::= DATABASE */ + 293, /* (391) database_kw_opt ::= */ + 296, /* (392) kwcolumn_opt ::= */ + 296, /* (393) kwcolumn_opt ::= COLUMNKW */ + 298, /* (394) vtabarglist ::= vtabarg */ + 298, /* (395) vtabarglist ::= vtabarglist COMMA vtabarg */ + 299, /* (396) vtabarg ::= vtabarg vtabargtoken */ + 302, /* (397) anylist ::= */ + 302, /* (398) anylist ::= anylist LP anylist RP */ + 302, /* (399) anylist ::= anylist ANY */ + 265, /* (400) with ::= */ }; /* For rule J, yyRuleInfoNRhs[J] contains the negative of the number @@ -161123,385 +162570,388 @@ static const signed char yyRuleInfoNRhs[] = { -3, /* (16) ifnotexists ::= IF NOT EXISTS */ -1, /* (17) temp ::= TEMP */ 0, /* (18) temp ::= */ - -5, /* (19) create_table_args ::= LP columnlist conslist_opt RP table_options */ + -5, /* (19) create_table_args ::= LP columnlist conslist_opt RP table_option_set */ -2, /* (20) create_table_args ::= AS select */ - 0, /* (21) table_options ::= */ - -2, /* (22) table_options ::= WITHOUT nm */ - -2, /* (23) columnname ::= nm typetoken */ - 0, /* (24) typetoken ::= */ - -4, /* (25) typetoken ::= typename LP signed RP */ - -6, /* (26) typetoken ::= typename LP signed COMMA signed RP */ - -2, /* (27) typename ::= typename ID|STRING */ - 0, /* (28) scanpt ::= */ - 0, /* (29) scantok ::= */ - -2, /* (30) ccons ::= CONSTRAINT nm */ - -3, /* (31) ccons ::= DEFAULT scantok term */ - -4, /* (32) ccons ::= DEFAULT LP expr RP */ - -4, /* (33) ccons ::= DEFAULT PLUS scantok term */ - -4, /* (34) ccons ::= DEFAULT MINUS scantok term */ - -3, /* (35) ccons ::= DEFAULT scantok ID|INDEXED */ - -3, /* (36) ccons ::= NOT NULL onconf */ - -5, /* (37) ccons ::= PRIMARY KEY sortorder onconf autoinc */ - -2, /* (38) ccons ::= UNIQUE onconf */ - -4, /* (39) ccons ::= CHECK LP expr RP */ - -4, /* (40) ccons ::= REFERENCES nm eidlist_opt refargs */ - -1, /* (41) ccons ::= defer_subclause */ - -2, /* (42) ccons ::= COLLATE ID|STRING */ - -3, /* (43) generated ::= LP expr RP */ - -4, /* (44) generated ::= LP expr RP ID */ - 0, /* (45) autoinc ::= */ - -1, /* (46) autoinc ::= AUTOINCR */ - 0, /* (47) refargs ::= */ - -2, /* (48) refargs ::= refargs refarg */ - -2, /* (49) refarg ::= MATCH nm */ - -3, /* (50) refarg ::= ON INSERT refact */ - -3, /* (51) refarg ::= ON DELETE refact */ - -3, /* (52) refarg ::= ON UPDATE refact */ - -2, /* (53) refact ::= SET NULL */ - -2, /* (54) refact ::= SET DEFAULT */ - -1, /* (55) refact ::= CASCADE */ - -1, /* (56) refact ::= RESTRICT */ - -2, /* (57) refact ::= NO ACTION */ - -3, /* (58) defer_subclause ::= NOT DEFERRABLE init_deferred_pred_opt */ - -2, /* (59) defer_subclause ::= DEFERRABLE init_deferred_pred_opt */ - 0, /* (60) init_deferred_pred_opt ::= */ - -2, /* (61) init_deferred_pred_opt ::= INITIALLY DEFERRED */ - -2, /* (62) init_deferred_pred_opt ::= INITIALLY IMMEDIATE */ - 0, /* (63) conslist_opt ::= */ - -1, /* (64) tconscomma ::= COMMA */ - -2, /* (65) tcons ::= CONSTRAINT nm */ - -7, /* (66) tcons ::= PRIMARY KEY LP sortlist autoinc RP onconf */ - -5, /* (67) tcons ::= UNIQUE LP sortlist RP onconf */ - -5, /* (68) tcons ::= CHECK LP expr RP onconf */ - -10, /* (69) tcons ::= FOREIGN KEY LP eidlist RP REFERENCES nm eidlist_opt refargs defer_subclause_opt */ - 0, /* (70) defer_subclause_opt ::= */ - 0, /* (71) onconf ::= */ - -3, /* (72) onconf ::= ON CONFLICT resolvetype */ - 0, /* (73) orconf ::= */ - -2, /* (74) orconf ::= OR resolvetype */ - -1, /* (75) resolvetype ::= IGNORE */ - -1, /* (76) resolvetype ::= REPLACE */ - -4, /* (77) cmd ::= DROP TABLE ifexists fullname */ - -2, /* (78) ifexists ::= IF EXISTS */ - 0, /* (79) ifexists ::= */ - -9, /* (80) cmd ::= createkw temp VIEW ifnotexists nm dbnm eidlist_opt AS select */ - -4, /* (81) cmd ::= DROP VIEW ifexists fullname */ - -1, /* (82) cmd ::= select */ - -3, /* (83) select ::= WITH wqlist selectnowith */ - -4, /* (84) select ::= WITH RECURSIVE wqlist selectnowith */ - -1, /* (85) select ::= selectnowith */ - -3, /* (86) selectnowith ::= selectnowith multiselect_op oneselect */ - -1, /* (87) multiselect_op ::= UNION */ - -2, /* (88) multiselect_op ::= UNION ALL */ - -1, /* (89) multiselect_op ::= EXCEPT|INTERSECT */ - -9, /* (90) oneselect ::= SELECT distinct selcollist from where_opt groupby_opt having_opt orderby_opt limit_opt */ - -10, /* (91) oneselect ::= SELECT distinct selcollist from where_opt groupby_opt having_opt window_clause orderby_opt limit_opt */ - -4, /* (92) values ::= VALUES LP nexprlist RP */ - -5, /* (93) values ::= values COMMA LP nexprlist RP */ - -1, /* (94) distinct ::= DISTINCT */ - -1, /* (95) distinct ::= ALL */ - 0, /* (96) distinct ::= */ - 0, /* (97) sclp ::= */ - -5, /* (98) selcollist ::= sclp scanpt expr scanpt as */ - -3, /* (99) selcollist ::= sclp scanpt STAR */ - -5, /* (100) selcollist ::= sclp scanpt nm DOT STAR */ - -2, /* (101) as ::= AS nm */ - 0, /* (102) as ::= */ - 0, /* (103) from ::= */ - -2, /* (104) from ::= FROM seltablist */ - -2, /* (105) stl_prefix ::= seltablist joinop */ - 0, /* (106) stl_prefix ::= */ - -7, /* (107) seltablist ::= stl_prefix nm dbnm as indexed_opt on_opt using_opt */ - -9, /* (108) seltablist ::= stl_prefix nm dbnm LP exprlist RP as on_opt using_opt */ - -7, /* (109) seltablist ::= stl_prefix LP select RP as on_opt using_opt */ - -7, /* (110) seltablist ::= stl_prefix LP seltablist RP as on_opt using_opt */ - 0, /* (111) dbnm ::= */ - -2, /* (112) dbnm ::= DOT nm */ - -1, /* (113) fullname ::= nm */ - -3, /* (114) fullname ::= nm DOT nm */ - -1, /* (115) xfullname ::= nm */ - -3, /* (116) xfullname ::= nm DOT nm */ - -5, /* (117) xfullname ::= nm DOT nm AS nm */ - -3, /* (118) xfullname ::= nm AS nm */ - -1, /* (119) joinop ::= COMMA|JOIN */ - -2, /* (120) joinop ::= JOIN_KW JOIN */ - -3, /* (121) joinop ::= JOIN_KW nm JOIN */ - -4, /* (122) joinop ::= JOIN_KW nm nm JOIN */ - -2, /* (123) on_opt ::= ON expr */ - 0, /* (124) on_opt ::= */ - 0, /* (125) indexed_opt ::= */ - -3, /* (126) indexed_opt ::= INDEXED BY nm */ - -2, /* (127) indexed_opt ::= NOT INDEXED */ - -4, /* (128) using_opt ::= USING LP idlist RP */ - 0, /* (129) using_opt ::= */ - 0, /* (130) orderby_opt ::= */ - -3, /* (131) orderby_opt ::= ORDER BY sortlist */ - -5, /* (132) sortlist ::= sortlist COMMA expr sortorder nulls */ - -3, /* (133) sortlist ::= expr sortorder nulls */ - -1, /* (134) sortorder ::= ASC */ - -1, /* (135) sortorder ::= DESC */ - 0, /* (136) sortorder ::= */ - -2, /* (137) nulls ::= NULLS FIRST */ - -2, /* (138) nulls ::= NULLS LAST */ - 0, /* (139) nulls ::= */ - 0, /* (140) groupby_opt ::= */ - -3, /* (141) groupby_opt ::= GROUP BY nexprlist */ - 0, /* (142) having_opt ::= */ - -2, /* (143) having_opt ::= HAVING expr */ - 0, /* (144) limit_opt ::= */ - -2, /* (145) limit_opt ::= LIMIT expr */ - -4, /* (146) limit_opt ::= LIMIT expr OFFSET expr */ - -4, /* (147) limit_opt ::= LIMIT expr COMMA expr */ - -6, /* (148) cmd ::= with DELETE FROM xfullname indexed_opt where_opt_ret */ - 0, /* (149) where_opt ::= */ - -2, /* (150) where_opt ::= WHERE expr */ - 0, /* (151) where_opt_ret ::= */ - -2, /* (152) where_opt_ret ::= WHERE expr */ - -2, /* (153) where_opt_ret ::= RETURNING selcollist */ - -4, /* (154) where_opt_ret ::= WHERE expr RETURNING selcollist */ - -9, /* (155) cmd ::= with UPDATE orconf xfullname indexed_opt SET setlist from where_opt_ret */ - -5, /* (156) setlist ::= setlist COMMA nm EQ expr */ - -7, /* (157) setlist ::= setlist COMMA LP idlist RP EQ expr */ - -3, /* (158) setlist ::= nm EQ expr */ - -5, /* (159) setlist ::= LP idlist RP EQ expr */ - -7, /* (160) cmd ::= with insert_cmd INTO xfullname idlist_opt select upsert */ - -8, /* (161) cmd ::= with insert_cmd INTO xfullname idlist_opt DEFAULT VALUES returning */ - 0, /* (162) upsert ::= */ - -2, /* (163) upsert ::= RETURNING selcollist */ - -12, /* (164) upsert ::= ON CONFLICT LP sortlist RP where_opt DO UPDATE SET setlist where_opt upsert */ - -9, /* (165) upsert ::= ON CONFLICT LP sortlist RP where_opt DO NOTHING upsert */ - -5, /* (166) upsert ::= ON CONFLICT DO NOTHING returning */ - -8, /* (167) upsert ::= ON CONFLICT DO UPDATE SET setlist where_opt returning */ - -2, /* (168) returning ::= RETURNING selcollist */ - -2, /* (169) insert_cmd ::= INSERT orconf */ - -1, /* (170) insert_cmd ::= REPLACE */ - 0, /* (171) idlist_opt ::= */ - -3, /* (172) idlist_opt ::= LP idlist RP */ - -3, /* (173) idlist ::= idlist COMMA nm */ - -1, /* (174) idlist ::= nm */ - -3, /* (175) expr ::= LP expr RP */ - -1, /* (176) expr ::= ID|INDEXED */ - -1, /* (177) expr ::= JOIN_KW */ - -3, /* (178) expr ::= nm DOT nm */ - -5, /* (179) expr ::= nm DOT nm DOT nm */ - -1, /* (180) term ::= NULL|FLOAT|BLOB */ - -1, /* (181) term ::= STRING */ - -1, /* (182) term ::= INTEGER */ - -1, /* (183) expr ::= VARIABLE */ - -3, /* (184) expr ::= expr COLLATE ID|STRING */ - -6, /* (185) expr ::= CAST LP expr AS typetoken RP */ - -5, /* (186) expr ::= ID|INDEXED LP distinct exprlist RP */ - -4, /* (187) expr ::= ID|INDEXED LP STAR RP */ - -6, /* (188) expr ::= ID|INDEXED LP distinct exprlist RP filter_over */ - -5, /* (189) expr ::= ID|INDEXED LP STAR RP filter_over */ - -1, /* (190) term ::= CTIME_KW */ - -5, /* (191) expr ::= LP nexprlist COMMA expr RP */ - -3, /* (192) expr ::= expr AND expr */ - -3, /* (193) expr ::= expr OR expr */ - -3, /* (194) expr ::= expr LT|GT|GE|LE expr */ - -3, /* (195) expr ::= expr EQ|NE expr */ - -3, /* (196) expr ::= expr BITAND|BITOR|LSHIFT|RSHIFT expr */ - -3, /* (197) expr ::= expr PLUS|MINUS expr */ - -3, /* (198) expr ::= expr STAR|SLASH|REM expr */ - -3, /* (199) expr ::= expr CONCAT expr */ - -2, /* (200) likeop ::= NOT LIKE_KW|MATCH */ - -3, /* (201) expr ::= expr likeop expr */ - -5, /* (202) expr ::= expr likeop expr ESCAPE expr */ - -2, /* (203) expr ::= expr ISNULL|NOTNULL */ - -3, /* (204) expr ::= expr NOT NULL */ - -3, /* (205) expr ::= expr IS expr */ - -4, /* (206) expr ::= expr IS NOT expr */ - -2, /* (207) expr ::= NOT expr */ - -2, /* (208) expr ::= BITNOT expr */ - -2, /* (209) expr ::= PLUS|MINUS expr */ - -1, /* (210) between_op ::= BETWEEN */ - -2, /* (211) between_op ::= NOT BETWEEN */ - -5, /* (212) expr ::= expr between_op expr AND expr */ - -1, /* (213) in_op ::= IN */ - -2, /* (214) in_op ::= NOT IN */ - -5, /* (215) expr ::= expr in_op LP exprlist RP */ - -3, /* (216) expr ::= LP select RP */ - -5, /* (217) expr ::= expr in_op LP select RP */ - -5, /* (218) expr ::= expr in_op nm dbnm paren_exprlist */ - -4, /* (219) expr ::= EXISTS LP select RP */ - -5, /* (220) expr ::= CASE case_operand case_exprlist case_else END */ - -5, /* (221) case_exprlist ::= case_exprlist WHEN expr THEN expr */ - -4, /* (222) case_exprlist ::= WHEN expr THEN expr */ - -2, /* (223) case_else ::= ELSE expr */ - 0, /* (224) case_else ::= */ - -1, /* (225) case_operand ::= expr */ - 0, /* (226) case_operand ::= */ - 0, /* (227) exprlist ::= */ - -3, /* (228) nexprlist ::= nexprlist COMMA expr */ - -1, /* (229) nexprlist ::= expr */ - 0, /* (230) paren_exprlist ::= */ - -3, /* (231) paren_exprlist ::= LP exprlist RP */ - -12, /* (232) cmd ::= createkw uniqueflag INDEX ifnotexists nm dbnm ON nm LP sortlist RP where_opt */ - -1, /* (233) uniqueflag ::= UNIQUE */ - 0, /* (234) uniqueflag ::= */ - 0, /* (235) eidlist_opt ::= */ - -3, /* (236) eidlist_opt ::= LP eidlist RP */ - -5, /* (237) eidlist ::= eidlist COMMA nm collate sortorder */ - -3, /* (238) eidlist ::= nm collate sortorder */ - 0, /* (239) collate ::= */ - -2, /* (240) collate ::= COLLATE ID|STRING */ - -4, /* (241) cmd ::= DROP INDEX ifexists fullname */ - -2, /* (242) cmd ::= VACUUM vinto */ - -3, /* (243) cmd ::= VACUUM nm vinto */ - -2, /* (244) vinto ::= INTO expr */ - 0, /* (245) vinto ::= */ - -3, /* (246) cmd ::= PRAGMA nm dbnm */ - -5, /* (247) cmd ::= PRAGMA nm dbnm EQ nmnum */ - -6, /* (248) cmd ::= PRAGMA nm dbnm LP nmnum RP */ - -5, /* (249) cmd ::= PRAGMA nm dbnm EQ minus_num */ - -6, /* (250) cmd ::= PRAGMA nm dbnm LP minus_num RP */ - -2, /* (251) plus_num ::= PLUS INTEGER|FLOAT */ - -2, /* (252) minus_num ::= MINUS INTEGER|FLOAT */ - -5, /* (253) cmd ::= createkw trigger_decl BEGIN trigger_cmd_list END */ - -11, /* (254) trigger_decl ::= temp TRIGGER ifnotexists nm dbnm trigger_time trigger_event ON fullname foreach_clause when_clause */ - -1, /* (255) trigger_time ::= BEFORE|AFTER */ - -2, /* (256) trigger_time ::= INSTEAD OF */ - 0, /* (257) trigger_time ::= */ - -1, /* (258) trigger_event ::= DELETE|INSERT */ - -1, /* (259) trigger_event ::= UPDATE */ - -3, /* (260) trigger_event ::= UPDATE OF idlist */ - 0, /* (261) when_clause ::= */ - -2, /* (262) when_clause ::= WHEN expr */ - -3, /* (263) trigger_cmd_list ::= trigger_cmd_list trigger_cmd SEMI */ - -2, /* (264) trigger_cmd_list ::= trigger_cmd SEMI */ - -3, /* (265) trnm ::= nm DOT nm */ - -3, /* (266) tridxby ::= INDEXED BY nm */ - -2, /* (267) tridxby ::= NOT INDEXED */ - -9, /* (268) trigger_cmd ::= UPDATE orconf trnm tridxby SET setlist from where_opt scanpt */ - -8, /* (269) trigger_cmd ::= scanpt insert_cmd INTO trnm idlist_opt select upsert scanpt */ - -6, /* (270) trigger_cmd ::= DELETE FROM trnm tridxby where_opt scanpt */ - -3, /* (271) trigger_cmd ::= scanpt select scanpt */ - -4, /* (272) expr ::= RAISE LP IGNORE RP */ - -6, /* (273) expr ::= RAISE LP raisetype COMMA nm RP */ - -1, /* (274) raisetype ::= ROLLBACK */ - -1, /* (275) raisetype ::= ABORT */ - -1, /* (276) raisetype ::= FAIL */ - -4, /* (277) cmd ::= DROP TRIGGER ifexists fullname */ - -6, /* (278) cmd ::= ATTACH database_kw_opt expr AS expr key_opt */ - -3, /* (279) cmd ::= DETACH database_kw_opt expr */ - 0, /* (280) key_opt ::= */ - -2, /* (281) key_opt ::= KEY expr */ - -1, /* (282) cmd ::= REINDEX */ - -3, /* (283) cmd ::= REINDEX nm dbnm */ - -1, /* (284) cmd ::= ANALYZE */ - -3, /* (285) cmd ::= ANALYZE nm dbnm */ - -6, /* (286) cmd ::= ALTER TABLE fullname RENAME TO nm */ - -7, /* (287) cmd ::= ALTER TABLE add_column_fullname ADD kwcolumn_opt columnname carglist */ - -6, /* (288) cmd ::= ALTER TABLE fullname DROP kwcolumn_opt nm */ - -1, /* (289) add_column_fullname ::= fullname */ - -8, /* (290) cmd ::= ALTER TABLE fullname RENAME kwcolumn_opt nm TO nm */ - -1, /* (291) cmd ::= create_vtab */ - -4, /* (292) cmd ::= create_vtab LP vtabarglist RP */ - -8, /* (293) create_vtab ::= createkw VIRTUAL TABLE ifnotexists nm dbnm USING nm */ - 0, /* (294) vtabarg ::= */ - -1, /* (295) vtabargtoken ::= ANY */ - -3, /* (296) vtabargtoken ::= lp anylist RP */ - -1, /* (297) lp ::= LP */ - -2, /* (298) with ::= WITH wqlist */ - -3, /* (299) with ::= WITH RECURSIVE wqlist */ - -1, /* (300) wqas ::= AS */ - -2, /* (301) wqas ::= AS MATERIALIZED */ - -3, /* (302) wqas ::= AS NOT MATERIALIZED */ - -6, /* (303) wqitem ::= nm eidlist_opt wqas LP select RP */ - -1, /* (304) wqlist ::= wqitem */ - -3, /* (305) wqlist ::= wqlist COMMA wqitem */ - -1, /* (306) windowdefn_list ::= windowdefn */ - -3, /* (307) windowdefn_list ::= windowdefn_list COMMA windowdefn */ - -5, /* (308) windowdefn ::= nm AS LP window RP */ - -5, /* (309) window ::= PARTITION BY nexprlist orderby_opt frame_opt */ - -6, /* (310) window ::= nm PARTITION BY nexprlist orderby_opt frame_opt */ - -4, /* (311) window ::= ORDER BY sortlist frame_opt */ - -5, /* (312) window ::= nm ORDER BY sortlist frame_opt */ - -1, /* (313) window ::= frame_opt */ - -2, /* (314) window ::= nm frame_opt */ - 0, /* (315) frame_opt ::= */ - -3, /* (316) frame_opt ::= range_or_rows frame_bound_s frame_exclude_opt */ - -6, /* (317) frame_opt ::= range_or_rows BETWEEN frame_bound_s AND frame_bound_e frame_exclude_opt */ - -1, /* (318) range_or_rows ::= RANGE|ROWS|GROUPS */ - -1, /* (319) frame_bound_s ::= frame_bound */ - -2, /* (320) frame_bound_s ::= UNBOUNDED PRECEDING */ - -1, /* (321) frame_bound_e ::= frame_bound */ - -2, /* (322) frame_bound_e ::= UNBOUNDED FOLLOWING */ - -2, /* (323) frame_bound ::= expr PRECEDING|FOLLOWING */ - -2, /* (324) frame_bound ::= CURRENT ROW */ - 0, /* (325) frame_exclude_opt ::= */ - -2, /* (326) frame_exclude_opt ::= EXCLUDE frame_exclude */ - -2, /* (327) frame_exclude ::= NO OTHERS */ - -2, /* (328) frame_exclude ::= CURRENT ROW */ - -1, /* (329) frame_exclude ::= GROUP|TIES */ - -2, /* (330) window_clause ::= WINDOW windowdefn_list */ - -2, /* (331) filter_over ::= filter_clause over_clause */ - -1, /* (332) filter_over ::= over_clause */ - -1, /* (333) filter_over ::= filter_clause */ - -4, /* (334) over_clause ::= OVER LP window RP */ - -2, /* (335) over_clause ::= OVER nm */ - -5, /* (336) filter_clause ::= FILTER LP WHERE expr RP */ - -1, /* (337) input ::= cmdlist */ - -2, /* (338) cmdlist ::= cmdlist ecmd */ - -1, /* (339) cmdlist ::= ecmd */ - -1, /* (340) ecmd ::= SEMI */ - -2, /* (341) ecmd ::= cmdx SEMI */ - -3, /* (342) ecmd ::= explain cmdx SEMI */ - 0, /* (343) trans_opt ::= */ - -1, /* (344) trans_opt ::= TRANSACTION */ - -2, /* (345) trans_opt ::= TRANSACTION nm */ - -1, /* (346) savepoint_opt ::= SAVEPOINT */ - 0, /* (347) savepoint_opt ::= */ - -2, /* (348) cmd ::= create_table create_table_args */ - -4, /* (349) columnlist ::= columnlist COMMA columnname carglist */ - -2, /* (350) columnlist ::= columnname carglist */ - -1, /* (351) nm ::= ID|INDEXED */ - -1, /* (352) nm ::= STRING */ - -1, /* (353) nm ::= JOIN_KW */ - -1, /* (354) typetoken ::= typename */ - -1, /* (355) typename ::= ID|STRING */ - -1, /* (356) signed ::= plus_num */ - -1, /* (357) signed ::= minus_num */ - -2, /* (358) carglist ::= carglist ccons */ - 0, /* (359) carglist ::= */ - -2, /* (360) ccons ::= NULL onconf */ - -4, /* (361) ccons ::= GENERATED ALWAYS AS generated */ - -2, /* (362) ccons ::= AS generated */ - -2, /* (363) conslist_opt ::= COMMA conslist */ - -3, /* (364) conslist ::= conslist tconscomma tcons */ - -1, /* (365) conslist ::= tcons */ - 0, /* (366) tconscomma ::= */ - -1, /* (367) defer_subclause_opt ::= defer_subclause */ - -1, /* (368) resolvetype ::= raisetype */ - -1, /* (369) selectnowith ::= oneselect */ - -1, /* (370) oneselect ::= values */ - -2, /* (371) sclp ::= selcollist COMMA */ - -1, /* (372) as ::= ID|STRING */ - 0, /* (373) returning ::= */ - -1, /* (374) expr ::= term */ - -1, /* (375) likeop ::= LIKE_KW|MATCH */ - -1, /* (376) exprlist ::= nexprlist */ - -1, /* (377) nmnum ::= plus_num */ - -1, /* (378) nmnum ::= nm */ - -1, /* (379) nmnum ::= ON */ - -1, /* (380) nmnum ::= DELETE */ - -1, /* (381) nmnum ::= DEFAULT */ - -1, /* (382) plus_num ::= INTEGER|FLOAT */ - 0, /* (383) foreach_clause ::= */ - -3, /* (384) foreach_clause ::= FOR EACH ROW */ - -1, /* (385) trnm ::= nm */ - 0, /* (386) tridxby ::= */ - -1, /* (387) database_kw_opt ::= DATABASE */ - 0, /* (388) database_kw_opt ::= */ - 0, /* (389) kwcolumn_opt ::= */ - -1, /* (390) kwcolumn_opt ::= COLUMNKW */ - -1, /* (391) vtabarglist ::= vtabarg */ - -3, /* (392) vtabarglist ::= vtabarglist COMMA vtabarg */ - -2, /* (393) vtabarg ::= vtabarg vtabargtoken */ - 0, /* (394) anylist ::= */ - -4, /* (395) anylist ::= anylist LP anylist RP */ - -2, /* (396) anylist ::= anylist ANY */ - 0, /* (397) with ::= */ + 0, /* (21) table_option_set ::= */ + -3, /* (22) table_option_set ::= table_option_set COMMA table_option */ + -2, /* (23) table_option ::= WITHOUT nm */ + -1, /* (24) table_option ::= nm */ + -2, /* (25) columnname ::= nm typetoken */ + 0, /* (26) typetoken ::= */ + -4, /* (27) typetoken ::= typename LP signed RP */ + -6, /* (28) typetoken ::= typename LP signed COMMA signed RP */ + -2, /* (29) typename ::= typename ID|STRING */ + 0, /* (30) scanpt ::= */ + 0, /* (31) scantok ::= */ + -2, /* (32) ccons ::= CONSTRAINT nm */ + -3, /* (33) ccons ::= DEFAULT scantok term */ + -4, /* (34) ccons ::= DEFAULT LP expr RP */ + -4, /* (35) ccons ::= DEFAULT PLUS scantok term */ + -4, /* (36) ccons ::= DEFAULT MINUS scantok term */ + -3, /* (37) ccons ::= DEFAULT scantok ID|INDEXED */ + -3, /* (38) ccons ::= NOT NULL onconf */ + -5, /* (39) ccons ::= PRIMARY KEY sortorder onconf autoinc */ + -2, /* (40) ccons ::= UNIQUE onconf */ + -4, /* (41) ccons ::= CHECK LP expr RP */ + -4, /* (42) ccons ::= REFERENCES nm eidlist_opt refargs */ + -1, /* (43) ccons ::= defer_subclause */ + -2, /* (44) ccons ::= COLLATE ID|STRING */ + -3, /* (45) generated ::= LP expr RP */ + -4, /* (46) generated ::= LP expr RP ID */ + 0, /* (47) autoinc ::= */ + -1, /* (48) autoinc ::= AUTOINCR */ + 0, /* (49) refargs ::= */ + -2, /* (50) refargs ::= refargs refarg */ + -2, /* (51) refarg ::= MATCH nm */ + -3, /* (52) refarg ::= ON INSERT refact */ + -3, /* (53) refarg ::= ON DELETE refact */ + -3, /* (54) refarg ::= ON UPDATE refact */ + -2, /* (55) refact ::= SET NULL */ + -2, /* (56) refact ::= SET DEFAULT */ + -1, /* (57) refact ::= CASCADE */ + -1, /* (58) refact ::= RESTRICT */ + -2, /* (59) refact ::= NO ACTION */ + -3, /* (60) defer_subclause ::= NOT DEFERRABLE init_deferred_pred_opt */ + -2, /* (61) defer_subclause ::= DEFERRABLE init_deferred_pred_opt */ + 0, /* (62) init_deferred_pred_opt ::= */ + -2, /* (63) init_deferred_pred_opt ::= INITIALLY DEFERRED */ + -2, /* (64) init_deferred_pred_opt ::= INITIALLY IMMEDIATE */ + 0, /* (65) conslist_opt ::= */ + -1, /* (66) tconscomma ::= COMMA */ + -2, /* (67) tcons ::= CONSTRAINT nm */ + -7, /* (68) tcons ::= PRIMARY KEY LP sortlist autoinc RP onconf */ + -5, /* (69) tcons ::= UNIQUE LP sortlist RP onconf */ + -5, /* (70) tcons ::= CHECK LP expr RP onconf */ + -10, /* (71) tcons ::= FOREIGN KEY LP eidlist RP REFERENCES nm eidlist_opt refargs defer_subclause_opt */ + 0, /* (72) defer_subclause_opt ::= */ + 0, /* (73) onconf ::= */ + -3, /* (74) onconf ::= ON CONFLICT resolvetype */ + 0, /* (75) orconf ::= */ + -2, /* (76) orconf ::= OR resolvetype */ + -1, /* (77) resolvetype ::= IGNORE */ + -1, /* (78) resolvetype ::= REPLACE */ + -4, /* (79) cmd ::= DROP TABLE ifexists fullname */ + -2, /* (80) ifexists ::= IF EXISTS */ + 0, /* (81) ifexists ::= */ + -9, /* (82) cmd ::= createkw temp VIEW ifnotexists nm dbnm eidlist_opt AS select */ + -4, /* (83) cmd ::= DROP VIEW ifexists fullname */ + -1, /* (84) cmd ::= select */ + -3, /* (85) select ::= WITH wqlist selectnowith */ + -4, /* (86) select ::= WITH RECURSIVE wqlist selectnowith */ + -1, /* (87) select ::= selectnowith */ + -3, /* (88) selectnowith ::= selectnowith multiselect_op oneselect */ + -1, /* (89) multiselect_op ::= UNION */ + -2, /* (90) multiselect_op ::= UNION ALL */ + -1, /* (91) multiselect_op ::= EXCEPT|INTERSECT */ + -9, /* (92) oneselect ::= SELECT distinct selcollist from where_opt groupby_opt having_opt orderby_opt limit_opt */ + -10, /* (93) oneselect ::= SELECT distinct selcollist from where_opt groupby_opt having_opt window_clause orderby_opt limit_opt */ + -4, /* (94) values ::= VALUES LP nexprlist RP */ + -5, /* (95) values ::= values COMMA LP nexprlist RP */ + -1, /* (96) distinct ::= DISTINCT */ + -1, /* (97) distinct ::= ALL */ + 0, /* (98) distinct ::= */ + 0, /* (99) sclp ::= */ + -5, /* (100) selcollist ::= sclp scanpt expr scanpt as */ + -3, /* (101) selcollist ::= sclp scanpt STAR */ + -5, /* (102) selcollist ::= sclp scanpt nm DOT STAR */ + -2, /* (103) as ::= AS nm */ + 0, /* (104) as ::= */ + 0, /* (105) from ::= */ + -2, /* (106) from ::= FROM seltablist */ + -2, /* (107) stl_prefix ::= seltablist joinop */ + 0, /* (108) stl_prefix ::= */ + -7, /* (109) seltablist ::= stl_prefix nm dbnm as indexed_opt on_opt using_opt */ + -9, /* (110) seltablist ::= stl_prefix nm dbnm LP exprlist RP as on_opt using_opt */ + -7, /* (111) seltablist ::= stl_prefix LP select RP as on_opt using_opt */ + -7, /* (112) seltablist ::= stl_prefix LP seltablist RP as on_opt using_opt */ + 0, /* (113) dbnm ::= */ + -2, /* (114) dbnm ::= DOT nm */ + -1, /* (115) fullname ::= nm */ + -3, /* (116) fullname ::= nm DOT nm */ + -1, /* (117) xfullname ::= nm */ + -3, /* (118) xfullname ::= nm DOT nm */ + -5, /* (119) xfullname ::= nm DOT nm AS nm */ + -3, /* (120) xfullname ::= nm AS nm */ + -1, /* (121) joinop ::= COMMA|JOIN */ + -2, /* (122) joinop ::= JOIN_KW JOIN */ + -3, /* (123) joinop ::= JOIN_KW nm JOIN */ + -4, /* (124) joinop ::= JOIN_KW nm nm JOIN */ + -2, /* (125) on_opt ::= ON expr */ + 0, /* (126) on_opt ::= */ + 0, /* (127) indexed_opt ::= */ + -3, /* (128) indexed_opt ::= INDEXED BY nm */ + -2, /* (129) indexed_opt ::= NOT INDEXED */ + -4, /* (130) using_opt ::= USING LP idlist RP */ + 0, /* (131) using_opt ::= */ + 0, /* (132) orderby_opt ::= */ + -3, /* (133) orderby_opt ::= ORDER BY sortlist */ + -5, /* (134) sortlist ::= sortlist COMMA expr sortorder nulls */ + -3, /* (135) sortlist ::= expr sortorder nulls */ + -1, /* (136) sortorder ::= ASC */ + -1, /* (137) sortorder ::= DESC */ + 0, /* (138) sortorder ::= */ + -2, /* (139) nulls ::= NULLS FIRST */ + -2, /* (140) nulls ::= NULLS LAST */ + 0, /* (141) nulls ::= */ + 0, /* (142) groupby_opt ::= */ + -3, /* (143) groupby_opt ::= GROUP BY nexprlist */ + 0, /* (144) having_opt ::= */ + -2, /* (145) having_opt ::= HAVING expr */ + 0, /* (146) limit_opt ::= */ + -2, /* (147) limit_opt ::= LIMIT expr */ + -4, /* (148) limit_opt ::= LIMIT expr OFFSET expr */ + -4, /* (149) limit_opt ::= LIMIT expr COMMA expr */ + -6, /* (150) cmd ::= with DELETE FROM xfullname indexed_opt where_opt_ret */ + 0, /* (151) where_opt ::= */ + -2, /* (152) where_opt ::= WHERE expr */ + 0, /* (153) where_opt_ret ::= */ + -2, /* (154) where_opt_ret ::= WHERE expr */ + -2, /* (155) where_opt_ret ::= RETURNING selcollist */ + -4, /* (156) where_opt_ret ::= WHERE expr RETURNING selcollist */ + -9, /* (157) cmd ::= with UPDATE orconf xfullname indexed_opt SET setlist from where_opt_ret */ + -5, /* (158) setlist ::= setlist COMMA nm EQ expr */ + -7, /* (159) setlist ::= setlist COMMA LP idlist RP EQ expr */ + -3, /* (160) setlist ::= nm EQ expr */ + -5, /* (161) setlist ::= LP idlist RP EQ expr */ + -7, /* (162) cmd ::= with insert_cmd INTO xfullname idlist_opt select upsert */ + -8, /* (163) cmd ::= with insert_cmd INTO xfullname idlist_opt DEFAULT VALUES returning */ + 0, /* (164) upsert ::= */ + -2, /* (165) upsert ::= RETURNING selcollist */ + -12, /* (166) upsert ::= ON CONFLICT LP sortlist RP where_opt DO UPDATE SET setlist where_opt upsert */ + -9, /* (167) upsert ::= ON CONFLICT LP sortlist RP where_opt DO NOTHING upsert */ + -5, /* (168) upsert ::= ON CONFLICT DO NOTHING returning */ + -8, /* (169) upsert ::= ON CONFLICT DO UPDATE SET setlist where_opt returning */ + -2, /* (170) returning ::= RETURNING selcollist */ + -2, /* (171) insert_cmd ::= INSERT orconf */ + -1, /* (172) insert_cmd ::= REPLACE */ + 0, /* (173) idlist_opt ::= */ + -3, /* (174) idlist_opt ::= LP idlist RP */ + -3, /* (175) idlist ::= idlist COMMA nm */ + -1, /* (176) idlist ::= nm */ + -3, /* (177) expr ::= LP expr RP */ + -1, /* (178) expr ::= ID|INDEXED */ + -1, /* (179) expr ::= JOIN_KW */ + -3, /* (180) expr ::= nm DOT nm */ + -5, /* (181) expr ::= nm DOT nm DOT nm */ + -1, /* (182) term ::= NULL|FLOAT|BLOB */ + -1, /* (183) term ::= STRING */ + -1, /* (184) term ::= INTEGER */ + -1, /* (185) expr ::= VARIABLE */ + -3, /* (186) expr ::= expr COLLATE ID|STRING */ + -6, /* (187) expr ::= CAST LP expr AS typetoken RP */ + -5, /* (188) expr ::= ID|INDEXED LP distinct exprlist RP */ + -4, /* (189) expr ::= ID|INDEXED LP STAR RP */ + -6, /* (190) expr ::= ID|INDEXED LP distinct exprlist RP filter_over */ + -5, /* (191) expr ::= ID|INDEXED LP STAR RP filter_over */ + -1, /* (192) term ::= CTIME_KW */ + -5, /* (193) expr ::= LP nexprlist COMMA expr RP */ + -3, /* (194) expr ::= expr AND expr */ + -3, /* (195) expr ::= expr OR expr */ + -3, /* (196) expr ::= expr LT|GT|GE|LE expr */ + -3, /* (197) expr ::= expr EQ|NE expr */ + -3, /* (198) expr ::= expr BITAND|BITOR|LSHIFT|RSHIFT expr */ + -3, /* (199) expr ::= expr PLUS|MINUS expr */ + -3, /* (200) expr ::= expr STAR|SLASH|REM expr */ + -3, /* (201) expr ::= expr CONCAT expr */ + -2, /* (202) likeop ::= NOT LIKE_KW|MATCH */ + -3, /* (203) expr ::= expr likeop expr */ + -5, /* (204) expr ::= expr likeop expr ESCAPE expr */ + -2, /* (205) expr ::= expr ISNULL|NOTNULL */ + -3, /* (206) expr ::= expr NOT NULL */ + -3, /* (207) expr ::= expr IS expr */ + -4, /* (208) expr ::= expr IS NOT expr */ + -2, /* (209) expr ::= NOT expr */ + -2, /* (210) expr ::= BITNOT expr */ + -2, /* (211) expr ::= PLUS|MINUS expr */ + -1, /* (212) between_op ::= BETWEEN */ + -2, /* (213) between_op ::= NOT BETWEEN */ + -5, /* (214) expr ::= expr between_op expr AND expr */ + -1, /* (215) in_op ::= IN */ + -2, /* (216) in_op ::= NOT IN */ + -5, /* (217) expr ::= expr in_op LP exprlist RP */ + -3, /* (218) expr ::= LP select RP */ + -5, /* (219) expr ::= expr in_op LP select RP */ + -5, /* (220) expr ::= expr in_op nm dbnm paren_exprlist */ + -4, /* (221) expr ::= EXISTS LP select RP */ + -5, /* (222) expr ::= CASE case_operand case_exprlist case_else END */ + -5, /* (223) case_exprlist ::= case_exprlist WHEN expr THEN expr */ + -4, /* (224) case_exprlist ::= WHEN expr THEN expr */ + -2, /* (225) case_else ::= ELSE expr */ + 0, /* (226) case_else ::= */ + -1, /* (227) case_operand ::= expr */ + 0, /* (228) case_operand ::= */ + 0, /* (229) exprlist ::= */ + -3, /* (230) nexprlist ::= nexprlist COMMA expr */ + -1, /* (231) nexprlist ::= expr */ + 0, /* (232) paren_exprlist ::= */ + -3, /* (233) paren_exprlist ::= LP exprlist RP */ + -12, /* (234) cmd ::= createkw uniqueflag INDEX ifnotexists nm dbnm ON nm LP sortlist RP where_opt */ + -1, /* (235) uniqueflag ::= UNIQUE */ + 0, /* (236) uniqueflag ::= */ + 0, /* (237) eidlist_opt ::= */ + -3, /* (238) eidlist_opt ::= LP eidlist RP */ + -5, /* (239) eidlist ::= eidlist COMMA nm collate sortorder */ + -3, /* (240) eidlist ::= nm collate sortorder */ + 0, /* (241) collate ::= */ + -2, /* (242) collate ::= COLLATE ID|STRING */ + -4, /* (243) cmd ::= DROP INDEX ifexists fullname */ + -2, /* (244) cmd ::= VACUUM vinto */ + -3, /* (245) cmd ::= VACUUM nm vinto */ + -2, /* (246) vinto ::= INTO expr */ + 0, /* (247) vinto ::= */ + -3, /* (248) cmd ::= PRAGMA nm dbnm */ + -5, /* (249) cmd ::= PRAGMA nm dbnm EQ nmnum */ + -6, /* (250) cmd ::= PRAGMA nm dbnm LP nmnum RP */ + -5, /* (251) cmd ::= PRAGMA nm dbnm EQ minus_num */ + -6, /* (252) cmd ::= PRAGMA nm dbnm LP minus_num RP */ + -2, /* (253) plus_num ::= PLUS INTEGER|FLOAT */ + -2, /* (254) minus_num ::= MINUS INTEGER|FLOAT */ + -5, /* (255) cmd ::= createkw trigger_decl BEGIN trigger_cmd_list END */ + -11, /* (256) trigger_decl ::= temp TRIGGER ifnotexists nm dbnm trigger_time trigger_event ON fullname foreach_clause when_clause */ + -1, /* (257) trigger_time ::= BEFORE|AFTER */ + -2, /* (258) trigger_time ::= INSTEAD OF */ + 0, /* (259) trigger_time ::= */ + -1, /* (260) trigger_event ::= DELETE|INSERT */ + -1, /* (261) trigger_event ::= UPDATE */ + -3, /* (262) trigger_event ::= UPDATE OF idlist */ + 0, /* (263) when_clause ::= */ + -2, /* (264) when_clause ::= WHEN expr */ + -3, /* (265) trigger_cmd_list ::= trigger_cmd_list trigger_cmd SEMI */ + -2, /* (266) trigger_cmd_list ::= trigger_cmd SEMI */ + -3, /* (267) trnm ::= nm DOT nm */ + -3, /* (268) tridxby ::= INDEXED BY nm */ + -2, /* (269) tridxby ::= NOT INDEXED */ + -9, /* (270) trigger_cmd ::= UPDATE orconf trnm tridxby SET setlist from where_opt scanpt */ + -8, /* (271) trigger_cmd ::= scanpt insert_cmd INTO trnm idlist_opt select upsert scanpt */ + -6, /* (272) trigger_cmd ::= DELETE FROM trnm tridxby where_opt scanpt */ + -3, /* (273) trigger_cmd ::= scanpt select scanpt */ + -4, /* (274) expr ::= RAISE LP IGNORE RP */ + -6, /* (275) expr ::= RAISE LP raisetype COMMA nm RP */ + -1, /* (276) raisetype ::= ROLLBACK */ + -1, /* (277) raisetype ::= ABORT */ + -1, /* (278) raisetype ::= FAIL */ + -4, /* (279) cmd ::= DROP TRIGGER ifexists fullname */ + -6, /* (280) cmd ::= ATTACH database_kw_opt expr AS expr key_opt */ + -3, /* (281) cmd ::= DETACH database_kw_opt expr */ + 0, /* (282) key_opt ::= */ + -2, /* (283) key_opt ::= KEY expr */ + -1, /* (284) cmd ::= REINDEX */ + -3, /* (285) cmd ::= REINDEX nm dbnm */ + -1, /* (286) cmd ::= ANALYZE */ + -3, /* (287) cmd ::= ANALYZE nm dbnm */ + -6, /* (288) cmd ::= ALTER TABLE fullname RENAME TO nm */ + -7, /* (289) cmd ::= ALTER TABLE add_column_fullname ADD kwcolumn_opt columnname carglist */ + -6, /* (290) cmd ::= ALTER TABLE fullname DROP kwcolumn_opt nm */ + -1, /* (291) add_column_fullname ::= fullname */ + -8, /* (292) cmd ::= ALTER TABLE fullname RENAME kwcolumn_opt nm TO nm */ + -1, /* (293) cmd ::= create_vtab */ + -4, /* (294) cmd ::= create_vtab LP vtabarglist RP */ + -8, /* (295) create_vtab ::= createkw VIRTUAL TABLE ifnotexists nm dbnm USING nm */ + 0, /* (296) vtabarg ::= */ + -1, /* (297) vtabargtoken ::= ANY */ + -3, /* (298) vtabargtoken ::= lp anylist RP */ + -1, /* (299) lp ::= LP */ + -2, /* (300) with ::= WITH wqlist */ + -3, /* (301) with ::= WITH RECURSIVE wqlist */ + -1, /* (302) wqas ::= AS */ + -2, /* (303) wqas ::= AS MATERIALIZED */ + -3, /* (304) wqas ::= AS NOT MATERIALIZED */ + -6, /* (305) wqitem ::= nm eidlist_opt wqas LP select RP */ + -1, /* (306) wqlist ::= wqitem */ + -3, /* (307) wqlist ::= wqlist COMMA wqitem */ + -1, /* (308) windowdefn_list ::= windowdefn */ + -3, /* (309) windowdefn_list ::= windowdefn_list COMMA windowdefn */ + -5, /* (310) windowdefn ::= nm AS LP window RP */ + -5, /* (311) window ::= PARTITION BY nexprlist orderby_opt frame_opt */ + -6, /* (312) window ::= nm PARTITION BY nexprlist orderby_opt frame_opt */ + -4, /* (313) window ::= ORDER BY sortlist frame_opt */ + -5, /* (314) window ::= nm ORDER BY sortlist frame_opt */ + -1, /* (315) window ::= frame_opt */ + -2, /* (316) window ::= nm frame_opt */ + 0, /* (317) frame_opt ::= */ + -3, /* (318) frame_opt ::= range_or_rows frame_bound_s frame_exclude_opt */ + -6, /* (319) frame_opt ::= range_or_rows BETWEEN frame_bound_s AND frame_bound_e frame_exclude_opt */ + -1, /* (320) range_or_rows ::= RANGE|ROWS|GROUPS */ + -1, /* (321) frame_bound_s ::= frame_bound */ + -2, /* (322) frame_bound_s ::= UNBOUNDED PRECEDING */ + -1, /* (323) frame_bound_e ::= frame_bound */ + -2, /* (324) frame_bound_e ::= UNBOUNDED FOLLOWING */ + -2, /* (325) frame_bound ::= expr PRECEDING|FOLLOWING */ + -2, /* (326) frame_bound ::= CURRENT ROW */ + 0, /* (327) frame_exclude_opt ::= */ + -2, /* (328) frame_exclude_opt ::= EXCLUDE frame_exclude */ + -2, /* (329) frame_exclude ::= NO OTHERS */ + -2, /* (330) frame_exclude ::= CURRENT ROW */ + -1, /* (331) frame_exclude ::= GROUP|TIES */ + -2, /* (332) window_clause ::= WINDOW windowdefn_list */ + -2, /* (333) filter_over ::= filter_clause over_clause */ + -1, /* (334) filter_over ::= over_clause */ + -1, /* (335) filter_over ::= filter_clause */ + -4, /* (336) over_clause ::= OVER LP window RP */ + -2, /* (337) over_clause ::= OVER nm */ + -5, /* (338) filter_clause ::= FILTER LP WHERE expr RP */ + -1, /* (339) input ::= cmdlist */ + -2, /* (340) cmdlist ::= cmdlist ecmd */ + -1, /* (341) cmdlist ::= ecmd */ + -1, /* (342) ecmd ::= SEMI */ + -2, /* (343) ecmd ::= cmdx SEMI */ + -3, /* (344) ecmd ::= explain cmdx SEMI */ + 0, /* (345) trans_opt ::= */ + -1, /* (346) trans_opt ::= TRANSACTION */ + -2, /* (347) trans_opt ::= TRANSACTION nm */ + -1, /* (348) savepoint_opt ::= SAVEPOINT */ + 0, /* (349) savepoint_opt ::= */ + -2, /* (350) cmd ::= create_table create_table_args */ + -1, /* (351) table_option_set ::= table_option */ + -4, /* (352) columnlist ::= columnlist COMMA columnname carglist */ + -2, /* (353) columnlist ::= columnname carglist */ + -1, /* (354) nm ::= ID|INDEXED */ + -1, /* (355) nm ::= STRING */ + -1, /* (356) nm ::= JOIN_KW */ + -1, /* (357) typetoken ::= typename */ + -1, /* (358) typename ::= ID|STRING */ + -1, /* (359) signed ::= plus_num */ + -1, /* (360) signed ::= minus_num */ + -2, /* (361) carglist ::= carglist ccons */ + 0, /* (362) carglist ::= */ + -2, /* (363) ccons ::= NULL onconf */ + -4, /* (364) ccons ::= GENERATED ALWAYS AS generated */ + -2, /* (365) ccons ::= AS generated */ + -2, /* (366) conslist_opt ::= COMMA conslist */ + -3, /* (367) conslist ::= conslist tconscomma tcons */ + -1, /* (368) conslist ::= tcons */ + 0, /* (369) tconscomma ::= */ + -1, /* (370) defer_subclause_opt ::= defer_subclause */ + -1, /* (371) resolvetype ::= raisetype */ + -1, /* (372) selectnowith ::= oneselect */ + -1, /* (373) oneselect ::= values */ + -2, /* (374) sclp ::= selcollist COMMA */ + -1, /* (375) as ::= ID|STRING */ + 0, /* (376) returning ::= */ + -1, /* (377) expr ::= term */ + -1, /* (378) likeop ::= LIKE_KW|MATCH */ + -1, /* (379) exprlist ::= nexprlist */ + -1, /* (380) nmnum ::= plus_num */ + -1, /* (381) nmnum ::= nm */ + -1, /* (382) nmnum ::= ON */ + -1, /* (383) nmnum ::= DELETE */ + -1, /* (384) nmnum ::= DEFAULT */ + -1, /* (385) plus_num ::= INTEGER|FLOAT */ + 0, /* (386) foreach_clause ::= */ + -3, /* (387) foreach_clause ::= FOR EACH ROW */ + -1, /* (388) trnm ::= nm */ + 0, /* (389) tridxby ::= */ + -1, /* (390) database_kw_opt ::= DATABASE */ + 0, /* (391) database_kw_opt ::= */ + 0, /* (392) kwcolumn_opt ::= */ + -1, /* (393) kwcolumn_opt ::= COLUMNKW */ + -1, /* (394) vtabarglist ::= vtabarg */ + -3, /* (395) vtabarglist ::= vtabarglist COMMA vtabarg */ + -2, /* (396) vtabarg ::= vtabarg vtabargtoken */ + 0, /* (397) anylist ::= */ + -4, /* (398) anylist ::= anylist LP anylist RP */ + -2, /* (399) anylist ::= anylist ANY */ + 0, /* (400) with ::= */ }; static void yy_accept(yyParser*); /* Forward Declaration */ @@ -161553,16 +163003,16 @@ static YYACTIONTYPE yy_reduce( { sqlite3FinishCoding(pParse); } break; case 3: /* cmd ::= BEGIN transtype trans_opt */ -{sqlite3BeginTransaction(pParse, yymsp[-1].minor.yy376);} +{sqlite3BeginTransaction(pParse, yymsp[-1].minor.yy64);} break; case 4: /* transtype ::= */ -{yymsp[1].minor.yy376 = TK_DEFERRED;} +{yymsp[1].minor.yy64 = TK_DEFERRED;} break; case 5: /* transtype ::= DEFERRED */ case 6: /* transtype ::= IMMEDIATE */ yytestcase(yyruleno==6); case 7: /* transtype ::= EXCLUSIVE */ yytestcase(yyruleno==7); - case 318: /* range_or_rows ::= RANGE|ROWS|GROUPS */ yytestcase(yyruleno==318); -{yymsp[0].minor.yy376 = yymsp[0].major; /*A-overwrites-X*/} + case 320: /* range_or_rows ::= RANGE|ROWS|GROUPS */ yytestcase(yyruleno==320); +{yymsp[0].minor.yy64 = yymsp[0].major; /*A-overwrites-X*/} break; case 8: /* cmd ::= COMMIT|END trans_opt */ case 9: /* cmd ::= ROLLBACK trans_opt */ yytestcase(yyruleno==9); @@ -161585,7 +163035,7 @@ static YYACTIONTYPE yy_reduce( break; case 13: /* create_table ::= createkw temp TABLE ifnotexists nm dbnm */ { - sqlite3StartTable(pParse,&yymsp[-1].minor.yy0,&yymsp[0].minor.yy0,yymsp[-4].minor.yy376,0,0,yymsp[-2].minor.yy376); + sqlite3StartTable(pParse,&yymsp[-1].minor.yy0,&yymsp[0].minor.yy0,yymsp[-4].minor.yy64,0,0,yymsp[-2].minor.yy64); } break; case 14: /* createkw ::= CREATE */ @@ -161593,95 +163043,112 @@ static YYACTIONTYPE yy_reduce( break; case 15: /* ifnotexists ::= */ case 18: /* temp ::= */ yytestcase(yyruleno==18); - case 21: /* table_options ::= */ yytestcase(yyruleno==21); - case 45: /* autoinc ::= */ yytestcase(yyruleno==45); - case 60: /* init_deferred_pred_opt ::= */ yytestcase(yyruleno==60); - case 70: /* defer_subclause_opt ::= */ yytestcase(yyruleno==70); - case 79: /* ifexists ::= */ yytestcase(yyruleno==79); - case 96: /* distinct ::= */ yytestcase(yyruleno==96); - case 239: /* collate ::= */ yytestcase(yyruleno==239); -{yymsp[1].minor.yy376 = 0;} + case 47: /* autoinc ::= */ yytestcase(yyruleno==47); + case 62: /* init_deferred_pred_opt ::= */ yytestcase(yyruleno==62); + case 72: /* defer_subclause_opt ::= */ yytestcase(yyruleno==72); + case 81: /* ifexists ::= */ yytestcase(yyruleno==81); + case 98: /* distinct ::= */ yytestcase(yyruleno==98); + case 241: /* collate ::= */ yytestcase(yyruleno==241); +{yymsp[1].minor.yy64 = 0;} break; case 16: /* ifnotexists ::= IF NOT EXISTS */ -{yymsp[-2].minor.yy376 = 1;} +{yymsp[-2].minor.yy64 = 1;} break; case 17: /* temp ::= TEMP */ -{yymsp[0].minor.yy376 = pParse->db->init.busy==0;} +{yymsp[0].minor.yy64 = pParse->db->init.busy==0;} break; - case 19: /* create_table_args ::= LP columnlist conslist_opt RP table_options */ + case 19: /* create_table_args ::= LP columnlist conslist_opt RP table_option_set */ { - sqlite3EndTable(pParse,&yymsp[-2].minor.yy0,&yymsp[-1].minor.yy0,yymsp[0].minor.yy376,0); + sqlite3EndTable(pParse,&yymsp[-2].minor.yy0,&yymsp[-1].minor.yy0,yymsp[0].minor.yy51,0); } break; case 20: /* create_table_args ::= AS select */ { - sqlite3EndTable(pParse,0,0,0,yymsp[0].minor.yy81); - sqlite3SelectDelete(pParse->db, yymsp[0].minor.yy81); + sqlite3EndTable(pParse,0,0,0,yymsp[0].minor.yy303); + sqlite3SelectDelete(pParse->db, yymsp[0].minor.yy303); } break; - case 22: /* table_options ::= WITHOUT nm */ + case 21: /* table_option_set ::= */ +{yymsp[1].minor.yy51 = 0;} + break; + case 22: /* table_option_set ::= table_option_set COMMA table_option */ +{yylhsminor.yy51 = yymsp[-2].minor.yy51|yymsp[0].minor.yy51;} + yymsp[-2].minor.yy51 = yylhsminor.yy51; + break; + case 23: /* table_option ::= WITHOUT nm */ { if( yymsp[0].minor.yy0.n==5 && sqlite3_strnicmp(yymsp[0].minor.yy0.z,"rowid",5)==0 ){ - yymsp[-1].minor.yy376 = TF_WithoutRowid | TF_NoVisibleRowid; + yymsp[-1].minor.yy51 = TF_WithoutRowid | TF_NoVisibleRowid; }else{ - yymsp[-1].minor.yy376 = 0; + yymsp[-1].minor.yy51 = 0; sqlite3ErrorMsg(pParse, "unknown table option: %.*s", yymsp[0].minor.yy0.n, yymsp[0].minor.yy0.z); } } break; - case 23: /* columnname ::= nm typetoken */ -{sqlite3AddColumn(pParse,&yymsp[-1].minor.yy0,&yymsp[0].minor.yy0);} + case 24: /* table_option ::= nm */ +{ + if( yymsp[0].minor.yy0.n==6 && sqlite3_strnicmp(yymsp[0].minor.yy0.z,"strict",6)==0 ){ + yylhsminor.yy51 = TF_Strict; + }else{ + yylhsminor.yy51 = 0; + sqlite3ErrorMsg(pParse, "unknown table option: %.*s", yymsp[0].minor.yy0.n, yymsp[0].minor.yy0.z); + } +} + yymsp[0].minor.yy51 = yylhsminor.yy51; break; - case 24: /* typetoken ::= */ - case 63: /* conslist_opt ::= */ yytestcase(yyruleno==63); - case 102: /* as ::= */ yytestcase(yyruleno==102); + case 25: /* columnname ::= nm typetoken */ +{sqlite3AddColumn(pParse,yymsp[-1].minor.yy0,yymsp[0].minor.yy0);} + break; + case 26: /* typetoken ::= */ + case 65: /* conslist_opt ::= */ yytestcase(yyruleno==65); + case 104: /* as ::= */ yytestcase(yyruleno==104); {yymsp[1].minor.yy0.n = 0; yymsp[1].minor.yy0.z = 0;} break; - case 25: /* typetoken ::= typename LP signed RP */ + case 27: /* typetoken ::= typename LP signed RP */ { yymsp[-3].minor.yy0.n = (int)(&yymsp[0].minor.yy0.z[yymsp[0].minor.yy0.n] - yymsp[-3].minor.yy0.z); } break; - case 26: /* typetoken ::= typename LP signed COMMA signed RP */ + case 28: /* typetoken ::= typename LP signed COMMA signed RP */ { yymsp[-5].minor.yy0.n = (int)(&yymsp[0].minor.yy0.z[yymsp[0].minor.yy0.n] - yymsp[-5].minor.yy0.z); } break; - case 27: /* typename ::= typename ID|STRING */ + case 29: /* typename ::= typename ID|STRING */ {yymsp[-1].minor.yy0.n=yymsp[0].minor.yy0.n+(int)(yymsp[0].minor.yy0.z-yymsp[-1].minor.yy0.z);} break; - case 28: /* scanpt ::= */ + case 30: /* scanpt ::= */ { assert( yyLookahead!=YYNOCODE ); - yymsp[1].minor.yy504 = yyLookaheadToken.z; + yymsp[1].minor.yy600 = yyLookaheadToken.z; } break; - case 29: /* scantok ::= */ + case 31: /* scantok ::= */ { assert( yyLookahead!=YYNOCODE ); yymsp[1].minor.yy0 = yyLookaheadToken; } break; - case 30: /* ccons ::= CONSTRAINT nm */ - case 65: /* tcons ::= CONSTRAINT nm */ yytestcase(yyruleno==65); + case 32: /* ccons ::= CONSTRAINT nm */ + case 67: /* tcons ::= CONSTRAINT nm */ yytestcase(yyruleno==67); {pParse->constraintName = yymsp[0].minor.yy0;} break; - case 31: /* ccons ::= DEFAULT scantok term */ -{sqlite3AddDefaultValue(pParse,yymsp[0].minor.yy404,yymsp[-1].minor.yy0.z,&yymsp[-1].minor.yy0.z[yymsp[-1].minor.yy0.n]);} + case 33: /* ccons ::= DEFAULT scantok term */ +{sqlite3AddDefaultValue(pParse,yymsp[0].minor.yy626,yymsp[-1].minor.yy0.z,&yymsp[-1].minor.yy0.z[yymsp[-1].minor.yy0.n]);} break; - case 32: /* ccons ::= DEFAULT LP expr RP */ -{sqlite3AddDefaultValue(pParse,yymsp[-1].minor.yy404,yymsp[-2].minor.yy0.z+1,yymsp[0].minor.yy0.z);} + case 34: /* ccons ::= DEFAULT LP expr RP */ +{sqlite3AddDefaultValue(pParse,yymsp[-1].minor.yy626,yymsp[-2].minor.yy0.z+1,yymsp[0].minor.yy0.z);} break; - case 33: /* ccons ::= DEFAULT PLUS scantok term */ -{sqlite3AddDefaultValue(pParse,yymsp[0].minor.yy404,yymsp[-2].minor.yy0.z,&yymsp[-1].minor.yy0.z[yymsp[-1].minor.yy0.n]);} + case 35: /* ccons ::= DEFAULT PLUS scantok term */ +{sqlite3AddDefaultValue(pParse,yymsp[0].minor.yy626,yymsp[-2].minor.yy0.z,&yymsp[-1].minor.yy0.z[yymsp[-1].minor.yy0.n]);} break; - case 34: /* ccons ::= DEFAULT MINUS scantok term */ + case 36: /* ccons ::= DEFAULT MINUS scantok term */ { - Expr *p = sqlite3PExpr(pParse, TK_UMINUS, yymsp[0].minor.yy404, 0); + Expr *p = sqlite3PExpr(pParse, TK_UMINUS, yymsp[0].minor.yy626, 0); sqlite3AddDefaultValue(pParse,p,yymsp[-2].minor.yy0.z,&yymsp[-1].minor.yy0.z[yymsp[-1].minor.yy0.n]); } break; - case 35: /* ccons ::= DEFAULT scantok ID|INDEXED */ + case 37: /* ccons ::= DEFAULT scantok ID|INDEXED */ { Expr *p = tokenExpr(pParse, TK_STRING, yymsp[0].minor.yy0); if( p ){ @@ -161691,162 +163158,162 @@ static YYACTIONTYPE yy_reduce( sqlite3AddDefaultValue(pParse,p,yymsp[0].minor.yy0.z,yymsp[0].minor.yy0.z+yymsp[0].minor.yy0.n); } break; - case 36: /* ccons ::= NOT NULL onconf */ -{sqlite3AddNotNull(pParse, yymsp[0].minor.yy376);} + case 38: /* ccons ::= NOT NULL onconf */ +{sqlite3AddNotNull(pParse, yymsp[0].minor.yy64);} break; - case 37: /* ccons ::= PRIMARY KEY sortorder onconf autoinc */ -{sqlite3AddPrimaryKey(pParse,0,yymsp[-1].minor.yy376,yymsp[0].minor.yy376,yymsp[-2].minor.yy376);} + case 39: /* ccons ::= PRIMARY KEY sortorder onconf autoinc */ +{sqlite3AddPrimaryKey(pParse,0,yymsp[-1].minor.yy64,yymsp[0].minor.yy64,yymsp[-2].minor.yy64);} break; - case 38: /* ccons ::= UNIQUE onconf */ -{sqlite3CreateIndex(pParse,0,0,0,0,yymsp[0].minor.yy376,0,0,0,0, + case 40: /* ccons ::= UNIQUE onconf */ +{sqlite3CreateIndex(pParse,0,0,0,0,yymsp[0].minor.yy64,0,0,0,0, SQLITE_IDXTYPE_UNIQUE);} break; - case 39: /* ccons ::= CHECK LP expr RP */ -{sqlite3AddCheckConstraint(pParse,yymsp[-1].minor.yy404,yymsp[-2].minor.yy0.z,yymsp[0].minor.yy0.z);} + case 41: /* ccons ::= CHECK LP expr RP */ +{sqlite3AddCheckConstraint(pParse,yymsp[-1].minor.yy626,yymsp[-2].minor.yy0.z,yymsp[0].minor.yy0.z);} break; - case 40: /* ccons ::= REFERENCES nm eidlist_opt refargs */ -{sqlite3CreateForeignKey(pParse,0,&yymsp[-2].minor.yy0,yymsp[-1].minor.yy70,yymsp[0].minor.yy376);} + case 42: /* ccons ::= REFERENCES nm eidlist_opt refargs */ +{sqlite3CreateForeignKey(pParse,0,&yymsp[-2].minor.yy0,yymsp[-1].minor.yy562,yymsp[0].minor.yy64);} break; - case 41: /* ccons ::= defer_subclause */ -{sqlite3DeferForeignKey(pParse,yymsp[0].minor.yy376);} + case 43: /* ccons ::= defer_subclause */ +{sqlite3DeferForeignKey(pParse,yymsp[0].minor.yy64);} break; - case 42: /* ccons ::= COLLATE ID|STRING */ + case 44: /* ccons ::= COLLATE ID|STRING */ {sqlite3AddCollateType(pParse, &yymsp[0].minor.yy0);} break; - case 43: /* generated ::= LP expr RP */ -{sqlite3AddGenerated(pParse,yymsp[-1].minor.yy404,0);} + case 45: /* generated ::= LP expr RP */ +{sqlite3AddGenerated(pParse,yymsp[-1].minor.yy626,0);} break; - case 44: /* generated ::= LP expr RP ID */ -{sqlite3AddGenerated(pParse,yymsp[-2].minor.yy404,&yymsp[0].minor.yy0);} + case 46: /* generated ::= LP expr RP ID */ +{sqlite3AddGenerated(pParse,yymsp[-2].minor.yy626,&yymsp[0].minor.yy0);} break; - case 46: /* autoinc ::= AUTOINCR */ -{yymsp[0].minor.yy376 = 1;} + case 48: /* autoinc ::= AUTOINCR */ +{yymsp[0].minor.yy64 = 1;} break; - case 47: /* refargs ::= */ -{ yymsp[1].minor.yy376 = OE_None*0x0101; /* EV: R-19803-45884 */} + case 49: /* refargs ::= */ +{ yymsp[1].minor.yy64 = OE_None*0x0101; /* EV: R-19803-45884 */} break; - case 48: /* refargs ::= refargs refarg */ -{ yymsp[-1].minor.yy376 = (yymsp[-1].minor.yy376 & ~yymsp[0].minor.yy139.mask) | yymsp[0].minor.yy139.value; } + case 50: /* refargs ::= refargs refarg */ +{ yymsp[-1].minor.yy64 = (yymsp[-1].minor.yy64 & ~yymsp[0].minor.yy83.mask) | yymsp[0].minor.yy83.value; } break; - case 49: /* refarg ::= MATCH nm */ -{ yymsp[-1].minor.yy139.value = 0; yymsp[-1].minor.yy139.mask = 0x000000; } + case 51: /* refarg ::= MATCH nm */ +{ yymsp[-1].minor.yy83.value = 0; yymsp[-1].minor.yy83.mask = 0x000000; } break; - case 50: /* refarg ::= ON INSERT refact */ -{ yymsp[-2].minor.yy139.value = 0; yymsp[-2].minor.yy139.mask = 0x000000; } + case 52: /* refarg ::= ON INSERT refact */ +{ yymsp[-2].minor.yy83.value = 0; yymsp[-2].minor.yy83.mask = 0x000000; } break; - case 51: /* refarg ::= ON DELETE refact */ -{ yymsp[-2].minor.yy139.value = yymsp[0].minor.yy376; yymsp[-2].minor.yy139.mask = 0x0000ff; } + case 53: /* refarg ::= ON DELETE refact */ +{ yymsp[-2].minor.yy83.value = yymsp[0].minor.yy64; yymsp[-2].minor.yy83.mask = 0x0000ff; } break; - case 52: /* refarg ::= ON UPDATE refact */ -{ yymsp[-2].minor.yy139.value = yymsp[0].minor.yy376<<8; yymsp[-2].minor.yy139.mask = 0x00ff00; } + case 54: /* refarg ::= ON UPDATE refact */ +{ yymsp[-2].minor.yy83.value = yymsp[0].minor.yy64<<8; yymsp[-2].minor.yy83.mask = 0x00ff00; } break; - case 53: /* refact ::= SET NULL */ -{ yymsp[-1].minor.yy376 = OE_SetNull; /* EV: R-33326-45252 */} + case 55: /* refact ::= SET NULL */ +{ yymsp[-1].minor.yy64 = OE_SetNull; /* EV: R-33326-45252 */} break; - case 54: /* refact ::= SET DEFAULT */ -{ yymsp[-1].minor.yy376 = OE_SetDflt; /* EV: R-33326-45252 */} + case 56: /* refact ::= SET DEFAULT */ +{ yymsp[-1].minor.yy64 = OE_SetDflt; /* EV: R-33326-45252 */} break; - case 55: /* refact ::= CASCADE */ -{ yymsp[0].minor.yy376 = OE_Cascade; /* EV: R-33326-45252 */} + case 57: /* refact ::= CASCADE */ +{ yymsp[0].minor.yy64 = OE_Cascade; /* EV: R-33326-45252 */} break; - case 56: /* refact ::= RESTRICT */ -{ yymsp[0].minor.yy376 = OE_Restrict; /* EV: R-33326-45252 */} + case 58: /* refact ::= RESTRICT */ +{ yymsp[0].minor.yy64 = OE_Restrict; /* EV: R-33326-45252 */} break; - case 57: /* refact ::= NO ACTION */ -{ yymsp[-1].minor.yy376 = OE_None; /* EV: R-33326-45252 */} + case 59: /* refact ::= NO ACTION */ +{ yymsp[-1].minor.yy64 = OE_None; /* EV: R-33326-45252 */} break; - case 58: /* defer_subclause ::= NOT DEFERRABLE init_deferred_pred_opt */ -{yymsp[-2].minor.yy376 = 0;} + case 60: /* defer_subclause ::= NOT DEFERRABLE init_deferred_pred_opt */ +{yymsp[-2].minor.yy64 = 0;} break; - case 59: /* defer_subclause ::= DEFERRABLE init_deferred_pred_opt */ - case 74: /* orconf ::= OR resolvetype */ yytestcase(yyruleno==74); - case 169: /* insert_cmd ::= INSERT orconf */ yytestcase(yyruleno==169); -{yymsp[-1].minor.yy376 = yymsp[0].minor.yy376;} + case 61: /* defer_subclause ::= DEFERRABLE init_deferred_pred_opt */ + case 76: /* orconf ::= OR resolvetype */ yytestcase(yyruleno==76); + case 171: /* insert_cmd ::= INSERT orconf */ yytestcase(yyruleno==171); +{yymsp[-1].minor.yy64 = yymsp[0].minor.yy64;} break; - case 61: /* init_deferred_pred_opt ::= INITIALLY DEFERRED */ - case 78: /* ifexists ::= IF EXISTS */ yytestcase(yyruleno==78); - case 211: /* between_op ::= NOT BETWEEN */ yytestcase(yyruleno==211); - case 214: /* in_op ::= NOT IN */ yytestcase(yyruleno==214); - case 240: /* collate ::= COLLATE ID|STRING */ yytestcase(yyruleno==240); -{yymsp[-1].minor.yy376 = 1;} + case 63: /* init_deferred_pred_opt ::= INITIALLY DEFERRED */ + case 80: /* ifexists ::= IF EXISTS */ yytestcase(yyruleno==80); + case 213: /* between_op ::= NOT BETWEEN */ yytestcase(yyruleno==213); + case 216: /* in_op ::= NOT IN */ yytestcase(yyruleno==216); + case 242: /* collate ::= COLLATE ID|STRING */ yytestcase(yyruleno==242); +{yymsp[-1].minor.yy64 = 1;} break; - case 62: /* init_deferred_pred_opt ::= INITIALLY IMMEDIATE */ -{yymsp[-1].minor.yy376 = 0;} + case 64: /* init_deferred_pred_opt ::= INITIALLY IMMEDIATE */ +{yymsp[-1].minor.yy64 = 0;} break; - case 64: /* tconscomma ::= COMMA */ + case 66: /* tconscomma ::= COMMA */ {pParse->constraintName.n = 0;} break; - case 66: /* tcons ::= PRIMARY KEY LP sortlist autoinc RP onconf */ -{sqlite3AddPrimaryKey(pParse,yymsp[-3].minor.yy70,yymsp[0].minor.yy376,yymsp[-2].minor.yy376,0);} + case 68: /* tcons ::= PRIMARY KEY LP sortlist autoinc RP onconf */ +{sqlite3AddPrimaryKey(pParse,yymsp[-3].minor.yy562,yymsp[0].minor.yy64,yymsp[-2].minor.yy64,0);} break; - case 67: /* tcons ::= UNIQUE LP sortlist RP onconf */ -{sqlite3CreateIndex(pParse,0,0,0,yymsp[-2].minor.yy70,yymsp[0].minor.yy376,0,0,0,0, + case 69: /* tcons ::= UNIQUE LP sortlist RP onconf */ +{sqlite3CreateIndex(pParse,0,0,0,yymsp[-2].minor.yy562,yymsp[0].minor.yy64,0,0,0,0, SQLITE_IDXTYPE_UNIQUE);} break; - case 68: /* tcons ::= CHECK LP expr RP onconf */ -{sqlite3AddCheckConstraint(pParse,yymsp[-2].minor.yy404,yymsp[-3].minor.yy0.z,yymsp[-1].minor.yy0.z);} + case 70: /* tcons ::= CHECK LP expr RP onconf */ +{sqlite3AddCheckConstraint(pParse,yymsp[-2].minor.yy626,yymsp[-3].minor.yy0.z,yymsp[-1].minor.yy0.z);} break; - case 69: /* tcons ::= FOREIGN KEY LP eidlist RP REFERENCES nm eidlist_opt refargs defer_subclause_opt */ + case 71: /* tcons ::= FOREIGN KEY LP eidlist RP REFERENCES nm eidlist_opt refargs defer_subclause_opt */ { - sqlite3CreateForeignKey(pParse, yymsp[-6].minor.yy70, &yymsp[-3].minor.yy0, yymsp[-2].minor.yy70, yymsp[-1].minor.yy376); - sqlite3DeferForeignKey(pParse, yymsp[0].minor.yy376); + sqlite3CreateForeignKey(pParse, yymsp[-6].minor.yy562, &yymsp[-3].minor.yy0, yymsp[-2].minor.yy562, yymsp[-1].minor.yy64); + sqlite3DeferForeignKey(pParse, yymsp[0].minor.yy64); } break; - case 71: /* onconf ::= */ - case 73: /* orconf ::= */ yytestcase(yyruleno==73); -{yymsp[1].minor.yy376 = OE_Default;} + case 73: /* onconf ::= */ + case 75: /* orconf ::= */ yytestcase(yyruleno==75); +{yymsp[1].minor.yy64 = OE_Default;} break; - case 72: /* onconf ::= ON CONFLICT resolvetype */ -{yymsp[-2].minor.yy376 = yymsp[0].minor.yy376;} + case 74: /* onconf ::= ON CONFLICT resolvetype */ +{yymsp[-2].minor.yy64 = yymsp[0].minor.yy64;} break; - case 75: /* resolvetype ::= IGNORE */ -{yymsp[0].minor.yy376 = OE_Ignore;} + case 77: /* resolvetype ::= IGNORE */ +{yymsp[0].minor.yy64 = OE_Ignore;} break; - case 76: /* resolvetype ::= REPLACE */ - case 170: /* insert_cmd ::= REPLACE */ yytestcase(yyruleno==170); -{yymsp[0].minor.yy376 = OE_Replace;} + case 78: /* resolvetype ::= REPLACE */ + case 172: /* insert_cmd ::= REPLACE */ yytestcase(yyruleno==172); +{yymsp[0].minor.yy64 = OE_Replace;} break; - case 77: /* cmd ::= DROP TABLE ifexists fullname */ + case 79: /* cmd ::= DROP TABLE ifexists fullname */ { - sqlite3DropTable(pParse, yymsp[0].minor.yy153, 0, yymsp[-1].minor.yy376); + sqlite3DropTable(pParse, yymsp[0].minor.yy607, 0, yymsp[-1].minor.yy64); } break; - case 80: /* cmd ::= createkw temp VIEW ifnotexists nm dbnm eidlist_opt AS select */ + case 82: /* cmd ::= createkw temp VIEW ifnotexists nm dbnm eidlist_opt AS select */ { - sqlite3CreateView(pParse, &yymsp[-8].minor.yy0, &yymsp[-4].minor.yy0, &yymsp[-3].minor.yy0, yymsp[-2].minor.yy70, yymsp[0].minor.yy81, yymsp[-7].minor.yy376, yymsp[-5].minor.yy376); + sqlite3CreateView(pParse, &yymsp[-8].minor.yy0, &yymsp[-4].minor.yy0, &yymsp[-3].minor.yy0, yymsp[-2].minor.yy562, yymsp[0].minor.yy303, yymsp[-7].minor.yy64, yymsp[-5].minor.yy64); } break; - case 81: /* cmd ::= DROP VIEW ifexists fullname */ + case 83: /* cmd ::= DROP VIEW ifexists fullname */ { - sqlite3DropTable(pParse, yymsp[0].minor.yy153, 1, yymsp[-1].minor.yy376); + sqlite3DropTable(pParse, yymsp[0].minor.yy607, 1, yymsp[-1].minor.yy64); } break; - case 82: /* cmd ::= select */ + case 84: /* cmd ::= select */ { SelectDest dest = {SRT_Output, 0, 0, 0, 0, 0, 0}; - sqlite3Select(pParse, yymsp[0].minor.yy81, &dest); - sqlite3SelectDelete(pParse->db, yymsp[0].minor.yy81); + sqlite3Select(pParse, yymsp[0].minor.yy303, &dest); + sqlite3SelectDelete(pParse->db, yymsp[0].minor.yy303); } break; - case 83: /* select ::= WITH wqlist selectnowith */ -{yymsp[-2].minor.yy81 = attachWithToSelect(pParse,yymsp[0].minor.yy81,yymsp[-1].minor.yy103);} + case 85: /* select ::= WITH wqlist selectnowith */ +{yymsp[-2].minor.yy303 = attachWithToSelect(pParse,yymsp[0].minor.yy303,yymsp[-1].minor.yy43);} break; - case 84: /* select ::= WITH RECURSIVE wqlist selectnowith */ -{yymsp[-3].minor.yy81 = attachWithToSelect(pParse,yymsp[0].minor.yy81,yymsp[-1].minor.yy103);} + case 86: /* select ::= WITH RECURSIVE wqlist selectnowith */ +{yymsp[-3].minor.yy303 = attachWithToSelect(pParse,yymsp[0].minor.yy303,yymsp[-1].minor.yy43);} break; - case 85: /* select ::= selectnowith */ + case 87: /* select ::= selectnowith */ { - Select *p = yymsp[0].minor.yy81; + Select *p = yymsp[0].minor.yy303; if( p ){ parserDoubleLinkSelect(pParse, p); } - yymsp[0].minor.yy81 = p; /*A-overwrites-X*/ + yymsp[0].minor.yy303 = p; /*A-overwrites-X*/ } break; - case 86: /* selectnowith ::= selectnowith multiselect_op oneselect */ + case 88: /* selectnowith ::= selectnowith multiselect_op oneselect */ { - Select *pRhs = yymsp[0].minor.yy81; - Select *pLhs = yymsp[-2].minor.yy81; + Select *pRhs = yymsp[0].minor.yy303; + Select *pLhs = yymsp[-2].minor.yy303; if( pRhs && pRhs->pPrior ){ SrcList *pFrom; Token x; @@ -161856,140 +163323,140 @@ static YYACTIONTYPE yy_reduce( pRhs = sqlite3SelectNew(pParse,0,pFrom,0,0,0,0,0,0); } if( pRhs ){ - pRhs->op = (u8)yymsp[-1].minor.yy376; + pRhs->op = (u8)yymsp[-1].minor.yy64; pRhs->pPrior = pLhs; if( ALWAYS(pLhs) ) pLhs->selFlags &= ~SF_MultiValue; pRhs->selFlags &= ~SF_MultiValue; - if( yymsp[-1].minor.yy376!=TK_ALL ) pParse->hasCompound = 1; + if( yymsp[-1].minor.yy64!=TK_ALL ) pParse->hasCompound = 1; }else{ sqlite3SelectDelete(pParse->db, pLhs); } - yymsp[-2].minor.yy81 = pRhs; + yymsp[-2].minor.yy303 = pRhs; } break; - case 87: /* multiselect_op ::= UNION */ - case 89: /* multiselect_op ::= EXCEPT|INTERSECT */ yytestcase(yyruleno==89); -{yymsp[0].minor.yy376 = yymsp[0].major; /*A-overwrites-OP*/} + case 89: /* multiselect_op ::= UNION */ + case 91: /* multiselect_op ::= EXCEPT|INTERSECT */ yytestcase(yyruleno==91); +{yymsp[0].minor.yy64 = yymsp[0].major; /*A-overwrites-OP*/} break; - case 88: /* multiselect_op ::= UNION ALL */ -{yymsp[-1].minor.yy376 = TK_ALL;} + case 90: /* multiselect_op ::= UNION ALL */ +{yymsp[-1].minor.yy64 = TK_ALL;} break; - case 90: /* oneselect ::= SELECT distinct selcollist from where_opt groupby_opt having_opt orderby_opt limit_opt */ + case 92: /* oneselect ::= SELECT distinct selcollist from where_opt groupby_opt having_opt orderby_opt limit_opt */ { - yymsp[-8].minor.yy81 = sqlite3SelectNew(pParse,yymsp[-6].minor.yy70,yymsp[-5].minor.yy153,yymsp[-4].minor.yy404,yymsp[-3].minor.yy70,yymsp[-2].minor.yy404,yymsp[-1].minor.yy70,yymsp[-7].minor.yy376,yymsp[0].minor.yy404); + yymsp[-8].minor.yy303 = sqlite3SelectNew(pParse,yymsp[-6].minor.yy562,yymsp[-5].minor.yy607,yymsp[-4].minor.yy626,yymsp[-3].minor.yy562,yymsp[-2].minor.yy626,yymsp[-1].minor.yy562,yymsp[-7].minor.yy64,yymsp[0].minor.yy626); } break; - case 91: /* oneselect ::= SELECT distinct selcollist from where_opt groupby_opt having_opt window_clause orderby_opt limit_opt */ + case 93: /* oneselect ::= SELECT distinct selcollist from where_opt groupby_opt having_opt window_clause orderby_opt limit_opt */ { - yymsp[-9].minor.yy81 = sqlite3SelectNew(pParse,yymsp[-7].minor.yy70,yymsp[-6].minor.yy153,yymsp[-5].minor.yy404,yymsp[-4].minor.yy70,yymsp[-3].minor.yy404,yymsp[-1].minor.yy70,yymsp[-8].minor.yy376,yymsp[0].minor.yy404); - if( yymsp[-9].minor.yy81 ){ - yymsp[-9].minor.yy81->pWinDefn = yymsp[-2].minor.yy49; + yymsp[-9].minor.yy303 = sqlite3SelectNew(pParse,yymsp[-7].minor.yy562,yymsp[-6].minor.yy607,yymsp[-5].minor.yy626,yymsp[-4].minor.yy562,yymsp[-3].minor.yy626,yymsp[-1].minor.yy562,yymsp[-8].minor.yy64,yymsp[0].minor.yy626); + if( yymsp[-9].minor.yy303 ){ + yymsp[-9].minor.yy303->pWinDefn = yymsp[-2].minor.yy375; }else{ - sqlite3WindowListDelete(pParse->db, yymsp[-2].minor.yy49); + sqlite3WindowListDelete(pParse->db, yymsp[-2].minor.yy375); } } break; - case 92: /* values ::= VALUES LP nexprlist RP */ + case 94: /* values ::= VALUES LP nexprlist RP */ { - yymsp[-3].minor.yy81 = sqlite3SelectNew(pParse,yymsp[-1].minor.yy70,0,0,0,0,0,SF_Values,0); + yymsp[-3].minor.yy303 = sqlite3SelectNew(pParse,yymsp[-1].minor.yy562,0,0,0,0,0,SF_Values,0); } break; - case 93: /* values ::= values COMMA LP nexprlist RP */ + case 95: /* values ::= values COMMA LP nexprlist RP */ { - Select *pRight, *pLeft = yymsp[-4].minor.yy81; - pRight = sqlite3SelectNew(pParse,yymsp[-1].minor.yy70,0,0,0,0,0,SF_Values|SF_MultiValue,0); + Select *pRight, *pLeft = yymsp[-4].minor.yy303; + pRight = sqlite3SelectNew(pParse,yymsp[-1].minor.yy562,0,0,0,0,0,SF_Values|SF_MultiValue,0); if( ALWAYS(pLeft) ) pLeft->selFlags &= ~SF_MultiValue; if( pRight ){ pRight->op = TK_ALL; pRight->pPrior = pLeft; - yymsp[-4].minor.yy81 = pRight; + yymsp[-4].minor.yy303 = pRight; }else{ - yymsp[-4].minor.yy81 = pLeft; + yymsp[-4].minor.yy303 = pLeft; } } break; - case 94: /* distinct ::= DISTINCT */ -{yymsp[0].minor.yy376 = SF_Distinct;} + case 96: /* distinct ::= DISTINCT */ +{yymsp[0].minor.yy64 = SF_Distinct;} break; - case 95: /* distinct ::= ALL */ -{yymsp[0].minor.yy376 = SF_All;} + case 97: /* distinct ::= ALL */ +{yymsp[0].minor.yy64 = SF_All;} break; - case 97: /* sclp ::= */ - case 130: /* orderby_opt ::= */ yytestcase(yyruleno==130); - case 140: /* groupby_opt ::= */ yytestcase(yyruleno==140); - case 227: /* exprlist ::= */ yytestcase(yyruleno==227); - case 230: /* paren_exprlist ::= */ yytestcase(yyruleno==230); - case 235: /* eidlist_opt ::= */ yytestcase(yyruleno==235); -{yymsp[1].minor.yy70 = 0;} + case 99: /* sclp ::= */ + case 132: /* orderby_opt ::= */ yytestcase(yyruleno==132); + case 142: /* groupby_opt ::= */ yytestcase(yyruleno==142); + case 229: /* exprlist ::= */ yytestcase(yyruleno==229); + case 232: /* paren_exprlist ::= */ yytestcase(yyruleno==232); + case 237: /* eidlist_opt ::= */ yytestcase(yyruleno==237); +{yymsp[1].minor.yy562 = 0;} break; - case 98: /* selcollist ::= sclp scanpt expr scanpt as */ + case 100: /* selcollist ::= sclp scanpt expr scanpt as */ { - yymsp[-4].minor.yy70 = sqlite3ExprListAppend(pParse, yymsp[-4].minor.yy70, yymsp[-2].minor.yy404); - if( yymsp[0].minor.yy0.n>0 ) sqlite3ExprListSetName(pParse, yymsp[-4].minor.yy70, &yymsp[0].minor.yy0, 1); - sqlite3ExprListSetSpan(pParse,yymsp[-4].minor.yy70,yymsp[-3].minor.yy504,yymsp[-1].minor.yy504); + yymsp[-4].minor.yy562 = sqlite3ExprListAppend(pParse, yymsp[-4].minor.yy562, yymsp[-2].minor.yy626); + if( yymsp[0].minor.yy0.n>0 ) sqlite3ExprListSetName(pParse, yymsp[-4].minor.yy562, &yymsp[0].minor.yy0, 1); + sqlite3ExprListSetSpan(pParse,yymsp[-4].minor.yy562,yymsp[-3].minor.yy600,yymsp[-1].minor.yy600); } break; - case 99: /* selcollist ::= sclp scanpt STAR */ + case 101: /* selcollist ::= sclp scanpt STAR */ { Expr *p = sqlite3Expr(pParse->db, TK_ASTERISK, 0); - yymsp[-2].minor.yy70 = sqlite3ExprListAppend(pParse, yymsp[-2].minor.yy70, p); + yymsp[-2].minor.yy562 = sqlite3ExprListAppend(pParse, yymsp[-2].minor.yy562, p); } break; - case 100: /* selcollist ::= sclp scanpt nm DOT STAR */ + case 102: /* selcollist ::= sclp scanpt nm DOT STAR */ { Expr *pRight = sqlite3PExpr(pParse, TK_ASTERISK, 0, 0); Expr *pLeft = sqlite3ExprAlloc(pParse->db, TK_ID, &yymsp[-2].minor.yy0, 1); Expr *pDot = sqlite3PExpr(pParse, TK_DOT, pLeft, pRight); - yymsp[-4].minor.yy70 = sqlite3ExprListAppend(pParse,yymsp[-4].minor.yy70, pDot); + yymsp[-4].minor.yy562 = sqlite3ExprListAppend(pParse,yymsp[-4].minor.yy562, pDot); } break; - case 101: /* as ::= AS nm */ - case 112: /* dbnm ::= DOT nm */ yytestcase(yyruleno==112); - case 251: /* plus_num ::= PLUS INTEGER|FLOAT */ yytestcase(yyruleno==251); - case 252: /* minus_num ::= MINUS INTEGER|FLOAT */ yytestcase(yyruleno==252); + case 103: /* as ::= AS nm */ + case 114: /* dbnm ::= DOT nm */ yytestcase(yyruleno==114); + case 253: /* plus_num ::= PLUS INTEGER|FLOAT */ yytestcase(yyruleno==253); + case 254: /* minus_num ::= MINUS INTEGER|FLOAT */ yytestcase(yyruleno==254); {yymsp[-1].minor.yy0 = yymsp[0].minor.yy0;} break; - case 103: /* from ::= */ - case 106: /* stl_prefix ::= */ yytestcase(yyruleno==106); -{yymsp[1].minor.yy153 = 0;} + case 105: /* from ::= */ + case 108: /* stl_prefix ::= */ yytestcase(yyruleno==108); +{yymsp[1].minor.yy607 = 0;} break; - case 104: /* from ::= FROM seltablist */ + case 106: /* from ::= FROM seltablist */ { - yymsp[-1].minor.yy153 = yymsp[0].minor.yy153; - sqlite3SrcListShiftJoinType(yymsp[-1].minor.yy153); + yymsp[-1].minor.yy607 = yymsp[0].minor.yy607; + sqlite3SrcListShiftJoinType(yymsp[-1].minor.yy607); } break; - case 105: /* stl_prefix ::= seltablist joinop */ + case 107: /* stl_prefix ::= seltablist joinop */ { - if( ALWAYS(yymsp[-1].minor.yy153 && yymsp[-1].minor.yy153->nSrc>0) ) yymsp[-1].minor.yy153->a[yymsp[-1].minor.yy153->nSrc-1].fg.jointype = (u8)yymsp[0].minor.yy376; + if( ALWAYS(yymsp[-1].minor.yy607 && yymsp[-1].minor.yy607->nSrc>0) ) yymsp[-1].minor.yy607->a[yymsp[-1].minor.yy607->nSrc-1].fg.jointype = (u8)yymsp[0].minor.yy64; } break; - case 107: /* seltablist ::= stl_prefix nm dbnm as indexed_opt on_opt using_opt */ + case 109: /* seltablist ::= stl_prefix nm dbnm as indexed_opt on_opt using_opt */ { - yymsp[-6].minor.yy153 = sqlite3SrcListAppendFromTerm(pParse,yymsp[-6].minor.yy153,&yymsp[-5].minor.yy0,&yymsp[-4].minor.yy0,&yymsp[-3].minor.yy0,0,yymsp[-1].minor.yy404,yymsp[0].minor.yy436); - sqlite3SrcListIndexedBy(pParse, yymsp[-6].minor.yy153, &yymsp[-2].minor.yy0); + yymsp[-6].minor.yy607 = sqlite3SrcListAppendFromTerm(pParse,yymsp[-6].minor.yy607,&yymsp[-5].minor.yy0,&yymsp[-4].minor.yy0,&yymsp[-3].minor.yy0,0,yymsp[-1].minor.yy626,yymsp[0].minor.yy240); + sqlite3SrcListIndexedBy(pParse, yymsp[-6].minor.yy607, &yymsp[-2].minor.yy0); } break; - case 108: /* seltablist ::= stl_prefix nm dbnm LP exprlist RP as on_opt using_opt */ + case 110: /* seltablist ::= stl_prefix nm dbnm LP exprlist RP as on_opt using_opt */ { - yymsp[-8].minor.yy153 = sqlite3SrcListAppendFromTerm(pParse,yymsp[-8].minor.yy153,&yymsp[-7].minor.yy0,&yymsp[-6].minor.yy0,&yymsp[-2].minor.yy0,0,yymsp[-1].minor.yy404,yymsp[0].minor.yy436); - sqlite3SrcListFuncArgs(pParse, yymsp[-8].minor.yy153, yymsp[-4].minor.yy70); + yymsp[-8].minor.yy607 = sqlite3SrcListAppendFromTerm(pParse,yymsp[-8].minor.yy607,&yymsp[-7].minor.yy0,&yymsp[-6].minor.yy0,&yymsp[-2].minor.yy0,0,yymsp[-1].minor.yy626,yymsp[0].minor.yy240); + sqlite3SrcListFuncArgs(pParse, yymsp[-8].minor.yy607, yymsp[-4].minor.yy562); } break; - case 109: /* seltablist ::= stl_prefix LP select RP as on_opt using_opt */ + case 111: /* seltablist ::= stl_prefix LP select RP as on_opt using_opt */ { - yymsp[-6].minor.yy153 = sqlite3SrcListAppendFromTerm(pParse,yymsp[-6].minor.yy153,0,0,&yymsp[-2].minor.yy0,yymsp[-4].minor.yy81,yymsp[-1].minor.yy404,yymsp[0].minor.yy436); + yymsp[-6].minor.yy607 = sqlite3SrcListAppendFromTerm(pParse,yymsp[-6].minor.yy607,0,0,&yymsp[-2].minor.yy0,yymsp[-4].minor.yy303,yymsp[-1].minor.yy626,yymsp[0].minor.yy240); } break; - case 110: /* seltablist ::= stl_prefix LP seltablist RP as on_opt using_opt */ + case 112: /* seltablist ::= stl_prefix LP seltablist RP as on_opt using_opt */ { - if( yymsp[-6].minor.yy153==0 && yymsp[-2].minor.yy0.n==0 && yymsp[-1].minor.yy404==0 && yymsp[0].minor.yy436==0 ){ - yymsp[-6].minor.yy153 = yymsp[-4].minor.yy153; - }else if( yymsp[-4].minor.yy153->nSrc==1 ){ - yymsp[-6].minor.yy153 = sqlite3SrcListAppendFromTerm(pParse,yymsp[-6].minor.yy153,0,0,&yymsp[-2].minor.yy0,0,yymsp[-1].minor.yy404,yymsp[0].minor.yy436); - if( yymsp[-6].minor.yy153 ){ - SrcItem *pNew = &yymsp[-6].minor.yy153->a[yymsp[-6].minor.yy153->nSrc-1]; - SrcItem *pOld = yymsp[-4].minor.yy153->a; + if( yymsp[-6].minor.yy607==0 && yymsp[-2].minor.yy0.n==0 && yymsp[-1].minor.yy626==0 && yymsp[0].minor.yy240==0 ){ + yymsp[-6].minor.yy607 = yymsp[-4].minor.yy607; + }else if( yymsp[-4].minor.yy607->nSrc==1 ){ + yymsp[-6].minor.yy607 = sqlite3SrcListAppendFromTerm(pParse,yymsp[-6].minor.yy607,0,0,&yymsp[-2].minor.yy0,0,yymsp[-1].minor.yy626,yymsp[0].minor.yy240); + if( yymsp[-6].minor.yy607 ){ + SrcItem *pNew = &yymsp[-6].minor.yy607->a[yymsp[-6].minor.yy607->nSrc-1]; + SrcItem *pOld = yymsp[-4].minor.yy607->a; pNew->zName = pOld->zName; pNew->zDatabase = pOld->zDatabase; pNew->pSelect = pOld->pSelect; @@ -162002,226 +163469,226 @@ static YYACTIONTYPE yy_reduce( pOld->zName = pOld->zDatabase = 0; pOld->pSelect = 0; } - sqlite3SrcListDelete(pParse->db, yymsp[-4].minor.yy153); + sqlite3SrcListDelete(pParse->db, yymsp[-4].minor.yy607); }else{ Select *pSubquery; - sqlite3SrcListShiftJoinType(yymsp[-4].minor.yy153); - pSubquery = sqlite3SelectNew(pParse,0,yymsp[-4].minor.yy153,0,0,0,0,SF_NestedFrom,0); - yymsp[-6].minor.yy153 = sqlite3SrcListAppendFromTerm(pParse,yymsp[-6].minor.yy153,0,0,&yymsp[-2].minor.yy0,pSubquery,yymsp[-1].minor.yy404,yymsp[0].minor.yy436); + sqlite3SrcListShiftJoinType(yymsp[-4].minor.yy607); + pSubquery = sqlite3SelectNew(pParse,0,yymsp[-4].minor.yy607,0,0,0,0,SF_NestedFrom,0); + yymsp[-6].minor.yy607 = sqlite3SrcListAppendFromTerm(pParse,yymsp[-6].minor.yy607,0,0,&yymsp[-2].minor.yy0,pSubquery,yymsp[-1].minor.yy626,yymsp[0].minor.yy240); } } break; - case 111: /* dbnm ::= */ - case 125: /* indexed_opt ::= */ yytestcase(yyruleno==125); + case 113: /* dbnm ::= */ + case 127: /* indexed_opt ::= */ yytestcase(yyruleno==127); {yymsp[1].minor.yy0.z=0; yymsp[1].minor.yy0.n=0;} break; - case 113: /* fullname ::= nm */ + case 115: /* fullname ::= nm */ { - yylhsminor.yy153 = sqlite3SrcListAppend(pParse,0,&yymsp[0].minor.yy0,0); - if( IN_RENAME_OBJECT && yylhsminor.yy153 ) sqlite3RenameTokenMap(pParse, yylhsminor.yy153->a[0].zName, &yymsp[0].minor.yy0); + yylhsminor.yy607 = sqlite3SrcListAppend(pParse,0,&yymsp[0].minor.yy0,0); + if( IN_RENAME_OBJECT && yylhsminor.yy607 ) sqlite3RenameTokenMap(pParse, yylhsminor.yy607->a[0].zName, &yymsp[0].minor.yy0); } - yymsp[0].minor.yy153 = yylhsminor.yy153; + yymsp[0].minor.yy607 = yylhsminor.yy607; break; - case 114: /* fullname ::= nm DOT nm */ + case 116: /* fullname ::= nm DOT nm */ { - yylhsminor.yy153 = sqlite3SrcListAppend(pParse,0,&yymsp[-2].minor.yy0,&yymsp[0].minor.yy0); - if( IN_RENAME_OBJECT && yylhsminor.yy153 ) sqlite3RenameTokenMap(pParse, yylhsminor.yy153->a[0].zName, &yymsp[0].minor.yy0); + yylhsminor.yy607 = sqlite3SrcListAppend(pParse,0,&yymsp[-2].minor.yy0,&yymsp[0].minor.yy0); + if( IN_RENAME_OBJECT && yylhsminor.yy607 ) sqlite3RenameTokenMap(pParse, yylhsminor.yy607->a[0].zName, &yymsp[0].minor.yy0); } - yymsp[-2].minor.yy153 = yylhsminor.yy153; + yymsp[-2].minor.yy607 = yylhsminor.yy607; break; - case 115: /* xfullname ::= nm */ -{yymsp[0].minor.yy153 = sqlite3SrcListAppend(pParse,0,&yymsp[0].minor.yy0,0); /*A-overwrites-X*/} + case 117: /* xfullname ::= nm */ +{yymsp[0].minor.yy607 = sqlite3SrcListAppend(pParse,0,&yymsp[0].minor.yy0,0); /*A-overwrites-X*/} break; - case 116: /* xfullname ::= nm DOT nm */ -{yymsp[-2].minor.yy153 = sqlite3SrcListAppend(pParse,0,&yymsp[-2].minor.yy0,&yymsp[0].minor.yy0); /*A-overwrites-X*/} + case 118: /* xfullname ::= nm DOT nm */ +{yymsp[-2].minor.yy607 = sqlite3SrcListAppend(pParse,0,&yymsp[-2].minor.yy0,&yymsp[0].minor.yy0); /*A-overwrites-X*/} break; - case 117: /* xfullname ::= nm DOT nm AS nm */ + case 119: /* xfullname ::= nm DOT nm AS nm */ { - yymsp[-4].minor.yy153 = sqlite3SrcListAppend(pParse,0,&yymsp[-4].minor.yy0,&yymsp[-2].minor.yy0); /*A-overwrites-X*/ - if( yymsp[-4].minor.yy153 ) yymsp[-4].minor.yy153->a[0].zAlias = sqlite3NameFromToken(pParse->db, &yymsp[0].minor.yy0); + yymsp[-4].minor.yy607 = sqlite3SrcListAppend(pParse,0,&yymsp[-4].minor.yy0,&yymsp[-2].minor.yy0); /*A-overwrites-X*/ + if( yymsp[-4].minor.yy607 ) yymsp[-4].minor.yy607->a[0].zAlias = sqlite3NameFromToken(pParse->db, &yymsp[0].minor.yy0); } break; - case 118: /* xfullname ::= nm AS nm */ + case 120: /* xfullname ::= nm AS nm */ { - yymsp[-2].minor.yy153 = sqlite3SrcListAppend(pParse,0,&yymsp[-2].minor.yy0,0); /*A-overwrites-X*/ - if( yymsp[-2].minor.yy153 ) yymsp[-2].minor.yy153->a[0].zAlias = sqlite3NameFromToken(pParse->db, &yymsp[0].minor.yy0); + yymsp[-2].minor.yy607 = sqlite3SrcListAppend(pParse,0,&yymsp[-2].minor.yy0,0); /*A-overwrites-X*/ + if( yymsp[-2].minor.yy607 ) yymsp[-2].minor.yy607->a[0].zAlias = sqlite3NameFromToken(pParse->db, &yymsp[0].minor.yy0); } break; - case 119: /* joinop ::= COMMA|JOIN */ -{ yymsp[0].minor.yy376 = JT_INNER; } + case 121: /* joinop ::= COMMA|JOIN */ +{ yymsp[0].minor.yy64 = JT_INNER; } break; - case 120: /* joinop ::= JOIN_KW JOIN */ -{yymsp[-1].minor.yy376 = sqlite3JoinType(pParse,&yymsp[-1].minor.yy0,0,0); /*X-overwrites-A*/} + case 122: /* joinop ::= JOIN_KW JOIN */ +{yymsp[-1].minor.yy64 = sqlite3JoinType(pParse,&yymsp[-1].minor.yy0,0,0); /*X-overwrites-A*/} break; - case 121: /* joinop ::= JOIN_KW nm JOIN */ -{yymsp[-2].minor.yy376 = sqlite3JoinType(pParse,&yymsp[-2].minor.yy0,&yymsp[-1].minor.yy0,0); /*X-overwrites-A*/} + case 123: /* joinop ::= JOIN_KW nm JOIN */ +{yymsp[-2].minor.yy64 = sqlite3JoinType(pParse,&yymsp[-2].minor.yy0,&yymsp[-1].minor.yy0,0); /*X-overwrites-A*/} break; - case 122: /* joinop ::= JOIN_KW nm nm JOIN */ -{yymsp[-3].minor.yy376 = sqlite3JoinType(pParse,&yymsp[-3].minor.yy0,&yymsp[-2].minor.yy0,&yymsp[-1].minor.yy0);/*X-overwrites-A*/} + case 124: /* joinop ::= JOIN_KW nm nm JOIN */ +{yymsp[-3].minor.yy64 = sqlite3JoinType(pParse,&yymsp[-3].minor.yy0,&yymsp[-2].minor.yy0,&yymsp[-1].minor.yy0);/*X-overwrites-A*/} break; - case 123: /* on_opt ::= ON expr */ - case 143: /* having_opt ::= HAVING expr */ yytestcase(yyruleno==143); - case 150: /* where_opt ::= WHERE expr */ yytestcase(yyruleno==150); - case 152: /* where_opt_ret ::= WHERE expr */ yytestcase(yyruleno==152); - case 223: /* case_else ::= ELSE expr */ yytestcase(yyruleno==223); - case 244: /* vinto ::= INTO expr */ yytestcase(yyruleno==244); -{yymsp[-1].minor.yy404 = yymsp[0].minor.yy404;} + case 125: /* on_opt ::= ON expr */ + case 145: /* having_opt ::= HAVING expr */ yytestcase(yyruleno==145); + case 152: /* where_opt ::= WHERE expr */ yytestcase(yyruleno==152); + case 154: /* where_opt_ret ::= WHERE expr */ yytestcase(yyruleno==154); + case 225: /* case_else ::= ELSE expr */ yytestcase(yyruleno==225); + case 246: /* vinto ::= INTO expr */ yytestcase(yyruleno==246); +{yymsp[-1].minor.yy626 = yymsp[0].minor.yy626;} break; - case 124: /* on_opt ::= */ - case 142: /* having_opt ::= */ yytestcase(yyruleno==142); - case 144: /* limit_opt ::= */ yytestcase(yyruleno==144); - case 149: /* where_opt ::= */ yytestcase(yyruleno==149); - case 151: /* where_opt_ret ::= */ yytestcase(yyruleno==151); - case 224: /* case_else ::= */ yytestcase(yyruleno==224); - case 226: /* case_operand ::= */ yytestcase(yyruleno==226); - case 245: /* vinto ::= */ yytestcase(yyruleno==245); -{yymsp[1].minor.yy404 = 0;} + case 126: /* on_opt ::= */ + case 144: /* having_opt ::= */ yytestcase(yyruleno==144); + case 146: /* limit_opt ::= */ yytestcase(yyruleno==146); + case 151: /* where_opt ::= */ yytestcase(yyruleno==151); + case 153: /* where_opt_ret ::= */ yytestcase(yyruleno==153); + case 226: /* case_else ::= */ yytestcase(yyruleno==226); + case 228: /* case_operand ::= */ yytestcase(yyruleno==228); + case 247: /* vinto ::= */ yytestcase(yyruleno==247); +{yymsp[1].minor.yy626 = 0;} break; - case 126: /* indexed_opt ::= INDEXED BY nm */ + case 128: /* indexed_opt ::= INDEXED BY nm */ {yymsp[-2].minor.yy0 = yymsp[0].minor.yy0;} break; - case 127: /* indexed_opt ::= NOT INDEXED */ + case 129: /* indexed_opt ::= NOT INDEXED */ {yymsp[-1].minor.yy0.z=0; yymsp[-1].minor.yy0.n=1;} break; - case 128: /* using_opt ::= USING LP idlist RP */ -{yymsp[-3].minor.yy436 = yymsp[-1].minor.yy436;} + case 130: /* using_opt ::= USING LP idlist RP */ +{yymsp[-3].minor.yy240 = yymsp[-1].minor.yy240;} break; - case 129: /* using_opt ::= */ - case 171: /* idlist_opt ::= */ yytestcase(yyruleno==171); -{yymsp[1].minor.yy436 = 0;} + case 131: /* using_opt ::= */ + case 173: /* idlist_opt ::= */ yytestcase(yyruleno==173); +{yymsp[1].minor.yy240 = 0;} break; - case 131: /* orderby_opt ::= ORDER BY sortlist */ - case 141: /* groupby_opt ::= GROUP BY nexprlist */ yytestcase(yyruleno==141); -{yymsp[-2].minor.yy70 = yymsp[0].minor.yy70;} + case 133: /* orderby_opt ::= ORDER BY sortlist */ + case 143: /* groupby_opt ::= GROUP BY nexprlist */ yytestcase(yyruleno==143); +{yymsp[-2].minor.yy562 = yymsp[0].minor.yy562;} break; - case 132: /* sortlist ::= sortlist COMMA expr sortorder nulls */ + case 134: /* sortlist ::= sortlist COMMA expr sortorder nulls */ { - yymsp[-4].minor.yy70 = sqlite3ExprListAppend(pParse,yymsp[-4].minor.yy70,yymsp[-2].minor.yy404); - sqlite3ExprListSetSortOrder(yymsp[-4].minor.yy70,yymsp[-1].minor.yy376,yymsp[0].minor.yy376); + yymsp[-4].minor.yy562 = sqlite3ExprListAppend(pParse,yymsp[-4].minor.yy562,yymsp[-2].minor.yy626); + sqlite3ExprListSetSortOrder(yymsp[-4].minor.yy562,yymsp[-1].minor.yy64,yymsp[0].minor.yy64); } break; - case 133: /* sortlist ::= expr sortorder nulls */ + case 135: /* sortlist ::= expr sortorder nulls */ { - yymsp[-2].minor.yy70 = sqlite3ExprListAppend(pParse,0,yymsp[-2].minor.yy404); /*A-overwrites-Y*/ - sqlite3ExprListSetSortOrder(yymsp[-2].minor.yy70,yymsp[-1].minor.yy376,yymsp[0].minor.yy376); + yymsp[-2].minor.yy562 = sqlite3ExprListAppend(pParse,0,yymsp[-2].minor.yy626); /*A-overwrites-Y*/ + sqlite3ExprListSetSortOrder(yymsp[-2].minor.yy562,yymsp[-1].minor.yy64,yymsp[0].minor.yy64); } break; - case 134: /* sortorder ::= ASC */ -{yymsp[0].minor.yy376 = SQLITE_SO_ASC;} + case 136: /* sortorder ::= ASC */ +{yymsp[0].minor.yy64 = SQLITE_SO_ASC;} break; - case 135: /* sortorder ::= DESC */ -{yymsp[0].minor.yy376 = SQLITE_SO_DESC;} + case 137: /* sortorder ::= DESC */ +{yymsp[0].minor.yy64 = SQLITE_SO_DESC;} break; - case 136: /* sortorder ::= */ - case 139: /* nulls ::= */ yytestcase(yyruleno==139); -{yymsp[1].minor.yy376 = SQLITE_SO_UNDEFINED;} + case 138: /* sortorder ::= */ + case 141: /* nulls ::= */ yytestcase(yyruleno==141); +{yymsp[1].minor.yy64 = SQLITE_SO_UNDEFINED;} break; - case 137: /* nulls ::= NULLS FIRST */ -{yymsp[-1].minor.yy376 = SQLITE_SO_ASC;} + case 139: /* nulls ::= NULLS FIRST */ +{yymsp[-1].minor.yy64 = SQLITE_SO_ASC;} break; - case 138: /* nulls ::= NULLS LAST */ -{yymsp[-1].minor.yy376 = SQLITE_SO_DESC;} + case 140: /* nulls ::= NULLS LAST */ +{yymsp[-1].minor.yy64 = SQLITE_SO_DESC;} break; - case 145: /* limit_opt ::= LIMIT expr */ -{yymsp[-1].minor.yy404 = sqlite3PExpr(pParse,TK_LIMIT,yymsp[0].minor.yy404,0);} + case 147: /* limit_opt ::= LIMIT expr */ +{yymsp[-1].minor.yy626 = sqlite3PExpr(pParse,TK_LIMIT,yymsp[0].minor.yy626,0);} break; - case 146: /* limit_opt ::= LIMIT expr OFFSET expr */ -{yymsp[-3].minor.yy404 = sqlite3PExpr(pParse,TK_LIMIT,yymsp[-2].minor.yy404,yymsp[0].minor.yy404);} + case 148: /* limit_opt ::= LIMIT expr OFFSET expr */ +{yymsp[-3].minor.yy626 = sqlite3PExpr(pParse,TK_LIMIT,yymsp[-2].minor.yy626,yymsp[0].minor.yy626);} break; - case 147: /* limit_opt ::= LIMIT expr COMMA expr */ -{yymsp[-3].minor.yy404 = sqlite3PExpr(pParse,TK_LIMIT,yymsp[0].minor.yy404,yymsp[-2].minor.yy404);} + case 149: /* limit_opt ::= LIMIT expr COMMA expr */ +{yymsp[-3].minor.yy626 = sqlite3PExpr(pParse,TK_LIMIT,yymsp[0].minor.yy626,yymsp[-2].minor.yy626);} break; - case 148: /* cmd ::= with DELETE FROM xfullname indexed_opt where_opt_ret */ + case 150: /* cmd ::= with DELETE FROM xfullname indexed_opt where_opt_ret */ { - sqlite3SrcListIndexedBy(pParse, yymsp[-2].minor.yy153, &yymsp[-1].minor.yy0); - sqlite3DeleteFrom(pParse,yymsp[-2].minor.yy153,yymsp[0].minor.yy404,0,0); + sqlite3SrcListIndexedBy(pParse, yymsp[-2].minor.yy607, &yymsp[-1].minor.yy0); + sqlite3DeleteFrom(pParse,yymsp[-2].minor.yy607,yymsp[0].minor.yy626,0,0); } break; - case 153: /* where_opt_ret ::= RETURNING selcollist */ -{sqlite3AddReturning(pParse,yymsp[0].minor.yy70); yymsp[-1].minor.yy404 = 0;} + case 155: /* where_opt_ret ::= RETURNING selcollist */ +{sqlite3AddReturning(pParse,yymsp[0].minor.yy562); yymsp[-1].minor.yy626 = 0;} break; - case 154: /* where_opt_ret ::= WHERE expr RETURNING selcollist */ -{sqlite3AddReturning(pParse,yymsp[0].minor.yy70); yymsp[-3].minor.yy404 = yymsp[-2].minor.yy404;} + case 156: /* where_opt_ret ::= WHERE expr RETURNING selcollist */ +{sqlite3AddReturning(pParse,yymsp[0].minor.yy562); yymsp[-3].minor.yy626 = yymsp[-2].minor.yy626;} break; - case 155: /* cmd ::= with UPDATE orconf xfullname indexed_opt SET setlist from where_opt_ret */ + case 157: /* cmd ::= with UPDATE orconf xfullname indexed_opt SET setlist from where_opt_ret */ { - sqlite3SrcListIndexedBy(pParse, yymsp[-5].minor.yy153, &yymsp[-4].minor.yy0); - sqlite3ExprListCheckLength(pParse,yymsp[-2].minor.yy70,"set list"); - yymsp[-5].minor.yy153 = sqlite3SrcListAppendList(pParse, yymsp[-5].minor.yy153, yymsp[-1].minor.yy153); - sqlite3Update(pParse,yymsp[-5].minor.yy153,yymsp[-2].minor.yy70,yymsp[0].minor.yy404,yymsp[-6].minor.yy376,0,0,0); + sqlite3SrcListIndexedBy(pParse, yymsp[-5].minor.yy607, &yymsp[-4].minor.yy0); + sqlite3ExprListCheckLength(pParse,yymsp[-2].minor.yy562,"set list"); + yymsp[-5].minor.yy607 = sqlite3SrcListAppendList(pParse, yymsp[-5].minor.yy607, yymsp[-1].minor.yy607); + sqlite3Update(pParse,yymsp[-5].minor.yy607,yymsp[-2].minor.yy562,yymsp[0].minor.yy626,yymsp[-6].minor.yy64,0,0,0); } break; - case 156: /* setlist ::= setlist COMMA nm EQ expr */ + case 158: /* setlist ::= setlist COMMA nm EQ expr */ { - yymsp[-4].minor.yy70 = sqlite3ExprListAppend(pParse, yymsp[-4].minor.yy70, yymsp[0].minor.yy404); - sqlite3ExprListSetName(pParse, yymsp[-4].minor.yy70, &yymsp[-2].minor.yy0, 1); + yymsp[-4].minor.yy562 = sqlite3ExprListAppend(pParse, yymsp[-4].minor.yy562, yymsp[0].minor.yy626); + sqlite3ExprListSetName(pParse, yymsp[-4].minor.yy562, &yymsp[-2].minor.yy0, 1); } break; - case 157: /* setlist ::= setlist COMMA LP idlist RP EQ expr */ + case 159: /* setlist ::= setlist COMMA LP idlist RP EQ expr */ { - yymsp[-6].minor.yy70 = sqlite3ExprListAppendVector(pParse, yymsp[-6].minor.yy70, yymsp[-3].minor.yy436, yymsp[0].minor.yy404); + yymsp[-6].minor.yy562 = sqlite3ExprListAppendVector(pParse, yymsp[-6].minor.yy562, yymsp[-3].minor.yy240, yymsp[0].minor.yy626); } break; - case 158: /* setlist ::= nm EQ expr */ + case 160: /* setlist ::= nm EQ expr */ { - yylhsminor.yy70 = sqlite3ExprListAppend(pParse, 0, yymsp[0].minor.yy404); - sqlite3ExprListSetName(pParse, yylhsminor.yy70, &yymsp[-2].minor.yy0, 1); + yylhsminor.yy562 = sqlite3ExprListAppend(pParse, 0, yymsp[0].minor.yy626); + sqlite3ExprListSetName(pParse, yylhsminor.yy562, &yymsp[-2].minor.yy0, 1); } - yymsp[-2].minor.yy70 = yylhsminor.yy70; + yymsp[-2].minor.yy562 = yylhsminor.yy562; break; - case 159: /* setlist ::= LP idlist RP EQ expr */ + case 161: /* setlist ::= LP idlist RP EQ expr */ { - yymsp[-4].minor.yy70 = sqlite3ExprListAppendVector(pParse, 0, yymsp[-3].minor.yy436, yymsp[0].minor.yy404); + yymsp[-4].minor.yy562 = sqlite3ExprListAppendVector(pParse, 0, yymsp[-3].minor.yy240, yymsp[0].minor.yy626); } break; - case 160: /* cmd ::= with insert_cmd INTO xfullname idlist_opt select upsert */ + case 162: /* cmd ::= with insert_cmd INTO xfullname idlist_opt select upsert */ { - sqlite3Insert(pParse, yymsp[-3].minor.yy153, yymsp[-1].minor.yy81, yymsp[-2].minor.yy436, yymsp[-5].minor.yy376, yymsp[0].minor.yy190); + sqlite3Insert(pParse, yymsp[-3].minor.yy607, yymsp[-1].minor.yy303, yymsp[-2].minor.yy240, yymsp[-5].minor.yy64, yymsp[0].minor.yy138); } break; - case 161: /* cmd ::= with insert_cmd INTO xfullname idlist_opt DEFAULT VALUES returning */ + case 163: /* cmd ::= with insert_cmd INTO xfullname idlist_opt DEFAULT VALUES returning */ { - sqlite3Insert(pParse, yymsp[-4].minor.yy153, 0, yymsp[-3].minor.yy436, yymsp[-6].minor.yy376, 0); + sqlite3Insert(pParse, yymsp[-4].minor.yy607, 0, yymsp[-3].minor.yy240, yymsp[-6].minor.yy64, 0); } break; - case 162: /* upsert ::= */ -{ yymsp[1].minor.yy190 = 0; } + case 164: /* upsert ::= */ +{ yymsp[1].minor.yy138 = 0; } break; - case 163: /* upsert ::= RETURNING selcollist */ -{ yymsp[-1].minor.yy190 = 0; sqlite3AddReturning(pParse,yymsp[0].minor.yy70); } + case 165: /* upsert ::= RETURNING selcollist */ +{ yymsp[-1].minor.yy138 = 0; sqlite3AddReturning(pParse,yymsp[0].minor.yy562); } break; - case 164: /* upsert ::= ON CONFLICT LP sortlist RP where_opt DO UPDATE SET setlist where_opt upsert */ -{ yymsp[-11].minor.yy190 = sqlite3UpsertNew(pParse->db,yymsp[-8].minor.yy70,yymsp[-6].minor.yy404,yymsp[-2].minor.yy70,yymsp[-1].minor.yy404,yymsp[0].minor.yy190);} + case 166: /* upsert ::= ON CONFLICT LP sortlist RP where_opt DO UPDATE SET setlist where_opt upsert */ +{ yymsp[-11].minor.yy138 = sqlite3UpsertNew(pParse->db,yymsp[-8].minor.yy562,yymsp[-6].minor.yy626,yymsp[-2].minor.yy562,yymsp[-1].minor.yy626,yymsp[0].minor.yy138);} break; - case 165: /* upsert ::= ON CONFLICT LP sortlist RP where_opt DO NOTHING upsert */ -{ yymsp[-8].minor.yy190 = sqlite3UpsertNew(pParse->db,yymsp[-5].minor.yy70,yymsp[-3].minor.yy404,0,0,yymsp[0].minor.yy190); } + case 167: /* upsert ::= ON CONFLICT LP sortlist RP where_opt DO NOTHING upsert */ +{ yymsp[-8].minor.yy138 = sqlite3UpsertNew(pParse->db,yymsp[-5].minor.yy562,yymsp[-3].minor.yy626,0,0,yymsp[0].minor.yy138); } break; - case 166: /* upsert ::= ON CONFLICT DO NOTHING returning */ -{ yymsp[-4].minor.yy190 = sqlite3UpsertNew(pParse->db,0,0,0,0,0); } + case 168: /* upsert ::= ON CONFLICT DO NOTHING returning */ +{ yymsp[-4].minor.yy138 = sqlite3UpsertNew(pParse->db,0,0,0,0,0); } break; - case 167: /* upsert ::= ON CONFLICT DO UPDATE SET setlist where_opt returning */ -{ yymsp[-7].minor.yy190 = sqlite3UpsertNew(pParse->db,0,0,yymsp[-2].minor.yy70,yymsp[-1].minor.yy404,0);} + case 169: /* upsert ::= ON CONFLICT DO UPDATE SET setlist where_opt returning */ +{ yymsp[-7].minor.yy138 = sqlite3UpsertNew(pParse->db,0,0,yymsp[-2].minor.yy562,yymsp[-1].minor.yy626,0);} break; - case 168: /* returning ::= RETURNING selcollist */ -{sqlite3AddReturning(pParse,yymsp[0].minor.yy70);} + case 170: /* returning ::= RETURNING selcollist */ +{sqlite3AddReturning(pParse,yymsp[0].minor.yy562);} break; - case 172: /* idlist_opt ::= LP idlist RP */ -{yymsp[-2].minor.yy436 = yymsp[-1].minor.yy436;} + case 174: /* idlist_opt ::= LP idlist RP */ +{yymsp[-2].minor.yy240 = yymsp[-1].minor.yy240;} break; - case 173: /* idlist ::= idlist COMMA nm */ -{yymsp[-2].minor.yy436 = sqlite3IdListAppend(pParse,yymsp[-2].minor.yy436,&yymsp[0].minor.yy0);} + case 175: /* idlist ::= idlist COMMA nm */ +{yymsp[-2].minor.yy240 = sqlite3IdListAppend(pParse,yymsp[-2].minor.yy240,&yymsp[0].minor.yy0);} break; - case 174: /* idlist ::= nm */ -{yymsp[0].minor.yy436 = sqlite3IdListAppend(pParse,0,&yymsp[0].minor.yy0); /*A-overwrites-Y*/} + case 176: /* idlist ::= nm */ +{yymsp[0].minor.yy240 = sqlite3IdListAppend(pParse,0,&yymsp[0].minor.yy0); /*A-overwrites-Y*/} break; - case 175: /* expr ::= LP expr RP */ -{yymsp[-2].minor.yy404 = yymsp[-1].minor.yy404;} + case 177: /* expr ::= LP expr RP */ +{yymsp[-2].minor.yy626 = yymsp[-1].minor.yy626;} break; - case 176: /* expr ::= ID|INDEXED */ - case 177: /* expr ::= JOIN_KW */ yytestcase(yyruleno==177); -{yymsp[0].minor.yy404=tokenExpr(pParse,TK_ID,yymsp[0].minor.yy0); /*A-overwrites-X*/} + case 178: /* expr ::= ID|INDEXED */ + case 179: /* expr ::= JOIN_KW */ yytestcase(yyruleno==179); +{yymsp[0].minor.yy626=tokenExpr(pParse,TK_ID,yymsp[0].minor.yy0); /*A-overwrites-X*/} break; - case 178: /* expr ::= nm DOT nm */ + case 180: /* expr ::= nm DOT nm */ { Expr *temp1 = sqlite3ExprAlloc(pParse->db, TK_ID, &yymsp[-2].minor.yy0, 1); Expr *temp2 = sqlite3ExprAlloc(pParse->db, TK_ID, &yymsp[0].minor.yy0, 1); @@ -162229,11 +163696,11 @@ static YYACTIONTYPE yy_reduce( sqlite3RenameTokenMap(pParse, (void*)temp2, &yymsp[0].minor.yy0); sqlite3RenameTokenMap(pParse, (void*)temp1, &yymsp[-2].minor.yy0); } - yylhsminor.yy404 = sqlite3PExpr(pParse, TK_DOT, temp1, temp2); + yylhsminor.yy626 = sqlite3PExpr(pParse, TK_DOT, temp1, temp2); } - yymsp[-2].minor.yy404 = yylhsminor.yy404; + yymsp[-2].minor.yy626 = yylhsminor.yy626; break; - case 179: /* expr ::= nm DOT nm DOT nm */ + case 181: /* expr ::= nm DOT nm DOT nm */ { Expr *temp1 = sqlite3ExprAlloc(pParse->db, TK_ID, &yymsp[-4].minor.yy0, 1); Expr *temp2 = sqlite3ExprAlloc(pParse->db, TK_ID, &yymsp[-2].minor.yy0, 1); @@ -162243,26 +163710,26 @@ static YYACTIONTYPE yy_reduce( sqlite3RenameTokenMap(pParse, (void*)temp3, &yymsp[0].minor.yy0); sqlite3RenameTokenMap(pParse, (void*)temp2, &yymsp[-2].minor.yy0); } - yylhsminor.yy404 = sqlite3PExpr(pParse, TK_DOT, temp1, temp4); + yylhsminor.yy626 = sqlite3PExpr(pParse, TK_DOT, temp1, temp4); } - yymsp[-4].minor.yy404 = yylhsminor.yy404; + yymsp[-4].minor.yy626 = yylhsminor.yy626; break; - case 180: /* term ::= NULL|FLOAT|BLOB */ - case 181: /* term ::= STRING */ yytestcase(yyruleno==181); -{yymsp[0].minor.yy404=tokenExpr(pParse,yymsp[0].major,yymsp[0].minor.yy0); /*A-overwrites-X*/} + case 182: /* term ::= NULL|FLOAT|BLOB */ + case 183: /* term ::= STRING */ yytestcase(yyruleno==183); +{yymsp[0].minor.yy626=tokenExpr(pParse,yymsp[0].major,yymsp[0].minor.yy0); /*A-overwrites-X*/} break; - case 182: /* term ::= INTEGER */ + case 184: /* term ::= INTEGER */ { - yylhsminor.yy404 = sqlite3ExprAlloc(pParse->db, TK_INTEGER, &yymsp[0].minor.yy0, 1); + yylhsminor.yy626 = sqlite3ExprAlloc(pParse->db, TK_INTEGER, &yymsp[0].minor.yy0, 1); } - yymsp[0].minor.yy404 = yylhsminor.yy404; + yymsp[0].minor.yy626 = yylhsminor.yy626; break; - case 183: /* expr ::= VARIABLE */ + case 185: /* expr ::= VARIABLE */ { if( !(yymsp[0].minor.yy0.z[0]=='#' && sqlite3Isdigit(yymsp[0].minor.yy0.z[1])) ){ u32 n = yymsp[0].minor.yy0.n; - yymsp[0].minor.yy404 = tokenExpr(pParse, TK_VARIABLE, yymsp[0].minor.yy0); - sqlite3ExprAssignVarNumber(pParse, yymsp[0].minor.yy404, n); + yymsp[0].minor.yy626 = tokenExpr(pParse, TK_VARIABLE, yymsp[0].minor.yy0); + sqlite3ExprAssignVarNumber(pParse, yymsp[0].minor.yy626, n); }else{ /* When doing a nested parse, one can include terms in an expression ** that look like this: #1 #2 ... These terms refer to registers @@ -162271,159 +163738,159 @@ static YYACTIONTYPE yy_reduce( assert( t.n>=2 ); if( pParse->nested==0 ){ sqlite3ErrorMsg(pParse, "near \"%T\": syntax error", &t); - yymsp[0].minor.yy404 = 0; + yymsp[0].minor.yy626 = 0; }else{ - yymsp[0].minor.yy404 = sqlite3PExpr(pParse, TK_REGISTER, 0, 0); - if( yymsp[0].minor.yy404 ) sqlite3GetInt32(&t.z[1], &yymsp[0].minor.yy404->iTable); + yymsp[0].minor.yy626 = sqlite3PExpr(pParse, TK_REGISTER, 0, 0); + if( yymsp[0].minor.yy626 ) sqlite3GetInt32(&t.z[1], &yymsp[0].minor.yy626->iTable); } } } break; - case 184: /* expr ::= expr COLLATE ID|STRING */ + case 186: /* expr ::= expr COLLATE ID|STRING */ { - yymsp[-2].minor.yy404 = sqlite3ExprAddCollateToken(pParse, yymsp[-2].minor.yy404, &yymsp[0].minor.yy0, 1); + yymsp[-2].minor.yy626 = sqlite3ExprAddCollateToken(pParse, yymsp[-2].minor.yy626, &yymsp[0].minor.yy0, 1); } break; - case 185: /* expr ::= CAST LP expr AS typetoken RP */ + case 187: /* expr ::= CAST LP expr AS typetoken RP */ { - yymsp[-5].minor.yy404 = sqlite3ExprAlloc(pParse->db, TK_CAST, &yymsp[-1].minor.yy0, 1); - sqlite3ExprAttachSubtrees(pParse->db, yymsp[-5].minor.yy404, yymsp[-3].minor.yy404, 0); + yymsp[-5].minor.yy626 = sqlite3ExprAlloc(pParse->db, TK_CAST, &yymsp[-1].minor.yy0, 1); + sqlite3ExprAttachSubtrees(pParse->db, yymsp[-5].minor.yy626, yymsp[-3].minor.yy626, 0); } break; - case 186: /* expr ::= ID|INDEXED LP distinct exprlist RP */ + case 188: /* expr ::= ID|INDEXED LP distinct exprlist RP */ { - yylhsminor.yy404 = sqlite3ExprFunction(pParse, yymsp[-1].minor.yy70, &yymsp[-4].minor.yy0, yymsp[-2].minor.yy376); + yylhsminor.yy626 = sqlite3ExprFunction(pParse, yymsp[-1].minor.yy562, &yymsp[-4].minor.yy0, yymsp[-2].minor.yy64); } - yymsp[-4].minor.yy404 = yylhsminor.yy404; + yymsp[-4].minor.yy626 = yylhsminor.yy626; break; - case 187: /* expr ::= ID|INDEXED LP STAR RP */ + case 189: /* expr ::= ID|INDEXED LP STAR RP */ { - yylhsminor.yy404 = sqlite3ExprFunction(pParse, 0, &yymsp[-3].minor.yy0, 0); + yylhsminor.yy626 = sqlite3ExprFunction(pParse, 0, &yymsp[-3].minor.yy0, 0); } - yymsp[-3].minor.yy404 = yylhsminor.yy404; + yymsp[-3].minor.yy626 = yylhsminor.yy626; break; - case 188: /* expr ::= ID|INDEXED LP distinct exprlist RP filter_over */ + case 190: /* expr ::= ID|INDEXED LP distinct exprlist RP filter_over */ { - yylhsminor.yy404 = sqlite3ExprFunction(pParse, yymsp[-2].minor.yy70, &yymsp[-5].minor.yy0, yymsp[-3].minor.yy376); - sqlite3WindowAttach(pParse, yylhsminor.yy404, yymsp[0].minor.yy49); + yylhsminor.yy626 = sqlite3ExprFunction(pParse, yymsp[-2].minor.yy562, &yymsp[-5].minor.yy0, yymsp[-3].minor.yy64); + sqlite3WindowAttach(pParse, yylhsminor.yy626, yymsp[0].minor.yy375); } - yymsp[-5].minor.yy404 = yylhsminor.yy404; + yymsp[-5].minor.yy626 = yylhsminor.yy626; break; - case 189: /* expr ::= ID|INDEXED LP STAR RP filter_over */ + case 191: /* expr ::= ID|INDEXED LP STAR RP filter_over */ { - yylhsminor.yy404 = sqlite3ExprFunction(pParse, 0, &yymsp[-4].minor.yy0, 0); - sqlite3WindowAttach(pParse, yylhsminor.yy404, yymsp[0].minor.yy49); + yylhsminor.yy626 = sqlite3ExprFunction(pParse, 0, &yymsp[-4].minor.yy0, 0); + sqlite3WindowAttach(pParse, yylhsminor.yy626, yymsp[0].minor.yy375); } - yymsp[-4].minor.yy404 = yylhsminor.yy404; + yymsp[-4].minor.yy626 = yylhsminor.yy626; break; - case 190: /* term ::= CTIME_KW */ + case 192: /* term ::= CTIME_KW */ { - yylhsminor.yy404 = sqlite3ExprFunction(pParse, 0, &yymsp[0].minor.yy0, 0); + yylhsminor.yy626 = sqlite3ExprFunction(pParse, 0, &yymsp[0].minor.yy0, 0); } - yymsp[0].minor.yy404 = yylhsminor.yy404; + yymsp[0].minor.yy626 = yylhsminor.yy626; break; - case 191: /* expr ::= LP nexprlist COMMA expr RP */ + case 193: /* expr ::= LP nexprlist COMMA expr RP */ { - ExprList *pList = sqlite3ExprListAppend(pParse, yymsp[-3].minor.yy70, yymsp[-1].minor.yy404); - yymsp[-4].minor.yy404 = sqlite3PExpr(pParse, TK_VECTOR, 0, 0); - if( yymsp[-4].minor.yy404 ){ - yymsp[-4].minor.yy404->x.pList = pList; + ExprList *pList = sqlite3ExprListAppend(pParse, yymsp[-3].minor.yy562, yymsp[-1].minor.yy626); + yymsp[-4].minor.yy626 = sqlite3PExpr(pParse, TK_VECTOR, 0, 0); + if( yymsp[-4].minor.yy626 ){ + yymsp[-4].minor.yy626->x.pList = pList; if( ALWAYS(pList->nExpr) ){ - yymsp[-4].minor.yy404->flags |= pList->a[0].pExpr->flags & EP_Propagate; + yymsp[-4].minor.yy626->flags |= pList->a[0].pExpr->flags & EP_Propagate; } }else{ sqlite3ExprListDelete(pParse->db, pList); } } break; - case 192: /* expr ::= expr AND expr */ -{yymsp[-2].minor.yy404=sqlite3ExprAnd(pParse,yymsp[-2].minor.yy404,yymsp[0].minor.yy404);} + case 194: /* expr ::= expr AND expr */ +{yymsp[-2].minor.yy626=sqlite3ExprAnd(pParse,yymsp[-2].minor.yy626,yymsp[0].minor.yy626);} break; - case 193: /* expr ::= expr OR expr */ - case 194: /* expr ::= expr LT|GT|GE|LE expr */ yytestcase(yyruleno==194); - case 195: /* expr ::= expr EQ|NE expr */ yytestcase(yyruleno==195); - case 196: /* expr ::= expr BITAND|BITOR|LSHIFT|RSHIFT expr */ yytestcase(yyruleno==196); - case 197: /* expr ::= expr PLUS|MINUS expr */ yytestcase(yyruleno==197); - case 198: /* expr ::= expr STAR|SLASH|REM expr */ yytestcase(yyruleno==198); - case 199: /* expr ::= expr CONCAT expr */ yytestcase(yyruleno==199); -{yymsp[-2].minor.yy404=sqlite3PExpr(pParse,yymsp[-1].major,yymsp[-2].minor.yy404,yymsp[0].minor.yy404);} + case 195: /* expr ::= expr OR expr */ + case 196: /* expr ::= expr LT|GT|GE|LE expr */ yytestcase(yyruleno==196); + case 197: /* expr ::= expr EQ|NE expr */ yytestcase(yyruleno==197); + case 198: /* expr ::= expr BITAND|BITOR|LSHIFT|RSHIFT expr */ yytestcase(yyruleno==198); + case 199: /* expr ::= expr PLUS|MINUS expr */ yytestcase(yyruleno==199); + case 200: /* expr ::= expr STAR|SLASH|REM expr */ yytestcase(yyruleno==200); + case 201: /* expr ::= expr CONCAT expr */ yytestcase(yyruleno==201); +{yymsp[-2].minor.yy626=sqlite3PExpr(pParse,yymsp[-1].major,yymsp[-2].minor.yy626,yymsp[0].minor.yy626);} break; - case 200: /* likeop ::= NOT LIKE_KW|MATCH */ + case 202: /* likeop ::= NOT LIKE_KW|MATCH */ {yymsp[-1].minor.yy0=yymsp[0].minor.yy0; yymsp[-1].minor.yy0.n|=0x80000000; /*yymsp[-1].minor.yy0-overwrite-yymsp[0].minor.yy0*/} break; - case 201: /* expr ::= expr likeop expr */ + case 203: /* expr ::= expr likeop expr */ { ExprList *pList; int bNot = yymsp[-1].minor.yy0.n & 0x80000000; yymsp[-1].minor.yy0.n &= 0x7fffffff; - pList = sqlite3ExprListAppend(pParse,0, yymsp[0].minor.yy404); - pList = sqlite3ExprListAppend(pParse,pList, yymsp[-2].minor.yy404); - yymsp[-2].minor.yy404 = sqlite3ExprFunction(pParse, pList, &yymsp[-1].minor.yy0, 0); - if( bNot ) yymsp[-2].minor.yy404 = sqlite3PExpr(pParse, TK_NOT, yymsp[-2].minor.yy404, 0); - if( yymsp[-2].minor.yy404 ) yymsp[-2].minor.yy404->flags |= EP_InfixFunc; + pList = sqlite3ExprListAppend(pParse,0, yymsp[0].minor.yy626); + pList = sqlite3ExprListAppend(pParse,pList, yymsp[-2].minor.yy626); + yymsp[-2].minor.yy626 = sqlite3ExprFunction(pParse, pList, &yymsp[-1].minor.yy0, 0); + if( bNot ) yymsp[-2].minor.yy626 = sqlite3PExpr(pParse, TK_NOT, yymsp[-2].minor.yy626, 0); + if( yymsp[-2].minor.yy626 ) yymsp[-2].minor.yy626->flags |= EP_InfixFunc; } break; - case 202: /* expr ::= expr likeop expr ESCAPE expr */ + case 204: /* expr ::= expr likeop expr ESCAPE expr */ { ExprList *pList; int bNot = yymsp[-3].minor.yy0.n & 0x80000000; yymsp[-3].minor.yy0.n &= 0x7fffffff; - pList = sqlite3ExprListAppend(pParse,0, yymsp[-2].minor.yy404); - pList = sqlite3ExprListAppend(pParse,pList, yymsp[-4].minor.yy404); - pList = sqlite3ExprListAppend(pParse,pList, yymsp[0].minor.yy404); - yymsp[-4].minor.yy404 = sqlite3ExprFunction(pParse, pList, &yymsp[-3].minor.yy0, 0); - if( bNot ) yymsp[-4].minor.yy404 = sqlite3PExpr(pParse, TK_NOT, yymsp[-4].minor.yy404, 0); - if( yymsp[-4].minor.yy404 ) yymsp[-4].minor.yy404->flags |= EP_InfixFunc; + pList = sqlite3ExprListAppend(pParse,0, yymsp[-2].minor.yy626); + pList = sqlite3ExprListAppend(pParse,pList, yymsp[-4].minor.yy626); + pList = sqlite3ExprListAppend(pParse,pList, yymsp[0].minor.yy626); + yymsp[-4].minor.yy626 = sqlite3ExprFunction(pParse, pList, &yymsp[-3].minor.yy0, 0); + if( bNot ) yymsp[-4].minor.yy626 = sqlite3PExpr(pParse, TK_NOT, yymsp[-4].minor.yy626, 0); + if( yymsp[-4].minor.yy626 ) yymsp[-4].minor.yy626->flags |= EP_InfixFunc; } break; - case 203: /* expr ::= expr ISNULL|NOTNULL */ -{yymsp[-1].minor.yy404 = sqlite3PExpr(pParse,yymsp[0].major,yymsp[-1].minor.yy404,0);} + case 205: /* expr ::= expr ISNULL|NOTNULL */ +{yymsp[-1].minor.yy626 = sqlite3PExpr(pParse,yymsp[0].major,yymsp[-1].minor.yy626,0);} break; - case 204: /* expr ::= expr NOT NULL */ -{yymsp[-2].minor.yy404 = sqlite3PExpr(pParse,TK_NOTNULL,yymsp[-2].minor.yy404,0);} + case 206: /* expr ::= expr NOT NULL */ +{yymsp[-2].minor.yy626 = sqlite3PExpr(pParse,TK_NOTNULL,yymsp[-2].minor.yy626,0);} break; - case 205: /* expr ::= expr IS expr */ + case 207: /* expr ::= expr IS expr */ { - yymsp[-2].minor.yy404 = sqlite3PExpr(pParse,TK_IS,yymsp[-2].minor.yy404,yymsp[0].minor.yy404); - binaryToUnaryIfNull(pParse, yymsp[0].minor.yy404, yymsp[-2].minor.yy404, TK_ISNULL); + yymsp[-2].minor.yy626 = sqlite3PExpr(pParse,TK_IS,yymsp[-2].minor.yy626,yymsp[0].minor.yy626); + binaryToUnaryIfNull(pParse, yymsp[0].minor.yy626, yymsp[-2].minor.yy626, TK_ISNULL); } break; - case 206: /* expr ::= expr IS NOT expr */ + case 208: /* expr ::= expr IS NOT expr */ { - yymsp[-3].minor.yy404 = sqlite3PExpr(pParse,TK_ISNOT,yymsp[-3].minor.yy404,yymsp[0].minor.yy404); - binaryToUnaryIfNull(pParse, yymsp[0].minor.yy404, yymsp[-3].minor.yy404, TK_NOTNULL); + yymsp[-3].minor.yy626 = sqlite3PExpr(pParse,TK_ISNOT,yymsp[-3].minor.yy626,yymsp[0].minor.yy626); + binaryToUnaryIfNull(pParse, yymsp[0].minor.yy626, yymsp[-3].minor.yy626, TK_NOTNULL); } break; - case 207: /* expr ::= NOT expr */ - case 208: /* expr ::= BITNOT expr */ yytestcase(yyruleno==208); -{yymsp[-1].minor.yy404 = sqlite3PExpr(pParse, yymsp[-1].major, yymsp[0].minor.yy404, 0);/*A-overwrites-B*/} + case 209: /* expr ::= NOT expr */ + case 210: /* expr ::= BITNOT expr */ yytestcase(yyruleno==210); +{yymsp[-1].minor.yy626 = sqlite3PExpr(pParse, yymsp[-1].major, yymsp[0].minor.yy626, 0);/*A-overwrites-B*/} break; - case 209: /* expr ::= PLUS|MINUS expr */ + case 211: /* expr ::= PLUS|MINUS expr */ { - yymsp[-1].minor.yy404 = sqlite3PExpr(pParse, yymsp[-1].major==TK_PLUS ? TK_UPLUS : TK_UMINUS, yymsp[0].minor.yy404, 0); + yymsp[-1].minor.yy626 = sqlite3PExpr(pParse, yymsp[-1].major==TK_PLUS ? TK_UPLUS : TK_UMINUS, yymsp[0].minor.yy626, 0); /*A-overwrites-B*/ } break; - case 210: /* between_op ::= BETWEEN */ - case 213: /* in_op ::= IN */ yytestcase(yyruleno==213); -{yymsp[0].minor.yy376 = 0;} + case 212: /* between_op ::= BETWEEN */ + case 215: /* in_op ::= IN */ yytestcase(yyruleno==215); +{yymsp[0].minor.yy64 = 0;} break; - case 212: /* expr ::= expr between_op expr AND expr */ + case 214: /* expr ::= expr between_op expr AND expr */ { - ExprList *pList = sqlite3ExprListAppend(pParse,0, yymsp[-2].minor.yy404); - pList = sqlite3ExprListAppend(pParse,pList, yymsp[0].minor.yy404); - yymsp[-4].minor.yy404 = sqlite3PExpr(pParse, TK_BETWEEN, yymsp[-4].minor.yy404, 0); - if( yymsp[-4].minor.yy404 ){ - yymsp[-4].minor.yy404->x.pList = pList; + ExprList *pList = sqlite3ExprListAppend(pParse,0, yymsp[-2].minor.yy626); + pList = sqlite3ExprListAppend(pParse,pList, yymsp[0].minor.yy626); + yymsp[-4].minor.yy626 = sqlite3PExpr(pParse, TK_BETWEEN, yymsp[-4].minor.yy626, 0); + if( yymsp[-4].minor.yy626 ){ + yymsp[-4].minor.yy626->x.pList = pList; }else{ sqlite3ExprListDelete(pParse->db, pList); } - if( yymsp[-3].minor.yy376 ) yymsp[-4].minor.yy404 = sqlite3PExpr(pParse, TK_NOT, yymsp[-4].minor.yy404, 0); + if( yymsp[-3].minor.yy64 ) yymsp[-4].minor.yy626 = sqlite3PExpr(pParse, TK_NOT, yymsp[-4].minor.yy626, 0); } break; - case 215: /* expr ::= expr in_op LP exprlist RP */ + case 217: /* expr ::= expr in_op LP exprlist RP */ { - if( yymsp[-1].minor.yy70==0 ){ + if( yymsp[-1].minor.yy562==0 ){ /* Expressions of the form ** ** expr1 IN () @@ -162432,197 +163899,205 @@ static YYACTIONTYPE yy_reduce( ** simplify to constants 0 (false) and 1 (true), respectively, ** regardless of the value of expr1. */ - sqlite3ExprUnmapAndDelete(pParse, yymsp[-4].minor.yy404); - yymsp[-4].minor.yy404 = sqlite3Expr(pParse->db, TK_INTEGER, yymsp[-3].minor.yy376 ? "1" : "0"); - }else if( yymsp[-1].minor.yy70->nExpr==1 && sqlite3ExprIsConstant(yymsp[-1].minor.yy70->a[0].pExpr) ){ - Expr *pRHS = yymsp[-1].minor.yy70->a[0].pExpr; - yymsp[-1].minor.yy70->a[0].pExpr = 0; - sqlite3ExprListDelete(pParse->db, yymsp[-1].minor.yy70); - pRHS = sqlite3PExpr(pParse, TK_UPLUS, pRHS, 0); - yymsp[-4].minor.yy404 = sqlite3PExpr(pParse, TK_EQ, yymsp[-4].minor.yy404, pRHS); - if( yymsp[-3].minor.yy376 ) yymsp[-4].minor.yy404 = sqlite3PExpr(pParse, TK_NOT, yymsp[-4].minor.yy404, 0); + sqlite3ExprUnmapAndDelete(pParse, yymsp[-4].minor.yy626); + yymsp[-4].minor.yy626 = sqlite3Expr(pParse->db, TK_INTEGER, yymsp[-3].minor.yy64 ? "1" : "0"); }else{ - yymsp[-4].minor.yy404 = sqlite3PExpr(pParse, TK_IN, yymsp[-4].minor.yy404, 0); - if( yymsp[-4].minor.yy404 ){ - yymsp[-4].minor.yy404->x.pList = yymsp[-1].minor.yy70; - sqlite3ExprSetHeightAndFlags(pParse, yymsp[-4].minor.yy404); + Expr *pRHS = yymsp[-1].minor.yy562->a[0].pExpr; + if( yymsp[-1].minor.yy562->nExpr==1 && sqlite3ExprIsConstant(pRHS) && yymsp[-4].minor.yy626->op!=TK_VECTOR ){ + yymsp[-1].minor.yy562->a[0].pExpr = 0; + sqlite3ExprListDelete(pParse->db, yymsp[-1].minor.yy562); + pRHS = sqlite3PExpr(pParse, TK_UPLUS, pRHS, 0); + yymsp[-4].minor.yy626 = sqlite3PExpr(pParse, TK_EQ, yymsp[-4].minor.yy626, pRHS); }else{ - sqlite3ExprListDelete(pParse->db, yymsp[-1].minor.yy70); + yymsp[-4].minor.yy626 = sqlite3PExpr(pParse, TK_IN, yymsp[-4].minor.yy626, 0); + if( yymsp[-4].minor.yy626==0 ){ + sqlite3ExprListDelete(pParse->db, yymsp[-1].minor.yy562); + }else if( yymsp[-4].minor.yy626->pLeft->op==TK_VECTOR ){ + int nExpr = yymsp[-4].minor.yy626->pLeft->x.pList->nExpr; + Select *pSelectRHS = sqlite3ExprListToValues(pParse, nExpr, yymsp[-1].minor.yy562); + if( pSelectRHS ){ + parserDoubleLinkSelect(pParse, pSelectRHS); + sqlite3PExprAddSelect(pParse, yymsp[-4].minor.yy626, pSelectRHS); + } + }else{ + yymsp[-4].minor.yy626->x.pList = yymsp[-1].minor.yy562; + sqlite3ExprSetHeightAndFlags(pParse, yymsp[-4].minor.yy626); + } } - if( yymsp[-3].minor.yy376 ) yymsp[-4].minor.yy404 = sqlite3PExpr(pParse, TK_NOT, yymsp[-4].minor.yy404, 0); + if( yymsp[-3].minor.yy64 ) yymsp[-4].minor.yy626 = sqlite3PExpr(pParse, TK_NOT, yymsp[-4].minor.yy626, 0); } } break; - case 216: /* expr ::= LP select RP */ + case 218: /* expr ::= LP select RP */ { - yymsp[-2].minor.yy404 = sqlite3PExpr(pParse, TK_SELECT, 0, 0); - sqlite3PExprAddSelect(pParse, yymsp[-2].minor.yy404, yymsp[-1].minor.yy81); + yymsp[-2].minor.yy626 = sqlite3PExpr(pParse, TK_SELECT, 0, 0); + sqlite3PExprAddSelect(pParse, yymsp[-2].minor.yy626, yymsp[-1].minor.yy303); } break; - case 217: /* expr ::= expr in_op LP select RP */ + case 219: /* expr ::= expr in_op LP select RP */ { - yymsp[-4].minor.yy404 = sqlite3PExpr(pParse, TK_IN, yymsp[-4].minor.yy404, 0); - sqlite3PExprAddSelect(pParse, yymsp[-4].minor.yy404, yymsp[-1].minor.yy81); - if( yymsp[-3].minor.yy376 ) yymsp[-4].minor.yy404 = sqlite3PExpr(pParse, TK_NOT, yymsp[-4].minor.yy404, 0); + yymsp[-4].minor.yy626 = sqlite3PExpr(pParse, TK_IN, yymsp[-4].minor.yy626, 0); + sqlite3PExprAddSelect(pParse, yymsp[-4].minor.yy626, yymsp[-1].minor.yy303); + if( yymsp[-3].minor.yy64 ) yymsp[-4].minor.yy626 = sqlite3PExpr(pParse, TK_NOT, yymsp[-4].minor.yy626, 0); } break; - case 218: /* expr ::= expr in_op nm dbnm paren_exprlist */ + case 220: /* expr ::= expr in_op nm dbnm paren_exprlist */ { SrcList *pSrc = sqlite3SrcListAppend(pParse, 0,&yymsp[-2].minor.yy0,&yymsp[-1].minor.yy0); Select *pSelect = sqlite3SelectNew(pParse, 0,pSrc,0,0,0,0,0,0); - if( yymsp[0].minor.yy70 ) sqlite3SrcListFuncArgs(pParse, pSelect ? pSrc : 0, yymsp[0].minor.yy70); - yymsp[-4].minor.yy404 = sqlite3PExpr(pParse, TK_IN, yymsp[-4].minor.yy404, 0); - sqlite3PExprAddSelect(pParse, yymsp[-4].minor.yy404, pSelect); - if( yymsp[-3].minor.yy376 ) yymsp[-4].minor.yy404 = sqlite3PExpr(pParse, TK_NOT, yymsp[-4].minor.yy404, 0); + if( yymsp[0].minor.yy562 ) sqlite3SrcListFuncArgs(pParse, pSelect ? pSrc : 0, yymsp[0].minor.yy562); + yymsp[-4].minor.yy626 = sqlite3PExpr(pParse, TK_IN, yymsp[-4].minor.yy626, 0); + sqlite3PExprAddSelect(pParse, yymsp[-4].minor.yy626, pSelect); + if( yymsp[-3].minor.yy64 ) yymsp[-4].minor.yy626 = sqlite3PExpr(pParse, TK_NOT, yymsp[-4].minor.yy626, 0); } break; - case 219: /* expr ::= EXISTS LP select RP */ + case 221: /* expr ::= EXISTS LP select RP */ { Expr *p; - p = yymsp[-3].minor.yy404 = sqlite3PExpr(pParse, TK_EXISTS, 0, 0); - sqlite3PExprAddSelect(pParse, p, yymsp[-1].minor.yy81); + p = yymsp[-3].minor.yy626 = sqlite3PExpr(pParse, TK_EXISTS, 0, 0); + sqlite3PExprAddSelect(pParse, p, yymsp[-1].minor.yy303); } break; - case 220: /* expr ::= CASE case_operand case_exprlist case_else END */ + case 222: /* expr ::= CASE case_operand case_exprlist case_else END */ { - yymsp[-4].minor.yy404 = sqlite3PExpr(pParse, TK_CASE, yymsp[-3].minor.yy404, 0); - if( yymsp[-4].minor.yy404 ){ - yymsp[-4].minor.yy404->x.pList = yymsp[-1].minor.yy404 ? sqlite3ExprListAppend(pParse,yymsp[-2].minor.yy70,yymsp[-1].minor.yy404) : yymsp[-2].minor.yy70; - sqlite3ExprSetHeightAndFlags(pParse, yymsp[-4].minor.yy404); + yymsp[-4].minor.yy626 = sqlite3PExpr(pParse, TK_CASE, yymsp[-3].minor.yy626, 0); + if( yymsp[-4].minor.yy626 ){ + yymsp[-4].minor.yy626->x.pList = yymsp[-1].minor.yy626 ? sqlite3ExprListAppend(pParse,yymsp[-2].minor.yy562,yymsp[-1].minor.yy626) : yymsp[-2].minor.yy562; + sqlite3ExprSetHeightAndFlags(pParse, yymsp[-4].minor.yy626); }else{ - sqlite3ExprListDelete(pParse->db, yymsp[-2].minor.yy70); - sqlite3ExprDelete(pParse->db, yymsp[-1].minor.yy404); + sqlite3ExprListDelete(pParse->db, yymsp[-2].minor.yy562); + sqlite3ExprDelete(pParse->db, yymsp[-1].minor.yy626); } } break; - case 221: /* case_exprlist ::= case_exprlist WHEN expr THEN expr */ + case 223: /* case_exprlist ::= case_exprlist WHEN expr THEN expr */ { - yymsp[-4].minor.yy70 = sqlite3ExprListAppend(pParse,yymsp[-4].minor.yy70, yymsp[-2].minor.yy404); - yymsp[-4].minor.yy70 = sqlite3ExprListAppend(pParse,yymsp[-4].minor.yy70, yymsp[0].minor.yy404); + yymsp[-4].minor.yy562 = sqlite3ExprListAppend(pParse,yymsp[-4].minor.yy562, yymsp[-2].minor.yy626); + yymsp[-4].minor.yy562 = sqlite3ExprListAppend(pParse,yymsp[-4].minor.yy562, yymsp[0].minor.yy626); } break; - case 222: /* case_exprlist ::= WHEN expr THEN expr */ + case 224: /* case_exprlist ::= WHEN expr THEN expr */ { - yymsp[-3].minor.yy70 = sqlite3ExprListAppend(pParse,0, yymsp[-2].minor.yy404); - yymsp[-3].minor.yy70 = sqlite3ExprListAppend(pParse,yymsp[-3].minor.yy70, yymsp[0].minor.yy404); + yymsp[-3].minor.yy562 = sqlite3ExprListAppend(pParse,0, yymsp[-2].minor.yy626); + yymsp[-3].minor.yy562 = sqlite3ExprListAppend(pParse,yymsp[-3].minor.yy562, yymsp[0].minor.yy626); } break; - case 225: /* case_operand ::= expr */ -{yymsp[0].minor.yy404 = yymsp[0].minor.yy404; /*A-overwrites-X*/} + case 227: /* case_operand ::= expr */ +{yymsp[0].minor.yy626 = yymsp[0].minor.yy626; /*A-overwrites-X*/} break; - case 228: /* nexprlist ::= nexprlist COMMA expr */ -{yymsp[-2].minor.yy70 = sqlite3ExprListAppend(pParse,yymsp[-2].minor.yy70,yymsp[0].minor.yy404);} + case 230: /* nexprlist ::= nexprlist COMMA expr */ +{yymsp[-2].minor.yy562 = sqlite3ExprListAppend(pParse,yymsp[-2].minor.yy562,yymsp[0].minor.yy626);} break; - case 229: /* nexprlist ::= expr */ -{yymsp[0].minor.yy70 = sqlite3ExprListAppend(pParse,0,yymsp[0].minor.yy404); /*A-overwrites-Y*/} + case 231: /* nexprlist ::= expr */ +{yymsp[0].minor.yy562 = sqlite3ExprListAppend(pParse,0,yymsp[0].minor.yy626); /*A-overwrites-Y*/} break; - case 231: /* paren_exprlist ::= LP exprlist RP */ - case 236: /* eidlist_opt ::= LP eidlist RP */ yytestcase(yyruleno==236); -{yymsp[-2].minor.yy70 = yymsp[-1].minor.yy70;} + case 233: /* paren_exprlist ::= LP exprlist RP */ + case 238: /* eidlist_opt ::= LP eidlist RP */ yytestcase(yyruleno==238); +{yymsp[-2].minor.yy562 = yymsp[-1].minor.yy562;} break; - case 232: /* cmd ::= createkw uniqueflag INDEX ifnotexists nm dbnm ON nm LP sortlist RP where_opt */ + case 234: /* cmd ::= createkw uniqueflag INDEX ifnotexists nm dbnm ON nm LP sortlist RP where_opt */ { sqlite3CreateIndex(pParse, &yymsp[-7].minor.yy0, &yymsp[-6].minor.yy0, - sqlite3SrcListAppend(pParse,0,&yymsp[-4].minor.yy0,0), yymsp[-2].minor.yy70, yymsp[-10].minor.yy376, - &yymsp[-11].minor.yy0, yymsp[0].minor.yy404, SQLITE_SO_ASC, yymsp[-8].minor.yy376, SQLITE_IDXTYPE_APPDEF); + sqlite3SrcListAppend(pParse,0,&yymsp[-4].minor.yy0,0), yymsp[-2].minor.yy562, yymsp[-10].minor.yy64, + &yymsp[-11].minor.yy0, yymsp[0].minor.yy626, SQLITE_SO_ASC, yymsp[-8].minor.yy64, SQLITE_IDXTYPE_APPDEF); if( IN_RENAME_OBJECT && pParse->pNewIndex ){ sqlite3RenameTokenMap(pParse, pParse->pNewIndex->zName, &yymsp[-4].minor.yy0); } } break; - case 233: /* uniqueflag ::= UNIQUE */ - case 275: /* raisetype ::= ABORT */ yytestcase(yyruleno==275); -{yymsp[0].minor.yy376 = OE_Abort;} + case 235: /* uniqueflag ::= UNIQUE */ + case 277: /* raisetype ::= ABORT */ yytestcase(yyruleno==277); +{yymsp[0].minor.yy64 = OE_Abort;} break; - case 234: /* uniqueflag ::= */ -{yymsp[1].minor.yy376 = OE_None;} + case 236: /* uniqueflag ::= */ +{yymsp[1].minor.yy64 = OE_None;} break; - case 237: /* eidlist ::= eidlist COMMA nm collate sortorder */ + case 239: /* eidlist ::= eidlist COMMA nm collate sortorder */ { - yymsp[-4].minor.yy70 = parserAddExprIdListTerm(pParse, yymsp[-4].minor.yy70, &yymsp[-2].minor.yy0, yymsp[-1].minor.yy376, yymsp[0].minor.yy376); + yymsp[-4].minor.yy562 = parserAddExprIdListTerm(pParse, yymsp[-4].minor.yy562, &yymsp[-2].minor.yy0, yymsp[-1].minor.yy64, yymsp[0].minor.yy64); } break; - case 238: /* eidlist ::= nm collate sortorder */ + case 240: /* eidlist ::= nm collate sortorder */ { - yymsp[-2].minor.yy70 = parserAddExprIdListTerm(pParse, 0, &yymsp[-2].minor.yy0, yymsp[-1].minor.yy376, yymsp[0].minor.yy376); /*A-overwrites-Y*/ + yymsp[-2].minor.yy562 = parserAddExprIdListTerm(pParse, 0, &yymsp[-2].minor.yy0, yymsp[-1].minor.yy64, yymsp[0].minor.yy64); /*A-overwrites-Y*/ } break; - case 241: /* cmd ::= DROP INDEX ifexists fullname */ -{sqlite3DropIndex(pParse, yymsp[0].minor.yy153, yymsp[-1].minor.yy376);} + case 243: /* cmd ::= DROP INDEX ifexists fullname */ +{sqlite3DropIndex(pParse, yymsp[0].minor.yy607, yymsp[-1].minor.yy64);} break; - case 242: /* cmd ::= VACUUM vinto */ -{sqlite3Vacuum(pParse,0,yymsp[0].minor.yy404);} + case 244: /* cmd ::= VACUUM vinto */ +{sqlite3Vacuum(pParse,0,yymsp[0].minor.yy626);} break; - case 243: /* cmd ::= VACUUM nm vinto */ -{sqlite3Vacuum(pParse,&yymsp[-1].minor.yy0,yymsp[0].minor.yy404);} + case 245: /* cmd ::= VACUUM nm vinto */ +{sqlite3Vacuum(pParse,&yymsp[-1].minor.yy0,yymsp[0].minor.yy626);} break; - case 246: /* cmd ::= PRAGMA nm dbnm */ + case 248: /* cmd ::= PRAGMA nm dbnm */ {sqlite3Pragma(pParse,&yymsp[-1].minor.yy0,&yymsp[0].minor.yy0,0,0);} break; - case 247: /* cmd ::= PRAGMA nm dbnm EQ nmnum */ + case 249: /* cmd ::= PRAGMA nm dbnm EQ nmnum */ {sqlite3Pragma(pParse,&yymsp[-3].minor.yy0,&yymsp[-2].minor.yy0,&yymsp[0].minor.yy0,0);} break; - case 248: /* cmd ::= PRAGMA nm dbnm LP nmnum RP */ + case 250: /* cmd ::= PRAGMA nm dbnm LP nmnum RP */ {sqlite3Pragma(pParse,&yymsp[-4].minor.yy0,&yymsp[-3].minor.yy0,&yymsp[-1].minor.yy0,0);} break; - case 249: /* cmd ::= PRAGMA nm dbnm EQ minus_num */ + case 251: /* cmd ::= PRAGMA nm dbnm EQ minus_num */ {sqlite3Pragma(pParse,&yymsp[-3].minor.yy0,&yymsp[-2].minor.yy0,&yymsp[0].minor.yy0,1);} break; - case 250: /* cmd ::= PRAGMA nm dbnm LP minus_num RP */ + case 252: /* cmd ::= PRAGMA nm dbnm LP minus_num RP */ {sqlite3Pragma(pParse,&yymsp[-4].minor.yy0,&yymsp[-3].minor.yy0,&yymsp[-1].minor.yy0,1);} break; - case 253: /* cmd ::= createkw trigger_decl BEGIN trigger_cmd_list END */ + case 255: /* cmd ::= createkw trigger_decl BEGIN trigger_cmd_list END */ { Token all; all.z = yymsp[-3].minor.yy0.z; all.n = (int)(yymsp[0].minor.yy0.z - yymsp[-3].minor.yy0.z) + yymsp[0].minor.yy0.n; - sqlite3FinishTrigger(pParse, yymsp[-1].minor.yy157, &all); + sqlite3FinishTrigger(pParse, yymsp[-1].minor.yy95, &all); } break; - case 254: /* trigger_decl ::= temp TRIGGER ifnotexists nm dbnm trigger_time trigger_event ON fullname foreach_clause when_clause */ + case 256: /* trigger_decl ::= temp TRIGGER ifnotexists nm dbnm trigger_time trigger_event ON fullname foreach_clause when_clause */ { - sqlite3BeginTrigger(pParse, &yymsp[-7].minor.yy0, &yymsp[-6].minor.yy0, yymsp[-5].minor.yy376, yymsp[-4].minor.yy262.a, yymsp[-4].minor.yy262.b, yymsp[-2].minor.yy153, yymsp[0].minor.yy404, yymsp[-10].minor.yy376, yymsp[-8].minor.yy376); + sqlite3BeginTrigger(pParse, &yymsp[-7].minor.yy0, &yymsp[-6].minor.yy0, yymsp[-5].minor.yy64, yymsp[-4].minor.yy570.a, yymsp[-4].minor.yy570.b, yymsp[-2].minor.yy607, yymsp[0].minor.yy626, yymsp[-10].minor.yy64, yymsp[-8].minor.yy64); yymsp[-10].minor.yy0 = (yymsp[-6].minor.yy0.n==0?yymsp[-7].minor.yy0:yymsp[-6].minor.yy0); /*A-overwrites-T*/ } break; - case 255: /* trigger_time ::= BEFORE|AFTER */ -{ yymsp[0].minor.yy376 = yymsp[0].major; /*A-overwrites-X*/ } + case 257: /* trigger_time ::= BEFORE|AFTER */ +{ yymsp[0].minor.yy64 = yymsp[0].major; /*A-overwrites-X*/ } break; - case 256: /* trigger_time ::= INSTEAD OF */ -{ yymsp[-1].minor.yy376 = TK_INSTEAD;} + case 258: /* trigger_time ::= INSTEAD OF */ +{ yymsp[-1].minor.yy64 = TK_INSTEAD;} break; - case 257: /* trigger_time ::= */ -{ yymsp[1].minor.yy376 = TK_BEFORE; } + case 259: /* trigger_time ::= */ +{ yymsp[1].minor.yy64 = TK_BEFORE; } break; - case 258: /* trigger_event ::= DELETE|INSERT */ - case 259: /* trigger_event ::= UPDATE */ yytestcase(yyruleno==259); -{yymsp[0].minor.yy262.a = yymsp[0].major; /*A-overwrites-X*/ yymsp[0].minor.yy262.b = 0;} + case 260: /* trigger_event ::= DELETE|INSERT */ + case 261: /* trigger_event ::= UPDATE */ yytestcase(yyruleno==261); +{yymsp[0].minor.yy570.a = yymsp[0].major; /*A-overwrites-X*/ yymsp[0].minor.yy570.b = 0;} break; - case 260: /* trigger_event ::= UPDATE OF idlist */ -{yymsp[-2].minor.yy262.a = TK_UPDATE; yymsp[-2].minor.yy262.b = yymsp[0].minor.yy436;} + case 262: /* trigger_event ::= UPDATE OF idlist */ +{yymsp[-2].minor.yy570.a = TK_UPDATE; yymsp[-2].minor.yy570.b = yymsp[0].minor.yy240;} break; - case 261: /* when_clause ::= */ - case 280: /* key_opt ::= */ yytestcase(yyruleno==280); -{ yymsp[1].minor.yy404 = 0; } + case 263: /* when_clause ::= */ + case 282: /* key_opt ::= */ yytestcase(yyruleno==282); +{ yymsp[1].minor.yy626 = 0; } break; - case 262: /* when_clause ::= WHEN expr */ - case 281: /* key_opt ::= KEY expr */ yytestcase(yyruleno==281); -{ yymsp[-1].minor.yy404 = yymsp[0].minor.yy404; } + case 264: /* when_clause ::= WHEN expr */ + case 283: /* key_opt ::= KEY expr */ yytestcase(yyruleno==283); +{ yymsp[-1].minor.yy626 = yymsp[0].minor.yy626; } break; - case 263: /* trigger_cmd_list ::= trigger_cmd_list trigger_cmd SEMI */ + case 265: /* trigger_cmd_list ::= trigger_cmd_list trigger_cmd SEMI */ { - assert( yymsp[-2].minor.yy157!=0 ); - yymsp[-2].minor.yy157->pLast->pNext = yymsp[-1].minor.yy157; - yymsp[-2].minor.yy157->pLast = yymsp[-1].minor.yy157; + assert( yymsp[-2].minor.yy95!=0 ); + yymsp[-2].minor.yy95->pLast->pNext = yymsp[-1].minor.yy95; + yymsp[-2].minor.yy95->pLast = yymsp[-1].minor.yy95; } break; - case 264: /* trigger_cmd_list ::= trigger_cmd SEMI */ + case 266: /* trigger_cmd_list ::= trigger_cmd SEMI */ { - assert( yymsp[-1].minor.yy157!=0 ); - yymsp[-1].minor.yy157->pLast = yymsp[-1].minor.yy157; + assert( yymsp[-1].minor.yy95!=0 ); + yymsp[-1].minor.yy95->pLast = yymsp[-1].minor.yy95; } break; - case 265: /* trnm ::= nm DOT nm */ + case 267: /* trnm ::= nm DOT nm */ { yymsp[-2].minor.yy0 = yymsp[0].minor.yy0; sqlite3ErrorMsg(pParse, @@ -162630,368 +164105,369 @@ static YYACTIONTYPE yy_reduce( "statements within triggers"); } break; - case 266: /* tridxby ::= INDEXED BY nm */ + case 268: /* tridxby ::= INDEXED BY nm */ { sqlite3ErrorMsg(pParse, "the INDEXED BY clause is not allowed on UPDATE or DELETE statements " "within triggers"); } break; - case 267: /* tridxby ::= NOT INDEXED */ + case 269: /* tridxby ::= NOT INDEXED */ { sqlite3ErrorMsg(pParse, "the NOT INDEXED clause is not allowed on UPDATE or DELETE statements " "within triggers"); } break; - case 268: /* trigger_cmd ::= UPDATE orconf trnm tridxby SET setlist from where_opt scanpt */ -{yylhsminor.yy157 = sqlite3TriggerUpdateStep(pParse, &yymsp[-6].minor.yy0, yymsp[-2].minor.yy153, yymsp[-3].minor.yy70, yymsp[-1].minor.yy404, yymsp[-7].minor.yy376, yymsp[-8].minor.yy0.z, yymsp[0].minor.yy504);} - yymsp[-8].minor.yy157 = yylhsminor.yy157; + case 270: /* trigger_cmd ::= UPDATE orconf trnm tridxby SET setlist from where_opt scanpt */ +{yylhsminor.yy95 = sqlite3TriggerUpdateStep(pParse, &yymsp[-6].minor.yy0, yymsp[-2].minor.yy607, yymsp[-3].minor.yy562, yymsp[-1].minor.yy626, yymsp[-7].minor.yy64, yymsp[-8].minor.yy0.z, yymsp[0].minor.yy600);} + yymsp[-8].minor.yy95 = yylhsminor.yy95; break; - case 269: /* trigger_cmd ::= scanpt insert_cmd INTO trnm idlist_opt select upsert scanpt */ + case 271: /* trigger_cmd ::= scanpt insert_cmd INTO trnm idlist_opt select upsert scanpt */ { - yylhsminor.yy157 = sqlite3TriggerInsertStep(pParse,&yymsp[-4].minor.yy0,yymsp[-3].minor.yy436,yymsp[-2].minor.yy81,yymsp[-6].minor.yy376,yymsp[-1].minor.yy190,yymsp[-7].minor.yy504,yymsp[0].minor.yy504);/*yylhsminor.yy157-overwrites-yymsp[-6].minor.yy376*/ + yylhsminor.yy95 = sqlite3TriggerInsertStep(pParse,&yymsp[-4].minor.yy0,yymsp[-3].minor.yy240,yymsp[-2].minor.yy303,yymsp[-6].minor.yy64,yymsp[-1].minor.yy138,yymsp[-7].minor.yy600,yymsp[0].minor.yy600);/*yylhsminor.yy95-overwrites-yymsp[-6].minor.yy64*/ } - yymsp[-7].minor.yy157 = yylhsminor.yy157; + yymsp[-7].minor.yy95 = yylhsminor.yy95; break; - case 270: /* trigger_cmd ::= DELETE FROM trnm tridxby where_opt scanpt */ -{yylhsminor.yy157 = sqlite3TriggerDeleteStep(pParse, &yymsp[-3].minor.yy0, yymsp[-1].minor.yy404, yymsp[-5].minor.yy0.z, yymsp[0].minor.yy504);} - yymsp[-5].minor.yy157 = yylhsminor.yy157; + case 272: /* trigger_cmd ::= DELETE FROM trnm tridxby where_opt scanpt */ +{yylhsminor.yy95 = sqlite3TriggerDeleteStep(pParse, &yymsp[-3].minor.yy0, yymsp[-1].minor.yy626, yymsp[-5].minor.yy0.z, yymsp[0].minor.yy600);} + yymsp[-5].minor.yy95 = yylhsminor.yy95; break; - case 271: /* trigger_cmd ::= scanpt select scanpt */ -{yylhsminor.yy157 = sqlite3TriggerSelectStep(pParse->db, yymsp[-1].minor.yy81, yymsp[-2].minor.yy504, yymsp[0].minor.yy504); /*yylhsminor.yy157-overwrites-yymsp[-1].minor.yy81*/} - yymsp[-2].minor.yy157 = yylhsminor.yy157; + case 273: /* trigger_cmd ::= scanpt select scanpt */ +{yylhsminor.yy95 = sqlite3TriggerSelectStep(pParse->db, yymsp[-1].minor.yy303, yymsp[-2].minor.yy600, yymsp[0].minor.yy600); /*yylhsminor.yy95-overwrites-yymsp[-1].minor.yy303*/} + yymsp[-2].minor.yy95 = yylhsminor.yy95; break; - case 272: /* expr ::= RAISE LP IGNORE RP */ + case 274: /* expr ::= RAISE LP IGNORE RP */ { - yymsp[-3].minor.yy404 = sqlite3PExpr(pParse, TK_RAISE, 0, 0); - if( yymsp[-3].minor.yy404 ){ - yymsp[-3].minor.yy404->affExpr = OE_Ignore; + yymsp[-3].minor.yy626 = sqlite3PExpr(pParse, TK_RAISE, 0, 0); + if( yymsp[-3].minor.yy626 ){ + yymsp[-3].minor.yy626->affExpr = OE_Ignore; } } break; - case 273: /* expr ::= RAISE LP raisetype COMMA nm RP */ + case 275: /* expr ::= RAISE LP raisetype COMMA nm RP */ { - yymsp[-5].minor.yy404 = sqlite3ExprAlloc(pParse->db, TK_RAISE, &yymsp[-1].minor.yy0, 1); - if( yymsp[-5].minor.yy404 ) { - yymsp[-5].minor.yy404->affExpr = (char)yymsp[-3].minor.yy376; + yymsp[-5].minor.yy626 = sqlite3ExprAlloc(pParse->db, TK_RAISE, &yymsp[-1].minor.yy0, 1); + if( yymsp[-5].minor.yy626 ) { + yymsp[-5].minor.yy626->affExpr = (char)yymsp[-3].minor.yy64; } } break; - case 274: /* raisetype ::= ROLLBACK */ -{yymsp[0].minor.yy376 = OE_Rollback;} + case 276: /* raisetype ::= ROLLBACK */ +{yymsp[0].minor.yy64 = OE_Rollback;} break; - case 276: /* raisetype ::= FAIL */ -{yymsp[0].minor.yy376 = OE_Fail;} + case 278: /* raisetype ::= FAIL */ +{yymsp[0].minor.yy64 = OE_Fail;} break; - case 277: /* cmd ::= DROP TRIGGER ifexists fullname */ + case 279: /* cmd ::= DROP TRIGGER ifexists fullname */ { - sqlite3DropTrigger(pParse,yymsp[0].minor.yy153,yymsp[-1].minor.yy376); + sqlite3DropTrigger(pParse,yymsp[0].minor.yy607,yymsp[-1].minor.yy64); } break; - case 278: /* cmd ::= ATTACH database_kw_opt expr AS expr key_opt */ + case 280: /* cmd ::= ATTACH database_kw_opt expr AS expr key_opt */ { - sqlite3Attach(pParse, yymsp[-3].minor.yy404, yymsp[-1].minor.yy404, yymsp[0].minor.yy404); + sqlite3Attach(pParse, yymsp[-3].minor.yy626, yymsp[-1].minor.yy626, yymsp[0].minor.yy626); } break; - case 279: /* cmd ::= DETACH database_kw_opt expr */ + case 281: /* cmd ::= DETACH database_kw_opt expr */ { - sqlite3Detach(pParse, yymsp[0].minor.yy404); + sqlite3Detach(pParse, yymsp[0].minor.yy626); } break; - case 282: /* cmd ::= REINDEX */ + case 284: /* cmd ::= REINDEX */ {sqlite3Reindex(pParse, 0, 0);} break; - case 283: /* cmd ::= REINDEX nm dbnm */ + case 285: /* cmd ::= REINDEX nm dbnm */ {sqlite3Reindex(pParse, &yymsp[-1].minor.yy0, &yymsp[0].minor.yy0);} break; - case 284: /* cmd ::= ANALYZE */ + case 286: /* cmd ::= ANALYZE */ {sqlite3Analyze(pParse, 0, 0);} break; - case 285: /* cmd ::= ANALYZE nm dbnm */ + case 287: /* cmd ::= ANALYZE nm dbnm */ {sqlite3Analyze(pParse, &yymsp[-1].minor.yy0, &yymsp[0].minor.yy0);} break; - case 286: /* cmd ::= ALTER TABLE fullname RENAME TO nm */ + case 288: /* cmd ::= ALTER TABLE fullname RENAME TO nm */ { - sqlite3AlterRenameTable(pParse,yymsp[-3].minor.yy153,&yymsp[0].minor.yy0); + sqlite3AlterRenameTable(pParse,yymsp[-3].minor.yy607,&yymsp[0].minor.yy0); } break; - case 287: /* cmd ::= ALTER TABLE add_column_fullname ADD kwcolumn_opt columnname carglist */ + case 289: /* cmd ::= ALTER TABLE add_column_fullname ADD kwcolumn_opt columnname carglist */ { yymsp[-1].minor.yy0.n = (int)(pParse->sLastToken.z-yymsp[-1].minor.yy0.z) + pParse->sLastToken.n; sqlite3AlterFinishAddColumn(pParse, &yymsp[-1].minor.yy0); } break; - case 288: /* cmd ::= ALTER TABLE fullname DROP kwcolumn_opt nm */ + case 290: /* cmd ::= ALTER TABLE fullname DROP kwcolumn_opt nm */ { - sqlite3AlterDropColumn(pParse, yymsp[-3].minor.yy153, &yymsp[0].minor.yy0); + sqlite3AlterDropColumn(pParse, yymsp[-3].minor.yy607, &yymsp[0].minor.yy0); } break; - case 289: /* add_column_fullname ::= fullname */ + case 291: /* add_column_fullname ::= fullname */ { disableLookaside(pParse); - sqlite3AlterBeginAddColumn(pParse, yymsp[0].minor.yy153); + sqlite3AlterBeginAddColumn(pParse, yymsp[0].minor.yy607); } break; - case 290: /* cmd ::= ALTER TABLE fullname RENAME kwcolumn_opt nm TO nm */ + case 292: /* cmd ::= ALTER TABLE fullname RENAME kwcolumn_opt nm TO nm */ { - sqlite3AlterRenameColumn(pParse, yymsp[-5].minor.yy153, &yymsp[-2].minor.yy0, &yymsp[0].minor.yy0); + sqlite3AlterRenameColumn(pParse, yymsp[-5].minor.yy607, &yymsp[-2].minor.yy0, &yymsp[0].minor.yy0); } break; - case 291: /* cmd ::= create_vtab */ + case 293: /* cmd ::= create_vtab */ {sqlite3VtabFinishParse(pParse,0);} break; - case 292: /* cmd ::= create_vtab LP vtabarglist RP */ + case 294: /* cmd ::= create_vtab LP vtabarglist RP */ {sqlite3VtabFinishParse(pParse,&yymsp[0].minor.yy0);} break; - case 293: /* create_vtab ::= createkw VIRTUAL TABLE ifnotexists nm dbnm USING nm */ + case 295: /* create_vtab ::= createkw VIRTUAL TABLE ifnotexists nm dbnm USING nm */ { - sqlite3VtabBeginParse(pParse, &yymsp[-3].minor.yy0, &yymsp[-2].minor.yy0, &yymsp[0].minor.yy0, yymsp[-4].minor.yy376); + sqlite3VtabBeginParse(pParse, &yymsp[-3].minor.yy0, &yymsp[-2].minor.yy0, &yymsp[0].minor.yy0, yymsp[-4].minor.yy64); } break; - case 294: /* vtabarg ::= */ + case 296: /* vtabarg ::= */ {sqlite3VtabArgInit(pParse);} break; - case 295: /* vtabargtoken ::= ANY */ - case 296: /* vtabargtoken ::= lp anylist RP */ yytestcase(yyruleno==296); - case 297: /* lp ::= LP */ yytestcase(yyruleno==297); + case 297: /* vtabargtoken ::= ANY */ + case 298: /* vtabargtoken ::= lp anylist RP */ yytestcase(yyruleno==298); + case 299: /* lp ::= LP */ yytestcase(yyruleno==299); {sqlite3VtabArgExtend(pParse,&yymsp[0].minor.yy0);} break; - case 298: /* with ::= WITH wqlist */ - case 299: /* with ::= WITH RECURSIVE wqlist */ yytestcase(yyruleno==299); -{ sqlite3WithPush(pParse, yymsp[0].minor.yy103, 1); } + case 300: /* with ::= WITH wqlist */ + case 301: /* with ::= WITH RECURSIVE wqlist */ yytestcase(yyruleno==301); +{ sqlite3WithPush(pParse, yymsp[0].minor.yy43, 1); } break; - case 300: /* wqas ::= AS */ -{yymsp[0].minor.yy552 = M10d_Any;} + case 302: /* wqas ::= AS */ +{yymsp[0].minor.yy534 = M10d_Any;} break; - case 301: /* wqas ::= AS MATERIALIZED */ -{yymsp[-1].minor.yy552 = M10d_Yes;} + case 303: /* wqas ::= AS MATERIALIZED */ +{yymsp[-1].minor.yy534 = M10d_Yes;} break; - case 302: /* wqas ::= AS NOT MATERIALIZED */ -{yymsp[-2].minor.yy552 = M10d_No;} + case 304: /* wqas ::= AS NOT MATERIALIZED */ +{yymsp[-2].minor.yy534 = M10d_No;} break; - case 303: /* wqitem ::= nm eidlist_opt wqas LP select RP */ + case 305: /* wqitem ::= nm eidlist_opt wqas LP select RP */ { - yymsp[-5].minor.yy329 = sqlite3CteNew(pParse, &yymsp[-5].minor.yy0, yymsp[-4].minor.yy70, yymsp[-1].minor.yy81, yymsp[-3].minor.yy552); /*A-overwrites-X*/ + yymsp[-5].minor.yy255 = sqlite3CteNew(pParse, &yymsp[-5].minor.yy0, yymsp[-4].minor.yy562, yymsp[-1].minor.yy303, yymsp[-3].minor.yy534); /*A-overwrites-X*/ } break; - case 304: /* wqlist ::= wqitem */ + case 306: /* wqlist ::= wqitem */ { - yymsp[0].minor.yy103 = sqlite3WithAdd(pParse, 0, yymsp[0].minor.yy329); /*A-overwrites-X*/ + yymsp[0].minor.yy43 = sqlite3WithAdd(pParse, 0, yymsp[0].minor.yy255); /*A-overwrites-X*/ } break; - case 305: /* wqlist ::= wqlist COMMA wqitem */ + case 307: /* wqlist ::= wqlist COMMA wqitem */ { - yymsp[-2].minor.yy103 = sqlite3WithAdd(pParse, yymsp[-2].minor.yy103, yymsp[0].minor.yy329); + yymsp[-2].minor.yy43 = sqlite3WithAdd(pParse, yymsp[-2].minor.yy43, yymsp[0].minor.yy255); } break; - case 306: /* windowdefn_list ::= windowdefn */ -{ yylhsminor.yy49 = yymsp[0].minor.yy49; } - yymsp[0].minor.yy49 = yylhsminor.yy49; + case 308: /* windowdefn_list ::= windowdefn */ +{ yylhsminor.yy375 = yymsp[0].minor.yy375; } + yymsp[0].minor.yy375 = yylhsminor.yy375; break; - case 307: /* windowdefn_list ::= windowdefn_list COMMA windowdefn */ + case 309: /* windowdefn_list ::= windowdefn_list COMMA windowdefn */ { - assert( yymsp[0].minor.yy49!=0 ); - sqlite3WindowChain(pParse, yymsp[0].minor.yy49, yymsp[-2].minor.yy49); - yymsp[0].minor.yy49->pNextWin = yymsp[-2].minor.yy49; - yylhsminor.yy49 = yymsp[0].minor.yy49; + assert( yymsp[0].minor.yy375!=0 ); + sqlite3WindowChain(pParse, yymsp[0].minor.yy375, yymsp[-2].minor.yy375); + yymsp[0].minor.yy375->pNextWin = yymsp[-2].minor.yy375; + yylhsminor.yy375 = yymsp[0].minor.yy375; } - yymsp[-2].minor.yy49 = yylhsminor.yy49; + yymsp[-2].minor.yy375 = yylhsminor.yy375; break; - case 308: /* windowdefn ::= nm AS LP window RP */ + case 310: /* windowdefn ::= nm AS LP window RP */ { - if( ALWAYS(yymsp[-1].minor.yy49) ){ - yymsp[-1].minor.yy49->zName = sqlite3DbStrNDup(pParse->db, yymsp[-4].minor.yy0.z, yymsp[-4].minor.yy0.n); + if( ALWAYS(yymsp[-1].minor.yy375) ){ + yymsp[-1].minor.yy375->zName = sqlite3DbStrNDup(pParse->db, yymsp[-4].minor.yy0.z, yymsp[-4].minor.yy0.n); } - yylhsminor.yy49 = yymsp[-1].minor.yy49; + yylhsminor.yy375 = yymsp[-1].minor.yy375; } - yymsp[-4].minor.yy49 = yylhsminor.yy49; + yymsp[-4].minor.yy375 = yylhsminor.yy375; break; - case 309: /* window ::= PARTITION BY nexprlist orderby_opt frame_opt */ + case 311: /* window ::= PARTITION BY nexprlist orderby_opt frame_opt */ { - yymsp[-4].minor.yy49 = sqlite3WindowAssemble(pParse, yymsp[0].minor.yy49, yymsp[-2].minor.yy70, yymsp[-1].minor.yy70, 0); + yymsp[-4].minor.yy375 = sqlite3WindowAssemble(pParse, yymsp[0].minor.yy375, yymsp[-2].minor.yy562, yymsp[-1].minor.yy562, 0); } break; - case 310: /* window ::= nm PARTITION BY nexprlist orderby_opt frame_opt */ + case 312: /* window ::= nm PARTITION BY nexprlist orderby_opt frame_opt */ { - yylhsminor.yy49 = sqlite3WindowAssemble(pParse, yymsp[0].minor.yy49, yymsp[-2].minor.yy70, yymsp[-1].minor.yy70, &yymsp[-5].minor.yy0); + yylhsminor.yy375 = sqlite3WindowAssemble(pParse, yymsp[0].minor.yy375, yymsp[-2].minor.yy562, yymsp[-1].minor.yy562, &yymsp[-5].minor.yy0); } - yymsp[-5].minor.yy49 = yylhsminor.yy49; + yymsp[-5].minor.yy375 = yylhsminor.yy375; break; - case 311: /* window ::= ORDER BY sortlist frame_opt */ + case 313: /* window ::= ORDER BY sortlist frame_opt */ { - yymsp[-3].minor.yy49 = sqlite3WindowAssemble(pParse, yymsp[0].minor.yy49, 0, yymsp[-1].minor.yy70, 0); + yymsp[-3].minor.yy375 = sqlite3WindowAssemble(pParse, yymsp[0].minor.yy375, 0, yymsp[-1].minor.yy562, 0); } break; - case 312: /* window ::= nm ORDER BY sortlist frame_opt */ + case 314: /* window ::= nm ORDER BY sortlist frame_opt */ { - yylhsminor.yy49 = sqlite3WindowAssemble(pParse, yymsp[0].minor.yy49, 0, yymsp[-1].minor.yy70, &yymsp[-4].minor.yy0); + yylhsminor.yy375 = sqlite3WindowAssemble(pParse, yymsp[0].minor.yy375, 0, yymsp[-1].minor.yy562, &yymsp[-4].minor.yy0); } - yymsp[-4].minor.yy49 = yylhsminor.yy49; + yymsp[-4].minor.yy375 = yylhsminor.yy375; break; - case 313: /* window ::= frame_opt */ - case 332: /* filter_over ::= over_clause */ yytestcase(yyruleno==332); + case 315: /* window ::= frame_opt */ + case 334: /* filter_over ::= over_clause */ yytestcase(yyruleno==334); { - yylhsminor.yy49 = yymsp[0].minor.yy49; + yylhsminor.yy375 = yymsp[0].minor.yy375; } - yymsp[0].minor.yy49 = yylhsminor.yy49; + yymsp[0].minor.yy375 = yylhsminor.yy375; break; - case 314: /* window ::= nm frame_opt */ + case 316: /* window ::= nm frame_opt */ { - yylhsminor.yy49 = sqlite3WindowAssemble(pParse, yymsp[0].minor.yy49, 0, 0, &yymsp[-1].minor.yy0); + yylhsminor.yy375 = sqlite3WindowAssemble(pParse, yymsp[0].minor.yy375, 0, 0, &yymsp[-1].minor.yy0); } - yymsp[-1].minor.yy49 = yylhsminor.yy49; + yymsp[-1].minor.yy375 = yylhsminor.yy375; break; - case 315: /* frame_opt ::= */ + case 317: /* frame_opt ::= */ { - yymsp[1].minor.yy49 = sqlite3WindowAlloc(pParse, 0, TK_UNBOUNDED, 0, TK_CURRENT, 0, 0); + yymsp[1].minor.yy375 = sqlite3WindowAlloc(pParse, 0, TK_UNBOUNDED, 0, TK_CURRENT, 0, 0); } break; - case 316: /* frame_opt ::= range_or_rows frame_bound_s frame_exclude_opt */ + case 318: /* frame_opt ::= range_or_rows frame_bound_s frame_exclude_opt */ { - yylhsminor.yy49 = sqlite3WindowAlloc(pParse, yymsp[-2].minor.yy376, yymsp[-1].minor.yy117.eType, yymsp[-1].minor.yy117.pExpr, TK_CURRENT, 0, yymsp[0].minor.yy552); + yylhsminor.yy375 = sqlite3WindowAlloc(pParse, yymsp[-2].minor.yy64, yymsp[-1].minor.yy81.eType, yymsp[-1].minor.yy81.pExpr, TK_CURRENT, 0, yymsp[0].minor.yy534); } - yymsp[-2].minor.yy49 = yylhsminor.yy49; + yymsp[-2].minor.yy375 = yylhsminor.yy375; break; - case 317: /* frame_opt ::= range_or_rows BETWEEN frame_bound_s AND frame_bound_e frame_exclude_opt */ + case 319: /* frame_opt ::= range_or_rows BETWEEN frame_bound_s AND frame_bound_e frame_exclude_opt */ { - yylhsminor.yy49 = sqlite3WindowAlloc(pParse, yymsp[-5].minor.yy376, yymsp[-3].minor.yy117.eType, yymsp[-3].minor.yy117.pExpr, yymsp[-1].minor.yy117.eType, yymsp[-1].minor.yy117.pExpr, yymsp[0].minor.yy552); + yylhsminor.yy375 = sqlite3WindowAlloc(pParse, yymsp[-5].minor.yy64, yymsp[-3].minor.yy81.eType, yymsp[-3].minor.yy81.pExpr, yymsp[-1].minor.yy81.eType, yymsp[-1].minor.yy81.pExpr, yymsp[0].minor.yy534); } - yymsp[-5].minor.yy49 = yylhsminor.yy49; + yymsp[-5].minor.yy375 = yylhsminor.yy375; break; - case 319: /* frame_bound_s ::= frame_bound */ - case 321: /* frame_bound_e ::= frame_bound */ yytestcase(yyruleno==321); -{yylhsminor.yy117 = yymsp[0].minor.yy117;} - yymsp[0].minor.yy117 = yylhsminor.yy117; + case 321: /* frame_bound_s ::= frame_bound */ + case 323: /* frame_bound_e ::= frame_bound */ yytestcase(yyruleno==323); +{yylhsminor.yy81 = yymsp[0].minor.yy81;} + yymsp[0].minor.yy81 = yylhsminor.yy81; break; - case 320: /* frame_bound_s ::= UNBOUNDED PRECEDING */ - case 322: /* frame_bound_e ::= UNBOUNDED FOLLOWING */ yytestcase(yyruleno==322); - case 324: /* frame_bound ::= CURRENT ROW */ yytestcase(yyruleno==324); -{yylhsminor.yy117.eType = yymsp[-1].major; yylhsminor.yy117.pExpr = 0;} - yymsp[-1].minor.yy117 = yylhsminor.yy117; + case 322: /* frame_bound_s ::= UNBOUNDED PRECEDING */ + case 324: /* frame_bound_e ::= UNBOUNDED FOLLOWING */ yytestcase(yyruleno==324); + case 326: /* frame_bound ::= CURRENT ROW */ yytestcase(yyruleno==326); +{yylhsminor.yy81.eType = yymsp[-1].major; yylhsminor.yy81.pExpr = 0;} + yymsp[-1].minor.yy81 = yylhsminor.yy81; break; - case 323: /* frame_bound ::= expr PRECEDING|FOLLOWING */ -{yylhsminor.yy117.eType = yymsp[0].major; yylhsminor.yy117.pExpr = yymsp[-1].minor.yy404;} - yymsp[-1].minor.yy117 = yylhsminor.yy117; + case 325: /* frame_bound ::= expr PRECEDING|FOLLOWING */ +{yylhsminor.yy81.eType = yymsp[0].major; yylhsminor.yy81.pExpr = yymsp[-1].minor.yy626;} + yymsp[-1].minor.yy81 = yylhsminor.yy81; break; - case 325: /* frame_exclude_opt ::= */ -{yymsp[1].minor.yy552 = 0;} + case 327: /* frame_exclude_opt ::= */ +{yymsp[1].minor.yy534 = 0;} break; - case 326: /* frame_exclude_opt ::= EXCLUDE frame_exclude */ -{yymsp[-1].minor.yy552 = yymsp[0].minor.yy552;} + case 328: /* frame_exclude_opt ::= EXCLUDE frame_exclude */ +{yymsp[-1].minor.yy534 = yymsp[0].minor.yy534;} break; - case 327: /* frame_exclude ::= NO OTHERS */ - case 328: /* frame_exclude ::= CURRENT ROW */ yytestcase(yyruleno==328); -{yymsp[-1].minor.yy552 = yymsp[-1].major; /*A-overwrites-X*/} + case 329: /* frame_exclude ::= NO OTHERS */ + case 330: /* frame_exclude ::= CURRENT ROW */ yytestcase(yyruleno==330); +{yymsp[-1].minor.yy534 = yymsp[-1].major; /*A-overwrites-X*/} break; - case 329: /* frame_exclude ::= GROUP|TIES */ -{yymsp[0].minor.yy552 = yymsp[0].major; /*A-overwrites-X*/} + case 331: /* frame_exclude ::= GROUP|TIES */ +{yymsp[0].minor.yy534 = yymsp[0].major; /*A-overwrites-X*/} break; - case 330: /* window_clause ::= WINDOW windowdefn_list */ -{ yymsp[-1].minor.yy49 = yymsp[0].minor.yy49; } + case 332: /* window_clause ::= WINDOW windowdefn_list */ +{ yymsp[-1].minor.yy375 = yymsp[0].minor.yy375; } break; - case 331: /* filter_over ::= filter_clause over_clause */ + case 333: /* filter_over ::= filter_clause over_clause */ { - if( yymsp[0].minor.yy49 ){ - yymsp[0].minor.yy49->pFilter = yymsp[-1].minor.yy404; + if( yymsp[0].minor.yy375 ){ + yymsp[0].minor.yy375->pFilter = yymsp[-1].minor.yy626; }else{ - sqlite3ExprDelete(pParse->db, yymsp[-1].minor.yy404); + sqlite3ExprDelete(pParse->db, yymsp[-1].minor.yy626); } - yylhsminor.yy49 = yymsp[0].minor.yy49; + yylhsminor.yy375 = yymsp[0].minor.yy375; } - yymsp[-1].minor.yy49 = yylhsminor.yy49; + yymsp[-1].minor.yy375 = yylhsminor.yy375; break; - case 333: /* filter_over ::= filter_clause */ + case 335: /* filter_over ::= filter_clause */ { - yylhsminor.yy49 = (Window*)sqlite3DbMallocZero(pParse->db, sizeof(Window)); - if( yylhsminor.yy49 ){ - yylhsminor.yy49->eFrmType = TK_FILTER; - yylhsminor.yy49->pFilter = yymsp[0].minor.yy404; + yylhsminor.yy375 = (Window*)sqlite3DbMallocZero(pParse->db, sizeof(Window)); + if( yylhsminor.yy375 ){ + yylhsminor.yy375->eFrmType = TK_FILTER; + yylhsminor.yy375->pFilter = yymsp[0].minor.yy626; }else{ - sqlite3ExprDelete(pParse->db, yymsp[0].minor.yy404); + sqlite3ExprDelete(pParse->db, yymsp[0].minor.yy626); } } - yymsp[0].minor.yy49 = yylhsminor.yy49; + yymsp[0].minor.yy375 = yylhsminor.yy375; break; - case 334: /* over_clause ::= OVER LP window RP */ + case 336: /* over_clause ::= OVER LP window RP */ { - yymsp[-3].minor.yy49 = yymsp[-1].minor.yy49; - assert( yymsp[-3].minor.yy49!=0 ); + yymsp[-3].minor.yy375 = yymsp[-1].minor.yy375; + assert( yymsp[-3].minor.yy375!=0 ); } break; - case 335: /* over_clause ::= OVER nm */ + case 337: /* over_clause ::= OVER nm */ { - yymsp[-1].minor.yy49 = (Window*)sqlite3DbMallocZero(pParse->db, sizeof(Window)); - if( yymsp[-1].minor.yy49 ){ - yymsp[-1].minor.yy49->zName = sqlite3DbStrNDup(pParse->db, yymsp[0].minor.yy0.z, yymsp[0].minor.yy0.n); + yymsp[-1].minor.yy375 = (Window*)sqlite3DbMallocZero(pParse->db, sizeof(Window)); + if( yymsp[-1].minor.yy375 ){ + yymsp[-1].minor.yy375->zName = sqlite3DbStrNDup(pParse->db, yymsp[0].minor.yy0.z, yymsp[0].minor.yy0.n); } } break; - case 336: /* filter_clause ::= FILTER LP WHERE expr RP */ -{ yymsp[-4].minor.yy404 = yymsp[-1].minor.yy404; } + case 338: /* filter_clause ::= FILTER LP WHERE expr RP */ +{ yymsp[-4].minor.yy626 = yymsp[-1].minor.yy626; } break; default: - /* (337) input ::= cmdlist */ yytestcase(yyruleno==337); - /* (338) cmdlist ::= cmdlist ecmd */ yytestcase(yyruleno==338); - /* (339) cmdlist ::= ecmd (OPTIMIZED OUT) */ assert(yyruleno!=339); - /* (340) ecmd ::= SEMI */ yytestcase(yyruleno==340); - /* (341) ecmd ::= cmdx SEMI */ yytestcase(yyruleno==341); - /* (342) ecmd ::= explain cmdx SEMI (NEVER REDUCES) */ assert(yyruleno!=342); - /* (343) trans_opt ::= */ yytestcase(yyruleno==343); - /* (344) trans_opt ::= TRANSACTION */ yytestcase(yyruleno==344); - /* (345) trans_opt ::= TRANSACTION nm */ yytestcase(yyruleno==345); - /* (346) savepoint_opt ::= SAVEPOINT */ yytestcase(yyruleno==346); - /* (347) savepoint_opt ::= */ yytestcase(yyruleno==347); - /* (348) cmd ::= create_table create_table_args */ yytestcase(yyruleno==348); - /* (349) columnlist ::= columnlist COMMA columnname carglist */ yytestcase(yyruleno==349); - /* (350) columnlist ::= columnname carglist */ yytestcase(yyruleno==350); - /* (351) nm ::= ID|INDEXED */ yytestcase(yyruleno==351); - /* (352) nm ::= STRING */ yytestcase(yyruleno==352); - /* (353) nm ::= JOIN_KW */ yytestcase(yyruleno==353); - /* (354) typetoken ::= typename */ yytestcase(yyruleno==354); - /* (355) typename ::= ID|STRING */ yytestcase(yyruleno==355); - /* (356) signed ::= plus_num (OPTIMIZED OUT) */ assert(yyruleno!=356); - /* (357) signed ::= minus_num (OPTIMIZED OUT) */ assert(yyruleno!=357); - /* (358) carglist ::= carglist ccons */ yytestcase(yyruleno==358); - /* (359) carglist ::= */ yytestcase(yyruleno==359); - /* (360) ccons ::= NULL onconf */ yytestcase(yyruleno==360); - /* (361) ccons ::= GENERATED ALWAYS AS generated */ yytestcase(yyruleno==361); - /* (362) ccons ::= AS generated */ yytestcase(yyruleno==362); - /* (363) conslist_opt ::= COMMA conslist */ yytestcase(yyruleno==363); - /* (364) conslist ::= conslist tconscomma tcons */ yytestcase(yyruleno==364); - /* (365) conslist ::= tcons (OPTIMIZED OUT) */ assert(yyruleno!=365); - /* (366) tconscomma ::= */ yytestcase(yyruleno==366); - /* (367) defer_subclause_opt ::= defer_subclause (OPTIMIZED OUT) */ assert(yyruleno!=367); - /* (368) resolvetype ::= raisetype (OPTIMIZED OUT) */ assert(yyruleno!=368); - /* (369) selectnowith ::= oneselect (OPTIMIZED OUT) */ assert(yyruleno!=369); - /* (370) oneselect ::= values */ yytestcase(yyruleno==370); - /* (371) sclp ::= selcollist COMMA */ yytestcase(yyruleno==371); - /* (372) as ::= ID|STRING */ yytestcase(yyruleno==372); - /* (373) returning ::= */ yytestcase(yyruleno==373); - /* (374) expr ::= term (OPTIMIZED OUT) */ assert(yyruleno!=374); - /* (375) likeop ::= LIKE_KW|MATCH */ yytestcase(yyruleno==375); - /* (376) exprlist ::= nexprlist */ yytestcase(yyruleno==376); - /* (377) nmnum ::= plus_num (OPTIMIZED OUT) */ assert(yyruleno!=377); - /* (378) nmnum ::= nm (OPTIMIZED OUT) */ assert(yyruleno!=378); - /* (379) nmnum ::= ON */ yytestcase(yyruleno==379); - /* (380) nmnum ::= DELETE */ yytestcase(yyruleno==380); - /* (381) nmnum ::= DEFAULT */ yytestcase(yyruleno==381); - /* (382) plus_num ::= INTEGER|FLOAT */ yytestcase(yyruleno==382); - /* (383) foreach_clause ::= */ yytestcase(yyruleno==383); - /* (384) foreach_clause ::= FOR EACH ROW */ yytestcase(yyruleno==384); - /* (385) trnm ::= nm */ yytestcase(yyruleno==385); - /* (386) tridxby ::= */ yytestcase(yyruleno==386); - /* (387) database_kw_opt ::= DATABASE */ yytestcase(yyruleno==387); - /* (388) database_kw_opt ::= */ yytestcase(yyruleno==388); - /* (389) kwcolumn_opt ::= */ yytestcase(yyruleno==389); - /* (390) kwcolumn_opt ::= COLUMNKW */ yytestcase(yyruleno==390); - /* (391) vtabarglist ::= vtabarg */ yytestcase(yyruleno==391); - /* (392) vtabarglist ::= vtabarglist COMMA vtabarg */ yytestcase(yyruleno==392); - /* (393) vtabarg ::= vtabarg vtabargtoken */ yytestcase(yyruleno==393); - /* (394) anylist ::= */ yytestcase(yyruleno==394); - /* (395) anylist ::= anylist LP anylist RP */ yytestcase(yyruleno==395); - /* (396) anylist ::= anylist ANY */ yytestcase(yyruleno==396); - /* (397) with ::= */ yytestcase(yyruleno==397); + /* (339) input ::= cmdlist */ yytestcase(yyruleno==339); + /* (340) cmdlist ::= cmdlist ecmd */ yytestcase(yyruleno==340); + /* (341) cmdlist ::= ecmd (OPTIMIZED OUT) */ assert(yyruleno!=341); + /* (342) ecmd ::= SEMI */ yytestcase(yyruleno==342); + /* (343) ecmd ::= cmdx SEMI */ yytestcase(yyruleno==343); + /* (344) ecmd ::= explain cmdx SEMI (NEVER REDUCES) */ assert(yyruleno!=344); + /* (345) trans_opt ::= */ yytestcase(yyruleno==345); + /* (346) trans_opt ::= TRANSACTION */ yytestcase(yyruleno==346); + /* (347) trans_opt ::= TRANSACTION nm */ yytestcase(yyruleno==347); + /* (348) savepoint_opt ::= SAVEPOINT */ yytestcase(yyruleno==348); + /* (349) savepoint_opt ::= */ yytestcase(yyruleno==349); + /* (350) cmd ::= create_table create_table_args */ yytestcase(yyruleno==350); + /* (351) table_option_set ::= table_option (OPTIMIZED OUT) */ assert(yyruleno!=351); + /* (352) columnlist ::= columnlist COMMA columnname carglist */ yytestcase(yyruleno==352); + /* (353) columnlist ::= columnname carglist */ yytestcase(yyruleno==353); + /* (354) nm ::= ID|INDEXED */ yytestcase(yyruleno==354); + /* (355) nm ::= STRING */ yytestcase(yyruleno==355); + /* (356) nm ::= JOIN_KW */ yytestcase(yyruleno==356); + /* (357) typetoken ::= typename */ yytestcase(yyruleno==357); + /* (358) typename ::= ID|STRING */ yytestcase(yyruleno==358); + /* (359) signed ::= plus_num (OPTIMIZED OUT) */ assert(yyruleno!=359); + /* (360) signed ::= minus_num (OPTIMIZED OUT) */ assert(yyruleno!=360); + /* (361) carglist ::= carglist ccons */ yytestcase(yyruleno==361); + /* (362) carglist ::= */ yytestcase(yyruleno==362); + /* (363) ccons ::= NULL onconf */ yytestcase(yyruleno==363); + /* (364) ccons ::= GENERATED ALWAYS AS generated */ yytestcase(yyruleno==364); + /* (365) ccons ::= AS generated */ yytestcase(yyruleno==365); + /* (366) conslist_opt ::= COMMA conslist */ yytestcase(yyruleno==366); + /* (367) conslist ::= conslist tconscomma tcons */ yytestcase(yyruleno==367); + /* (368) conslist ::= tcons (OPTIMIZED OUT) */ assert(yyruleno!=368); + /* (369) tconscomma ::= */ yytestcase(yyruleno==369); + /* (370) defer_subclause_opt ::= defer_subclause (OPTIMIZED OUT) */ assert(yyruleno!=370); + /* (371) resolvetype ::= raisetype (OPTIMIZED OUT) */ assert(yyruleno!=371); + /* (372) selectnowith ::= oneselect (OPTIMIZED OUT) */ assert(yyruleno!=372); + /* (373) oneselect ::= values */ yytestcase(yyruleno==373); + /* (374) sclp ::= selcollist COMMA */ yytestcase(yyruleno==374); + /* (375) as ::= ID|STRING */ yytestcase(yyruleno==375); + /* (376) returning ::= */ yytestcase(yyruleno==376); + /* (377) expr ::= term (OPTIMIZED OUT) */ assert(yyruleno!=377); + /* (378) likeop ::= LIKE_KW|MATCH */ yytestcase(yyruleno==378); + /* (379) exprlist ::= nexprlist */ yytestcase(yyruleno==379); + /* (380) nmnum ::= plus_num (OPTIMIZED OUT) */ assert(yyruleno!=380); + /* (381) nmnum ::= nm (OPTIMIZED OUT) */ assert(yyruleno!=381); + /* (382) nmnum ::= ON */ yytestcase(yyruleno==382); + /* (383) nmnum ::= DELETE */ yytestcase(yyruleno==383); + /* (384) nmnum ::= DEFAULT */ yytestcase(yyruleno==384); + /* (385) plus_num ::= INTEGER|FLOAT */ yytestcase(yyruleno==385); + /* (386) foreach_clause ::= */ yytestcase(yyruleno==386); + /* (387) foreach_clause ::= FOR EACH ROW */ yytestcase(yyruleno==387); + /* (388) trnm ::= nm */ yytestcase(yyruleno==388); + /* (389) tridxby ::= */ yytestcase(yyruleno==389); + /* (390) database_kw_opt ::= DATABASE */ yytestcase(yyruleno==390); + /* (391) database_kw_opt ::= */ yytestcase(yyruleno==391); + /* (392) kwcolumn_opt ::= */ yytestcase(yyruleno==392); + /* (393) kwcolumn_opt ::= COLUMNKW */ yytestcase(yyruleno==393); + /* (394) vtabarglist ::= vtabarg */ yytestcase(yyruleno==394); + /* (395) vtabarglist ::= vtabarglist COMMA vtabarg */ yytestcase(yyruleno==395); + /* (396) vtabarg ::= vtabarg vtabargtoken */ yytestcase(yyruleno==396); + /* (397) anylist ::= */ yytestcase(yyruleno==397); + /* (398) anylist ::= anylist LP anylist RP */ yytestcase(yyruleno==398); + /* (399) anylist ::= anylist ANY */ yytestcase(yyruleno==399); + /* (400) with ::= */ yytestcase(yyruleno==400); break; /********** End reduce actions ************************************************/ }; @@ -163149,8 +164625,8 @@ SQLITE_PRIVATE void sqlite3Parser( yyact = yy_find_shift_action((YYCODETYPE)yymajor,yyact); if( yyact >= YY_MIN_REDUCE ){ unsigned int yyruleno = yyact - YY_MIN_REDUCE; /* Reduce by this rule */ - assert( yyruleno<(int)(sizeof(yyRuleName)/sizeof(yyRuleName[0])) ); #ifndef NDEBUG + assert( yyruleno<(int)(sizeof(yyRuleName)/sizeof(yyRuleName[0])) ); if( yyTraceFILE ){ int yysize = yyRuleInfoNRhs[yyruleno]; if( yysize ){ @@ -163248,14 +164724,13 @@ SQLITE_PRIVATE void sqlite3Parser( yy_destructor(yypParser, (YYCODETYPE)yymajor, &yyminorunion); yymajor = YYNOCODE; }else{ - while( yypParser->yytos >= yypParser->yystack - && (yyact = yy_find_reduce_action( - yypParser->yytos->stateno, - YYERRORSYMBOL)) > YY_MAX_SHIFTREDUCE - ){ + while( yypParser->yytos > yypParser->yystack ){ + yyact = yy_find_reduce_action(yypParser->yytos->stateno, + YYERRORSYMBOL); + if( yyact<=YY_MAX_SHIFTREDUCE ) break; yy_pop_parser_stack(yypParser); } - if( yypParser->yytos < yypParser->yystack || yymajor==0 ){ + if( yypParser->yytos <= yypParser->yystack || yymajor==0 ){ yy_destructor(yypParser,(YYCODETYPE)yymajor,&yyminorunion); yy_parse_failed(yypParser); #ifndef YYNOERRORRECOVERY @@ -164398,6 +165873,7 @@ SQLITE_PRIVATE int sqlite3RunParser(Parse *pParse, const char *zSql, char **pzEr int lastTokenParsed = -1; /* type of the previous token */ sqlite3 *db = pParse->db; /* The database connection */ int mxSqlLen; /* Max length of an SQL string */ + Parse *pParentParse = 0; /* Outer parse context, if any */ #ifdef sqlite3Parser_ENGINEALWAYSONSTACK yyParser sEngine; /* Space to hold the Lemon-generated Parser object */ #endif @@ -164433,7 +165909,7 @@ SQLITE_PRIVATE int sqlite3RunParser(Parse *pParse, const char *zSql, char **pzEr assert( pParse->pNewTrigger==0 ); assert( pParse->nVar==0 ); assert( pParse->pVList==0 ); - pParse->pParentParse = db->pParse; + pParentParse = db->pParse; db->pParse = pParse; while( 1 ){ n = sqlite3GetToken((u8*)zSql, &tokenType); @@ -164548,8 +166024,7 @@ SQLITE_PRIVATE int sqlite3RunParser(Parse *pParse, const char *zSql, char **pzEr sqlite3DeleteTrigger(db, pParse->pNewTrigger); } sqlite3DbFree(db, pParse->pVList); - db->pParse = pParse->pParentParse; - pParse->pParentParse = 0; + db->pParse = pParentParse; assert( nErr==0 || pParse->rc!=SQLITE_OK ); return nErr; } @@ -166166,7 +167641,7 @@ SQLITE_API void sqlite3_set_last_insert_rowid(sqlite3 *db, sqlite3_int64 iRowid) /* ** Return the number of changes in the most recent call to sqlite3_exec(). */ -SQLITE_API int sqlite3_changes(sqlite3 *db){ +SQLITE_API sqlite3_int64 sqlite3_changes64(sqlite3 *db){ #ifdef SQLITE_ENABLE_API_ARMOR if( !sqlite3SafetyCheckOk(db) ){ (void)SQLITE_MISUSE_BKPT; @@ -166175,11 +167650,14 @@ SQLITE_API int sqlite3_changes(sqlite3 *db){ #endif return db->nChange; } +SQLITE_API int sqlite3_changes(sqlite3 *db){ + return (int)sqlite3_changes64(db); +} /* ** Return the number of changes since the database handle was opened. */ -SQLITE_API int sqlite3_total_changes(sqlite3 *db){ +SQLITE_API sqlite3_int64 sqlite3_total_changes64(sqlite3 *db){ #ifdef SQLITE_ENABLE_API_ARMOR if( !sqlite3SafetyCheckOk(db) ){ (void)SQLITE_MISUSE_BKPT; @@ -166188,6 +167666,9 @@ SQLITE_API int sqlite3_total_changes(sqlite3 *db){ #endif return db->nTotalChange; } +SQLITE_API int sqlite3_total_changes(sqlite3 *db){ + return (int)sqlite3_total_changes64(db); +} /* ** Close all open savepoints. This function only manipulates fields of the @@ -166212,7 +167693,9 @@ SQLITE_PRIVATE void sqlite3CloseSavepoints(sqlite3 *db){ ** with SQLITE_ANY as the encoding. */ static void functionDestroy(sqlite3 *db, FuncDef *p){ - FuncDestructor *pDestructor = p->u.pDestructor; + FuncDestructor *pDestructor; + assert( (p->funcFlags & SQLITE_FUNC_BUILTIN)==0 ); + pDestructor = p->u.pDestructor; if( pDestructor ){ pDestructor->nRef--; if( pDestructor->nRef==0 ){ @@ -166316,7 +167799,7 @@ static int sqlite3Close(sqlite3 *db, int forceZombie){ /* Convert the connection into a zombie and then close it. */ - db->magic = SQLITE_MAGIC_ZOMBIE; + db->eOpenState = SQLITE_STATE_ZOMBIE; sqlite3LeaveMutexAndCloseZombie(db); return SQLITE_OK; } @@ -166380,7 +167863,7 @@ SQLITE_PRIVATE void sqlite3LeaveMutexAndCloseZombie(sqlite3 *db){ ** or if the connection has not yet been closed by sqlite3_close_v2(), ** then just leave the mutex and return. */ - if( db->magic!=SQLITE_MAGIC_ZOMBIE || connectionIsBusy(db) ){ + if( db->eOpenState!=SQLITE_STATE_ZOMBIE || connectionIsBusy(db) ){ sqlite3_mutex_leave(db->mutex); return; } @@ -166466,7 +167949,7 @@ SQLITE_PRIVATE void sqlite3LeaveMutexAndCloseZombie(sqlite3 *db){ sqlite3_free(db->auth.zAuthPW); #endif - db->magic = SQLITE_MAGIC_ERROR; + db->eOpenState = SQLITE_STATE_ERROR; /* The temp-database schema is allocated differently from the other schema ** objects (using sqliteMalloc() directly, instead of sqlite3BtreeSchema()). @@ -166475,8 +167958,11 @@ SQLITE_PRIVATE void sqlite3LeaveMutexAndCloseZombie(sqlite3 *db){ ** structure? */ sqlite3DbFree(db, db->aDb[1].pSchema); + if( db->xAutovacDestr ){ + db->xAutovacDestr(db->pAutovacPagesArg); + } sqlite3_mutex_leave(db->mutex); - db->magic = SQLITE_MAGIC_CLOSED; + db->eOpenState = SQLITE_STATE_CLOSED; sqlite3_mutex_free(db->mutex); assert( sqlite3LookasideUsed(db,0)==0 ); if( db->lookaside.bMalloced ){ @@ -166529,7 +168015,7 @@ SQLITE_PRIVATE void sqlite3RollbackAll(sqlite3 *db, int tripCode){ /* Any deferred constraint violations have now been resolved. */ db->nDeferredCons = 0; db->nDeferredImmCons = 0; - db->flags &= ~(u64)SQLITE_DeferFKs; + db->flags &= ~(u64)(SQLITE_DeferFKs|SQLITE_CorruptRdOnly); /* If one has been configured, invoke the rollback-hook callback */ if( db->xRollbackCallback && (inTrans || !db->autoCommit) ){ @@ -166864,7 +168350,7 @@ SQLITE_API int sqlite3_busy_timeout(sqlite3 *db, int ms){ */ SQLITE_API void sqlite3_interrupt(sqlite3 *db){ #ifdef SQLITE_ENABLE_API_ARMOR - if( !sqlite3SafetyCheckOk(db) && (db==0 || db->magic!=SQLITE_MAGIC_ZOMBIE) ){ + if( !sqlite3SafetyCheckOk(db) && (db==0 || db->eOpenState!=SQLITE_STATE_ZOMBIE) ){ (void)SQLITE_MISUSE_BKPT; return; } @@ -166893,7 +168379,6 @@ SQLITE_PRIVATE int sqlite3CreateFunc( FuncDestructor *pDestructor ){ FuncDef *p; - int nName; int extraFlags; assert( sqlite3_mutex_held(db->mutex) ); @@ -166903,7 +168388,7 @@ SQLITE_PRIVATE int sqlite3CreateFunc( || ((xFinal==0)!=(xStep==0)) /* Both or neither of xFinal and xStep */ || ((xValue==0)!=(xInverse==0)) /* Both or neither of xValue, xInverse */ || (nArg<-1 || nArg>SQLITE_MAX_FUNCTION_ARG) - || (255<(nName = sqlite3Strlen30( zFunctionName))) + || (255nRef==0 ){ - assert( rc!=SQLITE_OK ); + assert( rc!=SQLITE_OK || (xStep==0 && xFinal==0) ); xDestroy(p); sqlite3_free(pArg); } @@ -167366,6 +168862,34 @@ SQLITE_API void *sqlite3_preupdate_hook( } #endif /* SQLITE_ENABLE_PREUPDATE_HOOK */ +/* +** Register a function to be invoked prior to each autovacuum that +** determines the number of pages to vacuum. +*/ +SQLITE_API int sqlite3_autovacuum_pages( + sqlite3 *db, /* Attach the hook to this database */ + unsigned int (*xCallback)(void*,const char*,u32,u32,u32), + void *pArg, /* Argument to the function */ + void (*xDestructor)(void*) /* Destructor for pArg */ +){ +#ifdef SQLITE_ENABLE_API_ARMOR + if( !sqlite3SafetyCheckOk(db) ){ + if( xDestructor ) xDestructor(pArg); + return SQLITE_MISUSE_BKPT; + } +#endif + sqlite3_mutex_enter(db->mutex); + if( db->xAutovacDestr ){ + db->xAutovacDestr(db->pAutovacPagesArg); + } + db->xAutovacPages = xCallback; + db->pAutovacPagesArg = pArg; + db->xAutovacDestr = xDestructor; + sqlite3_mutex_leave(db->mutex); + return SQLITE_OK; +} + + #ifndef SQLITE_OMIT_WAL /* ** The sqlite3_wal_hook() callback registered by sqlite3_wal_autocheckpoint(). @@ -168159,7 +169683,7 @@ SQLITE_PRIVATE int sqlite3ParseUri( */ static const char *uriParameter(const char *zFilename, const char *zParam){ zFilename += sqlite3Strlen30(zFilename) + 1; - while( zFilename[0] ){ + while( ALWAYS(zFilename!=0) && zFilename[0] ){ int x = strcmp(zFilename, zParam); zFilename += sqlite3Strlen30(zFilename) + 1; if( x==0 ) return zFilename; @@ -168219,8 +169743,8 @@ static int openDatabase( ** dealt with in the previous code block. Besides these, the only ** valid input flags for sqlite3_open_v2() are SQLITE_OPEN_READONLY, ** SQLITE_OPEN_READWRITE, SQLITE_OPEN_CREATE, SQLITE_OPEN_SHAREDCACHE, - ** SQLITE_OPEN_PRIVATECACHE, and some reserved bits. Silently mask - ** off all other flags. + ** SQLITE_OPEN_PRIVATECACHE, SQLITE_OPEN_EXRESCODE, and some reserved + ** bits. Silently mask off all other flags. */ flags &= ~( SQLITE_OPEN_DELETEONCLOSE | SQLITE_OPEN_EXCLUSIVE | @@ -168255,9 +169779,9 @@ static int openDatabase( } } sqlite3_mutex_enter(db->mutex); - db->errMask = 0xff; + db->errMask = (flags & SQLITE_OPEN_EXRESCODE)!=0 ? 0xffffffff : 0xff; db->nDb = 2; - db->magic = SQLITE_MAGIC_BUSY; + db->eOpenState = SQLITE_STATE_BUSY; db->aDb = db->aDbStatic; db->lookaside.bDisable = 1; db->lookaside.sz = 0; @@ -168269,7 +169793,15 @@ static int openDatabase( db->nextAutovac = -1; db->szMmap = sqlite3GlobalConfig.szMmap; db->nextPagesize = 0; + db->init.azInit = sqlite3StdType; /* Any array of string ptrs will do */ +#ifdef SQLITE_ENABLE_SORTER_MMAP + /* Beginning with version 3.37.0, using the VFS xFetch() API to memory-map + ** the temporary files used to do external sorts (see code in vdbesort.c) + ** is disabled. It can still be used either by defining + ** SQLITE_ENABLE_SORTER_MMAP at compile time or by using the + ** SQLITE_TESTCTRL_SORTER_MMAP test-control at runtime. */ db->nMaxSorterMmap = 0x7FFFFFFF; +#endif db->flags |= SQLITE_ShortColNames | SQLITE_EnableTrigger | SQLITE_EnableView @@ -168417,7 +169949,7 @@ static int openDatabase( db->aDb[1].zDbSName = "temp"; db->aDb[1].safety_level = PAGER_SYNCHRONOUS_OFF; - db->magic = SQLITE_MAGIC_OPEN; + db->eOpenState = SQLITE_STATE_OPEN; if( db->mallocFailed ){ goto opendb_out; } @@ -168479,12 +170011,12 @@ opendb_out: sqlite3_mutex_leave(db->mutex); } rc = sqlite3_errcode(db); - assert( db!=0 || rc==SQLITE_NOMEM ); - if( rc==SQLITE_NOMEM ){ + assert( db!=0 || (rc&0xff)==SQLITE_NOMEM ); + if( (rc&0xff)==SQLITE_NOMEM ){ sqlite3_close(db); db = 0; }else if( rc!=SQLITE_OK ){ - db->magic = SQLITE_MAGIC_SICK; + db->eOpenState = SQLITE_STATE_SICK; } *ppDb = db; #ifdef SQLITE_ENABLE_SQLLOG @@ -168495,7 +170027,7 @@ opendb_out: } #endif sqlite3_free_filename(zOpen); - return rc & 0xff; + return rc; } @@ -168795,7 +170327,7 @@ SQLITE_API int sqlite3_table_column_metadata( /* Locate the table in question */ pTab = sqlite3FindTable(db, zTableName, zDbName); - if( !pTab || pTab->pSelect ){ + if( !pTab || IsView(pTab) ){ pTab = 0; goto error_out; } @@ -168806,7 +170338,7 @@ SQLITE_API int sqlite3_table_column_metadata( }else{ for(iCol=0; iColnCol; iCol++){ pCol = &pTab->aCol[iCol]; - if( 0==sqlite3StrICmp(pCol->zName, zColumnName) ){ + if( 0==sqlite3StrICmp(pCol->zCnName, zColumnName) ){ break; } } @@ -168833,7 +170365,7 @@ SQLITE_API int sqlite3_table_column_metadata( */ if( pCol ){ zDataType = sqlite3ColumnType(pCol,0); - zCollSeq = pCol->zColl; + zCollSeq = sqlite3ColumnColl(pCol); notnull = pCol->notNull!=0; primarykey = (pCol->colFlags & COLFLAG_PRIMKEY)!=0; autoinc = pTab->iPKey==iCol && (pTab->tabFlags & TF_Autoincrement)!=0; @@ -169500,7 +171032,7 @@ SQLITE_API const char *sqlite3_uri_key(const char *zFilename, int N){ if( zFilename==0 || N<0 ) return 0; zFilename = databaseName(zFilename); zFilename += sqlite3Strlen30(zFilename) + 1; - while( zFilename[0] && (N--)>0 ){ + while( ALWAYS(zFilename) && zFilename[0] && (N--)>0 ){ zFilename += sqlite3Strlen30(zFilename) + 1; zFilename += sqlite3Strlen30(zFilename) + 1; } @@ -169543,12 +171075,14 @@ SQLITE_API sqlite3_int64 sqlite3_uri_int64( ** corruption. */ SQLITE_API const char *sqlite3_filename_database(const char *zFilename){ + if( zFilename==0 ) return 0; return databaseName(zFilename); } SQLITE_API const char *sqlite3_filename_journal(const char *zFilename){ + if( zFilename==0 ) return 0; zFilename = databaseName(zFilename); zFilename += sqlite3Strlen30(zFilename) + 1; - while( zFilename[0] ){ + while( ALWAYS(zFilename) && zFilename[0] ){ zFilename += sqlite3Strlen30(zFilename) + 1; zFilename += sqlite3Strlen30(zFilename) + 1; } @@ -169559,7 +171093,7 @@ SQLITE_API const char *sqlite3_filename_wal(const char *zFilename){ return 0; #else zFilename = sqlite3_filename_journal(zFilename); - zFilename += sqlite3Strlen30(zFilename) + 1; + if( zFilename ) zFilename += sqlite3Strlen30(zFilename) + 1; return zFilename; #endif } @@ -170852,17 +172386,18 @@ SQLITE_API extern int sqlite3_fts3_may_be_corrupt; ** Macros indicating that conditional expressions are always true or ** false. */ -#ifdef SQLITE_COVERAGE_TEST -# define ALWAYS(x) (1) -# define NEVER(X) (0) -#elif defined(SQLITE_DEBUG) -# define ALWAYS(x) sqlite3Fts3Always((x)!=0) -# define NEVER(x) sqlite3Fts3Never((x)!=0) -SQLITE_PRIVATE int sqlite3Fts3Always(int b); -SQLITE_PRIVATE int sqlite3Fts3Never(int b); +#if defined(SQLITE_COVERAGE_TEST) || defined(SQLITE_MUTATION_TEST) +# define SQLITE_OMIT_AUXILIARY_SAFETY_CHECKS 1 +#endif +#if defined(SQLITE_OMIT_AUXILIARY_SAFETY_CHECKS) +# define ALWAYS(X) (1) +# define NEVER(X) (0) +#elif !defined(NDEBUG) +# define ALWAYS(X) ((X)?1:(assert(0),0)) +# define NEVER(X) ((X)?(assert(0),1):0) #else -# define ALWAYS(x) (x) -# define NEVER(x) (x) +# define ALWAYS(X) (X) +# define NEVER(X) (X) #endif /* @@ -171321,6 +172856,7 @@ SQLITE_PRIVATE void sqlite3Fts3ExprFree(Fts3Expr *); SQLITE_PRIVATE int sqlite3Fts3ExprInitTestInterface(sqlite3 *db, Fts3Hash*); SQLITE_PRIVATE int sqlite3Fts3InitTerm(sqlite3 *db); #endif +SQLITE_PRIVATE void *sqlite3Fts3MallocZero(i64 nByte); SQLITE_PRIVATE int sqlite3Fts3OpenTokenizer(sqlite3_tokenizer *, int, const char *, int, sqlite3_tokenizer_cursor ** @@ -171378,13 +172914,6 @@ static int fts3EvalStart(Fts3Cursor *pCsr); static int fts3TermSegReaderCursor( Fts3Cursor *, const char *, int, int, Fts3MultiSegReader **); -#ifndef SQLITE_AMALGAMATION -# if defined(SQLITE_DEBUG) -SQLITE_PRIVATE int sqlite3Fts3Always(int b) { assert( b ); return b; } -SQLITE_PRIVATE int sqlite3Fts3Never(int b) { assert( !b ); return b; } -# endif -#endif - /* ** This variable is set to false when running tests for which the on disk ** structures should not be corrupt. Otherwise, true. If it is false, extra @@ -176409,8 +177938,8 @@ static void fts3EvalNextRow( Fts3Expr *pRight = pExpr->pRight; sqlite3_int64 iCmp = DOCID_CMP(pLeft->iDocid, pRight->iDocid); - assert( pLeft->bStart || pLeft->iDocid==pRight->iDocid ); - assert( pRight->bStart || pLeft->iDocid==pRight->iDocid ); + assert_fts3_nc( pLeft->bStart || pLeft->iDocid==pRight->iDocid ); + assert_fts3_nc( pRight->bStart || pLeft->iDocid==pRight->iDocid ); if( pRight->bEof || (pLeft->bEof==0 && iCmp<0) ){ fts3EvalNextRow(pCsr, pLeft, pRc); @@ -177048,6 +178577,9 @@ SQLITE_PRIVATE int sqlite3Fts3EvalPhrasePoslist( if( bEofSave==0 && pNear->iDocid==iDocid ) break; } assert( rc!=SQLITE_OK || pPhrase->bIncr==0 ); + if( rc==SQLITE_OK && pNear->bEof!=bEofSave ){ + rc = FTS_CORRUPT_VTAB; + } } if( bTreeEof ){ while( rc==SQLITE_OK && !pNear->bEof ){ @@ -177470,6 +179002,7 @@ static int fts3auxNextMethod(sqlite3_vtab_cursor *pCursor){ if( fts3auxGrowStatArray(pCsr, 2) ) return SQLITE_NOMEM; memset(pCsr->aStat, 0, sizeof(struct Fts3auxColstats) * pCsr->nStat); iCol = 0; + rc = SQLITE_OK; while( iaStat[iCol+1].nDoc++; eState = 2; @@ -177521,7 +179058,6 @@ static int fts3auxNextMethod(sqlite3_vtab_cursor *pCursor){ } pCsr->iCol = 0; - rc = SQLITE_OK; }else{ pCsr->isEof = 1; } @@ -177850,7 +179386,7 @@ static int fts3isspace(char c){ ** zero the memory before returning a pointer to it. If unsuccessful, ** return NULL. */ -static void *fts3MallocZero(sqlite3_int64 nByte){ +SQLITE_PRIVATE void *sqlite3Fts3MallocZero(sqlite3_int64 nByte){ void *pRet = sqlite3_malloc64(nByte); if( pRet ) memset(pRet, 0, nByte); return pRet; @@ -177931,7 +179467,7 @@ static int getNextToken( rc = pModule->xNext(pCursor, &zToken, &nToken, &iStart, &iEnd, &iPosition); if( rc==SQLITE_OK ){ nByte = sizeof(Fts3Expr) + sizeof(Fts3Phrase) + nToken; - pRet = (Fts3Expr *)fts3MallocZero(nByte); + pRet = (Fts3Expr *)sqlite3Fts3MallocZero(nByte); if( !pRet ){ rc = SQLITE_NOMEM; }else{ @@ -178186,7 +179722,7 @@ static int getNextNode( if( fts3isspace(cNext) || cNext=='"' || cNext=='(' || cNext==')' || cNext==0 ){ - pRet = (Fts3Expr *)fts3MallocZero(sizeof(Fts3Expr)); + pRet = (Fts3Expr *)sqlite3Fts3MallocZero(sizeof(Fts3Expr)); if( !pRet ){ return SQLITE_NOMEM; } @@ -178365,7 +179901,7 @@ static int fts3ExprParse( && p->eType==FTSQUERY_PHRASE && pParse->isNot ){ /* Create an implicit NOT operator. */ - Fts3Expr *pNot = fts3MallocZero(sizeof(Fts3Expr)); + Fts3Expr *pNot = sqlite3Fts3MallocZero(sizeof(Fts3Expr)); if( !pNot ){ sqlite3Fts3ExprFree(p); rc = SQLITE_NOMEM; @@ -178399,7 +179935,7 @@ static int fts3ExprParse( /* Insert an implicit AND operator. */ Fts3Expr *pAnd; assert( pRet && pPrev ); - pAnd = fts3MallocZero(sizeof(Fts3Expr)); + pAnd = sqlite3Fts3MallocZero(sizeof(Fts3Expr)); if( !pAnd ){ sqlite3Fts3ExprFree(p); rc = SQLITE_NOMEM; @@ -182629,8 +184165,18 @@ static int fts3SegReaderNext( char *aCopy; PendingList *pList = (PendingList *)fts3HashData(pElem); int nCopy = pList->nData+1; - pReader->zTerm = (char *)fts3HashKey(pElem); - pReader->nTerm = fts3HashKeysize(pElem); + + int nTerm = fts3HashKeysize(pElem); + if( (nTerm+1)>pReader->nTermAlloc ){ + sqlite3_free(pReader->zTerm); + pReader->zTerm = (char*)sqlite3_malloc((nTerm+1)*2); + if( !pReader->zTerm ) return SQLITE_NOMEM; + pReader->nTermAlloc = (nTerm+1)*2; + } + memcpy(pReader->zTerm, fts3HashKey(pElem), nTerm); + pReader->zTerm[nTerm] = '\0'; + pReader->nTerm = nTerm; + aCopy = (char*)sqlite3_malloc(nCopy); if( !aCopy ) return SQLITE_NOMEM; memcpy(aCopy, pList->aData, nCopy); @@ -182883,9 +184429,7 @@ SQLITE_PRIVATE int sqlite3Fts3MsrOvfl( */ SQLITE_PRIVATE void sqlite3Fts3SegReaderFree(Fts3SegReader *pReader){ if( pReader ){ - if( !fts3SegReaderIsPending(pReader) ){ - sqlite3_free(pReader->zTerm); - } + sqlite3_free(pReader->zTerm); if( !fts3SegReaderIsRootOnly(pReader) ){ sqlite3_free(pReader->aNode); } @@ -185077,7 +186621,7 @@ static int nodeReaderNext(NodeReader *p){ return FTS_CORRUPT_VTAB; } blobGrowBuffer(&p->term, nPrefix+nSuffix, &rc); - if( rc==SQLITE_OK ){ + if( rc==SQLITE_OK && ALWAYS(p->term.a!=0) ){ memcpy(&p->term.a[nPrefix], &p->aNode[p->iOff], nSuffix); p->term.n = nPrefix+nSuffix; p->iOff += nSuffix; @@ -185471,7 +187015,11 @@ static int fts3TermCmp( int nCmp = MIN(nLhs, nRhs); int res; - res = (nCmp ? memcmp(zLhs, zRhs, nCmp) : 0); + if( nCmp && ALWAYS(zLhs) && ALWAYS(zRhs) ){ + res = memcmp(zLhs, zRhs, nCmp); + }else{ + res = 0; + } if( res==0 ) res = nLhs - nRhs; return res; @@ -186115,7 +187663,7 @@ static int fts3IncrmergeHintLoad(Fts3Table *p, Blob *pHint){ if( aHint ){ blobGrowBuffer(pHint, nHint, &rc); if( rc==SQLITE_OK ){ - memcpy(pHint->a, aHint, nHint); + if( ALWAYS(pHint->a!=0) ) memcpy(pHint->a, aHint, nHint); pHint->n = nHint; } } @@ -187232,9 +188780,8 @@ static MatchinfoBuffer *fts3MIBufferNew(size_t nElem, const char *zMatchinfo){ + sizeof(MatchinfoBuffer); sqlite3_int64 nStr = strlen(zMatchinfo); - pRet = sqlite3_malloc64(nByte + nStr+1); + pRet = sqlite3Fts3MallocZero(nByte + nStr+1); if( pRet ){ - memset(pRet, 0, nByte); pRet->aMatchinfo[0] = (u8*)(&pRet->aMatchinfo[1]) - (u8*)pRet; pRet->aMatchinfo[1+nElem] = pRet->aMatchinfo[0] + sizeof(u32)*((int)nElem+1); @@ -187638,11 +189185,10 @@ static int fts3BestSnippet( ** the required space using malloc(). */ nByte = sizeof(SnippetPhrase) * nList; - sIter.aPhrase = (SnippetPhrase *)sqlite3_malloc64(nByte); + sIter.aPhrase = (SnippetPhrase *)sqlite3Fts3MallocZero(nByte); if( !sIter.aPhrase ){ return SQLITE_NOMEM; } - memset(sIter.aPhrase, 0, nByte); /* Initialize the contents of the SnippetIter object. Then iterate through ** the set of phrases in the expression to populate the aPhrase[] array. @@ -188206,10 +189752,12 @@ static int fts3MatchinfoLcsCb( ** position list for the next column. */ static int fts3LcsIteratorAdvance(LcsIterator *pIter){ - char *pRead = pIter->pRead; + char *pRead; sqlite3_int64 iRead; int rc = 0; + if( NEVER(pIter==0) ) return 1; + pRead = pIter->pRead; pRead += sqlite3Fts3GetVarint(pRead, &iRead); if( iRead==0 || iRead==1 ){ pRead = 0; @@ -188243,9 +189791,8 @@ static int fts3MatchinfoLcs(Fts3Cursor *pCsr, MatchInfo *pInfo){ /* Allocate and populate the array of LcsIterator objects. The array ** contains one element for each matchable phrase in the query. **/ - aIter = sqlite3_malloc64(sizeof(LcsIterator) * pCsr->nPhrase); + aIter = sqlite3Fts3MallocZero(sizeof(LcsIterator) * pCsr->nPhrase); if( !aIter ) return SQLITE_NOMEM; - memset(aIter, 0, sizeof(LcsIterator) * pCsr->nPhrase); (void)fts3ExprIterate(pCsr->pExpr, fts3MatchinfoLcsCb, (void*)aIter); for(i=0; inPhrase; i++){ @@ -188706,7 +190253,7 @@ SQLITE_PRIVATE void sqlite3Fts3Offsets( if( rc!=SQLITE_OK ) goto offsets_out; /* Allocate the array of TermOffset iterators. */ - sCtx.aTerm = (TermOffset *)sqlite3_malloc64(sizeof(TermOffset)*nToken); + sCtx.aTerm = (TermOffset *)sqlite3Fts3MallocZero(sizeof(TermOffset)*nToken); if( 0==sCtx.aTerm ){ rc = SQLITE_NOMEM; goto offsets_out; @@ -188727,13 +190274,13 @@ SQLITE_PRIVATE void sqlite3Fts3Offsets( const char *zDoc; int nDoc; - /* Initialize the contents of sCtx.aTerm[] for column iCol. There is - ** no way that this operation can fail, so the return code from - ** fts3ExprIterate() can be discarded. + /* Initialize the contents of sCtx.aTerm[] for column iCol. This + ** operation may fail if the database contains corrupt records. */ sCtx.iCol = iCol; sCtx.iTerm = 0; - (void)fts3ExprIterate(pCsr->pExpr, fts3ExprTermOffsetInit, (void*)&sCtx); + rc = fts3ExprIterate(pCsr->pExpr, fts3ExprTermOffsetInit, (void*)&sCtx); + if( rc!=SQLITE_OK ) goto offsets_out; /* Retreive the text stored in column iCol. If an SQL NULL is stored ** in column iCol, jump immediately to the next iteration of the loop. @@ -189731,7 +191278,34 @@ static const char jsonIsSpace[] = { typedef unsigned int u32; typedef unsigned short int u16; typedef unsigned char u8; +# if defined(SQLITE_COVERAGE_TEST) || defined(SQLITE_MUTATION_TEST) +# define SQLITE_OMIT_AUXILIARY_SAFETY_CHECKS 1 +# endif +# if defined(SQLITE_OMIT_AUXILIARY_SAFETY_CHECKS) +# define ALWAYS(X) (1) +# define NEVER(X) (0) +# elif !defined(NDEBUG) +# define ALWAYS(X) ((X)?1:(assert(0),0)) +# define NEVER(X) ((X)?(assert(0),1):0) +# else +# define ALWAYS(X) (X) +# define NEVER(X) (X) +# endif +# define testcase(X) #endif +#if !defined(SQLITE_DEBUG) && !defined(SQLITE_COVERAGE_TEST) +# define VVA(X) +#else +# define VVA(X) X +#endif + +/* +** Some of the testcase() macros in this file are problematic for gcov +** in that they generate false-miss errors randomly. This is a gcov problem, +** not a problem in this case. But to work around it, we disable the +** problematic test cases for production builds. +*/ +#define json_testcase(X) /* Objects */ typedef struct JsonString JsonString; @@ -189789,13 +191363,14 @@ static const char * const jsonType[] = { struct JsonNode { u8 eType; /* One of the JSON_ type values */ u8 jnFlags; /* JNODE flags */ + u8 eU; /* Which union element to use */ u32 n; /* Bytes of content, or number of sub-nodes */ union { - const char *zJContent; /* Content for INT, REAL, and STRING */ - u32 iAppend; /* More terms for ARRAY and OBJECT */ - u32 iKey; /* Key for ARRAY objects in json_tree() */ - u32 iReplace; /* Replacement content for JNODE_REPLACE */ - JsonNode *pPatch; /* Node chain of patch for JNODE_PATCH */ + const char *zJContent; /* 1: Content for INT, REAL, and STRING */ + u32 iAppend; /* 2: More terms for ARRAY and OBJECT */ + u32 iKey; /* 3: Key for ARRAY objects in json_tree() */ + u32 iReplace; /* 4: Replacement content for JNODE_REPLACE */ + JsonNode *pPatch; /* 5: Node chain of patch for JNODE_PATCH */ } u; }; @@ -190073,11 +191648,14 @@ static void jsonRenderNode( JsonString *pOut, /* Write JSON here */ sqlite3_value **aReplace /* Replacement values */ ){ + assert( pNode!=0 ); if( pNode->jnFlags & (JNODE_REPLACE|JNODE_PATCH) ){ - if( pNode->jnFlags & JNODE_REPLACE ){ + if( (pNode->jnFlags & JNODE_REPLACE)!=0 && ALWAYS(aReplace!=0) ){ + assert( pNode->eU==4 ); jsonAppendValue(pOut, aReplace[pNode->u.iReplace]); return; } + assert( pNode->eU==5 ); pNode = pNode->u.pPatch; } switch( pNode->eType ){ @@ -190096,6 +191674,7 @@ static void jsonRenderNode( } case JSON_STRING: { if( pNode->jnFlags & JNODE_RAW ){ + assert( pNode->eU==1 ); jsonAppendString(pOut, pNode->u.zJContent, pNode->n); break; } @@ -190103,6 +191682,7 @@ static void jsonRenderNode( } case JSON_REAL: case JSON_INT: { + assert( pNode->eU==1 ); jsonAppendRaw(pOut, pNode->u.zJContent, pNode->n); break; } @@ -190118,6 +191698,7 @@ static void jsonRenderNode( j += jsonNodeSize(&pNode[j]); } if( (pNode->jnFlags & JNODE_APPEND)==0 ) break; + assert( pNode->eU==2 ); pNode = &pNode[pNode->u.iAppend]; j = 1; } @@ -190138,6 +191719,7 @@ static void jsonRenderNode( j += 1 + jsonNodeSize(&pNode[j+1]); } if( (pNode->jnFlags & JNODE_APPEND)==0 ) break; + assert( pNode->eU==2 ); pNode = &pNode[pNode->u.iAppend]; j = 1; } @@ -190217,7 +191799,9 @@ static void jsonReturn( } case JSON_INT: { sqlite3_int64 i = 0; - const char *z = pNode->u.zJContent; + const char *z; + assert( pNode->eU==1 ); + z = pNode->u.zJContent; if( z[0]=='-' ){ z++; } while( z[0]>='0' && z[0]<='9' ){ unsigned v = *(z++) - '0'; @@ -190240,14 +191824,17 @@ static void jsonReturn( sqlite3_result_int64(pCtx, i); int_done: break; - int_as_real: i=0; /* no break */ deliberate_fall_through + int_as_real: ; /* no break */ deliberate_fall_through } case JSON_REAL: { double r; #ifdef SQLITE_AMALGAMATION - const char *z = pNode->u.zJContent; + const char *z; + assert( pNode->eU==1 ); + z = pNode->u.zJContent; sqlite3AtoF(z, &r, sqlite3Strlen30(z), SQLITE_UTF8); #else + assert( pNode->eU==1 ); r = strtod(pNode->u.zJContent, 0); #endif sqlite3_result_double(pCtx, r); @@ -190258,6 +191845,7 @@ static void jsonReturn( ** json_insert() and json_replace() and those routines do not ** call jsonReturn() */ if( pNode->jnFlags & JNODE_RAW ){ + assert( pNode->eU==1 ); sqlite3_result_text(pCtx, pNode->u.zJContent, pNode->n, SQLITE_TRANSIENT); }else @@ -190265,15 +191853,18 @@ static void jsonReturn( assert( (pNode->jnFlags & JNODE_RAW)==0 ); if( (pNode->jnFlags & JNODE_ESCAPE)==0 ){ /* JSON formatted without any backslash-escapes */ + assert( pNode->eU==1 ); sqlite3_result_text(pCtx, pNode->u.zJContent+1, pNode->n-2, SQLITE_TRANSIENT); }else{ /* Translate JSON formatted string into raw text */ u32 i; u32 n = pNode->n; - const char *z = pNode->u.zJContent; + const char *z; char *zOut; u32 j; + assert( pNode->eU==1 ); + z = pNode->u.zJContent; zOut = sqlite3_malloc( n+1 ); if( zOut==0 ){ sqlite3_result_error_nomem(pCtx); @@ -190394,12 +191985,13 @@ static int jsonParseAddNode( const char *zContent /* Content */ ){ JsonNode *p; - if( pParse->nNode>=pParse->nAlloc ){ + if( pParse->aNode==0 || pParse->nNode>=pParse->nAlloc ){ return jsonParseAddNodeExpand(pParse, eType, n, zContent); } p = &pParse->aNode[pParse->nNode]; p->eType = (u8)eType; p->jnFlags = 0; + VVA( p->eU = zContent ? 1 : 0 ); p->n = n; p->u.zJContent = zContent; return pParse->nNode++; @@ -190467,6 +192059,7 @@ static int jsonParseValue(JsonParse *pParse, u32 i){ /* Parse array */ iThis = jsonParseAddNode(pParse, JSON_ARRAY, 0, 0); if( iThis<0 ) return -1; + memset(&pParse->aNode[iThis].u, 0, sizeof(pParse->aNode[iThis].u)); for(j=i+1;;j++){ while( safe_isspace(z[j]) ){ j++; } if( ++pParse->iDepth > JSON_MAX_DEPTH ) return -1; @@ -190731,6 +192324,7 @@ static JsonParse *jsonParseCached( ** a match. */ static int jsonLabelCompare(JsonNode *pNode, const char *zKey, u32 nKey){ + assert( pNode->eU==1 ); if( pNode->jnFlags & JNODE_RAW ){ if( pNode->n!=nKey ) return 0; return strncmp(pNode->u.zJContent, zKey, nKey)==0; @@ -190796,6 +192390,7 @@ static JsonNode *jsonLookupStep( j += jsonNodeSize(&pRoot[j]); } if( (pRoot->jnFlags & JNODE_APPEND)==0 ) break; + assert( pRoot->eU==2 ); iRoot += pRoot->u.iAppend; pRoot = &pParse->aNode[iRoot]; j = 1; @@ -190810,8 +192405,10 @@ static JsonNode *jsonLookupStep( if( pParse->oom ) return 0; if( pNode ){ pRoot = &pParse->aNode[iRoot]; + assert( pRoot->eU==0 ); pRoot->u.iAppend = iStart - iRoot; pRoot->jnFlags |= JNODE_APPEND; + VVA( pRoot->eU = 2 ); pParse->aNode[iLabel].jnFlags |= JNODE_RAW; } return pNode; @@ -190834,6 +192431,7 @@ static JsonNode *jsonLookupStep( j += jsonNodeSize(&pBase[j]); } if( (pBase->jnFlags & JNODE_APPEND)==0 ) break; + assert( pBase->eU==2 ); iBase += pBase->u.iAppend; pBase = &pParse->aNode[iBase]; j = 1; @@ -190867,6 +192465,7 @@ static JsonNode *jsonLookupStep( j += jsonNodeSize(&pRoot[j]); } if( (pRoot->jnFlags & JNODE_APPEND)==0 ) break; + assert( pRoot->eU==2 ); iRoot += pRoot->u.iAppend; pRoot = &pParse->aNode[iRoot]; j = 1; @@ -190882,8 +192481,10 @@ static JsonNode *jsonLookupStep( if( pParse->oom ) return 0; if( pNode ){ pRoot = &pParse->aNode[iRoot]; + assert( pRoot->eU==0 ); pRoot->u.iAppend = iStart - iRoot; pRoot->jnFlags |= JNODE_APPEND; + VVA( pRoot->eU = 2 ); } return pNode; } @@ -191037,9 +192638,13 @@ static void jsonParseFunc( } jsonPrintf(100, &s,"node %3u: %7s n=%-4d up=%-4d", i, zType, x.aNode[i].n, x.aUp[i]); + assert( x.aNode[i].eU==0 || x.aNode[i].eU==1 ); if( x.aNode[i].u.zJContent!=0 ){ + assert( x.aNode[i].eU==1 ); jsonAppendRaw(&s, " ", 1); jsonAppendRaw(&s, x.aNode[i].u.zJContent, x.aNode[i].n); + }else{ + assert( x.aNode[i].eU==0 ); } jsonAppendRaw(&s, "\n", 1); } @@ -191222,6 +192827,7 @@ static JsonNode *jsonMergePatch( const char *zKey; assert( pPatch[i].eType==JSON_STRING ); assert( pPatch[i].jnFlags & JNODE_LABEL ); + assert( pPatch[i].eU==1 ); nKey = pPatch[i].n; zKey = pPatch[i].u.zJContent; assert( (pPatch[i].jnFlags & JNODE_RAW)==0 ); @@ -191238,6 +192844,12 @@ static JsonNode *jsonMergePatch( if( pNew==0 ) return 0; pTarget = &pParse->aNode[iTarget]; if( pNew!=&pTarget[j+1] ){ + assert( pTarget[j+1].eU==0 + || pTarget[j+1].eU==1 + || pTarget[j+1].eU==2 ); + testcase( pTarget[j+1].eU==1 ); + testcase( pTarget[j+1].eU==2 ); + VVA( pTarget[j+1].eU = 5 ); pTarget[j+1].u.pPatch = pNew; pTarget[j+1].jnFlags |= JNODE_PATCH; } @@ -191253,9 +192865,14 @@ static JsonNode *jsonMergePatch( if( pParse->oom ) return 0; jsonRemoveAllNulls(pPatch); pTarget = &pParse->aNode[iTarget]; + assert( pParse->aNode[iRoot].eU==0 || pParse->aNode[iRoot].eU==2 ); + testcase( pParse->aNode[iRoot].eU==2 ); pParse->aNode[iRoot].jnFlags |= JNODE_APPEND; + VVA( pParse->aNode[iRoot].eU = 2 ); pParse->aNode[iRoot].u.iAppend = iStart - iRoot; iRoot = iStart; + assert( pParse->aNode[iPatch].eU==0 ); + VVA( pParse->aNode[iPatch].eU = 5 ); pParse->aNode[iPatch].jnFlags |= JNODE_PATCH; pParse->aNode[iPatch].u.pPatch = &pPatch[i+1]; } @@ -191397,11 +193014,15 @@ static void jsonReplaceFunc( pNode = jsonLookup(&x, zPath, 0, ctx); if( x.nErr ) goto replace_err; if( pNode ){ + assert( pNode->eU==0 || pNode->eU==1 || pNode->eU==4 ); + json_testcase( pNode->eU!=0 && pNode->eU!=1 ); pNode->jnFlags |= (u8)JNODE_REPLACE; + VVA( pNode->eU = 4 ); pNode->u.iReplace = i + 1; } } if( x.aNode[0].jnFlags & JNODE_REPLACE ){ + assert( x.aNode[0].eU==4 ); sqlite3_result_value(ctx, argv[x.aNode[0].u.iReplace]); }else{ jsonReturnJson(x.aNode, ctx, argv); @@ -191451,11 +193072,15 @@ static void jsonSetFunc( }else if( x.nErr ){ goto jsonSetDone; }else if( pNode && (bApnd || bIsSet) ){ + json_testcase( pNode->eU!=0 && pNode->eU!=1 && pNode->eU!=4 ); + assert( pNode->eU!=3 || pNode->eU!=5 ); + VVA( pNode->eU = 4 ); pNode->jnFlags |= (u8)JNODE_REPLACE; pNode->u.iReplace = i + 1; } } if( x.aNode[0].jnFlags & JNODE_REPLACE ){ + assert( x.aNode[0].eU==4 ); sqlite3_result_value(ctx, argv[x.aNode[0].u.iReplace]); }else{ jsonReturnJson(x.aNode, ctx, argv); @@ -191806,6 +193431,9 @@ static int jsonEachNext(sqlite3_vtab_cursor *cur){ JsonNode *pUp = &p->sParse.aNode[iUp]; p->eType = pUp->eType; if( pUp->eType==JSON_ARRAY ){ + assert( pUp->eU==0 || pUp->eU==3 ); + json_testcase( pUp->eU==3 ); + VVA( pUp->eU = 3 ); if( iUp==p->i-1 ){ pUp->u.iKey = 0; }else{ @@ -191852,12 +193480,15 @@ static void jsonEachComputePath( pNode = &p->sParse.aNode[i]; pUp = &p->sParse.aNode[iUp]; if( pUp->eType==JSON_ARRAY ){ + assert( pUp->eU==3 || (pUp->eU==0 && pUp->u.iKey==0) ); + testcase( pUp->eU==0 ); jsonPrintf(30, pStr, "[%d]", pUp->u.iKey); }else{ assert( pUp->eType==JSON_OBJECT ); if( (pNode->jnFlags & JNODE_LABEL)==0 ) pNode--; assert( pNode->eType==JSON_STRING ); assert( pNode->jnFlags & JNODE_LABEL ); + assert( pNode->eU==1 ); jsonPrintf(pNode->n+1, pStr, ".%.*s", pNode->n-2, pNode->u.zJContent+1); } } @@ -191879,6 +193510,7 @@ static int jsonEachColumn( u32 iKey; if( p->bRecursive ){ if( p->iRowid==0 ) break; + assert( p->sParse.aNode[p->sParse.aUp[p->i]].eU==3 ); iKey = p->sParse.aNode[p->sParse.aUp[p->i]].u.iKey; }else{ iKey = p->iRowid; @@ -191928,6 +193560,7 @@ static int jsonEachColumn( if( p->eType==JSON_ARRAY ){ jsonPrintf(30, &x, "[%d]", p->iRowid); }else if( p->eType==JSON_OBJECT ){ + assert( pThis->eU==1 ); jsonPrintf(pThis->n, &x, ".%.*s", pThis->n-2, pThis->u.zJContent+1); } } @@ -191995,6 +193628,7 @@ static int jsonEachBestIndex( if( pConstraint->iColumn < JEACH_JSON ) continue; iCol = pConstraint->iColumn - JEACH_JSON; assert( iCol==0 || iCol==1 ); + testcase( iCol==0 ); iMask = 1 << iCol; if( pConstraint->usable==0 ){ unusableMask |= iMask; @@ -192092,6 +193726,8 @@ static int jsonEachFilter( p->iBegin = p->i = (int)(pNode - p->sParse.aNode); p->eType = pNode->eType; if( p->eType>=JSON_ARRAY ){ + assert( pNode->eU==0 ); + VVA( pNode->eU = 3 ); pNode->u.iKey = 0; p->iEnd = p->i + pNode->n + 1; if( p->bRecursive ){ @@ -192334,7 +193970,11 @@ SQLITE_API int sqlite3_json_init( #endif SQLITE_PRIVATE int sqlite3GetToken(const unsigned char*,int*); /* In the SQLite core */ -#ifndef SQLITE_AMALGAMATION +/* +** If building separately, we will need some setup that is normally +** found in sqliteInt.h +*/ +#if !defined(SQLITE_AMALGAMATION) #include "sqlite3rtree.h" typedef sqlite3_int64 i64; typedef sqlite3_uint64 u64; @@ -192347,7 +193987,20 @@ typedef unsigned int u32; #if defined(NDEBUG) && defined(SQLITE_DEBUG) # undef NDEBUG #endif +#if defined(SQLITE_COVERAGE_TEST) || defined(SQLITE_MUTATION_TEST) +# define SQLITE_OMIT_AUXILIARY_SAFETY_CHECKS 1 #endif +#if defined(SQLITE_OMIT_AUXILIARY_SAFETY_CHECKS) +# define ALWAYS(X) (1) +# define NEVER(X) (0) +#elif !defined(NDEBUG) +# define ALWAYS(X) ((X)?1:(assert(0),0)) +# define NEVER(X) ((X)?(assert(0),1):0) +#else +# define ALWAYS(X) (X) +# define NEVER(X) (X) +#endif +#endif /* !defined(SQLITE_AMALGAMATION) */ /* #include */ /* #include */ @@ -192405,7 +194058,9 @@ struct Rtree { u8 nBytesPerCell; /* Bytes consumed per cell */ u8 inWrTrans; /* True if inside write transaction */ u8 nAux; /* # of auxiliary columns in %_rowid */ +#ifdef SQLITE_ENABLE_GEOPOLY u8 nAuxNotNull; /* Number of initial not-null aux columns */ +#endif #ifdef SQLITE_DEBUG u8 bCorrupt; /* Shadow table corruption detected */ #endif @@ -192687,7 +194342,12 @@ struct RtreeMatchArg { ** it is not, make it a no-op. */ #ifndef SQLITE_AMALGAMATION -# define testcase(X) +# if defined(SQLITE_COVERAGE_TEST) || defined(SQLITE_DEBUG) + unsigned int sqlite3RtreeTestcase = 0; +# define testcase(X) if( X ){ sqlite3RtreeTestcase += __LINE__; } +# else +# define testcase(X) +# endif #endif /* @@ -192936,18 +194596,6 @@ static void nodeBlobReset(Rtree *pRtree){ } } -/* -** Check to see if pNode is the same as pParent or any of the parents -** of pParent. -*/ -static int nodeInParentChain(const RtreeNode *pNode, const RtreeNode *pParent){ - do{ - if( pNode==pParent ) return 1; - pParent = pParent->pParent; - }while( pParent ); - return 0; -} - /* ** Obtain a reference to an r-tree node. */ @@ -192964,14 +194612,7 @@ static int nodeAcquire( ** increase its reference count and return it. */ if( (pNode = nodeHashLookup(pRtree, iNode))!=0 ){ - if( pParent && !pNode->pParent ){ - if( nodeInParentChain(pNode, pParent) ){ - RTREE_IS_CORRUPT(pRtree); - return SQLITE_CORRUPT_VTAB; - } - pParent->nRef++; - pNode->pParent = pParent; - }else if( pParent && pNode->pParent && pParent!=pNode->pParent ){ + if( pParent && pParent!=pNode->pParent ){ RTREE_IS_CORRUPT(pRtree); return SQLITE_CORRUPT_VTAB; } @@ -193029,7 +194670,7 @@ static int nodeAcquire( ** are the leaves, and so on. If the depth as specified on the root node ** is greater than RTREE_MAX_DEPTH, the r-tree structure must be corrupt. */ - if( pNode && rc==SQLITE_OK && iNode==1 ){ + if( rc==SQLITE_OK && pNode && iNode==1 ){ pRtree->iDepth = readInt16(pNode->zData); if( pRtree->iDepth>RTREE_MAX_DEPTH ){ rc = SQLITE_CORRUPT_VTAB; @@ -193552,20 +195193,29 @@ static void rtreeNonleafConstraint( switch( p->op ){ case RTREE_TRUE: return; /* Always satisfied */ case RTREE_FALSE: break; /* Never satisfied */ - case RTREE_LE: - case RTREE_LT: case RTREE_EQ: + RTREE_DECODE_COORD(eInt, pCellData, val); + /* val now holds the lower bound of the coordinate pair */ + if( p->u.rValue>=val ){ + pCellData += 4; + RTREE_DECODE_COORD(eInt, pCellData, val); + /* val now holds the upper bound of the coordinate pair */ + if( p->u.rValue<=val ) return; + } + break; + case RTREE_LE: + case RTREE_LT: RTREE_DECODE_COORD(eInt, pCellData, val); /* val now holds the lower bound of the coordinate pair */ if( p->u.rValue>=val ) return; - if( p->op!=RTREE_EQ ) break; /* RTREE_LE and RTREE_LT end here */ - /* Fall through for the RTREE_EQ case */ + break; - default: /* RTREE_GT or RTREE_GE, or fallthrough of RTREE_EQ */ + default: pCellData += 4; RTREE_DECODE_COORD(eInt, pCellData, val); /* val now holds the upper bound of the coordinate pair */ if( p->u.rValue<=val ) return; + break; } *peWithin = NOT_WITHIN; } @@ -193635,11 +195285,12 @@ static int nodeRowidIndex( */ static int nodeParentIndex(Rtree *pRtree, RtreeNode *pNode, int *piIndex){ RtreeNode *pParent = pNode->pParent; - if( pParent ){ + if( ALWAYS(pParent) ){ return nodeRowidIndex(pRtree, pParent, pNode->iNode, piIndex); + }else{ + *piIndex = -1; + return SQLITE_OK; } - *piIndex = -1; - return SQLITE_OK; } /* @@ -193762,7 +195413,8 @@ static RtreeSearchPoint *rtreeSearchPointNew( pNew = rtreeEnqueue(pCur, rScore, iLevel); if( pNew==0 ) return 0; ii = (int)(pNew - pCur->aPoint) + 1; - if( iiaNode[ii]==0 ); pCur->aNode[ii] = pCur->aNode[0]; }else{ @@ -193823,7 +195475,7 @@ static void rtreeSearchPointPop(RtreeCursor *p){ if( p->bPoint ){ p->anQueue[p->sPoint.iLevel]--; p->bPoint = 0; - }else if( p->nPoint ){ + }else if( ALWAYS(p->nPoint) ){ p->anQueue[p->aPoint[0].iLevel]--; n = --p->nPoint; p->aPoint[0] = p->aPoint[n]; @@ -193964,7 +195616,7 @@ static int rtreeRowid(sqlite3_vtab_cursor *pVtabCursor, sqlite_int64 *pRowid){ RtreeSearchPoint *p = rtreeSearchPointFirst(pCsr); int rc = SQLITE_OK; RtreeNode *pNode = rtreeNodeOfFirstSearchPoint(pCsr, &rc); - if( rc==SQLITE_OK && p ){ + if( rc==SQLITE_OK && ALWAYS(p) ){ *pRowid = nodeGetRowid(RTREE_OF_CURSOR(pCsr), pNode, p->iCell); } return rc; @@ -193982,7 +195634,7 @@ static int rtreeColumn(sqlite3_vtab_cursor *cur, sqlite3_context *ctx, int i){ RtreeNode *pNode = rtreeNodeOfFirstSearchPoint(pCsr, &rc); if( rc ) return rc; - if( p==0 ) return SQLITE_OK; + if( NEVER(p==0) ) return SQLITE_OK; if( i==0 ){ sqlite3_result_int64(ctx, nodeGetRowid(pRtree, pNode, p->iCell)); }else if( i<=pRtree->nDim2 ){ @@ -194181,8 +195833,11 @@ static int rtreeFilter( } if( rc==SQLITE_OK ){ RtreeSearchPoint *pNew; + assert( pCsr->bPoint==0 ); /* Due to the resetCursor() call above */ pNew = rtreeSearchPointNew(pCsr, RTREE_ZERO, (u8)(pRtree->iDepth+1)); - if( pNew==0 ) return SQLITE_NOMEM; + if( NEVER(pNew==0) ){ /* Because pCsr->bPoint was FALSE */ + return SQLITE_NOMEM; + } pNew->id = 1; pNew->iCell = 0; pNew->eWithin = PARTLY_WITHIN; @@ -194259,7 +195914,7 @@ static int rtreeBestIndex(sqlite3_vtab *tab, sqlite3_index_info *pIdxInfo){ struct sqlite3_index_constraint *p = &pIdxInfo->aConstraint[ii]; if( bMatch==0 && p->usable - && p->iColumn==0 && p->op==SQLITE_INDEX_CONSTRAINT_EQ + && p->iColumn<=0 && p->op==SQLITE_INDEX_CONSTRAINT_EQ ){ /* We have an equality constraint on the rowid. Use strategy 1. */ int jj; @@ -194465,7 +196120,7 @@ static int ChooseLeaf( int nCell = NCELL(pNode); RtreeCell cell; - RtreeNode *pChild; + RtreeNode *pChild = 0; RtreeCell *aCell = 0; @@ -194512,12 +196167,19 @@ static int AdjustTree( ){ RtreeNode *p = pNode; int cnt = 0; + int rc; while( p->pParent ){ RtreeNode *pParent = p->pParent; RtreeCell cell; int iCell; - if( (++cnt)>1000 || nodeParentIndex(pRtree, p, &iCell) ){ + cnt++; + if( NEVER(cnt>100) ){ + RTREE_IS_CORRUPT(pRtree); + return SQLITE_CORRUPT_VTAB; + } + rc = nodeParentIndex(pRtree, p, &iCell); + if( NEVER(rc!=SQLITE_OK) ){ RTREE_IS_CORRUPT(pRtree); return SQLITE_CORRUPT_VTAB; } @@ -194806,12 +196468,17 @@ static int updateMapping( xSetMapping = ((iHeight==0)?rowidWrite:parentWrite); if( iHeight>0 ){ RtreeNode *pChild = nodeHashLookup(pRtree, iRowid); + RtreeNode *p; + for(p=pNode; p; p=p->pParent){ + if( p==pChild ) return SQLITE_CORRUPT_VTAB; + } if( pChild ){ nodeRelease(pRtree, pChild->pParent); nodeReference(pNode); pChild->pParent = pNode; } } + if( NEVER(pNode==0) ) return SQLITE_ERROR; return xSetMapping(pRtree, iRowid, pNode->iNode); } @@ -194901,11 +196568,12 @@ static int SplitNode( RtreeNode *pParent = pLeft->pParent; int iCell; rc = nodeParentIndex(pRtree, pLeft, &iCell); - if( rc==SQLITE_OK ){ + if( ALWAYS(rc==SQLITE_OK) ){ nodeOverwriteCell(pRtree, pParent, &leftbbox, iCell); rc = AdjustTree(pRtree, pParent, &leftbbox); + assert( rc==SQLITE_OK ); } - if( rc!=SQLITE_OK ){ + if( NEVER(rc!=SQLITE_OK) ){ goto splitnode_out; } } @@ -194980,7 +196648,7 @@ static int fixLeafParent(Rtree *pRtree, RtreeNode *pLeaf){ */ iNode = sqlite3_column_int64(pRtree->pReadParent, 0); for(pTest=pLeaf; pTest && pTest->iNode!=iNode; pTest=pTest->pParent); - if( !pTest ){ + if( pTest==0 ){ rc2 = nodeAcquire(pRtree, iNode, 0, &pChild->pParent); } } @@ -195011,6 +196679,7 @@ static int removeNode(Rtree *pRtree, RtreeNode *pNode, int iHeight){ pParent = pNode->pParent; pNode->pParent = 0; rc = deleteCell(pRtree, pParent, iCell, iHeight+1); + testcase( rc!=SQLITE_OK ); } rc2 = nodeRelease(pRtree, pParent); if( rc==SQLITE_OK ){ @@ -195233,7 +196902,7 @@ static int rtreeInsertCell( } }else{ rc = AdjustTree(pRtree, pNode, pCell); - if( rc==SQLITE_OK ){ + if( ALWAYS(rc==SQLITE_OK) ){ if( iHeight==0 ){ rc = rowidWrite(pRtree, pCell->iRowid, pNode->iNode); }else{ @@ -195339,7 +197008,7 @@ static int rtreeDeleteRowid(Rtree *pRtree, sqlite3_int64 iDelete){ int rc2; RtreeNode *pChild = 0; i64 iChild = nodeGetRowid(pRtree, pRoot, 0); - rc = nodeAcquire(pRtree, iChild, pRoot, &pChild); + rc = nodeAcquire(pRtree, iChild, pRoot, &pChild); /* tag-20210916a */ if( rc==SQLITE_OK ){ rc = removeNode(pRtree, pChild, pRtree->iDepth-1); } @@ -195674,7 +197343,7 @@ static int rtreeQueryStat1(sqlite3 *db, Rtree *pRtree){ char *zSql; sqlite3_stmt *p; int rc; - i64 nRow = 0; + i64 nRow = RTREE_MIN_ROWEST; rc = sqlite3_table_column_metadata( db, pRtree->zDb, "sqlite_stat1",0,0,0,0,0,0 @@ -195691,20 +197360,10 @@ static int rtreeQueryStat1(sqlite3 *db, Rtree *pRtree){ if( rc==SQLITE_OK ){ if( sqlite3_step(p)==SQLITE_ROW ) nRow = sqlite3_column_int64(p, 0); rc = sqlite3_finalize(p); - }else if( rc!=SQLITE_NOMEM ){ - rc = SQLITE_OK; - } - - if( rc==SQLITE_OK ){ - if( nRow==0 ){ - pRtree->nRowEst = RTREE_DEFAULT_ROWEST; - }else{ - pRtree->nRowEst = MAX(nRow, RTREE_MIN_ROWEST); - } } sqlite3_free(zSql); } - + pRtree->nRowEst = MAX(nRow, RTREE_MIN_ROWEST); return rc; } @@ -195854,9 +197513,12 @@ static int rtreeSqlInit( sqlite3_str_appendf(p, "UPDATE \"%w\".\"%w_rowid\"SET ", zDb, zPrefix); for(ii=0; iinAux; ii++){ if( ii ) sqlite3_str_append(p, ",", 1); +#ifdef SQLITE_ENABLE_GEOPOLY if( iinAuxNotNull ){ sqlite3_str_appendf(p,"a%d=coalesce(?%d,a%d)",ii,ii+2,ii); - }else{ + }else +#endif + { sqlite3_str_appendf(p,"a%d=?%d",ii,ii+2); } } @@ -196121,6 +197783,7 @@ static void rtreenode(sqlite3_context *ctx, int nArg, sqlite3_value **apArg){ tree.nDim2 = tree.nDim*2; tree.nBytesPerCell = 8 + 8 * tree.nDim; node.zData = (u8 *)sqlite3_value_blob(apArg[1]); + if( node.zData==0 ) return; nData = sqlite3_value_bytes(apArg[1]); if( nData<4 ) return; if( nData=(4+6*sizeof(GeoCoord)) ){ const unsigned char *a = sqlite3_value_blob(pVal); int nVertex; if( a==0 ){ - sqlite3_result_error_nomem(pCtx); + if( pCtx ) sqlite3_result_error_nomem(pCtx); return 0; } nVertex = (a[1]<<16) + (a[2]<<8) + a[3]; @@ -197783,11 +199449,11 @@ static int geopolyOverlap(GeoPoly *p1, GeoPoly *p2){ }else{ /* Remove a segment */ if( pActive==pThisEvent->pSeg ){ - pActive = pActive->pNext; + pActive = ALWAYS(pActive) ? pActive->pNext : 0; }else{ for(pSeg=pActive; pSeg; pSeg=pSeg->pNext){ if( pSeg->pNext==pThisEvent->pSeg ){ - pSeg->pNext = pSeg->pNext->pNext; + pSeg->pNext = ALWAYS(pSeg->pNext) ? pSeg->pNext->pNext : 0; break; } } @@ -198031,6 +199697,7 @@ static int geopolyFilter( RtreeCoord bbox[4]; RtreeConstraint *p; assert( argc==1 ); + assert( argv[0]!=0 ); geopolyBBox(0, argv[0], bbox, &rc); if( rc ){ goto geopoly_filter_end; @@ -198258,6 +199925,7 @@ static int geopolyUpdate( || !sqlite3_value_nochange(aData[2]) /* UPDATE _shape */ || oldRowid!=newRowid) /* Rowid change */ ){ + assert( aData[2]!=0 ); geopolyBBox(0, aData[2], cell.aCoord, &rc); if( rc ){ if( rc==SQLITE_ERROR ){ @@ -198611,7 +200279,10 @@ SQLITE_API int sqlite3_rtree_query_callback( /* Allocate and populate the context object. */ pGeomCtx = (RtreeGeomCallback *)sqlite3_malloc(sizeof(RtreeGeomCallback)); - if( !pGeomCtx ) return SQLITE_NOMEM; + if( !pGeomCtx ){ + if( xDestructor ) xDestructor(pContext); + return SQLITE_NOMEM; + } pGeomCtx->xGeom = 0; pGeomCtx->xQueryFunc = xQueryFunc; pGeomCtx->xDestructor = xDestructor; @@ -200182,6 +201853,13 @@ SQLITE_API void sqlite3rbu_destroy_vfs(const char *zName); # define SWAP(TYPE,A,B) {TYPE t=A; A=B; B=t;} #endif +/* +** Name of the URI option that causes RBU to take an exclusive lock as +** part of the incremental checkpoint operation. +*/ +#define RBU_EXCLUSIVE_CHECKPOINT "rbu_exclusive_checkpoint" + + /* ** The rbu_state table is used to save the state of a partially applied ** update so that it can be resumed later. The table consists of integer @@ -201266,7 +202944,9 @@ static void rbuTableType( assert( p->rc==SQLITE_OK ); p->rc = prepareFreeAndCollectError(p->dbMain, &aStmt[0], &p->zErrmsg, sqlite3_mprintf( - "SELECT (sql LIKE 'create virtual%%'), rootpage" + "SELECT " + " (sql COLLATE nocase BETWEEN 'CREATE VIRTUAL' AND 'CREATE VIRTUAM')," + " rootpage" " FROM sqlite_schema" " WHERE name=%Q", zTab )); @@ -202799,7 +204479,7 @@ static RbuState *rbuLoadState(sqlite3rbu *p){ break; case RBU_STATE_OALSZ: - pRet->iOalSz = (u32)sqlite3_column_int64(pStmt, 1); + pRet->iOalSz = sqlite3_column_int64(pStmt, 1); break; case RBU_STATE_PHASEONESTEP: @@ -202826,13 +204506,19 @@ static RbuState *rbuLoadState(sqlite3rbu *p){ /* ** Open the database handle and attach the RBU database as "rbu". If an ** error occurs, leave an error code and message in the RBU handle. +** +** If argument dbMain is not NULL, then it is a database handle already +** open on the target database. Use this handle instead of opening a new +** one. */ -static void rbuOpenDatabase(sqlite3rbu *p, int *pbRetry){ +static void rbuOpenDatabase(sqlite3rbu *p, sqlite3 *dbMain, int *pbRetry){ assert( p->rc || (p->dbMain==0 && p->dbRbu==0) ); assert( p->rc || rbuIsVacuum(p) || p->zTarget!=0 ); + assert( dbMain==0 || rbuIsVacuum(p)==0 ); /* Open the RBU database */ p->dbRbu = rbuOpenDbhandle(p, p->zRbu, 1); + p->dbMain = dbMain; if( p->rc==SQLITE_OK && rbuIsVacuum(p) ){ sqlite3_file_control(p->dbRbu, "main", SQLITE_FCNTL_RBUCNT, (void*)p); @@ -203198,15 +204884,31 @@ static void rbuCheckpointFrame(sqlite3rbu *p, RbuFrame *pFrame){ /* -** Take an EXCLUSIVE lock on the database file. +** Take an EXCLUSIVE lock on the database file. Return SQLITE_OK if +** successful, or an SQLite error code otherwise. */ -static void rbuLockDatabase(sqlite3rbu *p){ - sqlite3_file *pReal = p->pTargetFd->pReal; - assert( p->rc==SQLITE_OK ); - p->rc = pReal->pMethods->xLock(pReal, SQLITE_LOCK_SHARED); - if( p->rc==SQLITE_OK ){ - p->rc = pReal->pMethods->xLock(pReal, SQLITE_LOCK_EXCLUSIVE); +static int rbuLockDatabase(sqlite3 *db){ + int rc = SQLITE_OK; + sqlite3_file *fd = 0; + sqlite3_file_control(db, "main", SQLITE_FCNTL_FILE_POINTER, &fd); + + if( fd->pMethods ){ + rc = fd->pMethods->xLock(fd, SQLITE_LOCK_SHARED); + if( rc==SQLITE_OK ){ + rc = fd->pMethods->xLock(fd, SQLITE_LOCK_EXCLUSIVE); + } } + return rc; +} + +/* +** Return true if the database handle passed as the only argument +** was opened with the rbu_exclusive_checkpoint=1 URI parameter +** specified. Or false otherwise. +*/ +static int rbuExclusiveCheckpoint(sqlite3 *db){ + const char *zUri = sqlite3_db_filename(db, 0); + return sqlite3_uri_boolean(zUri, RBU_EXCLUSIVE_CHECKPOINT, 0); } #if defined(_WIN32_WCE) @@ -203264,18 +204966,24 @@ static void rbuMoveOalFile(sqlite3rbu *p){ ** In order to ensure that there are no database readers, an EXCLUSIVE ** lock is obtained here before the *-oal is moved to *-wal. */ - rbuLockDatabase(p); + sqlite3 *dbMain = 0; + rbuFileSuffix3(zBase, zWal); + rbuFileSuffix3(zBase, zOal); + + /* Re-open the databases. */ + rbuObjIterFinalize(&p->objiter); + sqlite3_close(p->dbRbu); + sqlite3_close(p->dbMain); + p->dbMain = 0; + p->dbRbu = 0; + + dbMain = rbuOpenDbhandle(p, p->zTarget, 1); + if( dbMain ){ + assert( p->rc==SQLITE_OK ); + p->rc = rbuLockDatabase(dbMain); + } + if( p->rc==SQLITE_OK ){ - rbuFileSuffix3(zBase, zWal); - rbuFileSuffix3(zBase, zOal); - - /* Re-open the databases. */ - rbuObjIterFinalize(&p->objiter); - sqlite3_close(p->dbRbu); - sqlite3_close(p->dbMain); - p->dbMain = 0; - p->dbRbu = 0; - #if defined(_WIN32_WCE) { LPWSTR zWideOal; @@ -203302,11 +205010,19 @@ static void rbuMoveOalFile(sqlite3rbu *p){ #else p->rc = rename(zOal, zWal) ? SQLITE_IOERR : SQLITE_OK; #endif + } - if( p->rc==SQLITE_OK ){ - rbuOpenDatabase(p, 0); - rbuSetupCheckpoint(p, 0); - } + if( p->rc!=SQLITE_OK + || rbuIsVacuum(p) + || rbuExclusiveCheckpoint(dbMain)==0 + ){ + sqlite3_close(dbMain); + dbMain = 0; + } + + if( p->rc==SQLITE_OK ){ + rbuOpenDatabase(p, dbMain, 0); + rbuSetupCheckpoint(p, 0); } } @@ -204057,9 +205773,9 @@ static sqlite3rbu *openRbuHandle( ** If this is the case, it will have been checkpointed and deleted ** when the handle was closed and a second attempt to open the ** database may succeed. */ - rbuOpenDatabase(p, &bRetry); + rbuOpenDatabase(p, 0, &bRetry); if( bRetry ){ - rbuOpenDatabase(p, 0); + rbuOpenDatabase(p, 0, 0); } } @@ -204154,6 +205870,14 @@ static sqlite3rbu *openRbuHandle( }else if( p->eStage==RBU_STAGE_MOVE ){ /* no-op */ }else if( p->eStage==RBU_STAGE_CKPT ){ + if( !rbuIsVacuum(p) && rbuExclusiveCheckpoint(p->dbMain) ){ + /* If the rbu_exclusive_checkpoint=1 URI parameter was specified + ** and an incremental checkpoint is being resumed, attempt an + ** exclusive lock on the db file. If this fails, so be it. */ + p->eStage = RBU_STAGE_DONE; + rbuLockDatabase(p->dbMain); + p->eStage = RBU_STAGE_CKPT; + } rbuSetupCheckpoint(p, pState); }else if( p->eStage==RBU_STAGE_DONE ){ p->rc = SQLITE_DONE; @@ -204191,7 +205915,6 @@ SQLITE_API sqlite3rbu *sqlite3rbu_open( const char *zState ){ if( zTarget==0 || zRbu==0 ){ return rbuMisuseError(); } - /* TODO: Check that zTarget and zRbu are non-NULL */ return openRbuHandle(zTarget, zRbu, zState); } @@ -205394,6 +207117,15 @@ SQLITE_API sqlite3_int64 sqlite3rbu_temp_size(sqlite3rbu *pRbu){ #if (defined(SQLITE_ENABLE_DBSTAT_VTAB) || defined(SQLITE_TEST)) \ && !defined(SQLITE_OMIT_VIRTUALTABLE) +/* +** The pager and btree modules arrange objects in memory so that there are +** always approximately 200 bytes of addressable memory following each page +** buffer. This way small buffer overreads caused by corrupt database pages +** do not cause undefined behaviour. This module pads each page buffer +** by the following number of bytes for the same purpose. +*/ +#define DBSTAT_PAGE_PADDING_BYTES 256 + /* ** Page paths: ** @@ -205461,9 +207193,8 @@ struct StatCell { /* Size information for a single btree page */ struct StatPage { u32 iPgno; /* Page number */ - DbPage *pPg; /* Page content */ + u8 *aPg; /* Page buffer from sqlite3_malloc() */ int iCell; /* Current cell */ - char *zPath; /* Path to this page */ /* Variables populated by statDecodePage(): */ @@ -205675,18 +207406,25 @@ static void statClearCells(StatPage *p){ } static void statClearPage(StatPage *p){ + u8 *aPg = p->aPg; statClearCells(p); - sqlite3PagerUnref(p->pPg); sqlite3_free(p->zPath); memset(p, 0, sizeof(StatPage)); + p->aPg = aPg; } static void statResetCsr(StatCursor *pCsr){ int i; - sqlite3_reset(pCsr->pStmt); + /* In some circumstances, specifically if an OOM has occurred, the call + ** to sqlite3_reset() may cause the pager to be reset (emptied). It is + ** important that statClearPage() is called to free any page refs before + ** this happens. dbsqlfuzz 9ed3e4e3816219d3509d711636c38542bf3f40b1. */ for(i=0; iaPage); i++){ statClearPage(&pCsr->aPage[i]); + sqlite3_free(pCsr->aPage[i].aPg); + pCsr->aPage[i].aPg = 0; } + sqlite3_reset(pCsr->pStmt); pCsr->iPage = 0; sqlite3_free(pCsr->zPath); pCsr->zPath = 0; @@ -205751,7 +207489,7 @@ static int statDecodePage(Btree *pBt, StatPage *p){ int isLeaf; int szPage; - u8 *aData = sqlite3PagerGetData(p->pPg); + u8 *aData = p->aPg; u8 *aHdr = &aData[p->iPgno==1 ? 100 : 0]; p->flags = aHdr[0]; @@ -205822,7 +207560,7 @@ static int statDecodePage(Btree *pBt, StatPage *p){ if( nPayload>(u32)nLocal ){ int j; int nOvfl = ((nPayload - nLocal) + nUsable-4 - 1) / (nUsable - 4); - if( iOff+nLocal>nUsable || nPayload>0x7fffffff ){ + if( iOff+nLocal+4>nUsable || nPayload>0x7fffffff ){ goto statPageIsCorrupt; } pCell->nLastOvfl = (nPayload-nLocal) - (nOvfl-1) * (nUsable-4); @@ -205881,6 +207619,38 @@ static void statSizeAndOffset(StatCursor *pCsr){ } } +/* +** Load a copy of the page data for page iPg into the buffer belonging +** to page object pPg. Allocate the buffer if necessary. Return SQLITE_OK +** if successful, or an SQLite error code otherwise. +*/ +static int statGetPage( + Btree *pBt, /* Load page from this b-tree */ + u32 iPg, /* Page number to load */ + StatPage *pPg /* Load page into this object */ +){ + int pgsz = sqlite3BtreeGetPageSize(pBt); + DbPage *pDbPage = 0; + int rc; + + if( pPg->aPg==0 ){ + pPg->aPg = (u8*)sqlite3_malloc(pgsz + DBSTAT_PAGE_PADDING_BYTES); + if( pPg->aPg==0 ){ + return SQLITE_NOMEM_BKPT; + } + memset(&pPg->aPg[pgsz], 0, DBSTAT_PAGE_PADDING_BYTES); + } + + rc = sqlite3PagerGet(sqlite3BtreePager(pBt), iPg, &pDbPage, 0); + if( rc==SQLITE_OK ){ + const u8 *a = sqlite3PagerGetData(pDbPage); + memcpy(pPg->aPg, a, pgsz); + sqlite3PagerUnref(pDbPage); + } + + return rc; +} + /* ** Move a DBSTAT cursor to the next entry. Normally, the next ** entry will be the next page, but in aggregated mode (pCsr->isAgg!=0), @@ -205899,7 +207669,7 @@ static int statNext(sqlite3_vtab_cursor *pCursor){ pCsr->zPath = 0; statNextRestart: - if( pCsr->aPage[0].pPg==0 ){ + if( pCsr->iPage<0 ){ /* Start measuring space on the next btree */ statResetCounts(pCsr); rc = sqlite3_step(pCsr->pStmt); @@ -205911,7 +207681,7 @@ statNextRestart: pCsr->isEof = 1; return sqlite3_reset(pCsr->pStmt); } - rc = sqlite3PagerGet(pPager, iRoot, &pCsr->aPage[0].pPg, 0); + rc = statGetPage(pBt, iRoot, &pCsr->aPage[0]); pCsr->aPage[0].iPgno = iRoot; pCsr->aPage[0].iCell = 0; if( !pCsr->isAgg ){ @@ -205962,9 +207732,8 @@ statNextRestart: if( !p->iRightChildPg || p->iCell>p->nCell ){ statClearPage(p); - if( pCsr->iPage>0 ){ - pCsr->iPage--; - }else if( pCsr->isAgg ){ + pCsr->iPage--; + if( pCsr->isAgg && pCsr->iPage<0 ){ /* label-statNext-done: When computing aggregate space usage over ** an entire btree, this is the exit point from this function */ return SQLITE_OK; @@ -205983,7 +207752,7 @@ statNextRestart: }else{ p[1].iPgno = p->aCell[p->iCell].iChildPg; } - rc = sqlite3PagerGet(pPager, p[1].iPgno, &p[1].pPg, 0); + rc = statGetPage(pBt, p[1].iPgno, &p[1]); pCsr->nPage++; p[1].iCell = 0; if( !pCsr->isAgg ){ @@ -206113,6 +207882,7 @@ static int statFilter( } if( rc==SQLITE_OK ){ + pCsr->iPage = -1; rc = statNext(pCursor); } return rc; @@ -207064,7 +208834,7 @@ static int sessionSerializeValue( if( aBuf ){ sessionVarintPut(&aBuf[1], n); - if( n ) memcpy(&aBuf[nVarint + 1], z, n); + if( n>0 ) memcpy(&aBuf[nVarint + 1], z, n); } nByte = 1 + nVarint + n; @@ -207669,16 +209439,32 @@ static int sessionTableInfo( }else if( rc==SQLITE_ERROR ){ zPragma = sqlite3_mprintf(""); }else{ + *pazCol = 0; + *pabPK = 0; + *pnCol = 0; + if( pzTab ) *pzTab = 0; return rc; } }else{ zPragma = sqlite3_mprintf("PRAGMA '%q'.table_info('%q')", zDb, zThis); } - if( !zPragma ) return SQLITE_NOMEM; + if( !zPragma ){ + *pazCol = 0; + *pabPK = 0; + *pnCol = 0; + if( pzTab ) *pzTab = 0; + return SQLITE_NOMEM; + } rc = sqlite3_prepare_v2(db, zPragma, -1, &pStmt, 0); sqlite3_free(zPragma); - if( rc!=SQLITE_OK ) return rc; + if( rc!=SQLITE_OK ){ + *pazCol = 0; + *pabPK = 0; + *pnCol = 0; + if( pzTab ) *pzTab = 0; + return rc; + } nByte = nThis + 1; while( SQLITE_ROW==sqlite3_step(pStmt) ){ @@ -208096,7 +209882,11 @@ static int sessionFindTable( ){ rc = sqlite3session_attach(pSession, zName); if( rc==SQLITE_OK ){ - for(pRet=pSession->pTable; pRet->pNext; pRet=pRet->pNext); + pRet = pSession->pTable; + while( ALWAYS(pRet) && pRet->pNext ){ + pRet = pRet->pNext; + } + assert( pRet!=0 ); assert( 0==sqlite3_strnicmp(pRet->zName, zName, nName+1) ); } } @@ -208869,6 +210659,7 @@ static int sessionAppendUpdate( int i; /* Used to iterate through columns */ u8 *pCsr = p->aRecord; /* Used to iterate through old.* values */ + assert( abPK!=0 ); sessionAppendByte(pBuf, SQLITE_UPDATE, &rc); sessionAppendByte(pBuf, p->bIndirect, &rc); for(i=0; ipTable; rc==SQLITE_OK && pTab; pTab=pTab->pNext){ if( pTab->nEntry ){ const char *zName = pTab->zName; - int nCol; /* Number of columns in table */ - u8 *abPK; /* Primary key array */ + int nCol = 0; /* Number of columns in table */ + u8 *abPK = 0; /* Primary key array */ const char **azCol = 0; /* Table columns */ int i; /* Used to iterate through hash buckets */ sqlite3_stmt *pSel = 0; /* SELECT statement to query table pTab */ @@ -209231,6 +211024,7 @@ static int sessionGenerateChangeset( sessionAppendCol(&buf, pSel, iCol, &rc); } }else{ + assert( abPK!=0 ); /* Because sessionSelectStmt() returned ok */ rc = sessionAppendUpdate(&buf, bPatchset, pSel, p, abPK); } }else if( p->op!=SQLITE_INSERT ){ @@ -209291,7 +211085,10 @@ SQLITE_API int sqlite3session_changeset( int *pnChangeset, /* OUT: Size of buffer at *ppChangeset */ void **ppChangeset /* OUT: Buffer containing changeset */ ){ - int rc = sessionGenerateChangeset(pSession, 0, 0, 0, pnChangeset,ppChangeset); + int rc; + + if( pnChangeset==0 || ppChangeset==0 ) return SQLITE_MISUSE; + rc = sessionGenerateChangeset(pSession, 0, 0, 0, pnChangeset,ppChangeset); assert( rc || pnChangeset==0 || pSession->bEnableSize==0 || *pnChangeset<=pSession->nMaxChangesetSize ); @@ -209306,6 +211103,7 @@ SQLITE_API int sqlite3session_changeset_strm( int (*xOutput)(void *pOut, const void *pData, int nData), void *pOut ){ + if( xOutput==0 ) return SQLITE_MISUSE; return sessionGenerateChangeset(pSession, 0, xOutput, pOut, 0, 0); } @@ -209317,6 +211115,7 @@ SQLITE_API int sqlite3session_patchset_strm( int (*xOutput)(void *pOut, const void *pData, int nData), void *pOut ){ + if( xOutput==0 ) return SQLITE_MISUSE; return sessionGenerateChangeset(pSession, 1, xOutput, pOut, 0, 0); } @@ -209332,6 +211131,7 @@ SQLITE_API int sqlite3session_patchset( int *pnPatchset, /* OUT: Size of buffer at *ppChangeset */ void **ppPatchset /* OUT: Buffer containing changeset */ ){ + if( pnPatchset==0 || ppPatchset==0 ) return SQLITE_MISUSE; return sessionGenerateChangeset(pSession, 1, 0, 0, pnPatchset, ppPatchset); } @@ -210295,11 +212095,11 @@ static int sessionChangesetInvert( } assert( rc==SQLITE_OK ); - if( pnInverted ){ + if( pnInverted && ALWAYS(ppInverted) ){ *pnInverted = sOut.nBuf; *ppInverted = sOut.aBuf; sOut.aBuf = 0; - }else if( sOut.nBuf>0 ){ + }else if( sOut.nBuf>0 && ALWAYS(xOutput!=0) ){ rc = xOutput(pOut, sOut.aBuf, sOut.nBuf); } @@ -210755,7 +212555,7 @@ static int sessionBindRow( for(i=0; rc==SQLITE_OK && i0 ) rc = xOutput(pOut, buf.aBuf, buf.nBuf); - }else{ + }else if( ppOut ){ *ppOut = buf.aBuf; - *pnOut = buf.nBuf; + if( pnOut ) *pnOut = buf.nBuf; buf.aBuf = 0; } } @@ -212300,7 +214100,7 @@ static int sessionRebase( if( sOut.nBuf>0 ){ rc = xOutput(pOut, sOut.aBuf, sOut.nBuf); } - }else{ + }else if( ppOut ){ *ppOut = (void*)sOut.aBuf; *pnOut = sOut.nBuf; sOut.aBuf = 0; @@ -213043,8 +214843,20 @@ typedef sqlite3_uint64 u64; #endif #define testcase(x) -#define ALWAYS(x) 1 -#define NEVER(x) 0 + +#if defined(SQLITE_COVERAGE_TEST) || defined(SQLITE_MUTATION_TEST) +# define SQLITE_OMIT_AUXILIARY_SAFETY_CHECKS 1 +#endif +#if defined(SQLITE_OMIT_AUXILIARY_SAFETY_CHECKS) +# define ALWAYS(X) (1) +# define NEVER(X) (0) +#elif !defined(NDEBUG) +# define ALWAYS(X) ((X)?1:(assert(0),0)) +# define NEVER(X) ((X)?(assert(0),1):0) +#else +# define ALWAYS(X) (X) +# define NEVER(X) (X) +#endif #define MIN(x,y) (((x) < (y)) ? (x) : (y)) #define MAX(x,y) (((x) > (y)) ? (x) : (y)) @@ -213104,7 +214916,7 @@ SQLITE_API extern int sqlite3_fts5_may_be_corrupt; ** A version of memcmp() that does not cause asan errors if one of the pointer ** parameters is NULL and the number of bytes to compare is zero. */ -#define fts5Memcmp(s1, s2, n) ((n)==0 ? 0 : memcmp((s1), (s2), (n))) +#define fts5Memcmp(s1, s2, n) ((n)<=0 ? 0 : memcmp((s1), (s2), (n))) /* Mark a function parameter as unused, to suppress nuisance compiler ** warnings. */ @@ -213443,6 +215255,9 @@ static void sqlite3Fts5IndexCloseReader(Fts5Index*); */ static const char *sqlite3Fts5IterTerm(Fts5IndexIter*, int*); static int sqlite3Fts5IterNextScan(Fts5IndexIter*); +static void *sqlite3Fts5StructureRef(Fts5Index*); +static void sqlite3Fts5StructureRelease(void*); +static int sqlite3Fts5StructureTest(Fts5Index*, void*); /* @@ -214220,9 +216035,9 @@ struct fts5yyParser { }; typedef struct fts5yyParser fts5yyParser; +/* #include */ #ifndef NDEBUG /* #include */ -/* #include */ static FILE *fts5yyTraceFILE = 0; static char *fts5yyTracePrompt = 0; #endif /* NDEBUG */ @@ -215159,8 +216974,8 @@ static void sqlite3Fts5Parser( fts5yyact = fts5yy_find_shift_action((fts5YYCODETYPE)fts5yymajor,fts5yyact); if( fts5yyact >= fts5YY_MIN_REDUCE ){ unsigned int fts5yyruleno = fts5yyact - fts5YY_MIN_REDUCE; /* Reduce by this rule */ - assert( fts5yyruleno<(int)(sizeof(fts5yyRuleName)/sizeof(fts5yyRuleName[0])) ); #ifndef NDEBUG + assert( fts5yyruleno<(int)(sizeof(fts5yyRuleName)/sizeof(fts5yyRuleName[0])) ); if( fts5yyTraceFILE ){ int fts5yysize = fts5yyRuleInfoNRhs[fts5yyruleno]; if( fts5yysize ){ @@ -215258,14 +217073,13 @@ static void sqlite3Fts5Parser( fts5yy_destructor(fts5yypParser, (fts5YYCODETYPE)fts5yymajor, &fts5yyminorunion); fts5yymajor = fts5YYNOCODE; }else{ - while( fts5yypParser->fts5yytos >= fts5yypParser->fts5yystack - && (fts5yyact = fts5yy_find_reduce_action( - fts5yypParser->fts5yytos->stateno, - fts5YYERRORSYMBOL)) > fts5YY_MAX_SHIFTREDUCE - ){ + while( fts5yypParser->fts5yytos > fts5yypParser->fts5yystack ){ + fts5yyact = fts5yy_find_reduce_action(fts5yypParser->fts5yytos->stateno, + fts5YYERRORSYMBOL); + if( fts5yyact<=fts5YY_MAX_SHIFTREDUCE ) break; fts5yy_pop_parser_stack(fts5yypParser); } - if( fts5yypParser->fts5yytos < fts5yypParser->fts5yystack || fts5yymajor==0 ){ + if( fts5yypParser->fts5yytos <= fts5yypParser->fts5yystack || fts5yymajor==0 ){ fts5yy_destructor(fts5yypParser,(fts5YYCODETYPE)fts5yymajor,&fts5yyminorunion); fts5yy_parse_failed(fts5yypParser); #ifndef fts5YYNOERRORRECOVERY @@ -216128,7 +217942,6 @@ static void sqlite3Fts5BufferAppendBlob( u32 nData, const u8 *pData ){ - assert_nc( *pRc || nData>=0 ); if( nData ){ if( fts5BufferGrow(pRc, pBuf, nData) ) return; memcpy(&pBuf->p[pBuf->n], pData, nData); @@ -216238,9 +218051,8 @@ static int sqlite3Fts5PoslistNext64( return 1; }else{ i64 iOff = *piOff; - int iVal; + u32 iVal; fts5FastGetVarint32(a, i, iVal); - assert( iVal>=0 ); if( iVal<=1 ){ if( iVal==0 ){ *pi = i; @@ -216248,6 +218060,7 @@ static int sqlite3Fts5PoslistNext64( } fts5FastGetVarint32(a, i, iVal); iOff = ((i64)iVal) << 32; + assert( iOff>=0 ); fts5FastGetVarint32(a, i, iVal); if( iVal<2 ){ /* This is a corrupt record. So stop parsing it here. */ @@ -216259,7 +218072,7 @@ static int sqlite3Fts5PoslistNext64( *piOff = (iOff & (i64)0x7FFFFFFF<<32)+((iOff + (iVal-2)) & 0x7FFFFFFF); } *pi = i; - assert( *piOff>=iOff ); + assert_nc( *piOff>=iOff ); return 0; } } @@ -217034,6 +218847,7 @@ static int sqlite3Fts5ConfigParse( z = fts5ConfigSkipWhitespace(z); if( z && *z=='=' ){ bOption = 1; + assert( zOne!=0 ); z++; if( bMustBeCol ) z = 0; } @@ -217050,7 +218864,11 @@ static int sqlite3Fts5ConfigParse( rc = SQLITE_ERROR; }else{ if( bOption ){ - rc = fts5ConfigParseSpecial(pGlobal, pRet, zOne, zTwo?zTwo:"", pzErr); + rc = fts5ConfigParseSpecial(pGlobal, pRet, + ALWAYS(zOne)?zOne:"", + zTwo?zTwo:"", + pzErr + ); }else{ rc = fts5ConfigParseColumn(pRet, zOne, zTwo, pzErr); zOne = 0; @@ -217568,6 +219386,7 @@ static void sqlite3Fts5ParseError(Fts5Parse *pParse, const char *zFmt, ...){ va_list ap; va_start(ap, zFmt); if( pParse->rc==SQLITE_OK ){ + assert( pParse->zErr==0 ); pParse->zErr = sqlite3_vmprintf(zFmt, ap); pParse->rc = SQLITE_ERROR; } @@ -217866,6 +219685,7 @@ static i64 fts5ExprSynonymRowid(Fts5ExprTerm *pTerm, int bDesc, int *pbEof){ int bRetValid = 0; Fts5ExprTerm *p; + assert( pTerm ); assert( pTerm->pSynonym ); assert( bDesc==0 || bDesc==1 ); for(p=pTerm; p; p=p->pSynonym){ @@ -219306,7 +221126,7 @@ static int sqlite3Fts5ExprClonePhrase( sCtx.pPhrase = sqlite3Fts5MallocZero(&rc, sizeof(Fts5ExprPhrase)); } - if( rc==SQLITE_OK ){ + if( rc==SQLITE_OK && ALWAYS(sCtx.pPhrase) ){ /* All the allocations succeeded. Put the expression object together. */ pNew->pIndex = pExpr->pIndex; pNew->pConfig = pExpr->pConfig; @@ -219577,9 +221397,8 @@ static void sqlite3Fts5ParseSetColset( ){ Fts5Colset *pFree = pColset; if( pParse->pConfig->eDetail==FTS5_DETAIL_NONE ){ - pParse->rc = SQLITE_ERROR; - pParse->zErr = sqlite3_mprintf( - "fts5: column queries are not supported (detail=none)" + sqlite3Fts5ParseError(pParse, + "fts5: column queries are not supported (detail=none)" ); }else{ fts5ParseSetColset(pParse, pExpr, pColset, &pFree); @@ -219753,13 +221572,10 @@ static Fts5ExprNode *sqlite3Fts5ParseNode( || pPhrase->nTerm>1 || (pPhrase->nTerm>0 && pPhrase->aTerm[0].bFirst) ){ - assert( pParse->rc==SQLITE_OK ); - pParse->rc = SQLITE_ERROR; - assert( pParse->zErr==0 ); - pParse->zErr = sqlite3_mprintf( + sqlite3Fts5ParseError(pParse, "fts5: %s queries are not supported (detail!=full)", pNear->nPhrase==1 ? "phrase": "NEAR" - ); + ); sqlite3_free(pRet); pRet = 0; } @@ -220291,6 +222107,15 @@ struct Fts5PoslistPopulator { int bMiss; }; +/* +** Clear the position lists associated with all phrases in the expression +** passed as the first argument. Argument bLive is true if the expression +** might be pointing to a real entry, otherwise it has just been reset. +** +** At present this function is only used for detail=col and detail=none +** fts5 tables. This implies that all phrases must be at most 1 token +** in size, as phrase matches are not supported without detail=full. +*/ static Fts5PoslistPopulator *sqlite3Fts5ExprClearPoslists(Fts5Expr *pExpr, int bLive){ Fts5PoslistPopulator *pRet; pRet = sqlite3_malloc64(sizeof(Fts5PoslistPopulator)*pExpr->nPhrase); @@ -220300,7 +222125,7 @@ static Fts5PoslistPopulator *sqlite3Fts5ExprClearPoslists(Fts5Expr *pExpr, int b for(i=0; inPhrase; i++){ Fts5Buffer *pBuf = &pExpr->apExprPhrase[i]->poslist; Fts5ExprNode *pNode = pExpr->apExprPhrase[i]->pNode; - assert( pExpr->apExprPhrase[i]->nTerm==1 ); + assert( pExpr->apExprPhrase[i]->nTerm<=1 ); if( bLive && (pBuf->n==0 || pNode->iRowid!=pExpr->pRoot->iRowid || pNode->bEof) ){ @@ -220851,7 +222676,7 @@ static int sqlite3Fts5HashWrite( p->bContent = 1; }else{ /* Append a new column value, if necessary */ - assert( iCol>=p->iCol ); + assert_nc( iCol>=p->iCol ); if( iCol!=p->iCol ){ if( pHash->eDetail==FTS5_DETAIL_FULL ){ pPtr[p->nData++] = 0x01; @@ -221656,8 +223481,11 @@ static int fts5BufferCompareBlob( ** res = *pLeft - *pRight */ static int fts5BufferCompare(Fts5Buffer *pLeft, Fts5Buffer *pRight){ - int nCmp = MIN(pLeft->n, pRight->n); - int res = fts5Memcmp(pLeft->p, pRight->p, nCmp); + int nCmp, res; + nCmp = MIN(pLeft->n, pRight->n); + assert( nCmp<=0 || pLeft->p!=0 ); + assert( nCmp<=0 || pRight->p!=0 ); + res = fts5Memcmp(pLeft->p, pRight->p, nCmp); return (res==0 ? (pLeft->n - pRight->n) : res); } @@ -221753,6 +223581,7 @@ static Fts5Data *fts5DataRead(Fts5Index *p, i64 iRowid){ return pRet; } + /* ** Release a reference to data record returned by an earlier call to ** fts5DataRead(). @@ -221877,6 +223706,58 @@ static void fts5StructureRef(Fts5Structure *pStruct){ pStruct->nRef++; } +static void *sqlite3Fts5StructureRef(Fts5Index *p){ + fts5StructureRef(p->pStruct); + return (void*)p->pStruct; +} +static void sqlite3Fts5StructureRelease(void *p){ + if( p ){ + fts5StructureRelease((Fts5Structure*)p); + } +} +static int sqlite3Fts5StructureTest(Fts5Index *p, void *pStruct){ + if( p->pStruct!=(Fts5Structure*)pStruct ){ + return SQLITE_ABORT; + } + return SQLITE_OK; +} + +/* +** Ensure that structure object (*pp) is writable. +** +** This function is a no-op if (*pRc) is not SQLITE_OK when it is called. If +** an error occurs, (*pRc) is set to an SQLite error code before returning. +*/ +static void fts5StructureMakeWritable(int *pRc, Fts5Structure **pp){ + Fts5Structure *p = *pp; + if( *pRc==SQLITE_OK && p->nRef>1 ){ + i64 nByte = sizeof(Fts5Structure)+(p->nLevel-1)*sizeof(Fts5StructureLevel); + Fts5Structure *pNew; + pNew = (Fts5Structure*)sqlite3Fts5MallocZero(pRc, nByte); + if( pNew ){ + int i; + memcpy(pNew, p, nByte); + for(i=0; inLevel; i++) pNew->aLevel[i].aSeg = 0; + for(i=0; inLevel; i++){ + Fts5StructureLevel *pLvl = &pNew->aLevel[i]; + nByte = sizeof(Fts5StructureSegment) * pNew->aLevel[i].nSeg; + pLvl->aSeg = (Fts5StructureSegment*)sqlite3Fts5MallocZero(pRc, nByte); + if( pLvl->aSeg==0 ){ + for(i=0; inLevel; i++){ + sqlite3_free(pNew->aLevel[i].aSeg); + } + sqlite3_free(pNew); + return; + } + memcpy(pLvl->aSeg, p->aLevel[i].aSeg, nByte); + } + p->nRef--; + pNew->nRef = 1; + } + *pp = pNew; + } +} + /* ** Deserialize and return the structure record currently stored in serialized ** form within buffer pData/nData. @@ -221978,9 +223859,11 @@ static int fts5StructureDecode( } /* -** +** Add a level to the Fts5Structure.aLevel[] array of structure object +** (*ppStruct). */ static void fts5StructureAddLevel(int *pRc, Fts5Structure **ppStruct){ + fts5StructureMakeWritable(pRc, ppStruct); if( *pRc==SQLITE_OK ){ Fts5Structure *pStruct = *ppStruct; int nLevel = pStruct->nLevel; @@ -222774,6 +224657,7 @@ static void fts5SegIterInit( if( p->rc==SQLITE_OK ){ pIter->iLeafOffset = 4; + assert( pIter->pLeaf!=0 ); assert_nc( pIter->pLeaf->nn>4 ); assert_nc( fts5LeafFirstTermOff(pIter->pLeaf)==4 ); pIter->iPgidxOff = pIter->pLeaf->szLeaf+1; @@ -223157,7 +225041,7 @@ static void fts5SegIterReverse(Fts5Index *p, Fts5SegIter *pIter){ if( pDlidx ){ int iSegid = pIter->pSeg->iSegid; pgnoLast = fts5DlidxIterPgno(pDlidx); - pLast = fts5DataRead(p, FTS5_SEGMENT_ROWID(iSegid, pgnoLast)); + pLast = fts5LeafRead(p, FTS5_SEGMENT_ROWID(iSegid, pgnoLast)); }else{ Fts5Data *pLeaf = pIter->pLeaf; /* Current leaf data */ @@ -223184,7 +225068,7 @@ static void fts5SegIterReverse(Fts5Index *p, Fts5SegIter *pIter){ ** forward to find the page containing the last rowid. */ for(pgno=pIter->iLeafPgno+1; !p->rc && pgno<=pSeg->pgnoLast; pgno++){ i64 iAbs = FTS5_SEGMENT_ROWID(pSeg->iSegid, pgno); - Fts5Data *pNew = fts5DataRead(p, iAbs); + Fts5Data *pNew = fts5LeafRead(p, iAbs); if( pNew ){ int iRowid, bTermless; iRowid = fts5LeafFirstRowidOff(pNew); @@ -223215,6 +225099,10 @@ static void fts5SegIterReverse(Fts5Index *p, Fts5SegIter *pIter){ pIter->pLeaf = pLast; pIter->iLeafPgno = pgnoLast; iOff = fts5LeafFirstRowidOff(pLast); + if( iOff>pLast->szLeaf ){ + p->rc = FTS5_CORRUPT; + return; + } iOff += fts5GetVarint(&pLast->p[iOff], (u64*)&pIter->iRowid); pIter->iLeafOffset = iOff; @@ -223223,7 +225111,6 @@ static void fts5SegIterReverse(Fts5Index *p, Fts5SegIter *pIter){ }else{ pIter->iEndofDoclist = fts5LeafFirstTermOff(pLast); } - } fts5SegIterReverseInitPage(p, pIter); @@ -223275,21 +225162,20 @@ static void fts5LeafSeek( Fts5SegIter *pIter, /* Iterator to seek */ const u8 *pTerm, int nTerm /* Term to search for */ ){ - int iOff; + u32 iOff; const u8 *a = pIter->pLeaf->p; - int szLeaf = pIter->pLeaf->szLeaf; - int n = pIter->pLeaf->nn; + u32 n = (u32)pIter->pLeaf->nn; u32 nMatch = 0; u32 nKeep = 0; u32 nNew = 0; u32 iTermOff; - int iPgidx; /* Current offset in pgidx */ + u32 iPgidx; /* Current offset in pgidx */ int bEndOfPage = 0; assert( p->rc==SQLITE_OK ); - iPgidx = szLeaf; + iPgidx = (u32)pIter->pLeaf->szLeaf; iPgidx += fts5GetVarint32(&a[iPgidx], iTermOff); iOff = iTermOff; if( iOff>n ){ @@ -223355,15 +225241,15 @@ static void fts5LeafSeek( if( pIter->pLeaf==0 ) return; a = pIter->pLeaf->p; if( fts5LeafIsTermless(pIter->pLeaf)==0 ){ - iPgidx = pIter->pLeaf->szLeaf; + iPgidx = (u32)pIter->pLeaf->szLeaf; iPgidx += fts5GetVarint32(&pIter->pLeaf->p[iPgidx], iOff); - if( iOff<4 || iOff>=pIter->pLeaf->szLeaf ){ + if( iOff<4 || (i64)iOff>=pIter->pLeaf->szLeaf ){ p->rc = FTS5_CORRUPT; return; }else{ nKeep = 0; iTermOff = iOff; - n = pIter->pLeaf->nn; + n = (u32)pIter->pLeaf->nn; iOff += fts5GetVarint32(&a[iOff], nNew); break; } @@ -223731,7 +225617,7 @@ static void fts5SegIterGotoPage( fts5SegIterNextPage(p, pIter); assert( p->rc!=SQLITE_OK || pIter->iLeafPgno==iLeafPgno ); - if( p->rc==SQLITE_OK ){ + if( p->rc==SQLITE_OK && ALWAYS(pIter->pLeaf!=0) ){ int iOff; u8 *a = pIter->pLeaf->p; int n = pIter->pLeaf->szLeaf; @@ -224163,7 +226049,11 @@ static void fts5SegiterPoslist( Fts5Colset *pColset, Fts5Buffer *pBuf ){ + assert( pBuf!=0 ); + assert( pSeg!=0 ); if( 0==fts5BufferGrow(&p->rc, pBuf, pSeg->nPos+FTS5_DATA_ZERO_PADDING) ){ + assert( pBuf->p!=0 ); + assert( pBuf->nSpace >= pBuf->n+pSeg->nPos+FTS5_DATA_ZERO_PADDING ); memset(&pBuf->p[pBuf->n+pSeg->nPos], 0, FTS5_DATA_ZERO_PADDING); if( pColset==0 ){ fts5ChunkIterate(p, pSeg, (void*)pBuf, fts5PoslistCallback); @@ -224387,6 +226277,7 @@ static void fts5IterSetOutputs_Full(Fts5Iter *pIter, Fts5SegIter *pSeg){ } static void fts5IterSetOutputCb(int *pRc, Fts5Iter *pIter){ + assert( pIter!=0 || (*pRc)!=SQLITE_OK ); if( *pRc==SQLITE_OK ){ Fts5Config *pConfig = pIter->pIndex->pConfig; if( pConfig->eDetail==FTS5_DETAIL_NONE ){ @@ -224458,7 +226349,10 @@ static void fts5MultiIterNew( } } *ppOut = pNew = fts5MultiIterAlloc(p, nSeg); - if( pNew==0 ) return; + if( pNew==0 ){ + assert( p->rc!=SQLITE_OK ); + goto fts5MultiIterNew_post_check; + } pNew->bRev = (0!=(flags & FTS5INDEX_QUERY_DESC)); pNew->bSkipEmpty = (0!=(flags & FTS5INDEX_QUERY_SKIPEMPTY)); pNew->pColset = pColset; @@ -224522,6 +226416,10 @@ static void fts5MultiIterNew( fts5MultiIterFree(pNew); *ppOut = 0; } + +fts5MultiIterNew_post_check: + assert( (*ppOut)!=0 || p->rc!=SQLITE_OK ); + return; } /* @@ -224569,7 +226467,8 @@ static void fts5MultiIterNew2( ** False otherwise. */ static int fts5MultiIterEof(Fts5Index *p, Fts5Iter *pIter){ - assert( p->rc + assert( pIter!=0 || p->rc!=SQLITE_OK ); + assert( p->rc!=SQLITE_OK || (pIter->aSeg[ pIter->aFirst[1].iFirst ].pLeaf==0)==pIter->base.bEof ); return (p->rc || pIter->base.bEof); @@ -225373,6 +227272,7 @@ static void fts5IndexMergeLevel( ** and last leaf page number at the same time. */ fts5WriteFinish(p, &writer, &pSeg->pgnoLast); + assert( pIter!=0 || p->rc!=SQLITE_OK ); if( fts5MultiIterEof(p, pIter) ){ int i; @@ -225473,7 +227373,7 @@ static void fts5IndexAutomerge( Fts5Structure **ppStruct, /* IN/OUT: Current structure of index */ int nLeaf /* Number of output leaves just written */ ){ - if( p->rc==SQLITE_OK && p->pConfig->nAutomerge>0 ){ + if( p->rc==SQLITE_OK && p->pConfig->nAutomerge>0 && ALWAYS((*ppStruct)!=0) ){ Fts5Structure *pStruct = *ppStruct; u64 nWrite; /* Initial value of write-counter */ int nWork; /* Number of work-quanta to perform */ @@ -226583,11 +228483,15 @@ static int sqlite3Fts5IndexQuery( /* Scan multiple terms in the main index */ int bDesc = (flags & FTS5INDEX_QUERY_DESC)!=0; fts5SetupPrefixIter(p, bDesc, iPrefixIdx, buf.p, nToken+1, pColset,&pRet); - assert( p->rc!=SQLITE_OK || pRet->pColset==0 ); - fts5IterSetOutputCb(&p->rc, pRet); - if( p->rc==SQLITE_OK ){ - Fts5SegIter *pSeg = &pRet->aSeg[pRet->aFirst[1].iFirst]; - if( pSeg->pLeaf ) pRet->xSetOutputs(pRet, pSeg); + if( pRet==0 ){ + assert( p->rc!=SQLITE_OK ); + }else{ + assert( pRet->pColset==0 ); + fts5IterSetOutputCb(&p->rc, pRet); + if( p->rc==SQLITE_OK ){ + Fts5SegIter *pSeg = &pRet->aSeg[pRet->aFirst[1].iFirst]; + if( pSeg->pLeaf ) pRet->xSetOutputs(pRet, pSeg); + } } } @@ -226835,7 +228739,7 @@ static int fts5QueryCksum( Fts5IndexIter *pIter = 0; int rc = sqlite3Fts5IndexQuery(p, z, n, flags, 0, &pIter); - while( rc==SQLITE_OK && 0==sqlite3Fts5IterEof(pIter) ){ + while( rc==SQLITE_OK && ALWAYS(pIter!=0) && 0==sqlite3Fts5IterEof(pIter) ){ i64 rowid = pIter->iRowid; if( eDetail==FTS5_DETAIL_NONE ){ @@ -227200,6 +229104,7 @@ static int sqlite3Fts5IndexIntegrityCheck(Fts5Index *p, u64 cksum, int bUseCksum Fts5Buffer poslist = {0,0,0}; /* Buffer used to hold a poslist */ Fts5Iter *pIter; /* Used to iterate through entire index */ Fts5Structure *pStruct; /* Index structure */ + int iLvl, iSeg; #ifdef SQLITE_DEBUG /* Used by extra internal tests only run if NDEBUG is not defined */ @@ -227210,15 +229115,16 @@ static int sqlite3Fts5IndexIntegrityCheck(Fts5Index *p, u64 cksum, int bUseCksum /* Load the FTS index structure */ pStruct = fts5StructureRead(p); + if( pStruct==0 ){ + assert( p->rc!=SQLITE_OK ); + return fts5IndexReturn(p); + } /* Check that the internal nodes of each segment match the leaves */ - if( pStruct ){ - int iLvl, iSeg; - for(iLvl=0; iLvlnLevel; iLvl++){ - for(iSeg=0; iSegaLevel[iLvl].nSeg; iSeg++){ - Fts5StructureSegment *pSeg = &pStruct->aLevel[iLvl].aSeg[iSeg]; - fts5IndexIntegrityCheckSegment(p, pSeg); - } + for(iLvl=0; iLvlnLevel; iLvl++){ + for(iSeg=0; iSegaLevel[iLvl].nSeg; iSeg++){ + Fts5StructureSegment *pSeg = &pStruct->aLevel[iLvl].aSeg[iSeg]; + fts5IndexIntegrityCheckSegment(p, pSeg); } } @@ -229165,7 +231071,8 @@ static int fts5FilterMethod( pTab->pStorage, fts5StmtType(pCsr), &pCsr->pStmt, &pTab->p.base.zErrMsg ); if( rc==SQLITE_OK ){ - if( pCsr->ePlan==FTS5_PLAN_ROWID ){ + if( pRowidEq!=0 ){ + assert( pCsr->ePlan==FTS5_PLAN_ROWID ); sqlite3_bind_value(pCsr->pStmt, 1, pRowidEq); }else{ sqlite3_bind_int64(pCsr->pStmt, 1, pCsr->iFirstRowid); @@ -230583,7 +232490,7 @@ static void fts5SourceIdFunc( ){ assert( nArg==0 ); UNUSED_PARAM2(nArg, apUnused); - sqlite3_result_text(pCtx, "fts5: 2021-06-18 18:36:39 5c9a6c06871cb9fe42814af9c039eb6da5427a6ec28f187af7ebfb62eafa66e5", -1, SQLITE_TRANSIENT); + sqlite3_result_text(pCtx, "fts5: 2021-11-27 14:13:22 bd41822c7424d393a30e92ff6cb254d25c26769889c1499a18a0b9339f5d6c8a", -1, SQLITE_TRANSIENT); } /* @@ -231134,12 +233041,16 @@ static int fts5StorageDeleteFromIndex( if( pConfig->abUnindexed[iCol-1]==0 ){ const char *zText; int nText; + assert( pSeek==0 || apVal==0 ); + assert( pSeek!=0 || apVal!=0 ); if( pSeek ){ zText = (const char*)sqlite3_column_text(pSeek, iCol); nText = sqlite3_column_bytes(pSeek, iCol); - }else{ + }else if( ALWAYS(apVal) ){ zText = (const char*)sqlite3_value_text(apVal[iCol-1]); nText = sqlite3_value_bytes(apVal[iCol-1]); + }else{ + continue; } ctx.szCol = 0; rc = sqlite3Fts5Tokenize(pConfig, FTS5_TOKENIZE_DOCUMENT, @@ -231775,8 +233686,9 @@ static int sqlite3Fts5StorageDocsize(Fts5Storage *p, i64 iRowid, int *aCol){ assert( p->pConfig->bColumnsize ); rc = fts5StorageGetStmt(p, FTS5_STMT_LOOKUP_DOCSIZE, &pLookup, 0); - if( rc==SQLITE_OK ){ + if( pLookup ){ int bCorrupt = 1; + assert( rc==SQLITE_OK ); sqlite3_bind_int64(pLookup, 1, iRowid); if( SQLITE_ROW==sqlite3_step(pLookup) ){ const u8 *aBlob = sqlite3_column_blob(pLookup, 0); @@ -231789,6 +233701,8 @@ static int sqlite3Fts5StorageDocsize(Fts5Storage *p, i64 iRowid, int *aCol){ if( bCorrupt && rc==SQLITE_OK ){ rc = FTS5_CORRUPT; } + }else{ + assert( rc!=SQLITE_OK ); } return rc; @@ -234479,6 +236393,7 @@ struct Fts5VocabCursor { int bEof; /* True if this cursor is at EOF */ Fts5IndexIter *pIter; /* Term/rowid iterator object */ + void *pStruct; /* From sqlite3Fts5StructureRef() */ int nLeTerm; /* Size of zLeTerm in bytes */ char *zLeTerm; /* (term <= $zLeTerm) paramater, or NULL */ @@ -234792,7 +236707,7 @@ static int fts5VocabOpenMethod( } if( rc==SQLITE_OK ){ - int nByte = pFts5->pConfig->nCol * sizeof(i64)*2 + sizeof(Fts5VocabCursor); + i64 nByte = pFts5->pConfig->nCol * sizeof(i64)*2 + sizeof(Fts5VocabCursor); pCsr = (Fts5VocabCursor*)sqlite3Fts5MallocZero(&rc, nByte); } @@ -234812,6 +236727,8 @@ static int fts5VocabOpenMethod( static void fts5VocabResetCursor(Fts5VocabCursor *pCsr){ pCsr->rowid = 0; sqlite3Fts5IterClose(pCsr->pIter); + sqlite3Fts5StructureRelease(pCsr->pStruct); + pCsr->pStruct = 0; pCsr->pIter = 0; sqlite3_free(pCsr->zLeTerm); pCsr->nLeTerm = -1; @@ -234889,9 +236806,11 @@ static int fts5VocabInstanceNext(Fts5VocabCursor *pCsr){ static int fts5VocabNextMethod(sqlite3_vtab_cursor *pCursor){ Fts5VocabCursor *pCsr = (Fts5VocabCursor*)pCursor; Fts5VocabTable *pTab = (Fts5VocabTable*)pCursor->pVtab; - int rc = SQLITE_OK; int nCol = pCsr->pFts5->pConfig->nCol; + int rc; + rc = sqlite3Fts5StructureTest(pCsr->pFts5->pIndex, pCsr->pStruct); + if( rc!=SQLITE_OK ) return rc; pCsr->rowid++; if( pTab->eType==FTS5_VOCAB_INSTANCE ){ @@ -235065,6 +236984,9 @@ static int fts5VocabFilterMethod( if( rc==SQLITE_OK ){ Fts5Index *pIndex = pCsr->pFts5->pIndex; rc = sqlite3Fts5IndexQuery(pIndex, zTerm, nTerm, f, 0, &pCsr->pIter); + if( rc==SQLITE_OK ){ + pCsr->pStruct = sqlite3Fts5StructureRef(pIndex); + } } if( rc==SQLITE_OK && eType==FTS5_VOCAB_INSTANCE ){ rc = fts5VocabInstanceNewTerm(pCsr); @@ -235509,10 +237431,6 @@ SQLITE_API int sqlite3_stmt_init( #endif /* !defined(SQLITE_CORE) || defined(SQLITE_ENABLE_STMTVTAB) */ /************** End of stmt.c ************************************************/ -#if __LINE__!=235511 -#undef SQLITE_SOURCE_ID -#define SQLITE_SOURCE_ID "2021-06-18 18:36:39 5c9a6c06871cb9fe42814af9c039eb6da5427a6ec28f187af7ebfb62eafaalt2" -#endif /* Return the source-id for this library */ SQLITE_API const char *sqlite3_sourceid(void){ return SQLITE_SOURCE_ID; } /************************** End of sqlite3.c ******************************/ diff --git a/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.h b/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.h index a24716c5c..6bf2c58ae 100644 --- a/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.h +++ b/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.h @@ -44,7 +44,30 @@ extern "C" { /* -** Provide the ability to override linkage features of the interface. +** Facilitate override of interface linkage and calling conventions. +** Be aware that these macros may not be used within this particular +** translation of the amalgamation and its associated header file. +** +** The SQLITE_EXTERN and SQLITE_API macros are used to instruct the +** compiler that the target identifier should have external linkage. +** +** The SQLITE_CDECL macro is used to set the calling convention for +** public functions that accept a variable number of arguments. +** +** The SQLITE_APICALL macro is used to set the calling convention for +** public functions that accept a fixed number of arguments. +** +** The SQLITE_STDCALL macro is no longer used and is now deprecated. +** +** The SQLITE_CALLBACK macro is used to set the calling convention for +** function pointers. +** +** The SQLITE_SYSAPI macro is used to set the calling convention for +** functions provided by the operating system. +** +** Currently, the SQLITE_CDECL, SQLITE_APICALL, SQLITE_CALLBACK, and +** SQLITE_SYSAPI macros are used only when building for environments +** that require non-default calling conventions. */ #ifndef SQLITE_EXTERN # define SQLITE_EXTERN extern @@ -124,9 +147,9 @@ extern "C" { ** [sqlite3_libversion_number()], [sqlite3_sourceid()], ** [sqlite_version()] and [sqlite_source_id()]. */ -#define SQLITE_VERSION "3.36.0" -#define SQLITE_VERSION_NUMBER 3036000 -#define SQLITE_SOURCE_ID "2021-06-18 18:36:39 5c9a6c06871cb9fe42814af9c039eb6da5427a6ec28f187af7ebfb62eafa66e5" +#define SQLITE_VERSION "3.37.0" +#define SQLITE_VERSION_NUMBER 3037000 +#define SQLITE_SOURCE_ID "2021-11-27 14:13:22 bd41822c7424d393a30e92ff6cb254d25c26769889c1499a18a0b9339f5d6c8a" /* ** CAPI3REF: Run-Time Library Version Numbers @@ -538,6 +561,7 @@ SQLITE_API int sqlite3_exec( #define SQLITE_CONSTRAINT_VTAB (SQLITE_CONSTRAINT | (9<<8)) #define SQLITE_CONSTRAINT_ROWID (SQLITE_CONSTRAINT |(10<<8)) #define SQLITE_CONSTRAINT_PINNED (SQLITE_CONSTRAINT |(11<<8)) +#define SQLITE_CONSTRAINT_DATATYPE (SQLITE_CONSTRAINT |(12<<8)) #define SQLITE_NOTICE_RECOVER_WAL (SQLITE_NOTICE | (1<<8)) #define SQLITE_NOTICE_RECOVER_ROLLBACK (SQLITE_NOTICE | (2<<8)) #define SQLITE_WARNING_AUTOINDEX (SQLITE_WARNING | (1<<8)) @@ -551,6 +575,19 @@ SQLITE_API int sqlite3_exec( ** These bit values are intended for use in the ** 3rd parameter to the [sqlite3_open_v2()] interface and ** in the 4th parameter to the [sqlite3_vfs.xOpen] method. +** +** Only those flags marked as "Ok for sqlite3_open_v2()" may be +** used as the third argument to the [sqlite3_open_v2()] interface. +** The other flags have historically been ignored by sqlite3_open_v2(), +** though future versions of SQLite might change so that an error is +** raised if any of the disallowed bits are passed into sqlite3_open_v2(). +** Applications should not depend on the historical behavior. +** +** Note in particular that passing the SQLITE_OPEN_EXCLUSIVE flag into +** [sqlite3_open_v2()] does *not* cause the underlying database file +** to be opened using O_EXCL. Passing SQLITE_OPEN_EXCLUSIVE into +** [sqlite3_open_v2()] has historically be a no-op and might become an +** error in future versions of SQLite. */ #define SQLITE_OPEN_READONLY 0x00000001 /* Ok for sqlite3_open_v2() */ #define SQLITE_OPEN_READWRITE 0x00000002 /* Ok for sqlite3_open_v2() */ @@ -573,6 +610,7 @@ SQLITE_API int sqlite3_exec( #define SQLITE_OPEN_PRIVATECACHE 0x00040000 /* Ok for sqlite3_open_v2() */ #define SQLITE_OPEN_WAL 0x00080000 /* VFS only */ #define SQLITE_OPEN_NOFOLLOW 0x01000000 /* Ok for sqlite3_open_v2() */ +#define SQLITE_OPEN_EXRESCODE 0x02000000 /* Extended result codes */ /* Reserved: 0x00F00000 */ /* Legacy compatibility: */ @@ -2465,11 +2503,14 @@ SQLITE_API void sqlite3_set_last_insert_rowid(sqlite3*,sqlite3_int64); ** CAPI3REF: Count The Number Of Rows Modified ** METHOD: sqlite3 ** -** ^This function returns the number of rows modified, inserted or +** ^These functions return the number of rows modified, inserted or ** deleted by the most recently completed INSERT, UPDATE or DELETE ** statement on the database connection specified by the only parameter. -** ^Executing any other type of SQL statement does not modify the value -** returned by this function. +** The two functions are identical except for the type of the return value +** and that if the number of rows modified by the most recent INSERT, UPDATE +** or DELETE is greater than the maximum value supported by type "int", then +** the return value of sqlite3_changes() is undefined. ^Executing any other +** type of SQL statement does not modify the value returned by these functions. ** ** ^Only changes made directly by the INSERT, UPDATE or DELETE statement are ** considered - auxiliary changes caused by [CREATE TRIGGER | triggers], @@ -2518,16 +2559,21 @@ SQLITE_API void sqlite3_set_last_insert_rowid(sqlite3*,sqlite3_int64); ** */ SQLITE_API int sqlite3_changes(sqlite3*); +SQLITE_API sqlite3_int64 sqlite3_changes64(sqlite3*); /* ** CAPI3REF: Total Number Of Rows Modified ** METHOD: sqlite3 ** -** ^This function returns the total number of rows inserted, modified or +** ^These functions return the total number of rows inserted, modified or ** deleted by all [INSERT], [UPDATE] or [DELETE] statements completed ** since the database connection was opened, including those executed as -** part of trigger programs. ^Executing any other type of SQL statement -** does not affect the value returned by sqlite3_total_changes(). +** part of trigger programs. The two functions are identical except for the +** type of the return value and that if the number of rows modified by the +** connection exceeds the maximum value supported by type "int", then +** the return value of sqlite3_total_changes() is undefined. ^Executing +** any other type of SQL statement does not affect the value returned by +** sqlite3_total_changes(). ** ** ^Changes made as part of [foreign key actions] are included in the ** count, but those made as part of REPLACE constraint resolution are @@ -2555,6 +2601,7 @@ SQLITE_API int sqlite3_changes(sqlite3*); ** */ SQLITE_API int sqlite3_total_changes(sqlite3*); +SQLITE_API sqlite3_int64 sqlite3_total_changes64(sqlite3*); /* ** CAPI3REF: Interrupt A Long-Running Query @@ -3384,6 +3431,14 @@ SQLITE_API void sqlite3_progress_handler(sqlite3*, int, int(*)(void*), void*); ** the default shared cache setting provided by ** [sqlite3_enable_shared_cache()].)^ ** +** [[OPEN_EXRESCODE]] ^(
[SQLITE_OPEN_EXRESCODE]
+**
The database connection comes up in "extended result code mode". +** In other words, the database behaves has if +** [sqlite3_extended_result_codes(db,1)] where called on the database +** connection as soon as the connection is created. In addition to setting +** the extended result code mode, this flag also causes [sqlite3_open_v2()] +** to return an extended result code.
+** ** [[OPEN_NOFOLLOW]] ^(
[SQLITE_OPEN_NOFOLLOW]
**
The database filename is not allowed to be a symbolic link
** )^ @@ -3391,7 +3446,15 @@ SQLITE_API void sqlite3_progress_handler(sqlite3*, int, int(*)(void*), void*); ** If the 3rd parameter to sqlite3_open_v2() is not one of the ** required combinations shown above optionally combined with other ** [SQLITE_OPEN_READONLY | SQLITE_OPEN_* bits] -** then the behavior is undefined. +** then the behavior is undefined. Historic versions of SQLite +** have silently ignored surplus bits in the flags parameter to +** sqlite3_open_v2(), however that behavior might not be carried through +** into future versions of SQLite and so applications should not rely +** upon it. Note in particular that the SQLITE_OPEN_EXCLUSIVE flag is a no-op +** for sqlite3_open_v2(). The SQLITE_OPEN_EXCLUSIVE does *not* cause +** the open to fail if the database already exists. The SQLITE_OPEN_EXCLUSIVE +** flag is intended for use by the [sqlite3_vfs|VFS interface] only, and not +** by sqlite3_open_v2(). ** ** ^The fourth parameter to sqlite3_open_v2() is the name of the ** [sqlite3_vfs] object that defines the operating system interface that @@ -4159,12 +4222,17 @@ SQLITE_API int sqlite3_prepare16_v3( ** are managed by SQLite and are automatically freed when the prepared ** statement is finalized. ** ^The string returned by sqlite3_expanded_sql(P), on the other hand, -** is obtained from [sqlite3_malloc()] and must be free by the application +** is obtained from [sqlite3_malloc()] and must be freed by the application ** by passing it to [sqlite3_free()]. +** +** ^The sqlite3_normalized_sql() interface is only available if +** the [SQLITE_ENABLE_NORMALIZE] compile-time option is defined. */ SQLITE_API const char *sqlite3_sql(sqlite3_stmt *pStmt); SQLITE_API char *sqlite3_expanded_sql(sqlite3_stmt *pStmt); +#ifdef SQLITE_ENABLE_NORMALIZE SQLITE_API const char *sqlite3_normalized_sql(sqlite3_stmt *pStmt); +#endif /* ** CAPI3REF: Determine If An SQL Statement Writes The Database @@ -6348,6 +6416,72 @@ SQLITE_API sqlite3_stmt *sqlite3_next_stmt(sqlite3 *pDb, sqlite3_stmt *pStmt); SQLITE_API void *sqlite3_commit_hook(sqlite3*, int(*)(void*), void*); SQLITE_API void *sqlite3_rollback_hook(sqlite3*, void(*)(void *), void*); +/* +** CAPI3REF: Autovacuum Compaction Amount Callback +** METHOD: sqlite3 +** +** ^The sqlite3_autovacuum_pages(D,C,P,X) interface registers a callback +** function C that is invoked prior to each autovacuum of the database +** file. ^The callback is passed a copy of the generic data pointer (P), +** the schema-name of the attached database that is being autovacuumed, +** the the size of the database file in pages, the number of free pages, +** and the number of bytes per page, respectively. The callback should +** return the number of free pages that should be removed by the +** autovacuum. ^If the callback returns zero, then no autovacuum happens. +** ^If the value returned is greater than or equal to the number of +** free pages, then a complete autovacuum happens. +** +**

^If there are multiple ATTACH-ed database files that are being +** modified as part of a transaction commit, then the autovacuum pages +** callback is invoked separately for each file. +** +**

The callback is not reentrant. The callback function should +** not attempt to invoke any other SQLite interface. If it does, bad +** things may happen, including segmentation faults and corrupt database +** files. The callback function should be a simple function that +** does some arithmetic on its input parameters and returns a result. +** +** ^The X parameter to sqlite3_autovacuum_pages(D,C,P,X) is an optional +** destructor for the P parameter. ^If X is not NULL, then X(P) is +** invoked whenever the database connection closes or when the callback +** is overwritten by another invocation of sqlite3_autovacuum_pages(). +** +**

^There is only one autovacuum pages callback per database connection. +** ^Each call to the sqlite3_autovacuum_pages() interface overrides all +** previous invocations for that database connection. ^If the callback +** argument (C) to sqlite3_autovacuum_pages(D,C,P,X) is a NULL pointer, +** then the autovacuum steps callback is cancelled. The return value +** from sqlite3_autovacuum_pages() is normally SQLITE_OK, but might +** be some other error code if something goes wrong. The current +** implementation will only return SQLITE_OK or SQLITE_MISUSE, but other +** return codes might be added in future releases. +** +**

If no autovacuum pages callback is specified (the usual case) or +** a NULL pointer is provided for the callback, +** then the default behavior is to vacuum all free pages. So, in other +** words, the default behavior is the same as if the callback function +** were something like this: +** +**

+**     unsigned int demonstration_autovac_pages_callback(
+**       void *pClientData,
+**       const char *zSchema,
+**       unsigned int nDbPage,
+**       unsigned int nFreePage,
+**       unsigned int nBytePerPage
+**     ){
+**       return nFreePage;
+**     }
+** 
+*/ +SQLITE_API int sqlite3_autovacuum_pages( + sqlite3 *db, + unsigned int(*)(void*,const char*,unsigned int,unsigned int,unsigned int), + void*, + void(*)(void*) +); + + /* ** CAPI3REF: Data Change Notification Callbacks ** METHOD: sqlite3 @@ -9011,8 +9145,9 @@ SQLITE_API void sqlite3_log(int iErrCode, const char *zFormat, ...); ** ** A single database handle may have at most a single write-ahead log callback ** registered at one time. ^Calling [sqlite3_wal_hook()] replaces any -** previously registered write-ahead log callback. ^Note that the -** [sqlite3_wal_autocheckpoint()] interface and the +** previously registered write-ahead log callback. ^The return value is +** a copy of the third parameter from the previous call, if any, or 0. +** ^Note that the [sqlite3_wal_autocheckpoint()] interface and the ** [wal_autocheckpoint pragma] both invoke [sqlite3_wal_hook()] and will ** overwrite any prior [sqlite3_wal_hook()] settings. */ @@ -9879,6 +10014,10 @@ SQLITE_API unsigned char *sqlite3_serialize( ** database is currently in a read transaction or is involved in a backup ** operation. ** +** It is not possible to deserialized into the TEMP database. If the +** S argument to sqlite3_deserialize(D,S,P,N,M,F) is "temp" then the +** function returns SQLITE_ERROR. +** ** If sqlite3_deserialize(D,S,P,N,M,F) fails for any reason and if the ** SQLITE_DESERIALIZE_FREEONCLOSE bit is set in argument F, then ** [sqlite3_free()] is invoked on argument P prior to returning. diff --git a/vendor/github.com/mattn/go-sqlite3/sqlite3.go b/vendor/github.com/mattn/go-sqlite3/sqlite3.go index 5ac957092..e037857db 100644 --- a/vendor/github.com/mattn/go-sqlite3/sqlite3.go +++ b/vendor/github.com/mattn/go-sqlite3/sqlite3.go @@ -4,6 +4,7 @@ // Use of this source code is governed by an MIT-style // license that can be found in the LICENSE file. +//go:build cgo // +build cgo package sqlite3 @@ -21,8 +22,10 @@ package sqlite3 #cgo CFLAGS: -DSQLITE_ENABLE_UPDATE_DELETE_LIMIT #cgo CFLAGS: -Wno-deprecated-declarations #cgo linux,!android CFLAGS: -DHAVE_PREAD64=1 -DHAVE_PWRITE64=1 +#cgo openbsd CFLAGS: -I/usr/local/include +#cgo openbsd LDFLAGS: -L/usr/local/lib #ifndef USE_LIBSQLITE3 -#include +#include "sqlite3-binding.h" #else #include #endif @@ -231,8 +234,14 @@ const ( columnTimestamp string = "timestamp" ) +// This variable can be replaced with -ldflags like below: +// go build -ldflags="-X 'github.com/mattn/go-sqlite3.driverName=my-sqlite3'" +var driverName = "sqlite3" + func init() { - sql.Register("sqlite3", &SQLiteDriver{}) + if driverName != "" { + sql.Register(driverName, &SQLiteDriver{}) + } } // Version returns SQLite library version information. @@ -288,6 +297,51 @@ const ( /*SQLITE_RECURSIVE = C.SQLITE_RECURSIVE*/ ) +// Standard File Control Opcodes +// See: https://www.sqlite.org/c3ref/c_fcntl_begin_atomic_write.html +const ( + SQLITE_FCNTL_LOCKSTATE = int(1) + SQLITE_FCNTL_GET_LOCKPROXYFILE = int(2) + SQLITE_FCNTL_SET_LOCKPROXYFILE = int(3) + SQLITE_FCNTL_LAST_ERRNO = int(4) + SQLITE_FCNTL_SIZE_HINT = int(5) + SQLITE_FCNTL_CHUNK_SIZE = int(6) + SQLITE_FCNTL_FILE_POINTER = int(7) + SQLITE_FCNTL_SYNC_OMITTED = int(8) + SQLITE_FCNTL_WIN32_AV_RETRY = int(9) + SQLITE_FCNTL_PERSIST_WAL = int(10) + SQLITE_FCNTL_OVERWRITE = int(11) + SQLITE_FCNTL_VFSNAME = int(12) + SQLITE_FCNTL_POWERSAFE_OVERWRITE = int(13) + SQLITE_FCNTL_PRAGMA = int(14) + SQLITE_FCNTL_BUSYHANDLER = int(15) + SQLITE_FCNTL_TEMPFILENAME = int(16) + SQLITE_FCNTL_MMAP_SIZE = int(18) + SQLITE_FCNTL_TRACE = int(19) + SQLITE_FCNTL_HAS_MOVED = int(20) + SQLITE_FCNTL_SYNC = int(21) + SQLITE_FCNTL_COMMIT_PHASETWO = int(22) + SQLITE_FCNTL_WIN32_SET_HANDLE = int(23) + SQLITE_FCNTL_WAL_BLOCK = int(24) + SQLITE_FCNTL_ZIPVFS = int(25) + SQLITE_FCNTL_RBU = int(26) + SQLITE_FCNTL_VFS_POINTER = int(27) + SQLITE_FCNTL_JOURNAL_POINTER = int(28) + SQLITE_FCNTL_WIN32_GET_HANDLE = int(29) + SQLITE_FCNTL_PDB = int(30) + SQLITE_FCNTL_BEGIN_ATOMIC_WRITE = int(31) + SQLITE_FCNTL_COMMIT_ATOMIC_WRITE = int(32) + SQLITE_FCNTL_ROLLBACK_ATOMIC_WRITE = int(33) + SQLITE_FCNTL_LOCK_TIMEOUT = int(34) + SQLITE_FCNTL_DATA_VERSION = int(35) + SQLITE_FCNTL_SIZE_LIMIT = int(36) + SQLITE_FCNTL_CKPT_DONE = int(37) + SQLITE_FCNTL_RESERVE_BYTES = int(38) + SQLITE_FCNTL_CKPT_START = int(39) + SQLITE_FCNTL_EXTERNAL_READER = int(40) + SQLITE_FCNTL_CKSM_FILE = int(41) +) + // SQLiteDriver implements driver.Driver. type SQLiteDriver struct { Extensions []string @@ -828,6 +882,10 @@ func (c *SQLiteConn) exec(ctx context.Context, query string, args []namedValue) tail := s.(*SQLiteStmt).t s.Close() if tail == "" { + if res == nil { + // https://github.com/mattn/go-sqlite3/issues/963 + res = &SQLiteResult{0, 0} + } return res, nil } query = tail @@ -1409,12 +1467,6 @@ func (d *SQLiteDriver) Open(dsn string) (driver.Conn, error) { return nil, errors.New("sqlite succeeded without returning a database") } - rv = C.sqlite3_busy_timeout(db, C.int(busyTimeout)) - if rv != C.SQLITE_OK { - C.sqlite3_close_v2(db) - return nil, Error{Code: ErrNo(rv)} - } - exec := func(s string) error { cs := C.CString(s) rv := C.sqlite3_exec(db, cs, nil, nil, nil) @@ -1425,6 +1477,12 @@ func (d *SQLiteDriver) Open(dsn string) (driver.Conn, error) { return nil } + // Busy timeout + if err := exec(fmt.Sprintf("PRAGMA busy_timeout = %d;", busyTimeout)); err != nil { + C.sqlite3_close_v2(db) + return nil, err + } + // USER AUTHENTICATION // // User Authentication is always performed even when @@ -1800,6 +1858,31 @@ func (c *SQLiteConn) SetLimit(id int, newVal int) int { return int(C._sqlite3_limit(c.db, C.int(id), C.int(newVal))) } +// SetFileControlInt invokes the xFileControl method on a given database. The +// dbName is the name of the database. It will default to "main" if left blank. +// The op is one of the opcodes prefixed by "SQLITE_FCNTL_". The arg argument +// and return code are both opcode-specific. Please see the SQLite documentation. +// +// This method is not thread-safe as the returned error code can be changed by +// another call if invoked concurrently. +// +// See: sqlite3_file_control, https://www.sqlite.org/c3ref/file_control.html +func (c *SQLiteConn) SetFileControlInt(dbName string, op int, arg int) error { + if dbName == "" { + dbName = "main" + } + + cDBName := C.CString(dbName) + defer C.free(unsafe.Pointer(cDBName)) + + cArg := C.int(arg) + rv := C.sqlite3_file_control(c.db, cDBName, C.int(op), unsafe.Pointer(&cArg)) + if rv != C.SQLITE_OK { + return c.lastError() + } + return nil +} + // Close the statement. func (s *SQLiteStmt) Close() error { s.mu.Lock() diff --git a/vendor/github.com/mattn/go-sqlite3/sqlite3_context.go b/vendor/github.com/mattn/go-sqlite3/sqlite3_context.go index 90800feeb..7c7431dcc 100644 --- a/vendor/github.com/mattn/go-sqlite3/sqlite3_context.go +++ b/vendor/github.com/mattn/go-sqlite3/sqlite3_context.go @@ -8,7 +8,7 @@ package sqlite3 /* #ifndef USE_LIBSQLITE3 -#include +#include "sqlite3-binding.h" #else #include #endif diff --git a/vendor/github.com/mattn/go-sqlite3/sqlite3_load_extension.go b/vendor/github.com/mattn/go-sqlite3/sqlite3_load_extension.go index f2418196f..9433fea82 100644 --- a/vendor/github.com/mattn/go-sqlite3/sqlite3_load_extension.go +++ b/vendor/github.com/mattn/go-sqlite3/sqlite3_load_extension.go @@ -9,7 +9,7 @@ package sqlite3 /* #ifndef USE_LIBSQLITE3 -#include +#include "sqlite3-binding.h" #else #include #endif diff --git a/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_preupdate_hook.go b/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_preupdate_hook.go index db7a66621..b2e18bbcb 100644 --- a/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_preupdate_hook.go +++ b/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_preupdate_hook.go @@ -13,7 +13,7 @@ package sqlite3 #cgo LDFLAGS: -lm #ifndef USE_LIBSQLITE3 -#include +#include "sqlite3-binding.h" #else #include #endif diff --git a/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_unlock_notify.c b/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_unlock_notify.c index 1af1726b4..fc37b336c 100644 --- a/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_unlock_notify.c +++ b/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_unlock_notify.c @@ -5,7 +5,7 @@ #ifdef SQLITE_ENABLE_UNLOCK_NOTIFY #include -#include +#include "sqlite3-binding.h" extern int unlock_notify_wait(sqlite3 *db); diff --git a/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_unlock_notify.go b/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_unlock_notify.go index 43f53e807..adfa26c54 100644 --- a/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_unlock_notify.go +++ b/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_unlock_notify.go @@ -12,7 +12,7 @@ package sqlite3 #cgo CFLAGS: -DSQLITE_ENABLE_UNLOCK_NOTIFY #include -#include +#include "sqlite3-binding.h" extern void unlock_notify_callback(void *arg, int argc); */ diff --git a/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_userauth.go b/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_userauth.go index 94203b397..b62b60840 100644 --- a/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_userauth.go +++ b/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_userauth.go @@ -11,7 +11,7 @@ package sqlite3 #cgo CFLAGS: -DSQLITE_USER_AUTHENTICATION #cgo LDFLAGS: -lm #ifndef USE_LIBSQLITE3 -#include +#include "sqlite3-binding.h" #else #include #endif diff --git a/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_vtable.go b/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_vtable.go index 8fd6cdffe..4a93c4652 100644 --- a/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_vtable.go +++ b/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_vtable.go @@ -19,7 +19,7 @@ package sqlite3 #cgo CFLAGS: -Wno-deprecated-declarations #ifndef USE_LIBSQLITE3 -#include +#include "sqlite3-binding.h" #else #include #endif @@ -472,10 +472,21 @@ func goVBestIndex(pVTab unsafe.Pointer, icp unsafe.Pointer) *C.char { } info.idxNum = C.int(res.IdxNum) - idxStr := C.CString(res.IdxStr) - defer C.free(unsafe.Pointer(idxStr)) - info.idxStr = idxStr - info.needToFreeIdxStr = C.int(0) + info.idxStr = (*C.char)(C.sqlite3_malloc(C.int(len(res.IdxStr) + 1))) + if info.idxStr == nil { + // C.malloc and C.CString ordinarily do this for you. See https://golang.org/cmd/cgo/ + panic("out of memory") + } + info.needToFreeIdxStr = C.int(1) + + idxStr := *(*[]byte)(unsafe.Pointer(&reflect.SliceHeader{ + Data: uintptr(unsafe.Pointer(info.idxStr)), + Len: len(res.IdxStr) + 1, + Cap: len(res.IdxStr) + 1, + })) + copy(idxStr, res.IdxStr) + idxStr[len(idxStr)-1] = 0 // null-terminated string + if res.AlreadyOrdered { info.orderByConsumed = C.int(1) } diff --git a/vendor/github.com/mattn/go-sqlite3/sqlite3_trace.go b/vendor/github.com/mattn/go-sqlite3/sqlite3_trace.go index 4c8d9928a..56bb91490 100644 --- a/vendor/github.com/mattn/go-sqlite3/sqlite3_trace.go +++ b/vendor/github.com/mattn/go-sqlite3/sqlite3_trace.go @@ -9,7 +9,7 @@ package sqlite3 /* #ifndef USE_LIBSQLITE3 -#include +#include "sqlite3-binding.h" #else #include #endif diff --git a/vendor/github.com/mattn/go-sqlite3/sqlite3_type.go b/vendor/github.com/mattn/go-sqlite3/sqlite3_type.go index b4128db4b..0fd8210bb 100644 --- a/vendor/github.com/mattn/go-sqlite3/sqlite3_type.go +++ b/vendor/github.com/mattn/go-sqlite3/sqlite3_type.go @@ -1,5 +1,4 @@ // Copyright (C) 2019 Yasuhiro Matsumoto . -// // Use of this source code is governed by an MIT-style // license that can be found in the LICENSE file. @@ -7,15 +6,16 @@ package sqlite3 /* #ifndef USE_LIBSQLITE3 -#include +#include "sqlite3-binding.h" #else #include #endif */ import "C" import ( + "database/sql" "reflect" - "time" + "strings" ) // ColumnTypeDatabaseTypeName implement RowsColumnTypeDatabaseTypeName. @@ -31,32 +31,78 @@ func (rc *SQLiteRows) ColumnTypeLength(index int) (length int64, ok bool) { func (rc *SQLiteRows) ColumnTypePrecisionScale(index int) (precision, scale int64, ok bool) { return 0, 0, false } +*/ // ColumnTypeNullable implement RowsColumnTypeNullable. func (rc *SQLiteRows) ColumnTypeNullable(i int) (nullable, ok bool) { - return false, false + return true, true } -*/ // ColumnTypeScanType implement RowsColumnTypeScanType. func (rc *SQLiteRows) ColumnTypeScanType(i int) reflect.Type { - switch C.sqlite3_column_type(rc.s.s, C.int(i)) { - case C.SQLITE_INTEGER: - switch C.GoString(C.sqlite3_column_decltype(rc.s.s, C.int(i))) { - case "timestamp", "datetime", "date": - return reflect.TypeOf(time.Time{}) - case "boolean": - return reflect.TypeOf(false) - } - return reflect.TypeOf(int64(0)) - case C.SQLITE_FLOAT: - return reflect.TypeOf(float64(0)) - case C.SQLITE_BLOB: - return reflect.SliceOf(reflect.TypeOf(byte(0))) - case C.SQLITE_NULL: - return reflect.TypeOf(nil) - case C.SQLITE_TEXT: - return reflect.TypeOf("") - } - return reflect.SliceOf(reflect.TypeOf(byte(0))) + //ct := C.sqlite3_column_type(rc.s.s, C.int(i)) // Always returns 5 + return scanType(C.GoString(C.sqlite3_column_decltype(rc.s.s, C.int(i)))) +} + +const ( + SQLITE_INTEGER = iota + SQLITE_TEXT + SQLITE_BLOB + SQLITE_REAL + SQLITE_NUMERIC + SQLITE_TIME + SQLITE_BOOL + SQLITE_NULL +) + +func scanType(cdt string) reflect.Type { + t := strings.ToUpper(cdt) + i := databaseTypeConvSqlite(t) + switch i { + case SQLITE_INTEGER: + return reflect.TypeOf(sql.NullInt64{}) + case SQLITE_TEXT: + return reflect.TypeOf(sql.NullString{}) + case SQLITE_BLOB: + return reflect.TypeOf(sql.RawBytes{}) + case SQLITE_REAL: + return reflect.TypeOf(sql.NullFloat64{}) + case SQLITE_NUMERIC: + return reflect.TypeOf(sql.NullFloat64{}) + case SQLITE_BOOL: + return reflect.TypeOf(sql.NullBool{}) + case SQLITE_TIME: + return reflect.TypeOf(sql.NullTime{}) + } + return reflect.TypeOf(new(interface{})) +} + +func databaseTypeConvSqlite(t string) int { + if strings.Contains(t, "INT") { + return SQLITE_INTEGER + } + if t == "CLOB" || t == "TEXT" || + strings.Contains(t, "CHAR") { + return SQLITE_TEXT + } + if t == "BLOB" { + return SQLITE_BLOB + } + if t == "REAL" || t == "FLOAT" || + strings.Contains(t, "DOUBLE") { + return SQLITE_REAL + } + if t == "DATE" || t == "DATETIME" || + t == "TIMESTAMP" { + return SQLITE_TIME + } + if t == "NUMERIC" || + strings.Contains(t, "DECIMAL") { + return SQLITE_NUMERIC + } + if t == "BOOLEAN" { + return SQLITE_BOOL + } + + return SQLITE_NULL } diff --git a/vendor/github.com/mattn/go-sqlite3/sqlite3ext.h b/vendor/github.com/mattn/go-sqlite3/sqlite3ext.h index ba6d12f46..fd6e2d4e8 100644 --- a/vendor/github.com/mattn/go-sqlite3/sqlite3ext.h +++ b/vendor/github.com/mattn/go-sqlite3/sqlite3ext.h @@ -342,6 +342,13 @@ struct sqlite3_api_routines { sqlite3_file *(*database_file_object)(const char*); /* Version 3.34.0 and later */ int (*txn_state)(sqlite3*,const char*); + /* Version 3.36.1 and later */ + sqlite3_int64 (*changes64)(sqlite3*); + sqlite3_int64 (*total_changes64)(sqlite3*); + /* Version 3.37.0 and later */ + int (*autovacuum_pages)(sqlite3*, + unsigned int(*)(void*,const char*,unsigned int,unsigned int,unsigned int), + void*, void(*)(void*)); }; /* @@ -648,6 +655,11 @@ typedef int (*sqlite3_loadext_entry)( #define sqlite3_database_file_object sqlite3_api->database_file_object /* Version 3.34.0 and later */ #define sqlite3_txn_state sqlite3_api->txn_state +/* Version 3.36.1 and later */ +#define sqlite3_changes64 sqlite3_api->changes64 +#define sqlite3_total_changes64 sqlite3_api->total_changes64 +/* Version 3.37.0 and later */ +#define sqlite3_autovacuum_pages sqlite3_api->autovacuum_pages #endif /* !defined(SQLITE_CORE) && !defined(SQLITE_OMIT_LOAD_EXTENSION) */ #if !defined(SQLITE_CORE) && !defined(SQLITE_OMIT_LOAD_EXTENSION) diff --git a/vendor/github.com/mgechev/revive/config/config.go b/vendor/github.com/mgechev/revive/config/config.go index e98aaf0a0..6c21ea322 100644 --- a/vendor/github.com/mgechev/revive/config/config.go +++ b/vendor/github.com/mgechev/revive/config/config.go @@ -81,6 +81,9 @@ var allRules = append([]lint.Rule{ &rule.NestedStructs{}, &rule.IfReturnRule{}, &rule.UselessBreak{}, + &rule.TimeEqualRule{}, + &rule.BannedCharsRule{}, + &rule.OptimizeOperandsOrderRule{}, }, defaultRules...) var allFormatters = []lint.Formatter{ @@ -110,7 +113,7 @@ func GetLintingRules(config *lint.Config) ([]lint.Rule, error) { rulesMap[r.Name()] = r } - lintingRules := []lint.Rule{} + var lintingRules []lint.Rule for name, ruleConfig := range config.Rules { rule, ok := rulesMap[name] if !ok { @@ -127,25 +130,19 @@ func GetLintingRules(config *lint.Config) ([]lint.Rule, error) { return lintingRules, nil } -func parseConfig(path string) (*lint.Config, error) { - config := &lint.Config{} +func parseConfig(path string, config *lint.Config) error { file, err := ioutil.ReadFile(path) if err != nil { - return nil, errors.New("cannot read the config file") + return errors.New("cannot read the config file") } _, err = toml.Decode(string(file), config) if err != nil { - return nil, fmt.Errorf("cannot parse the config file: %v", err) + return fmt.Errorf("cannot parse the config file: %v", err) } - return config, nil + return nil } func normalizeConfig(config *lint.Config) { - const defaultConfidence = 0.8 - if config.Confidence == 0 { - config.Confidence = defaultConfidence - } - if len(config.Rules) == 0 { config.Rules = map[string]lint.RuleConfig{} } @@ -179,16 +176,23 @@ func normalizeConfig(config *lint.Config) { } } +const defaultConfidence = 0.8 + // GetConfig yields the configuration func GetConfig(configPath string) (*lint.Config, error) { - config := defaultConfig() - if configPath != "" { - var err error - config, err = parseConfig(configPath) + var config = &lint.Config{} + switch { + case configPath != "": + config.Confidence = defaultConfidence + err := parseConfig(configPath, config) if err != nil { return nil, err } + + default: // no configuration provided + config = defaultConfig() } + normalizeConfig(config) return config, nil } @@ -209,7 +213,7 @@ func GetFormatter(formatterName string) (lint.Formatter, error) { func defaultConfig() *lint.Config { defaultConfig := lint.Config{ - Confidence: 0.0, + Confidence: defaultConfidence, Severity: lint.SeverityWarning, Rules: map[string]lint.RuleConfig{}, } diff --git a/vendor/github.com/mgechev/revive/formatter/friendly.go b/vendor/github.com/mgechev/revive/formatter/friendly.go index d0a3099f8..f1637e446 100644 --- a/vendor/github.com/mgechev/revive/formatter/friendly.go +++ b/vendor/github.com/mgechev/revive/formatter/friendly.go @@ -49,11 +49,11 @@ func (f *Friendly) Format(failures <-chan lint.Failure, config lint.Config) (str sev := severity(config, failure) f.printFriendlyFailure(failure, sev) if sev == lint.SeverityWarning { - warningMap[failure.RuleName] = warningMap[failure.RuleName] + 1 + warningMap[failure.RuleName]++ totalWarnings++ } if sev == lint.SeverityError { - errorMap[failure.RuleName] = errorMap[failure.RuleName] + 1 + errorMap[failure.RuleName]++ totalErrors++ } } diff --git a/vendor/github.com/mgechev/revive/lint/file.go b/vendor/github.com/mgechev/revive/lint/file.go index 7396f2859..0de921aeb 100644 --- a/vendor/github.com/mgechev/revive/lint/file.go +++ b/vendor/github.com/mgechev/revive/lint/file.go @@ -47,7 +47,7 @@ func (f *File) ToPosition(pos token.Pos) token.Position { return f.Pkg.fset.Position(pos) } -// Render renters a node. +// Render renders a node. func (f *File) Render(x interface{}) string { var buf bytes.Buffer if err := printer.Fprint(&buf, f.Pkg.fset, x); err != nil { @@ -74,7 +74,7 @@ var basicTypeKinds = map[types.BasicKind]string{ // and indicates what its default type is. // scope may be nil. func (f *File) IsUntypedConst(expr ast.Expr) (defType string, ok bool) { - // Re-evaluate expr outside of its context to see if it's untyped. + // Re-evaluate expr outside its context to see if it's untyped. // (An expr evaluated within, for example, an assignment context will get the type of the LHS.) exprStr := f.Render(expr) tv, err := types.Eval(f.Pkg.fset, f.Pkg.TypesPkg, expr.Pos(), exprStr) @@ -206,9 +206,9 @@ func (f *File) disabledIntervals(rules []Rule, mustSpecifyDisableReason bool, fa if len(match) == 0 { continue } - ruleNames := []string{} tempNames := strings.Split(match[rulesPos], ",") + for _, name := range tempNames { name = strings.Trim(name, "\n") if len(name) > 0 { diff --git a/vendor/github.com/mgechev/revive/lint/linter.go b/vendor/github.com/mgechev/revive/lint/linter.go index cdca84fb5..9a194f088 100644 --- a/vendor/github.com/mgechev/revive/lint/linter.go +++ b/vendor/github.com/mgechev/revive/lint/linter.go @@ -6,6 +6,8 @@ import ( "fmt" "go/token" "os" + "regexp" + "strconv" "sync" ) @@ -16,12 +18,34 @@ type disabledIntervalsMap = map[string][]DisabledInterval // Linter is used for linting set of files. type Linter struct { - reader ReadFile + reader ReadFile + fileReadTokens chan struct{} } // New creates a new Linter -func New(reader ReadFile) Linter { - return Linter{reader: reader} +func New(reader ReadFile, maxOpenFiles int) Linter { + var fileReadTokens chan struct{} + if maxOpenFiles > 0 { + fileReadTokens = make(chan struct{}, maxOpenFiles) + } + return Linter{ + reader: reader, + fileReadTokens: fileReadTokens, + } +} + +func (l Linter) readFile(path string) (result []byte, err error) { + if l.fileReadTokens != nil { + // "take" a token by writing to the channel. + // It will block if no more space in the channel's buffer + l.fileReadTokens <- struct{}{} + defer func() { + // "free" a token by reading from the channel + <-l.fileReadTokens + }() + } + + return l.reader(path) } var ( @@ -60,17 +84,18 @@ func (l *Linter) lintPackage(filenames []string, ruleSet []Rule, config Config, mu: sync.Mutex{}, } for _, filename := range filenames { - content, err := l.reader(filename) + content, err := l.readFile(filename) if err != nil { return err } - if isGenerated(content) && !config.IgnoreGeneratedHeader { + if !config.IgnoreGeneratedHeader && isGenerated(content) { continue } file, err := NewFile(filename, content, pkg) if err != nil { - return err + addInvalidFileFailure(filename, err.Error(), failures) + continue } pkg.files[filename] = file } @@ -97,3 +122,42 @@ func isGenerated(src []byte) bool { } return false } + +// addInvalidFileFailure adds a failure for an invalid formatted file +func addInvalidFileFailure(filename, errStr string, failures chan Failure) { + position := getPositionInvalidFile(filename, errStr) + failures <- Failure{ + Confidence: 1, + Failure: fmt.Sprintf("invalid file %s: %v", filename, errStr), + Category: "validity", + Position: position, + } +} + +// errPosRegexp matches with an NewFile error message +// i.e. : corrupted.go:10:4: expected '}', found 'EOF +// first group matches the line and the second group, the column +var errPosRegexp = regexp.MustCompile(".*:(\\d*):(\\d*):.*$") + +// getPositionInvalidFile gets the position of the error in an invalid file +func getPositionInvalidFile(filename, s string) FailurePosition { + pos := errPosRegexp.FindStringSubmatch(s) + if len(pos) < 3 { + return FailurePosition{} + } + line, err := strconv.Atoi(pos[1]) + if err != nil { + return FailurePosition{} + } + column, err := strconv.Atoi(pos[2]) + if err != nil { + return FailurePosition{} + } + + return FailurePosition{ + Start: token.Position{ + Filename: filename, + Line: line, + Column: column, + }} +} diff --git a/vendor/github.com/mgechev/revive/lint/rule.go b/vendor/github.com/mgechev/revive/lint/rule.go index 815abfdd8..c10b9e86b 100644 --- a/vendor/github.com/mgechev/revive/lint/rule.go +++ b/vendor/github.com/mgechev/revive/lint/rule.go @@ -11,7 +11,7 @@ type DisabledInterval struct { RuleName string } -// Rule defines an abstract rule interaface +// Rule defines an abstract rule interface type Rule interface { Name() string Apply(*File, Arguments) []Failure diff --git a/vendor/github.com/mgechev/revive/rule/add-constant.go b/vendor/github.com/mgechev/revive/rule/add-constant.go index 4d1579053..69bf92069 100644 --- a/vendor/github.com/mgechev/revive/rule/add-constant.go +++ b/vendor/github.com/mgechev/revive/rule/add-constant.go @@ -30,48 +30,53 @@ func (wl whiteList) add(kind string, list string) { } // AddConstantRule lints unused params in functions. -type AddConstantRule struct{} +type AddConstantRule struct { + whiteList whiteList + strLitLimit int +} // Apply applies the rule to given file. func (r *AddConstantRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - strLitLimit := defaultStrLitLimit - var whiteList = newWhiteList() - if len(arguments) > 0 { - args, ok := arguments[0].(map[string]interface{}) - if !ok { - panic(fmt.Sprintf("Invalid argument to the add-constant rule. Expecting a k,v map, got %T", arguments[0])) - } - for k, v := range args { - kind := "" - switch k { - case "allowFloats": - kind = kindFLOAT - fallthrough - case "allowInts": - if kind == "" { - kind = kindINT - } - fallthrough - case "allowStrs": - if kind == "" { - kind = kindSTRING - } - list, ok := v.(string) - if !ok { - panic(fmt.Sprintf("Invalid argument to the add-constant rule, string expected. Got '%v' (%T)", v, v)) - } - whiteList.add(kind, list) - case "maxLitCount": - sl, ok := v.(string) - if !ok { - panic(fmt.Sprintf("Invalid argument to the add-constant rule, expecting string representation of an integer. Got '%v' (%T)", v, v)) - } + if r.whiteList == nil { + r.strLitLimit = defaultStrLitLimit + r.whiteList = newWhiteList() + if len(arguments) > 0 { + args, ok := arguments[0].(map[string]interface{}) + if !ok { + panic(fmt.Sprintf("Invalid argument to the add-constant rule. Expecting a k,v map, got %T", arguments[0])) + } + for k, v := range args { + kind := "" + switch k { + case "allowFloats": + kind = kindFLOAT + fallthrough + case "allowInts": + if kind == "" { + kind = kindINT + } + fallthrough + case "allowStrs": + if kind == "" { + kind = kindSTRING + } + list, ok := v.(string) + if !ok { + panic(fmt.Sprintf("Invalid argument to the add-constant rule, string expected. Got '%v' (%T)", v, v)) + } + r.whiteList.add(kind, list) + case "maxLitCount": + sl, ok := v.(string) + if !ok { + panic(fmt.Sprintf("Invalid argument to the add-constant rule, expecting string representation of an integer. Got '%v' (%T)", v, v)) + } - limit, err := strconv.Atoi(sl) - if err != nil { - panic(fmt.Sprintf("Invalid argument to the add-constant rule, expecting string representation of an integer. Got '%v'", v)) + limit, err := strconv.Atoi(sl) + if err != nil { + panic(fmt.Sprintf("Invalid argument to the add-constant rule, expecting string representation of an integer. Got '%v'", v)) + } + r.strLitLimit = limit } - strLitLimit = limit } } } @@ -82,7 +87,7 @@ func (r *AddConstantRule) Apply(file *lint.File, arguments lint.Arguments) []lin failures = append(failures, failure) } - w := lintAddConstantRule{onFailure: onFailure, strLits: make(map[string]int), strLitLimit: strLitLimit, whiteLst: whiteList} + w := lintAddConstantRule{onFailure: onFailure, strLits: make(map[string]int), strLitLimit: r.strLitLimit, whiteLst: r.whiteList} ast.Walk(w, file.AST) diff --git a/vendor/github.com/mgechev/revive/rule/argument-limit.go b/vendor/github.com/mgechev/revive/rule/argument-limit.go index 03bfa7f3d..b30df3be6 100644 --- a/vendor/github.com/mgechev/revive/rule/argument-limit.go +++ b/vendor/github.com/mgechev/revive/rule/argument-limit.go @@ -8,21 +8,26 @@ import ( ) // ArgumentsLimitRule lints given else constructs. -type ArgumentsLimitRule struct{} +type ArgumentsLimitRule struct { + total int +} // Apply applies the rule to given file. func (r *ArgumentsLimitRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - checkNumberOfArguments(1, arguments, r.Name()) + if r.total == 0 { + checkNumberOfArguments(1, arguments, r.Name()) - total, ok := arguments[0].(int64) // Alt. non panicking version - if !ok { - panic(`invalid value passed as argument number to the "argument-list" rule`) + total, ok := arguments[0].(int64) // Alt. non panicking version + if !ok { + panic(`invalid value passed as argument number to the "argument-limit" rule`) + } + r.total = int(total) } var failures []lint.Failure walker := lintArgsNum{ - total: int(total), + total: r.total, onFailure: func(failure lint.Failure) { failures = append(failures, failure) }, diff --git a/vendor/github.com/mgechev/revive/rule/banned-characters.go b/vendor/github.com/mgechev/revive/rule/banned-characters.go new file mode 100644 index 000000000..6911574a4 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/banned-characters.go @@ -0,0 +1,81 @@ +package rule + +import ( + "fmt" + "go/ast" + "strings" + + "github.com/mgechev/revive/lint" +) + +// BannedCharsRule checks if a file contains banned characters. +type BannedCharsRule struct { + bannedCharList []string +} + +const bannedCharsRuleName = "banned-characters" + +// Apply applied the rule to the given file. +func (r *BannedCharsRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { + if r.bannedCharList == nil { + checkNumberOfArguments(1, arguments, bannedCharsRuleName) + r.bannedCharList = r.getBannedCharsList(arguments) + } + + var failures []lint.Failure + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + w := lintBannedCharsRule{ + bannedChars: r.bannedCharList, + onFailure: onFailure, + } + ast.Walk(w, file.AST) + return failures +} + +// Name returns the rule name +func (r *BannedCharsRule) Name() string { + return bannedCharsRuleName +} + +// getBannedCharsList converts arguments into the banned characters list +func (r *BannedCharsRule) getBannedCharsList(args lint.Arguments) []string { + var bannedChars []string + for _, char := range args { + charStr, ok := char.(string) + if !ok { + panic(fmt.Sprintf("Invalid argument for the %s rule: expecting a string, got %T", r.Name(), char)) + } + bannedChars = append(bannedChars, charStr) + } + + return bannedChars +} + +type lintBannedCharsRule struct { + bannedChars []string + onFailure func(lint.Failure) +} + +// Visit checks for each node if an identifier contains banned characters +func (w lintBannedCharsRule) Visit(node ast.Node) ast.Visitor { + n, ok := node.(*ast.Ident) + if !ok { + return w + } + for _, c := range w.bannedChars { + ok := strings.Contains(n.Name, c) + if ok { + w.onFailure(lint.Failure{ + Confidence: 1, + Failure: fmt.Sprintf("banned character found: %s", c), + RuleName: bannedCharsRuleName, + Node: n, + }) + } + } + + return w +} diff --git a/vendor/github.com/mgechev/revive/rule/bare-return.go b/vendor/github.com/mgechev/revive/rule/bare-return.go index 3ee4c4adc..b7780cd6b 100644 --- a/vendor/github.com/mgechev/revive/rule/bare-return.go +++ b/vendor/github.com/mgechev/revive/rule/bare-return.go @@ -61,7 +61,7 @@ func (w bareReturnFinder) Visit(node ast.Node) ast.Visitor { _, ok := node.(*ast.FuncLit) if ok { // skip analysing function literals - // they will analyzed by the lintBareReturnRule.Visit method + // they will be analysed by the lintBareReturnRule.Visit method return nil } diff --git a/vendor/github.com/mgechev/revive/rule/blank-imports.go b/vendor/github.com/mgechev/revive/rule/blank-imports.go index 9e8b8fc00..be706c369 100644 --- a/vendor/github.com/mgechev/revive/rule/blank-imports.go +++ b/vendor/github.com/mgechev/revive/rule/blank-imports.go @@ -43,7 +43,7 @@ func (r *BlankImportsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failu prev := file.AST.Imports[i-1] prevPos := file.ToPosition(prev.Pos()) - isSubsequentBlancInAGroup := isBlank(prev.Name) && prevPos.Line+1 == pos.Line && prev.Path.Value != embedImportPath + isSubsequentBlancInAGroup := prevPos.Line+1 == pos.Line && prev.Path.Value != embedImportPath && isBlank(prev.Name) if isSubsequentBlancInAGroup { continue } diff --git a/vendor/github.com/mgechev/revive/rule/cognitive-complexity.go b/vendor/github.com/mgechev/revive/rule/cognitive-complexity.go index 7176c9957..ed7481fc8 100644 --- a/vendor/github.com/mgechev/revive/rule/cognitive-complexity.go +++ b/vendor/github.com/mgechev/revive/rule/cognitive-complexity.go @@ -10,22 +10,26 @@ import ( ) // CognitiveComplexityRule lints given else constructs. -type CognitiveComplexityRule struct{} +type CognitiveComplexityRule struct { + maxComplexity int +} // Apply applies the rule to given file. func (r *CognitiveComplexityRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - checkNumberOfArguments(1, arguments, r.Name()) + if r.maxComplexity == 0 { + checkNumberOfArguments(1, arguments, r.Name()) - complexity, ok := arguments[0].(int64) - if !ok { - panic(fmt.Sprintf("invalid argument type for cognitive-complexity, expected int64, got %T", arguments[0])) + complexity, ok := arguments[0].(int64) + if !ok { + panic(fmt.Sprintf("invalid argument type for cognitive-complexity, expected int64, got %T", arguments[0])) + } + r.maxComplexity = int(complexity) } var failures []lint.Failure - linter := cognitiveComplexityLinter{ file: file, - maxComplexity: int(complexity), + maxComplexity: r.maxComplexity, onFailure: func(failure lint.Failure) { failures = append(failures, failure) }, diff --git a/vendor/github.com/mgechev/revive/rule/context-as-argument.go b/vendor/github.com/mgechev/revive/rule/context-as-argument.go index 6502a07be..a737cecc2 100644 --- a/vendor/github.com/mgechev/revive/rule/context-as-argument.go +++ b/vendor/github.com/mgechev/revive/rule/context-as-argument.go @@ -1,28 +1,34 @@ package rule import ( + "fmt" "go/ast" + "strings" "github.com/mgechev/revive/lint" ) // ContextAsArgumentRule lints given else constructs. -type ContextAsArgumentRule struct{} +type ContextAsArgumentRule struct { + allowTypesLUT map[string]struct{} +} // Apply applies the rule to given file. -func (r *ContextAsArgumentRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure +func (r *ContextAsArgumentRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure { - fileAst := file.AST + if r.allowTypesLUT == nil { + r.allowTypesLUT = getAllowTypesFromArguments(args) + } + + var failures []lint.Failure walker := lintContextArguments{ - file: file, - fileAst: fileAst, + allowTypesLUT: r.allowTypesLUT, onFailure: func(failure lint.Failure) { failures = append(failures, failure) }, } - ast.Walk(walker, fileAst) + ast.Walk(walker, file.AST) return failures } @@ -33,9 +39,8 @@ func (r *ContextAsArgumentRule) Name() string { } type lintContextArguments struct { - file *lint.File - fileAst *ast.File - onFailure func(lint.Failure) + allowTypesLUT map[string]struct{} + onFailure func(lint.Failure) } func (w lintContextArguments) Visit(n ast.Node) ast.Visitor { @@ -43,12 +48,15 @@ func (w lintContextArguments) Visit(n ast.Node) ast.Visitor { if !ok || len(fn.Type.Params.List) <= 1 { return w } + + fnArgs := fn.Type.Params.List + // A context.Context should be the first parameter of a function. // Flag any that show up after the first. - previousArgIsCtx := isPkgDot(fn.Type.Params.List[0].Type, "context", "Context") - for _, arg := range fn.Type.Params.List[1:] { + isCtxStillAllowed := true + for _, arg := range fnArgs { argIsCtx := isPkgDot(arg.Type, "context", "Context") - if argIsCtx && !previousArgIsCtx { + if argIsCtx && !isCtxStillAllowed { w.onFailure(lint.Failure{ Node: arg, Category: "arg-order", @@ -57,7 +65,41 @@ func (w lintContextArguments) Visit(n ast.Node) ast.Visitor { }) break // only flag one } - previousArgIsCtx = argIsCtx + + typeName := gofmt(arg.Type) + // a parameter of type context.Context is still allowed if the current arg type is in the LUT + _, isCtxStillAllowed = w.allowTypesLUT[typeName] } - return w + + return nil // avoid visiting the function body +} + +func getAllowTypesFromArguments(args lint.Arguments) map[string]struct{} { + allowTypesBefore := []string{} + if len(args) >= 1 { + argKV, ok := args[0].(map[string]interface{}) + if !ok { + panic(fmt.Sprintf("Invalid argument to the context-as-argument rule. Expecting a k,v map, got %T", args[0])) + } + for k, v := range argKV { + switch k { + case "allowTypesBefore": + typesBefore, ok := v.(string) + if !ok { + panic(fmt.Sprintf("Invalid argument to the context-as-argument.allowTypesBefore rule. Expecting a string, got %T", v)) + } + allowTypesBefore = append(allowTypesBefore, strings.Split(typesBefore, ",")...) + default: + panic(fmt.Sprintf("Invalid argument to the context-as-argument rule. Unrecognized key %s", k)) + } + } + } + + result := make(map[string]struct{}, len(allowTypesBefore)) + for _, v := range allowTypesBefore { + result[v] = struct{}{} + } + + result["context.Context"] = struct{}{} // context.Context is always allowed before another context.Context + return result } diff --git a/vendor/github.com/mgechev/revive/rule/cyclomatic.go b/vendor/github.com/mgechev/revive/rule/cyclomatic.go index f597909c7..bdda5f598 100644 --- a/vendor/github.com/mgechev/revive/rule/cyclomatic.go +++ b/vendor/github.com/mgechev/revive/rule/cyclomatic.go @@ -11,23 +11,27 @@ import ( // Based on https://github.com/fzipp/gocyclo // CyclomaticRule lints given else constructs. -type CyclomaticRule struct{} +type CyclomaticRule struct { + maxComplexity int +} // Apply applies the rule to given file. func (r *CyclomaticRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - checkNumberOfArguments(1, arguments, r.Name()) + if r.maxComplexity == 0 { + checkNumberOfArguments(1, arguments, r.Name()) - complexity, ok := arguments[0].(int64) // Alt. non panicking version - if !ok { - panic("invalid argument for cyclomatic complexity") + complexity, ok := arguments[0].(int64) // Alt. non panicking version + if !ok { + panic(fmt.Sprintf("invalid argument for cyclomatic complexity; expected int but got %T", arguments[0])) + } + r.maxComplexity = int(complexity) } var failures []lint.Failure - fileAst := file.AST walker := lintCyclomatic{ file: file, - complexity: int(complexity), + complexity: r.maxComplexity, onFailure: func(failure lint.Failure) { failures = append(failures, failure) }, @@ -58,8 +62,9 @@ func (w lintCyclomatic) Visit(_ ast.Node) ast.Visitor { w.onFailure(lint.Failure{ Confidence: 1, Category: "maintenance", - Failure: fmt.Sprintf("function %s has cyclomatic complexity %d", funcName(fn), c), - Node: fn, + Failure: fmt.Sprintf("function %s has cyclomatic complexity %d (> max enabled %d)", + funcName(fn), c, w.complexity), + Node: fn, }) } } diff --git a/vendor/github.com/mgechev/revive/rule/defer.go b/vendor/github.com/mgechev/revive/rule/defer.go index 2ec7ef47c..101127f76 100644 --- a/vendor/github.com/mgechev/revive/rule/defer.go +++ b/vendor/github.com/mgechev/revive/rule/defer.go @@ -8,18 +8,21 @@ import ( ) // DeferRule lints unused params in functions. -type DeferRule struct{} +type DeferRule struct { + allow map[string]bool +} // Apply applies the rule to given file. func (r *DeferRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - allow := r.allowFromArgs(arguments) - + if r.allow == nil { + r.allow = r.allowFromArgs(arguments) + } var failures []lint.Failure onFailure := func(failure lint.Failure) { failures = append(failures, failure) } - w := lintDeferRule{onFailure: onFailure, allow: allow} + w := lintDeferRule{onFailure: onFailure, allow: r.allow} ast.Walk(w, file.AST) @@ -85,7 +88,7 @@ func (w lintDeferRule) Visit(node ast.Node) ast.Visitor { w.newFailure("return in a defer function has no effect", n, 1.0, "logic", "return") } case *ast.CallExpr: - if isIdent(n.Fun, "recover") && !w.inADefer { + if !w.inADefer && isIdent(n.Fun, "recover") { // confidence is not 1 because recover can be in a function that is deferred elsewhere w.newFailure("recover must be called inside a deferred function", n, 0.8, "logic", "recover") } diff --git a/vendor/github.com/mgechev/revive/rule/error-strings.go b/vendor/github.com/mgechev/revive/rule/error-strings.go index b8a5b7ed7..f0739d9c7 100644 --- a/vendor/github.com/mgechev/revive/rule/error-strings.go +++ b/vendor/github.com/mgechev/revive/rule/error-strings.go @@ -17,10 +17,25 @@ type ErrorStringsRule struct{} func (r *ErrorStringsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { var failures []lint.Failure + var errorFunctions = map[string]map[string]struct{}{ + "fmt": { + "Errorf": {}, + }, + "errors": { + "Errorf": {}, + "WithMessage": {}, + "Wrap": {}, + "New": {}, + "WithMessagef": {}, + "Wrapf": {}, + }, + } + fileAst := file.AST walker := lintErrorStrings{ - file: file, - fileAst: fileAst, + file: file, + fileAst: fileAst, + errorFunctions: errorFunctions, onFailure: func(failure lint.Failure) { failures = append(failures, failure) }, @@ -37,24 +52,31 @@ func (r *ErrorStringsRule) Name() string { } type lintErrorStrings struct { - file *lint.File - fileAst *ast.File - onFailure func(lint.Failure) + file *lint.File + fileAst *ast.File + errorFunctions map[string]map[string]struct{} + onFailure func(lint.Failure) } +// Visit browses the AST func (w lintErrorStrings) Visit(n ast.Node) ast.Visitor { ce, ok := n.(*ast.CallExpr) if !ok { return w } - if !isPkgDot(ce.Fun, "errors", "New") && !isPkgDot(ce.Fun, "fmt", "Errorf") { - return w - } + if len(ce.Args) < 1 { return w } - str, ok := ce.Args[0].(*ast.BasicLit) - if !ok || str.Kind != token.STRING { + + // expression matches the known pkg.function + ok = w.match(ce) + if !ok { + return w + } + + str, ok := w.getMessage(ce) + if !ok { return w } s, _ := strconv.Unquote(str.Value) // can assume well-formed Go @@ -65,7 +87,6 @@ func (w lintErrorStrings) Visit(n ast.Node) ast.Visitor { if clean { return w } - w.onFailure(lint.Failure{ Node: str, Confidence: conf, @@ -75,6 +96,55 @@ func (w lintErrorStrings) Visit(n ast.Node) ast.Visitor { return w } +// match returns true if the expression corresponds to the known pkg.function +// i.e.: errors.Wrap +func (w lintErrorStrings) match(expr *ast.CallExpr) bool { + sel, ok := expr.Fun.(*ast.SelectorExpr) + if !ok { + return false + } + // retrieve the package + id, ok := sel.X.(*ast.Ident) + if !ok { + return false + } + functions, ok := w.errorFunctions[id.Name] + if !ok { + return false + } + // retrieve the function + _, ok = functions[sel.Sel.Name] + return ok +} + +// getMessage returns the message depending on its position +// returns false if the cast is unsuccessful +func (w lintErrorStrings) getMessage(expr *ast.CallExpr) (s *ast.BasicLit, success bool) { + str, ok := w.checkArg(expr, 0) + if ok { + return str, true + } + if len(expr.Args) < 2 { + return s, false + } + str, ok = w.checkArg(expr, 1) + if !ok { + return s, false + } + return str, true +} + +func (lintErrorStrings) checkArg(expr *ast.CallExpr, arg int) (s *ast.BasicLit, success bool) { + str, ok := expr.Args[arg].(*ast.BasicLit) + if !ok { + return s, false + } + if str.Kind != token.STRING { + return s, false + } + return str, true +} + func lintErrorString(s string) (isClean bool, conf float64) { const basicConfidence = 0.8 const capConfidence = basicConfidence - 0.2 diff --git a/vendor/github.com/mgechev/revive/rule/exported.go b/vendor/github.com/mgechev/revive/rule/exported.go index 3dab1bbaf..d81be3ac5 100644 --- a/vendor/github.com/mgechev/revive/rule/exported.go +++ b/vendor/github.com/mgechev/revive/rule/exported.go @@ -12,7 +12,12 @@ import ( ) // ExportedRule lints given else constructs. -type ExportedRule struct{} +type ExportedRule struct { + configured bool + checkPrivateReceivers bool + disableStutteringCheck bool + stuttersMsg string +} // Apply applies the rule to given file. func (r *ExportedRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure { @@ -22,11 +27,15 @@ func (r *ExportedRule) Apply(file *lint.File, args lint.Arguments) []lint.Failur return failures } - checkPrivateReceivers, disableStutteringCheck, sayRepetitiveInsteadOfStutters := r.getConf(args) + if !r.configured { + var sayRepetitiveInsteadOfStutters bool + r.checkPrivateReceivers, r.disableStutteringCheck, sayRepetitiveInsteadOfStutters = r.getConf(args) + r.stuttersMsg = "stutters" + if sayRepetitiveInsteadOfStutters { + r.stuttersMsg = "is repetitive" + } - stuttersMsg := "stutters" - if sayRepetitiveInsteadOfStutters { - stuttersMsg = "is repetitive" + r.configured = true } fileAst := file.AST @@ -37,9 +46,9 @@ func (r *ExportedRule) Apply(file *lint.File, args lint.Arguments) []lint.Failur failures = append(failures, failure) }, genDeclMissingComments: make(map[*ast.GenDecl]bool), - checkPrivateReceivers: checkPrivateReceivers, - disableStutteringCheck: disableStutteringCheck, - stuttersMsg: stuttersMsg, + checkPrivateReceivers: r.checkPrivateReceivers, + disableStutteringCheck: r.disableStutteringCheck, + stuttersMsg: r.stuttersMsg, } ast.Walk(&walker, fileAst) @@ -100,7 +109,7 @@ func (w *lintExported) lintFuncDoc(fn *ast.FuncDecl) { // method kind = "method" recv := receiverType(fn) - if !ast.IsExported(recv) && !w.checkPrivateReceivers { + if !w.checkPrivateReceivers && !ast.IsExported(recv) { // receiver is unexported return } @@ -250,7 +259,7 @@ func (w *lintExported) lintValueSpecDoc(vs *ast.ValueSpec, gd *ast.GenDecl, genD return } // If this GenDecl has parens and a comment, we don't check its comment form. - if gd.Lparen.IsValid() && gd.Doc != nil { + if gd.Doc != nil && gd.Lparen.IsValid() { return } // The relevant text to check will be on either vs.Doc or gd.Doc. diff --git a/vendor/github.com/mgechev/revive/rule/file-header.go b/vendor/github.com/mgechev/revive/rule/file-header.go index 8fc89e84e..17fd9ff58 100644 --- a/vendor/github.com/mgechev/revive/rule/file-header.go +++ b/vendor/github.com/mgechev/revive/rule/file-header.go @@ -1,13 +1,16 @@ package rule import ( + "fmt" "regexp" "github.com/mgechev/revive/lint" ) // FileHeaderRule lints given else constructs. -type FileHeaderRule struct{} +type FileHeaderRule struct { + header string +} var ( multiRegexp = regexp.MustCompile("^/\\*") @@ -16,11 +19,13 @@ var ( // Apply applies the rule to given file. func (r *FileHeaderRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - checkNumberOfArguments(1, arguments, r.Name()) - - header, ok := arguments[0].(string) - if !ok { - panic(`invalid argument for "file-header" rule: first argument should be a string`) + if r.header == "" { + checkNumberOfArguments(1, arguments, r.Name()) + var ok bool + r.header, ok = arguments[0].(string) + if !ok { + panic(fmt.Sprintf("invalid argument for \"file-header\" rule: first argument should be a string, got %T", arguments[0])) + } } failure := []lint.Failure{ @@ -50,7 +55,7 @@ func (r *FileHeaderRule) Apply(file *lint.File, arguments lint.Arguments) []lint comment += text } - regex, err := regexp.Compile(header) + regex, err := regexp.Compile(r.header) if err != nil { panic(err.Error()) } diff --git a/vendor/github.com/mgechev/revive/rule/function-length.go b/vendor/github.com/mgechev/revive/rule/function-length.go index e1cee21cf..2cdb84c91 100644 --- a/vendor/github.com/mgechev/revive/rule/function-length.go +++ b/vendor/github.com/mgechev/revive/rule/function-length.go @@ -9,18 +9,25 @@ import ( ) // FunctionLength lint. -type FunctionLength struct{} +type FunctionLength struct { + maxStmt int + maxLines int +} // Apply applies the rule to given file. func (r *FunctionLength) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - maxStmt, maxLines := r.parseArguments(arguments) + if r.maxLines == 0 { + maxStmt, maxLines := r.parseArguments(arguments) + r.maxStmt = int(maxStmt) + r.maxLines = int(maxLines) + } var failures []lint.Failure walker := lintFuncLength{ file: file, - maxStmt: int(maxStmt), - maxLines: int(maxLines), + maxStmt: r.maxStmt, + maxLines: r.maxLines, onFailure: func(failure lint.Failure) { failures = append(failures, failure) }, diff --git a/vendor/github.com/mgechev/revive/rule/function-result-limit.go b/vendor/github.com/mgechev/revive/rule/function-result-limit.go index 51a4713f0..5f715ddc5 100644 --- a/vendor/github.com/mgechev/revive/rule/function-result-limit.go +++ b/vendor/github.com/mgechev/revive/rule/function-result-limit.go @@ -8,24 +8,29 @@ import ( ) // FunctionResultsLimitRule lints given else constructs. -type FunctionResultsLimitRule struct{} +type FunctionResultsLimitRule struct { + max int +} // Apply applies the rule to given file. func (r *FunctionResultsLimitRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - checkNumberOfArguments(1, arguments, r.Name()) + if r.max == 0 { + checkNumberOfArguments(1, arguments, r.Name()) - max, ok := arguments[0].(int64) // Alt. non panicking version - if !ok { - panic(fmt.Sprintf(`invalid value passed as return results number to the "function-result-limit" rule; need int64 but got %T`, arguments[0])) - } - if max < 0 { - panic(`the value passed as return results number to the "function-result-limit" rule cannot be negative`) + max, ok := arguments[0].(int64) // Alt. non panicking version + if !ok { + panic(fmt.Sprintf(`invalid value passed as return results number to the "function-result-limit" rule; need int64 but got %T`, arguments[0])) + } + if max < 0 { + panic(`the value passed as return results number to the "function-result-limit" rule cannot be negative`) + } + r.max = int(max) } var failures []lint.Failure walker := lintFunctionResultsNum{ - max: int(max), + max: r.max, onFailure: func(failure lint.Failure) { failures = append(failures, failure) }, diff --git a/vendor/github.com/mgechev/revive/rule/imports-blacklist.go b/vendor/github.com/mgechev/revive/rule/imports-blacklist.go index 31ef901e5..68beb73ac 100644 --- a/vendor/github.com/mgechev/revive/rule/imports-blacklist.go +++ b/vendor/github.com/mgechev/revive/rule/imports-blacklist.go @@ -7,7 +7,9 @@ import ( ) // ImportsBlacklistRule lints given else constructs. -type ImportsBlacklistRule struct{} +type ImportsBlacklistRule struct { + blacklist map[string]bool +} // Apply applies the rule to given file. func (r *ImportsBlacklistRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { @@ -17,23 +19,25 @@ func (r *ImportsBlacklistRule) Apply(file *lint.File, arguments lint.Arguments) return failures // skip, test file } - blacklist := make(map[string]bool, len(arguments)) + if r.blacklist == nil { + r.blacklist = make(map[string]bool, len(arguments)) - for _, arg := range arguments { - argStr, ok := arg.(string) - if !ok { - panic(fmt.Sprintf("Invalid argument to the imports-blacklist rule. Expecting a string, got %T", arg)) + for _, arg := range arguments { + argStr, ok := arg.(string) + if !ok { + panic(fmt.Sprintf("Invalid argument to the imports-blacklist rule. Expecting a string, got %T", arg)) + } + // we add quotes if not present, because when parsed, the value of the AST node, will be quoted + if len(argStr) > 2 && argStr[0] != '"' && argStr[len(argStr)-1] != '"' { + argStr = fmt.Sprintf(`%q`, argStr) + } + r.blacklist[argStr] = true } - // we add quotes if not present, because when parsed, the value of the AST node, will be quoted - if len(argStr) > 2 && argStr[0] != '"' && argStr[len(argStr)-1] != '"' { - argStr = fmt.Sprintf(`"%s"`, argStr) - } - blacklist[argStr] = true } for _, is := range file.AST.Imports { path := is.Path - if path != nil && blacklist[path.Value] { + if path != nil && r.blacklist[path.Value] { failures = append(failures, lint.Failure{ Confidence: 1, Failure: "should not use the following blacklisted import: " + path.Value, diff --git a/vendor/github.com/mgechev/revive/rule/line-length-limit.go b/vendor/github.com/mgechev/revive/rule/line-length-limit.go index 939ef227f..c51773201 100644 --- a/vendor/github.com/mgechev/revive/rule/line-length-limit.go +++ b/vendor/github.com/mgechev/revive/rule/line-length-limit.go @@ -12,20 +12,26 @@ import ( ) // LineLengthLimitRule lints given else constructs. -type LineLengthLimitRule struct{} +type LineLengthLimitRule struct { + max int +} // Apply applies the rule to given file. func (r *LineLengthLimitRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - checkNumberOfArguments(1, arguments, r.Name()) + if r.max == 0 { + checkNumberOfArguments(1, arguments, r.Name()) - max, ok := arguments[0].(int64) // Alt. non panicking version - if !ok || max < 0 { - panic(`invalid value passed as argument number to the "line-length-limit" rule`) + max, ok := arguments[0].(int64) // Alt. non panicking version + if !ok || max < 0 { + panic(`invalid value passed as argument number to the "line-length-limit" rule`) + } + + r.max = int(max) } var failures []lint.Failure checker := lintLineLengthNum{ - max: int(max), + max: r.max, file: file, onFailure: func(failure lint.Failure) { failures = append(failures, failure) @@ -55,7 +61,7 @@ func (r lintLineLengthNum) check() { s := bufio.NewScanner(f) for s.Scan() { t := s.Text() - t = strings.Replace(t, "\t", spaces, -1) + t = strings.ReplaceAll(t, "\t", spaces) c := utf8.RuneCountInString(t) if c > r.max { r.onFailure(lint.Failure{ diff --git a/vendor/github.com/mgechev/revive/rule/max-public-structs.go b/vendor/github.com/mgechev/revive/rule/max-public-structs.go index b38c8b745..5934d0b0b 100644 --- a/vendor/github.com/mgechev/revive/rule/max-public-structs.go +++ b/vendor/github.com/mgechev/revive/rule/max-public-structs.go @@ -9,15 +9,20 @@ import ( ) // MaxPublicStructsRule lints given else constructs. -type MaxPublicStructsRule struct{} +type MaxPublicStructsRule struct { + max int64 +} // Apply applies the rule to given file. func (r *MaxPublicStructsRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - checkNumberOfArguments(1, arguments, r.Name()) + if r.max < 1 { + checkNumberOfArguments(1, arguments, r.Name()) - max, ok := arguments[0].(int64) // Alt. non panicking version - if !ok { - panic(`invalid value passed as argument number to the "max-public-structs" rule`) + max, ok := arguments[0].(int64) // Alt. non panicking version + if !ok { + panic(`invalid value passed as argument number to the "max-public-structs" rule`) + } + r.max = max } var failures []lint.Failure @@ -32,7 +37,7 @@ func (r *MaxPublicStructsRule) Apply(file *lint.File, arguments lint.Arguments) ast.Walk(walker, fileAst) - if walker.current > max { + if walker.current > r.max { walker.onFailure(lint.Failure{ Failure: "you have exceeded the maximum number of public struct declarations", Confidence: 1, diff --git a/vendor/github.com/mgechev/revive/rule/optimize-operands-order.go b/vendor/github.com/mgechev/revive/rule/optimize-operands-order.go new file mode 100644 index 000000000..ccb8a3279 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/optimize-operands-order.go @@ -0,0 +1,77 @@ +package rule + +import ( + "fmt" + "go/ast" + "go/token" + + "github.com/mgechev/revive/lint" +) + +// OptimizeOperandsOrderRule lints given else constructs. +type OptimizeOperandsOrderRule struct{} + +// Apply applies the rule to given file. +func (r *OptimizeOperandsOrderRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + w := lintOptimizeOperandsOrderlExpr{ + onFailure: onFailure, + } + ast.Walk(w, file.AST) + return failures +} + +// Name returns the rule name. +func (r *OptimizeOperandsOrderRule) Name() string { + return "optimize-operands-order" +} + +type lintOptimizeOperandsOrderlExpr struct { + onFailure func(failure lint.Failure) +} + +// Visit checks boolean AND and OR expressions to determine +// if swapping their operands may result in an execution speedup. +func (w lintOptimizeOperandsOrderlExpr) Visit(node ast.Node) ast.Visitor { + binExpr, ok := node.(*ast.BinaryExpr) + if !ok { + return w + } + + switch binExpr.Op { + case token.LAND, token.LOR: + default: + return w + } + + isCaller := func(n ast.Node) bool { + _, ok := n.(*ast.CallExpr) + return ok + } + + // check if the left sub-expression contains a function call + nodes := pick(binExpr.X, isCaller, nil) + if len(nodes) < 1 { + return w + } + + // check if the right sub-expression does not contain a function call + nodes = pick(binExpr.Y, isCaller, nil) + if len(nodes) > 0 { + return w + } + + newExpr := ast.BinaryExpr{X: binExpr.Y, Y: binExpr.X, Op: binExpr.Op} + w.onFailure(lint.Failure{ + Failure: fmt.Sprintf("for better performance '%v' might be rewritten as '%v'", gofmt(binExpr), gofmt(&newExpr)), + Node: node, + Category: "optimization", + Confidence: 0.3, + }) + + return w +} diff --git a/vendor/github.com/mgechev/revive/rule/range-val-in-closure.go b/vendor/github.com/mgechev/revive/rule/range-val-in-closure.go index 857787be3..0547b8d93 100644 --- a/vendor/github.com/mgechev/revive/rule/range-val-in-closure.go +++ b/vendor/github.com/mgechev/revive/rule/range-val-in-closure.go @@ -87,15 +87,25 @@ func (w rangeValInClosure) Visit(node ast.Node) ast.Visitor { if !ok { return w } + if lit.Type == nil { // Not referring to a variable (e.g. struct field name) return w } - ast.Inspect(lit.Body, func(n ast.Node) bool { + + var inspector func(n ast.Node) bool + inspector = func(n ast.Node) bool { + kv, ok := n.(*ast.KeyValueExpr) + if ok { + // do not check identifiers acting as key in key-value expressions (see issue #637) + ast.Inspect(kv.Value, inspector) + return false + } id, ok := n.(*ast.Ident) if !ok || id.Obj == nil { return true } + for _, v := range vars { if v.Obj == id.Obj { w.onFailure(lint.Failure{ @@ -106,6 +116,7 @@ func (w rangeValInClosure) Visit(node ast.Node) ast.Visitor { } } return true - }) + } + ast.Inspect(lit.Body, inspector) return w } diff --git a/vendor/github.com/mgechev/revive/rule/string-format.go b/vendor/github.com/mgechev/revive/rule/string-format.go index 6017c4180..73557c43d 100644 --- a/vendor/github.com/mgechev/revive/rule/string-format.go +++ b/vendor/github.com/mgechev/revive/rule/string-format.go @@ -119,7 +119,7 @@ func (w lintStringFormatRule) parseArgument(argument interface{}, ruleNum int) ( } // Validate scope and regex length - if len(rule[0]) == 0 { + if rule[0] == "" { w.configError("empty scope provided", ruleNum, 0) } else if len(rule[1]) < 2 { w.configError("regex is too small (regexes should begin and end with '/')", ruleNum, 1) diff --git a/vendor/github.com/mgechev/revive/rule/string-of-int.go b/vendor/github.com/mgechev/revive/rule/string-of-int.go index 38f453a4a..f9fe5a450 100644 --- a/vendor/github.com/mgechev/revive/rule/string-of-int.go +++ b/vendor/github.com/mgechev/revive/rule/string-of-int.go @@ -54,7 +54,7 @@ func (w *lintStringInt) Visit(node ast.Node) ast.Visitor { w.onFailure(lint.Failure{ Confidence: 1, Node: ce, - Failure: "dubious convertion of an integer into a string, use strconv.Itoa", + Failure: "dubious conversion of an integer into a string, use strconv.Itoa", }) return w diff --git a/vendor/github.com/mgechev/revive/rule/time-equal.go b/vendor/github.com/mgechev/revive/rule/time-equal.go new file mode 100644 index 000000000..72ecf26fe --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/time-equal.go @@ -0,0 +1,76 @@ +package rule + +import ( + "fmt" + "go/ast" + "go/token" + + "github.com/mgechev/revive/lint" +) + +// TimeEqualRule shows where "==" and "!=" used for equality check time.Time +type TimeEqualRule struct{} + +// Apply applies the rule to given file. +func (*TimeEqualRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + w := &lintTimeEqual{file, onFailure} + if w.file.Pkg.TypeCheck() != nil { + return nil + } + + ast.Walk(w, file.AST) + return failures +} + +// Name returns the rule name. +func (*TimeEqualRule) Name() string { + return "time-equal" +} + +type lintTimeEqual struct { + file *lint.File + onFailure func(lint.Failure) +} + +func (l *lintTimeEqual) Visit(node ast.Node) ast.Visitor { + expr, ok := node.(*ast.BinaryExpr) + if !ok { + return l + } + + switch expr.Op { + case token.EQL, token.NEQ: + default: + return l + } + + xtyp := l.file.Pkg.TypeOf(expr.X) + ytyp := l.file.Pkg.TypeOf(expr.Y) + + if !isNamedType(xtyp, "time", "Time") || !isNamedType(ytyp, "time", "Time") { + return l + } + + var failure string + switch expr.Op { + case token.EQL: + failure = fmt.Sprintf("use %s.Equal(%s) instead of %q operator", expr.X, expr.Y, expr.Op) + case token.NEQ: + failure = fmt.Sprintf("use !%s.Equal(%s) instead of %q operator", expr.X, expr.Y, expr.Op) + } + + l.onFailure(lint.Failure{ + Category: "time", + Confidence: 1, + Node: node, + Failure: failure, + }) + + return l +} diff --git a/vendor/github.com/mgechev/revive/rule/unconditional-recursion.go b/vendor/github.com/mgechev/revive/rule/unconditional-recursion.go index d4da01574..930b486a2 100644 --- a/vendor/github.com/mgechev/revive/rule/unconditional-recursion.go +++ b/vendor/github.com/mgechev/revive/rule/unconditional-recursion.go @@ -61,8 +61,10 @@ func (w lintUnconditionalRecursionRule) Visit(node ast.Node) ast.Visitor { case *ast.FuncDecl: var rec *ast.Ident switch { - case n.Recv == nil || n.Recv.NumFields() < 1 || len(n.Recv.List[0].Names) < 1: + case n.Recv == nil: rec = nil + case n.Recv.NumFields() < 1 || len(n.Recv.List[0].Names) < 1: + rec = &ast.Ident{Name: "_"} default: rec = n.Recv.List[0].Names[0] } diff --git a/vendor/github.com/mgechev/revive/rule/unhandled-error.go b/vendor/github.com/mgechev/revive/rule/unhandled-error.go index 0e2f62875..432171430 100644 --- a/vendor/github.com/mgechev/revive/rule/unhandled-error.go +++ b/vendor/github.com/mgechev/revive/rule/unhandled-error.go @@ -9,27 +9,31 @@ import ( ) // UnhandledErrorRule lints given else constructs. -type UnhandledErrorRule struct{} +type UnhandledErrorRule struct { + ignoreList ignoreListType +} type ignoreListType map[string]struct{} // Apply applies the rule to given file. func (r *UnhandledErrorRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure { - var failures []lint.Failure + if r.ignoreList == nil { + r.ignoreList = make(ignoreListType, len(args)) - ignoreList := make(ignoreListType, len(args)) + for _, arg := range args { + argStr, ok := arg.(string) + if !ok { + panic(fmt.Sprintf("Invalid argument to the unhandled-error rule. Expecting a string, got %T", arg)) + } - for _, arg := range args { - argStr, ok := arg.(string) - if !ok { - panic(fmt.Sprintf("Invalid argument to the unhandled-error rule. Expecting a string, got %T", arg)) + r.ignoreList[argStr] = struct{}{} } - - ignoreList[argStr] = struct{}{} } + var failures []lint.Failure + walker := &lintUnhandledErrors{ - ignoreList: ignoreList, + ignoreList: r.ignoreList, pkg: file.Pkg, onFailure: func(failure lint.Failure) { failures = append(failures, failure) diff --git a/vendor/github.com/mgechev/revive/rule/utils.go b/vendor/github.com/mgechev/revive/rule/utils.go index 0d5744846..8b0f556b9 100644 --- a/vendor/github.com/mgechev/revive/rule/utils.go +++ b/vendor/github.com/mgechev/revive/rule/utils.go @@ -92,6 +92,7 @@ func validType(T types.Type) bool { !strings.Contains(T.String(), "invalid type") // good but not foolproof } +// isPkgDot checks if the expression is . func isPkgDot(expr ast.Expr, pkg, name string) bool { sel, ok := expr.(*ast.SelectorExpr) return ok && isIdent(sel.X, pkg) && isIdent(sel.Sel, name) @@ -111,7 +112,7 @@ func srcLine(src []byte, p token.Position) string { // pick yields a list of nodes by picking them from a sub-ast with root node n. // Nodes are selected by applying the fselect function -// f function is applied to each selected node before inseting it in the final result. +// f function is applied to each selected node before inserting it in the final result. // If f==nil then it defaults to the identity function (ie it returns the node itself) func pick(n ast.Node, fselect func(n ast.Node) bool, f func(n ast.Node) []ast.Node) []ast.Node { var result []ast.Node @@ -132,14 +133,6 @@ func pick(n ast.Node, fselect func(n ast.Node) bool, f func(n ast.Node) []ast.No return result } -func pickFromExpList(l []ast.Expr, fselect func(n ast.Node) bool, f func(n ast.Node) []ast.Node) []ast.Node { - result := make([]ast.Node, 0) - for _, e := range l { - result = append(result, pick(e, fselect, f)...) - } - return result -} - type picker struct { fselect func(n ast.Node) bool onSelect func(n ast.Node) diff --git a/vendor/github.com/mgechev/revive/rule/var-naming.go b/vendor/github.com/mgechev/revive/rule/var-naming.go index 768f65b96..1de860817 100644 --- a/vendor/github.com/mgechev/revive/rule/var-naming.go +++ b/vendor/github.com/mgechev/revive/rule/var-naming.go @@ -10,29 +10,33 @@ import ( ) // VarNamingRule lints given else constructs. -type VarNamingRule struct{} +type VarNamingRule struct { + configured bool + whitelist []string + blacklist []string +} // Apply applies the rule to given file. func (r *VarNamingRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { var failures []lint.Failure - var whitelist []string - var blacklist []string + if !r.configured { + if len(arguments) >= 1 { + r.whitelist = getList(arguments[0], "whitelist") + } - if len(arguments) >= 1 { - whitelist = getList(arguments[0], "whitelist") - } - - if len(arguments) >= 2 { - blacklist = getList(arguments[1], "blacklist") + if len(arguments) >= 2 { + r.blacklist = getList(arguments[1], "blacklist") + } + r.configured = true } fileAst := file.AST walker := lintNames{ file: file, fileAst: fileAst, - whitelist: whitelist, - blacklist: blacklist, + whitelist: r.whitelist, + blacklist: r.blacklist, onFailure: func(failure lint.Failure) { failures = append(failures, failure) }, @@ -141,7 +145,12 @@ func (w *lintNames) Visit(n ast.Node) ast.Visitor { } } case *ast.FuncDecl: - if w.file.IsTest() && (strings.HasPrefix(v.Name.Name, "Example") || strings.HasPrefix(v.Name.Name, "Test") || strings.HasPrefix(v.Name.Name, "Benchmark")) { + funcName := v.Name.Name + if w.file.IsTest() && + (strings.HasPrefix(funcName, "Example") || + strings.HasPrefix(funcName, "Test") || + strings.HasPrefix(funcName, "Benchmark") || + strings.HasPrefix(funcName, "Fuzz")) { return w } @@ -184,7 +193,7 @@ func (w *lintNames) Visit(n ast.Node) ast.Visitor { } case *ast.InterfaceType: // Do not check interface method names. - // They are often constrainted by the method names of concrete types. + // They are often constrained by the method names of concrete types. for _, x := range v.Methods.List { ft, ok := x.Type.(*ast.FuncType) if !ok { // might be an embedded interface name diff --git a/vendor/github.com/mitchellh/mapstructure/CHANGELOG.md b/vendor/github.com/mitchellh/mapstructure/CHANGELOG.md index 9fe803a5e..38a099162 100644 --- a/vendor/github.com/mitchellh/mapstructure/CHANGELOG.md +++ b/vendor/github.com/mitchellh/mapstructure/CHANGELOG.md @@ -1,3 +1,7 @@ +## 1.4.3 + +* Fix cases where `json.Number` didn't decode properly [GH-261] + ## 1.4.2 * Custom name matchers to support any sort of casing, formatting, etc. for diff --git a/vendor/github.com/mitchellh/mapstructure/mapstructure.go b/vendor/github.com/mitchellh/mapstructure/mapstructure.go index dcee0f2d6..6b81b0067 100644 --- a/vendor/github.com/mitchellh/mapstructure/mapstructure.go +++ b/vendor/github.com/mitchellh/mapstructure/mapstructure.go @@ -684,16 +684,12 @@ func (d *Decoder) decodeUint(name string, data interface{}, val reflect.Value) e } case dataType.PkgPath() == "encoding/json" && dataType.Name() == "Number": jn := data.(json.Number) - i, err := jn.Int64() + i, err := strconv.ParseUint(string(jn), 0, 64) if err != nil { return fmt.Errorf( "error decoding json.Number into %s: %s", name, err) } - if i < 0 && !d.config.WeaklyTypedInput { - return fmt.Errorf("cannot parse '%s', %d overflows uint", - name, i) - } - val.SetUint(uint64(i)) + val.SetUint(i) default: return fmt.Errorf( "'%s' expected type '%s', got unconvertible type '%s', value: '%v'", diff --git a/vendor/github.com/nishanths/exhaustive/.gitignore b/vendor/github.com/nishanths/exhaustive/.gitignore index 24bde5301..10acec6e1 100644 --- a/vendor/github.com/nishanths/exhaustive/.gitignore +++ b/vendor/github.com/nishanths/exhaustive/.gitignore @@ -5,3 +5,6 @@ tags # binary cmd/exhaustive/exhaustive exhaustive + +# testing artifacts +coverage.out diff --git a/vendor/github.com/nishanths/exhaustive/.travis.yml b/vendor/github.com/nishanths/exhaustive/.travis.yml deleted file mode 100644 index bd342f558..000000000 --- a/vendor/github.com/nishanths/exhaustive/.travis.yml +++ /dev/null @@ -1,12 +0,0 @@ -language: go - -go: - - 1.x - - master - -# Only clone the most recent commit. -git: - depth: 1 - -notifications: - email: false diff --git a/vendor/github.com/nishanths/exhaustive/Makefile b/vendor/github.com/nishanths/exhaustive/Makefile new file mode 100644 index 000000000..981a7ebe9 --- /dev/null +++ b/vendor/github.com/nishanths/exhaustive/Makefile @@ -0,0 +1,28 @@ +.PHONY: default +default: build + +.PHONY: build +build: + go build ./... + +.PHONY: test +test: + go test -cover ./... + +.PHONY: install-vet +install-vet: + go install github.com/nishanths/exhaustive/cmd/exhaustive@latest + go install github.com/gordonklaus/ineffassign@latest + go install github.com/kisielk/errcheck@latest + +.PHONY: vet +vet: + go vet ./... + exhaustive ./... + ineffassign ./... + errcheck ./... + +.PHONY: upgrade-deps +upgrade-deps: + go get golang.org/x/tools + go mod tidy diff --git a/vendor/github.com/nishanths/exhaustive/README.md b/vendor/github.com/nishanths/exhaustive/README.md index 633e19f01..b843ee862 100644 --- a/vendor/github.com/nishanths/exhaustive/README.md +++ b/vendor/github.com/nishanths/exhaustive/README.md @@ -1,33 +1,25 @@ -# exhaustive +## exhaustive [![Godoc][2]][1] -[![Godoc](https://godoc.org/github.com/nishanths/exhaustive?status.svg)](https://godoc.org/github.com/nishanths/exhaustive) - -[![Build Status](https://travis-ci.org/nishanths/exhaustive.svg?branch=master)](https://travis-ci.org/nishanths/exhaustive) - -The `exhaustive` package and command line program can be used to detect -enum switch statements that are not exhaustive. - -An enum switch statement is exhaustive if it has cases for each of the enum's members. See godoc for the definition of enum used by the program. - -The `exhaustive` package provides an `Analyzer` that follows the guidelines -described in the [go/analysis](https://godoc.org/golang.org/x/tools/go/analysis) package; this makes -it possible to integrate into existing analysis driver programs. - -## Install +Check exhaustiveness of enum switch statements in Go source code. ``` -go get github.com/nishanths/exhaustive/... +go install github.com/nishanths/exhaustive/cmd/exhaustive@latest ``` -## Docs +For docs on the flags, the definition of enum, and the definition of +exhaustiveness, see [godocs.io][4]. -https://godoc.org/github.com/nishanths/exhaustive +For the changelog, see [CHANGELOG][changelog] in the wiki. + +The package provides an `Analyzer` that follows the guidelines in the +[`go/analysis`][3] package; this should make it possible to integrate +exhaustive with your own analysis driver program. ## Example -Given the code: +Given the enum -```diff +```go package token type Token int @@ -36,35 +28,41 @@ const ( Add Token = iota Subtract Multiply -+ Quotient -+ Remainder + Quotient + Remainder ) ``` -``` + +and the switch statement + +```go package calc import "token" -func processToken(t token.Token) { +func f(t token.Token) { switch t { case token.Add: - ... case token.Subtract: - ... case token.Multiply: - ... + default: } } ``` -Running the `exhaustive` command will print: +running exhaustive will print ``` calc.go:6:2: missing cases in switch of type token.Token: Quotient, Remainder ``` -Enums can also be defined using explicit constant values instead of `iota`. +## Contributing -## License +Issues and pull requests are welcome. Before making a substantial +change, please discuss it in an issue. -BSD 2-Clause +[1]: https://godocs.io/github.com/nishanths/exhaustive +[2]: https://godocs.io/github.com/nishanths/exhaustive?status.svg +[3]: https://pkg.go.dev/golang.org/x/tools/go/analysis +[4]: https://godocs.io/github.com/nishanths/exhaustive +[changelog]: https://github.com/nishanths/exhaustive/wiki/CHANGELOG diff --git a/vendor/github.com/nishanths/exhaustive/comment.go b/vendor/github.com/nishanths/exhaustive/comment.go new file mode 100644 index 000000000..ea184d8e8 --- /dev/null +++ b/vendor/github.com/nishanths/exhaustive/comment.go @@ -0,0 +1,65 @@ +package exhaustive + +import ( + "go/ast" + "regexp" + "strings" +) + +// Generated file definition +// http://golang.org/s/generatedcode +// +// To convey to humans and machine tools that code is generated, generated +// source should have a line that matches the following regular expression (in +// Go syntax): +// +// ^// Code generated .* DO NOT EDIT\.$ +// +// This line must appear before the first non-comment, non-blank +// text in the file. + +func isGeneratedFile(file *ast.File) bool { + // NOTE: file.Comments includes file.Doc as well, so no need + // to separately check file.Doc. + + for _, c := range file.Comments { + for _, cc := range c.List { + // This check is intended to handle "must appear before the + // first non-comment, non-blank text in the file". + // TODO: Is this check fully correct? Seems correct based + // on https://golang.org/ref/spec#Source_file_organization. + if c.Pos() >= file.Package { + return false + } + // According to the docs: + // '\r' has been removed. + // '\n' has been removed for //-style comments, which is what we care about. + // Also manually verified. + if isGeneratedFileComment(cc.Text) { + return true + } + } + } + + return false +} + +var generatedCodeRx = regexp.MustCompile(`^// Code generated .* DO NOT EDIT\.$`) + +func isGeneratedFileComment(s string) bool { + return generatedCodeRx.MatchString(s) +} + +// ignoreDirective is used to exclude checking of specific switch statements. +const ignoreDirective = "//exhaustive:ignore" + +func containsIgnoreDirective(comments []*ast.CommentGroup) bool { + for _, c := range comments { + for _, cc := range c.List { + if strings.HasPrefix(cc.Text, ignoreDirective) { + return true + } + } + } + return false +} diff --git a/vendor/github.com/nishanths/exhaustive/enum.go b/vendor/github.com/nishanths/exhaustive/enum.go index ed0df642b..2b287e39a 100644 --- a/vendor/github.com/nishanths/exhaustive/enum.go +++ b/vendor/github.com/nishanths/exhaustive/enum.go @@ -1,146 +1,171 @@ package exhaustive import ( + "fmt" "go/ast" "go/token" "go/types" + "strings" - "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/ast/inspector" ) -type enums map[string]*enumMembers // enum type name -> enum members +// constantValue is a constant.Value.ExactString(). +type constantValue string +// Represents an enum type (or a potential enum type). +// It is a defined (named) type's name. +type enumType struct{ *types.TypeName } + +func (et enumType) String() string { return et.TypeName.String() } // for debugging +func (et enumType) scope() *types.Scope { return et.TypeName.Parent() } // scope that the type is declared in +func (et enumType) factObject() types.Object { return et.TypeName } // types.Object for fact export + +// enumMembers is the members for a single enum type. +// The zero value is ready to use. type enumMembers struct { - // Names in the order encountered in the AST. - OrderedNames []string - - // Maps name -> (constant.Value).ExactString(). - // If a name is missing in the map, it means that it does not have a - // corresponding constant.Value defined in the AST. - NameToValue map[string]string - - // Maps (constant.Value).ExactString() -> names. - // Names that don't have a constant.Value defined in the AST (e.g., some - // iota constants) will not have a corresponding entry in this map. - ValueToNames map[string][]string + Names []string // enum member names, AST order + NameToValue map[string]constantValue // enum member name -> constant value + ValueToNames map[constantValue][]string // constant value -> enum member names } -func (em *enumMembers) add(name string, constVal *string) { - em.OrderedNames = append(em.OrderedNames, name) - - if constVal != nil { - if em.NameToValue == nil { - em.NameToValue = make(map[string]string) - } - em.NameToValue[name] = *constVal - - if em.ValueToNames == nil { - em.ValueToNames = make(map[string][]string) - } - em.ValueToNames[*constVal] = append(em.ValueToNames[*constVal], name) +func (em *enumMembers) add(name string, val constantValue) { + if em.NameToValue == nil { + em.NameToValue = make(map[string]constantValue) } + if em.ValueToNames == nil { + em.ValueToNames = make(map[constantValue][]string) + } + + em.Names = append(em.Names, name) + em.NameToValue[name] = val + em.ValueToNames[val] = append(em.ValueToNames[val], name) } -func (em *enumMembers) numMembers() int { - return len(em.OrderedNames) +func (em enumMembers) String() string { return em.factString() } // for debugging + +func (em enumMembers) factString() string { + var buf strings.Builder + for j, vv := range em.Names { + buf.WriteString(vv) + // add comma separator between each enum member + if j != len(em.Names)-1 { + buf.WriteString(",") + } + } + return buf.String() } -func findEnums(pass *analysis.Pass) enums { - pkgEnums := make(enums) +func findEnums(pkgScopeOnly bool, pkg *types.Package, inspect *inspector.Inspector, info *types.Info) map[enumType]enumMembers { + result := make(map[enumType]enumMembers) - // Gather enum types. - for _, f := range pass.Files { - for _, decl := range f.Decls { - gen, ok := decl.(*ast.GenDecl) - if !ok { - continue - } - if gen.Tok != token.TYPE { - continue - } - for _, s := range gen.Specs { - // Must be TypeSpec since we've filtered on token.TYPE. - t, ok := s.(*ast.TypeSpec) - obj := pass.TypesInfo.Defs[t.Name] - if obj == nil { - continue - } - - named, ok := obj.Type().(*types.Named) + inspect.Preorder([]ast.Node{&ast.GenDecl{}}, func(n ast.Node) { + gen := n.(*ast.GenDecl) + if gen.Tok != token.CONST { + return + } + for _, s := range gen.Specs { + for _, name := range s.(*ast.ValueSpec).Names { + enumTyp, memberName, val, ok := possibleEnumMember(name, info) if !ok { continue } - basic, ok := named.Underlying().(*types.Basic) - if !ok { + if pkgScopeOnly && enumTyp.scope() != pkg.Scope() { continue } - - switch i := basic.Info(); { - case i&types.IsInteger != 0: - pkgEnums[named.Obj().Name()] = &enumMembers{} - case i&types.IsFloat != 0: - pkgEnums[named.Obj().Name()] = &enumMembers{} - case i&types.IsString != 0: - pkgEnums[named.Obj().Name()] = &enumMembers{} - } + v := result[enumTyp] + v.add(memberName, val) + result[enumTyp] = v } } - } + }) - // Gather enum members. - for _, f := range pass.Files { - for _, decl := range f.Decls { - gen, ok := decl.(*ast.GenDecl) - if !ok { - continue - } - if gen.Tok != token.CONST && gen.Tok != token.VAR { - continue - } - for _, s := range gen.Specs { - // Must be ValueSpec since we've filtered on token.CONST, token.VAR. - v := s.(*ast.ValueSpec) - for i, name := range v.Names { - obj := pass.TypesInfo.Defs[name] - if obj == nil { - continue - } - - named, ok := obj.Type().(*types.Named) - if !ok { - continue - } - - // Get the constant.Value representation, if any. - var constVal *string - if len(v.Values) > i { - value := v.Values[i] - if con, ok := pass.TypesInfo.Types[value]; ok && con.Value != nil { - str := con.Value.ExactString() // temp var to be able to take address - constVal = &str - } - } - - em, ok := pkgEnums[named.Obj().Name()] - if !ok { - continue - } - em.add(obj.Name(), constVal) - pkgEnums[named.Obj().Name()] = em - } - } - } - } - - // Delete member-less enum types. - // We can't call these enums, since we can't be sure without - // the existence of members. (The type may just be a named type, - // for instance.) - for k, v := range pkgEnums { - if v.numMembers() == 0 { - delete(pkgEnums, k) - } - } - - return pkgEnums + return result +} + +func possibleEnumMember(constName *ast.Ident, info *types.Info) (et enumType, name string, val constantValue, ok bool) { + obj := info.Defs[constName] + if obj == nil { + panic(fmt.Sprintf("info.Defs[%s] == nil", constName)) + } + if _, ok = obj.(*types.Const); !ok { + panic(fmt.Sprintf("obj must be *types.Const, got %T", obj)) + } + if isBlankIdentifier(obj) { + // These objects have a nil parent scope. + // Also, we have no real purpose to record them. + return enumType{}, "", "", false + } + + /* + NOTE: + + type T int + const A T = iota // obj.Type() is T + + type R T + const B R = iota // obj.Type() is R + + type T2 int + type T1 = T2 + const C T1 = iota // obj.Type() is T2 + + type T3 = T4 + type T4 int + type T5 = T3 + const D T5 = iota // obj.Type() is T4 + + // And, in all these cases, validNamedBasic(obj.Type()) == true. + */ + + if !validNamedBasic(obj.Type()) { + return enumType{}, "", "", false + } + + named := obj.Type().(*types.Named) // guaranteed by validNamedBasic() + tn := named.Obj() + + // Enum type's scope and enum member's scope must be the same. If they're + // not, don't consider the const a member. Additionally, the enum type and + // the enum member must be in the same package (the scope check accounts for + // this, too). + if tn.Parent() != obj.Parent() { + return enumType{}, "", "", false + } + + return enumType{tn}, obj.Name(), determineConstVal(constName, info), true +} + +func determineConstVal(name *ast.Ident, info *types.Info) constantValue { + c := info.ObjectOf(name).(*types.Const) + return constantValue(c.Val().ExactString()) +} + +func isBlankIdentifier(obj types.Object) bool { + return obj.Name() == "_" // NOTE: go/types/decl.go does a direct comparison like this +} + +func validBasic(basic *types.Basic) bool { + switch i := basic.Info(); { + case i&types.IsInteger != 0, i&types.IsFloat != 0, i&types.IsString != 0: + return true + } + return false +} + +// validNamedBasic returns whether the type t is a named type whose underlying +// type is a valid basic type to form an enum. +// A type that passes this check meets the definition of an enum type. +// Note that +// validNamedBasic(t) == true => t.(*types.Named) +func validNamedBasic(t types.Type) bool { + named, ok := t.(*types.Named) + if !ok { + return false + } + basic, ok := named.Underlying().(*types.Basic) + if !ok || !validBasic(basic) { + return false + } + return true } diff --git a/vendor/github.com/nishanths/exhaustive/exhaustive.go b/vendor/github.com/nishanths/exhaustive/exhaustive.go index bee01b108..5e12a6577 100644 --- a/vendor/github.com/nishanths/exhaustive/exhaustive.go +++ b/vendor/github.com/nishanths/exhaustive/exhaustive.go @@ -1,133 +1,236 @@ -// Package exhaustive provides an analyzer that checks exhaustiveness of enum -// switch statements. The analyzer also provides fixes to make the offending -// switch statements exhaustive (see "Fixes" section). -// -// See "cmd/exhaustive" subpackage for the related command line program. -// -// Definition of enum -// -// The Go language spec does not provide an explicit definition for enums. -// For the purpose of this program, an enum type is a package-level named type -// whose underlying type is an integer (includes byte and rune), a float, or -// a string type. An enum type must have associated with it one or more -// package-level variables of the named type in the package. These variables -// constitute the enum's members. -// -// In the code snippet below, Biome is an enum type with 3 members. (You may -// also use iota instead of explicitly specifying values.) -// -// type Biome int -// -// const ( -// Tundra Biome = 1 -// Savanna Biome = 2 -// Desert Biome = 3 -// ) -// -// Switch statement exhaustiveness -// -// An enum switch statement is exhaustive if it has cases for each of the enum's members. -// -// For an enum type defined in the same package as the switch statement, both -// exported and unexported enum members must be present in order to consider -// the switch exhaustive. On the other hand, for an enum type defined -// in an external package it is sufficient for just exported enum members -// to be present in order to consider the switch exhaustive. -// -// Flags -// -// The analyzer accepts 4 flags. -// -// The -default-signifies-exhaustive boolean flag indicates to the analyzer -// whether switch statements are to be considered exhaustive as long as a -// 'default' case is present (even if all enum members aren't listed in the -// switch statements cases). The default value is false. -// -// The -check-generated boolean flag indicates whether to check switch -// statements in generated Go source files. The default value is false. -// -// The -ignore-pattern flag specifies a regular expression. Member names -// in enum definitions that match the regular expression do not require a case -// clause to satisfy exhaustiveness. The regular expression is matched against -// enum member names inclusive of the import path, e.g. of the -// form: github.com/foo/bar.Tundra, where the import path is github.com/foo/bar -// and the enum member name is Tundra. -// -// The behavior of the -fix flag is described in the next section. -// -// Fixes -// -// The analyzer suggests fixes for a switch statement if it is not exhaustive. -// The suggested fix always adds a single case clause for the missing enum members. -// -// case MissingA, MissingB, MissingC: -// panic(fmt.Sprintf("unhandled value: %v", v)) -// -// where v is the expression in the switch statement's tag (in other words, the -// value being switched upon). If the switch statement's tag is a function or a -// method call the analyzer does not suggest a fix, as reusing the call expression -// in the panic/fmt.Sprintf call could be mutative. -// -// The rationale for the fix using panic is that it might be better to fail loudly on -// existing unhandled or impossible cases than to let them slip by quietly unnoticed. -// An even better fix may, of course, be to manually inspect the sites reported -// by the package and handle the missing cases if necessary. -// -// Imports will be adjusted automatically to account for the "fmt" dependency. -// -// Skipping analysis -// -// If the following directive comment: -// -// //exhaustive:ignore -// -// is associated with a switch statement, the analyzer skips -// checking of the switch statement and no diagnostics are reported. -// -// No diagnostics are reported for switch statements in -// generated files (see https://golang.org/s/generatedcode for definition of -// generated file), unless the -check-generated flag is enabled. -// -// Additionally, see the -ignore-pattern flag. +/* +Package exhaustive provides an analyzer that checks exhaustiveness of enum +switch statements in Go source code. + +Definition of enum + +The Go language spec does not provide an explicit definition for an enum. For +the purpose of this analyzer, an enum type is any named type (a.k.a. defined +type) whose underlying type is an integer (includes byte and rune), a float, or +a string type. An enum type has associated with it constants of this named type; +these constants constitute the enum members. + +In the example below, Biome is an enum type with 3 members. + + type Biome int + + const ( + Tundra Biome = 1 + Savanna Biome = 2 + Desert Biome = 3 + ) + +For a constant to be an enum member for an enum type, the constant must be +declared in the same scope as the enum type. Note that the scope requirement +implies that only constants declared in the same package as the enum type's +package can constitute the enum members for the enum type. + +Enum member constants for a given enum type don't necessarily have to all be +declared in the same const block. Constant values may be specified using iota, +using explicit values, or by any means of declaring a valid Go const. It is +allowed for multiple enum member constants for a given enum type to have the +same constant value. + +Definition of exhaustiveness + +A switch statement that switches on a value of an enum type is exhaustive if all +of the enum type's members are listed in the switch statement's cases. If +multiple enum member constants have the same constant value, it is sufficient +for any one of these same-valued members to be listed. + +For an enum type defined in the same package as the switch statement, both +exported and unexported enum members must be listed to satisfy exhaustiveness. +For an enum type defined in an external package, it is sufficient that only +exported enum members are listed. + +Only identifiers denoting constants (e.g. Tundra) and qualified identifiers +denoting constants (e.g. somepkg.Grassland) listed in a switch statement's cases +can contribute towards satisfying exhaustiveness. Literal values, struct fields, +re-assignable variables, etc. will not. + +Type aliases + +The analyzer handles type aliases for an enum type in the following manner. +Consider the example below. T2 is a enum type, and T1 is an alias for T2. Note +that we don't term T1 itself an enum type; it is only an alias for an enum +type. + + package pkg + type T1 = newpkg.T2 + const ( + A = newpkg.A + B = newpkg.B + ) + + package newpkg + type T2 int + const ( + A T2 = 1 + B T2 = 2 + ) + +Then a switch statement that switches on a value of type T1 (which, in reality, +is just an alternate spelling for type T2) is exhaustive if all of T2's enum +members are listed in the switch statement's cases. The same conditions +described in the previous section for same-valued enum members and for +exported/unexported enum members apply here too. + +It is worth noting that, though T1 and T2 are identical types, only constants +declared in the same scope as type T2's scope can be T2's enum members. In the +example, newpkg.A and newpkg.B are T2's enum members. + +The analyzer guarantees that introducing a type alias (such as type T1 = +newpkg.T2) will never result in new diagnostics from the analyzer, as long as +the set of enum member constant values of the new RHS type (newpkg.T2) is a +subset of the set of enum member constant values of the old LHS type (T1). + +Advanced notes + +Non-enum member constants in a switch statement's cases: Recall from an earlier +section that a constant must be declared in the same scope as the enum type to +be an enum member. It is valid, however, both to the Go type checker and to this +analyzer, for any constant of the right type to be listed in the cases of an +enum switch statement (it does not necessarily have to be an enum member +constant declared in the same scope/package as the enum type's scope/package). +This is particularly useful when a type alias is involved: A forwarding constant +declaration (such as pkg.A, in type T1's package) can take the place of the +actual enum member constant (newpkg.A, in type T2's package) in the switch +statement's cases to satisfy exhaustiveness. + + var v pkg.T1 = pkg.ReturnsT1() // v is effectively of type newpkg.T2 due to alias + switch v { + case pkg.A: // valid substitute for newpkg.A (same constant value) + case pkg.B: // valid substitute for newpkg.B (same constant value) + } + +Flags + +Notable flags supported by the analyzer are described below. +All of these flags are optional. + + flag type default value + + -check-generated bool false + -default-signifies-exhaustive bool false + -ignore-enum-members string (none) + -package-scope-only bool false + +If the -check-generated flag is enabled, switch statements in generated Go +source files are also checked. Otherwise, by default, switch statements in +generated files are not checked. See https://golang.org/s/generatedcode for the +definition of generated file. + +If the -default-signifies-exhaustive flag is enabled, the presence of a +'default' case in a switch statement always satisfies exhaustiveness, even if +all enum members are not listed. It is not recommended that you enable this +flag; enabling it generally defeats the purpose of exhaustiveness checking. + +The -ignore-enum-members flag specifies a regular expression in Go syntax. Enum +members matching the regular expression don't have to be listed in switch +statement cases to satisfy exhaustiveness. The specified regular expression is +matched against an enum member name inclusive of the enum package import path: +for example, if the enum package import path is "example.com/pkg" and the member +name is "Tundra", the specified regular expression will be matched against the +string "example.com/pkg.Tundra". + +If the -package-scope-only flag is enabled, the analyzer only finds enums +defined in package scopes, and consequently only switch statements that switch +on package-scoped enums will be checked for exhaustiveness. By default, the +analyzer finds enums defined in all scopes, and checks switch statements that +switch on all these enums. + +Skip analysis + +To skip checking of a specific switch statement, associate the comment shown in +the example below with the switch statement. Note the lack of whitespace between +the comment marker ("//") and the comment text ("exhaustive:ignore"). + + //exhaustive:ignore + switch v { ... } + +To ignore specific enum members, see the -ignore-enum-members flag. + +Switch statements in generated Go source files are not checked by default. +Use the -check-generated flag to change this behavior. +*/ package exhaustive import ( - "go/ast" - "go/types" - "sort" - "strings" + "flag" + "regexp" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" ) +var _ flag.Value = (*regexpFlag)(nil) + +// regexpFlag implements the flag.Value interface for parsing +// regular expression flag values. +type regexpFlag struct{ r *regexp.Regexp } + +func (v *regexpFlag) String() string { + if v == nil || v.r == nil { + return "" + } + return v.r.String() +} + +func (v *regexpFlag) Set(expr string) error { + if expr == "" { + v.r = nil + return nil + } + + r, err := regexp.Compile(expr) + if err != nil { + return err + } + + v.r = r + return nil +} + +func (v *regexpFlag) value() *regexp.Regexp { return v.r } + +func init() { + Analyzer.Flags.BoolVar(&fCheckGeneratedFiles, CheckGeneratedFlag, false, "check switch statements in generated files") + Analyzer.Flags.BoolVar(&fDefaultSignifiesExhaustive, DefaultSignifiesExhaustiveFlag, false, "presence of \"default\" case in switch statements satisfies exhaustiveness, even if all enum members are not listed") + Analyzer.Flags.Var(&fIgnoreEnumMembers, IgnoreEnumMembersFlag, "enum members matching `regex` do not have to be listed in switch statements to satisfy exhaustiveness") + Analyzer.Flags.BoolVar(&fPackageScopeOnly, PackageScopeOnlyFlag, false, "consider enums only in package scopes, not in inner scopes") + + var unused string + Analyzer.Flags.StringVar(&unused, IgnorePatternFlag, "", "no effect (deprecated); see -"+IgnoreEnumMembersFlag+" instead") + Analyzer.Flags.StringVar(&unused, CheckingStrategyFlag, "", "no effect (deprecated)") +} + // Flag names used by the analyzer. They are exported for use by analyzer // driver programs. const ( - DefaultSignifiesExhaustiveFlag = "default-signifies-exhaustive" CheckGeneratedFlag = "check-generated" - IgnorePatternFlag = "ignore-pattern" + DefaultSignifiesExhaustiveFlag = "default-signifies-exhaustive" + IgnoreEnumMembersFlag = "ignore-enum-members" + PackageScopeOnlyFlag = "package-scope-only" + + IgnorePatternFlag = "ignore-pattern" // Deprecated: see IgnoreEnumMembersFlag instead. + CheckingStrategyFlag = "checking-strategy" // Deprecated. ) var ( - fDefaultSignifiesExhaustive bool fCheckGeneratedFiles bool - fIgnorePattern regexpFlag + fDefaultSignifiesExhaustive bool + fIgnoreEnumMembers regexpFlag + fPackageScopeOnly bool ) -func init() { - Analyzer.Flags.BoolVar(&fDefaultSignifiesExhaustive, DefaultSignifiesExhaustiveFlag, false, "indicates that switch statements are to be considered exhaustive if a 'default' case is present, even if all enum members aren't listed in the switch") - Analyzer.Flags.BoolVar(&fCheckGeneratedFiles, CheckGeneratedFlag, false, "check switch statements in generated files also") - Analyzer.Flags.Var(&fIgnorePattern, IgnorePatternFlag, "do not require a case clause to satisfy exhaustiveness for enum member names that match the provided regular expression pattern") -} - // resetFlags resets the flag variables to their default values. // Useful in tests. func resetFlags() { - fDefaultSignifiesExhaustive = false fCheckGeneratedFiles = false - fIgnorePattern = regexpFlag{} + fDefaultSignifiesExhaustive = false + fIgnoreEnumMembers = regexpFlag{} + fPackageScopeOnly = false } var Analyzer = &analysis.Analyzer{ @@ -135,73 +238,21 @@ var Analyzer = &analysis.Analyzer{ Doc: "check exhaustiveness of enum switch statements", Run: run, Requires: []*analysis.Analyzer{inspect.Analyzer}, - FactTypes: []analysis.Fact{&enumsFact{}}, + FactTypes: []analysis.Fact{&enumMembersFact{}}, } func run(pass *analysis.Pass) (interface{}, error) { - e := findEnums(pass) - if len(e) != 0 { - pass.ExportPackageFact(&enumsFact{Enums: e}) - } - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - err := checkSwitchStatements(pass, inspect) - return nil, err -} -// IgnoreDirectivePrefix is used to exclude checking of specific switch statements. -// See package comment for details. -const IgnoreDirectivePrefix = "//exhaustive:ignore" - -func containsIgnoreDirective(comments []*ast.Comment) bool { - for _, c := range comments { - if strings.HasPrefix(c.Text, IgnoreDirectivePrefix) { - return true - } + for typ, members := range findEnums(fPackageScopeOnly, pass.Pkg, inspect, pass.TypesInfo) { + exportFact(pass, typ, members) } - return false -} -type enumsFact struct { - Enums enums -} - -var _ analysis.Fact = (*enumsFact)(nil) - -func (e *enumsFact) AFact() {} - -func (e *enumsFact) String() string { - // sort for stability (required for testing) - var sortedKeys []string - for k := range e.Enums { - sortedKeys = append(sortedKeys, k) + cfg := config{ + defaultSignifiesExhaustive: fDefaultSignifiesExhaustive, + checkGeneratedFiles: fCheckGeneratedFiles, + ignoreEnumMembers: fIgnoreEnumMembers.value(), } - sort.Strings(sortedKeys) - - var buf strings.Builder - for i, k := range sortedKeys { - v := e.Enums[k] - buf.WriteString(k) - buf.WriteString(":") - - for j, vv := range v.OrderedNames { - buf.WriteString(vv) - // add comma separator between each enum member in an enum type - if j != len(v.OrderedNames)-1 { - buf.WriteString(",") - } - } - // add semicolon separator between each enum type - if i != len(sortedKeys)-1 { - buf.WriteString("; ") - } - } - return buf.String() -} - -func enumTypeName(e *types.Named, samePkg bool) string { - if samePkg { - return e.Obj().Name() - } - return e.Obj().Pkg().Name() + "." + e.Obj().Name() + checkSwitchStatements(pass, inspect, cfg) + return nil, nil } diff --git a/vendor/github.com/nishanths/exhaustive/fact.go b/vendor/github.com/nishanths/exhaustive/fact.go new file mode 100644 index 000000000..5fc09beff --- /dev/null +++ b/vendor/github.com/nishanths/exhaustive/fact.go @@ -0,0 +1,29 @@ +package exhaustive + +import "golang.org/x/tools/go/analysis" + +// NOTE: Fact types must remain gob-coding compatible. +// See TestFactsGob. + +var _ analysis.Fact = (*enumMembersFact)(nil) + +type enumMembersFact struct{ Members enumMembers } + +func (f *enumMembersFact) AFact() {} +func (f *enumMembersFact) String() string { return f.Members.factString() } + +// exportFact exports the enum members for the given enum type. +func exportFact(pass *analysis.Pass, enumTyp enumType, members enumMembers) { + pass.ExportObjectFact(enumTyp.factObject(), &enumMembersFact{members}) +} + +// importFact imports the enum members for the given possible enum type. +// An (_, false) return indicates that the enum type is not a known one. +func importFact(pass *analysis.Pass, possibleEnumType enumType) (enumMembers, bool) { + var f enumMembersFact + ok := pass.ImportObjectFact(possibleEnumType.factObject(), &f) + if !ok { + return enumMembers{}, false + } + return f.Members, true +} diff --git a/vendor/github.com/nishanths/exhaustive/generated.go b/vendor/github.com/nishanths/exhaustive/generated.go deleted file mode 100644 index 19b4fb12b..000000000 --- a/vendor/github.com/nishanths/exhaustive/generated.go +++ /dev/null @@ -1,34 +0,0 @@ -package exhaustive - -import ( - "go/ast" - "strings" -) - -// Adapated from https://gotools.org/dmitri.shuralyov.com/go/generated - -func isGeneratedFile(file *ast.File) bool { - for _, c := range file.Comments { - for _, cc := range c.List { - s := cc.Text // "\n" already removed (see doc comment) - if len(s) >= 1 && s[len(s)-1] == '\r' { - s = s[:len(s)-1] // Trim "\r". - } - if containsGeneratedComment(s) { - return true - } - } - } - - return false -} - -func containsGeneratedComment(s string) bool { - return strings.HasPrefix(s, genCommentPrefix) && - strings.HasSuffix(s, genCommentSuffix) -} - -const ( - genCommentPrefix = "// Code generated " - genCommentSuffix = " DO NOT EDIT." -) diff --git a/vendor/github.com/nishanths/exhaustive/go.mod b/vendor/github.com/nishanths/exhaustive/go.mod index 4db5aeb01..03f91feb8 100644 --- a/vendor/github.com/nishanths/exhaustive/go.mod +++ b/vendor/github.com/nishanths/exhaustive/go.mod @@ -2,7 +2,4 @@ module github.com/nishanths/exhaustive go 1.14 -require ( - golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c // indirect - golang.org/x/tools v0.1.4 -) +require golang.org/x/tools v0.1.7 diff --git a/vendor/github.com/nishanths/exhaustive/go.sum b/vendor/github.com/nishanths/exhaustive/go.sum index 20d958ec4..13ba81505 100644 --- a/vendor/github.com/nishanths/exhaustive/go.sum +++ b/vendor/github.com/nishanths/exhaustive/go.sum @@ -1,27 +1,26 @@ -github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c h1:F1jZWGFhYfh0Ci55sIpILtKKK8p3i2/krTr0H1rg74I= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e h1:WUoyKPm6nCo1BnNUvPGnFG3T5DUVem42yDJZZ4CNxMA= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.1.4 h1:cVngSRcfgyZCzys3KYOpCFa+4dqX/Oub9tAq00ttGVs= -golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.7 h1:6j8CgantCy3yc8JGBqkDLMKWqZ0RDU2g1HVgacojGWQ= +golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= diff --git a/vendor/github.com/nishanths/exhaustive/regexp_flag.go b/vendor/github.com/nishanths/exhaustive/regexp_flag.go deleted file mode 100644 index 3a9ef7353..000000000 --- a/vendor/github.com/nishanths/exhaustive/regexp_flag.go +++ /dev/null @@ -1,35 +0,0 @@ -package exhaustive - -import ( - "regexp" -) - -type regexpFlag struct { - r *regexp.Regexp -} - -func (v *regexpFlag) String() string { - if v.r != nil { - return v.r.String() - } - return "" -} - -func (v *regexpFlag) Set(expr string) error { - if expr == "" { - v.r = nil - return nil - } - - r, err := regexp.Compile(expr) - if err != nil { - return err - } - - v.r = r - return nil -} - -func (v *regexpFlag) Get() interface{} { - return v.r -} diff --git a/vendor/github.com/nishanths/exhaustive/switch.go b/vendor/github.com/nishanths/exhaustive/switch.go index 1a88eec0c..22d74a4f4 100644 --- a/vendor/github.com/nishanths/exhaustive/switch.go +++ b/vendor/github.com/nishanths/exhaustive/switch.go @@ -1,15 +1,11 @@ package exhaustive import ( - "bytes" "fmt" "go/ast" - "go/printer" - "go/token" "go/types" "regexp" "sort" - "strconv" "strings" "golang.org/x/tools/go/analysis" @@ -17,428 +13,338 @@ import ( "golang.org/x/tools/go/ast/inspector" ) -func isDefaultCase(c *ast.CaseClause) bool { - return c.List == nil // see doc comment on field -} +// nodeVisitor is like the visitor function used by Inspector.WithStack, +// except that it returns an additional value: a short description of +// the result of this node visit. +// +// The result is typically useful in debugging or in unit tests to check +// that the nodeVisitor function took the expected code path. +type nodeVisitor func(n ast.Node, push bool, stack []ast.Node) (proceed bool, result string) -func checkSwitchStatements( - pass *analysis.Pass, - inspect *inspector.Inspector, -) error { - comments := make(map[*ast.File]ast.CommentMap) // CommentMap per package file, lazily populated by reference - generated := make(map[*ast.File]bool) - return checkSwitchStatements_(pass, inspect, comments, generated) -} +// Result values returned by a node visitor constructed via switchStmtChecker. +const ( + resultNotPush = "not push" + resultGeneratedFile = "generated file" + resultNoSwitchTag = "no switch tag" + resultTagNotValue = "switch tag not value type" + resultTagNotNamed = "switch tag not named type" + resultTagNoPkg = "switch tag does not belong to regular package" + resultTagNotEnum = "switch tag not known enum type" + resultSwitchIgnoreComment = "switch statement has ignore comment" + resultEnumMembersAccounted = "requisite enum members accounted for" + resultDefaultCaseSuffices = "default case presence satisfies exhaustiveness" + resultReportedDiagnostic = "reported diagnostic" +) -func checkSwitchStatements_( - pass *analysis.Pass, - inspect *inspector.Inspector, - comments map[*ast.File]ast.CommentMap, - generated map[*ast.File]bool, -) error { - inspect.WithStack([]ast.Node{&ast.SwitchStmt{}}, func(n ast.Node, push bool, stack []ast.Node) bool { +// switchStmtChecker returns a node visitor that checks exhaustiveness +// of enum switch statements for the supplied pass, and reports diagnostics for +// switch statements that are non-exhaustive. +func switchStmtChecker(pass *analysis.Pass, cfg config) nodeVisitor { + generated := make(map[*ast.File]bool) // cached results + comments := make(map[*ast.File]ast.CommentMap) // cached results + + return func(n ast.Node, push bool, stack []ast.Node) (bool, string) { if !push { - return true + // The proceed return value should not matter; it is ignored by + // inspector package for pop calls. + // Nevertheless, return true to be on the safe side for the future. + return true, resultNotPush } file := stack[0].(*ast.File) - // Determine if file is a generated file, based on https://golang.org/s/generatedcode. - // If generated, don't check this file. - var isGenerated bool - if gen, ok := generated[file]; ok { - isGenerated = gen - } else { - isGenerated = isGeneratedFile(file) - generated[file] = isGenerated + // Determine if the file is a generated file, and save the result. + // If it is a generated file, don't check the file. + if _, ok := generated[file]; !ok { + generated[file] = isGeneratedFile(file) } - if isGenerated && !fCheckGeneratedFiles { - // don't check - return true + if generated[file] && !cfg.checkGeneratedFiles { + // Don't check this file. + // Return false because the children nodes of node `n` don't have to be checked. + return false, resultGeneratedFile } sw := n.(*ast.SwitchStmt) - if sw.Tag == nil { - return true + + if _, ok := comments[file]; !ok { + comments[file] = ast.NewCommentMap(pass.Fset, file, file.Comments) } + if containsIgnoreDirective(comments[file].Filter(sw).Comments()) { + // Skip checking of this switch statement due to ignore directive comment. + // Still return true because there may be nested switch statements + // that are not to be ignored. + return true, resultSwitchIgnoreComment + } + + if sw.Tag == nil { + return true, resultNoSwitchTag + } + t := pass.TypesInfo.Types[sw.Tag] if !t.IsValue() { - return true + return true, resultTagNotValue } + tagType, ok := t.Type.(*types.Named) if !ok { - return true + return true, resultTagNotNamed } tagPkg := tagType.Obj().Pkg() if tagPkg == nil { - // Doc comment: nil for labels and objects in the Universe scope. + // The Go documentation says: nil for labels and objects in the Universe scope. // This happens for the `error` type, for example. - // Continuing would mean that ImportPackageFact panics. - return true + return true, resultTagNoPkg } - var enums enumsFact - if !pass.ImportPackageFact(tagPkg, &enums) { - // Can't do anything further. - return true + enumTyp := enumType{tagType.Obj()} + members, ok := importFact(pass, enumTyp) + if !ok { + // switch tag's type is not a known enum type. + return true, resultTagNotEnum } - em, isEnum := enums.Enums[tagType.Obj().Name()] - if !isEnum { - // Tag's type is not a known enum. - return true + samePkg := tagPkg == pass.Pkg // do the switch statement and the switch tag type (i.e. enum type) live in the same package? + checkUnexported := samePkg // we want to include unexported members in the exhaustiveness check only if we're in the same package + checklist := makeChecklist(members, tagPkg, checkUnexported, cfg.ignoreEnumMembers) + + hasDefaultCase := analyzeSwitchClauses(sw, tagPkg, members.NameToValue, pass.TypesInfo, func(val constantValue) { + checklist.found(val) + }) + + if len(checklist.remaining()) == 0 { + // All enum members accounted for. + // Nothing to report. + return true, resultEnumMembersAccounted } - - // Get comment map. - var allComments ast.CommentMap - if cm, ok := comments[file]; ok { - allComments = cm - } else { - allComments = ast.NewCommentMap(pass.Fset, file, file.Comments) - comments[file] = allComments + if hasDefaultCase && cfg.defaultSignifiesExhaustive { + // Though enum members are not accounted for, + // the existence of the default case signifies exhaustiveness. + // So don't report. + return true, resultDefaultCaseSuffices } + pass.Report(makeDiagnostic(sw, samePkg, enumTyp, members, checklist.remaining())) + return true, resultReportedDiagnostic + } +} - specificComments := allComments.Filter(sw) - for _, group := range specificComments.Comments() { - if containsIgnoreDirective(group.List) { - return true // skip checking due to ignore directive - } - } +// config is configuration for checkSwitchStatements. +type config struct { + defaultSignifiesExhaustive bool + checkGeneratedFiles bool + ignoreEnumMembers *regexp.Regexp // can be nil +} - samePkg := tagPkg == pass.Pkg - checkUnexported := samePkg +// checkSwitchStatements checks exhaustiveness of enum switch statements for the supplied +// pass. It reports switch statements that are not exhaustive via pass.Report. +func checkSwitchStatements(pass *analysis.Pass, inspect *inspector.Inspector, cfg config) { + f := switchStmtChecker(pass, cfg) - hitlist := hitlistFromEnumMembers(em, tagPkg, checkUnexported, fIgnorePattern.Get().(*regexp.Regexp)) - if len(hitlist) == 0 { - return true - } - - var defaultCase *ast.CaseClause - for _, stmt := range sw.Body.List { - caseCl := stmt.(*ast.CaseClause) - if isDefaultCase(caseCl) { - defaultCase = caseCl - continue // nothing more to do if it's the default case - } - for _, e := range caseCl.List { - e = astutil.Unparen(e) - if samePkg { - ident, ok := e.(*ast.Ident) - if !ok { - continue - } - updateHitlist(hitlist, em, ident.Name) - } else { - selExpr, ok := e.(*ast.SelectorExpr) - if !ok { - continue - } - - // ensure X is package identifier - ident, ok := selExpr.X.(*ast.Ident) - if !ok { - continue - } - if !isPackageNameIdentifier(pass, ident) { - continue - } - - updateHitlist(hitlist, em, selExpr.Sel.Name) - } - } - } - - defaultSuffices := fDefaultSignifiesExhaustive && defaultCase != nil - shouldReport := len(hitlist) > 0 && !defaultSuffices - - if shouldReport { - reportSwitch(pass, sw, defaultCase, samePkg, tagType, em, hitlist, file) - } - return true + inspect.WithStack([]ast.Node{&ast.SwitchStmt{}}, func(n ast.Node, push bool, stack []ast.Node) bool { + proceed, _ := f(n, push, stack) + return proceed }) - - return nil } -func updateHitlist(hitlist map[string]struct{}, em *enumMembers, foundName string) { - constVal, ok := em.NameToValue[foundName] - if !ok { - // only delete the name alone from hitlist - delete(hitlist, foundName) - return - } - - // delete all of the same-valued names from hitlist - namesToDelete := em.ValueToNames[constVal] - for _, n := range namesToDelete { - delete(hitlist, n) - } +func isDefaultCase(c *ast.CaseClause) bool { + return c.List == nil // see doc comment on List field } -func isPackageNameIdentifier(pass *analysis.Pass, ident *ast.Ident) bool { - obj := pass.TypesInfo.ObjectOf(ident) +func denotesPackage(ident *ast.Ident, info *types.Info) (*types.Package, bool) { + obj := info.ObjectOf(ident) if obj == nil { - return false + return nil, false } - _, ok := obj.(*types.PkgName) - return ok -} - -func hitlistFromEnumMembers(em *enumMembers, enumPkg *types.Package, checkUnexported bool, ignorePattern *regexp.Regexp) map[string]struct{} { - hitlist := make(map[string]struct{}) - for _, name := range em.OrderedNames { - if name == "_" { - // blank identifier is often used to skip entries in iota lists - continue - } - if ignorePattern != nil && ignorePattern.MatchString(enumPkg.Path()+"."+name) { - continue - } - if !ast.IsExported(name) && !checkUnexported { - continue - } - hitlist[name] = struct{}{} - } - return hitlist -} - -func determineMissingOutput(missingMembers map[string]struct{}, em *enumMembers) []string { - constValMembers := make(map[string][]string) // value -> names - var otherMembers []string // non-constant value names - - for m := range missingMembers { - if constVal, ok := em.NameToValue[m]; ok { - constValMembers[constVal] = append(constValMembers[constVal], m) - } else { - otherMembers = append(otherMembers, m) - } - } - - missingOutput := make([]string, 0, len(constValMembers)+len(otherMembers)) - for _, names := range constValMembers { - sort.Strings(names) - missingOutput = append(missingOutput, strings.Join(names, "|")) - } - missingOutput = append(missingOutput, otherMembers...) - sort.Strings(missingOutput) - return missingOutput -} - -func reportSwitch( - pass *analysis.Pass, - sw *ast.SwitchStmt, - defaultCase *ast.CaseClause, - samePkg bool, - enumType *types.Named, - em *enumMembers, - missingMembers map[string]struct{}, - f *ast.File, -) { - missingOutput := determineMissingOutput(missingMembers, em) - - var fixes []analysis.SuggestedFix - if fix, ok := computeFix(pass, pass.Fset, f, sw, defaultCase, enumType, samePkg, missingMembers); ok { - fixes = append(fixes, fix) - } - - pass.Report(analysis.Diagnostic{ - Pos: sw.Pos(), - End: sw.End(), - Message: fmt.Sprintf("missing cases in switch of type %s: %s", enumTypeName(enumType, samePkg), strings.Join(missingOutput, ", ")), - SuggestedFixes: fixes, - }) -} - -func computeFix(pass *analysis.Pass, fset *token.FileSet, f *ast.File, sw *ast.SwitchStmt, defaultCase *ast.CaseClause, enumType *types.Named, samePkg bool, missingMembers map[string]struct{}) (analysis.SuggestedFix, bool) { - // Function and method calls may be mutative, so we don't want to reuse the - // call expression in the about-to-be-inserted case clause body. So we just - // don't suggest a fix in such situations. - // - // However, we need to make an exception for type conversions, which are - // also call expressions in the AST. - // - // We'll need to lookup type information for this, and can't rely solely - // on the AST. - if containsFuncCall(pass, sw.Tag) { - return analysis.SuggestedFix{}, false - } - - textEdits := []analysis.TextEdit{missingCasesTextEdit(fset, f, samePkg, sw, defaultCase, enumType, missingMembers)} - - // need to add "fmt" import if "fmt" import doesn't already exist - if !hasImportWithPath(fset, f, `"fmt"`) { - textEdits = append(textEdits, fmtImportTextEdit(fset, f)) - } - - missing := make([]string, 0, len(missingMembers)) - for m := range missingMembers { - missing = append(missing, m) - } - sort.Strings(missing) - - return analysis.SuggestedFix{ - Message: fmt.Sprintf("add case clause for: %s", strings.Join(missing, ", ")), - TextEdits: textEdits, - }, true -} - -func containsFuncCall(pass *analysis.Pass, e ast.Expr) bool { - e = astutil.Unparen(e) - c, ok := e.(*ast.CallExpr) + n, ok := obj.(*types.PkgName) if !ok { - return false + return nil, false } - if _, isFunc := pass.TypesInfo.TypeOf(c.Fun).Underlying().(*types.Signature); isFunc { - return true - } - for _, a := range c.Args { - if containsFuncCall(pass, a) { - return true - } - } - return false + return n.Imported(), true } -func firstImportDecl(fset *token.FileSet, f *ast.File) *ast.GenDecl { - for _, decl := range f.Decls { - genDecl, ok := decl.(*ast.GenDecl) - if ok && genDecl.Tok == token.IMPORT { - // first IMPORT GenDecl - return genDecl +// analyzeSwitchClauses analyzes the clauses in the supplied switch statement. +// +// tagPkg is the package of the switch statement's tag value's type. +// The info param should typically be pass.TypesInfo. The found function is +// called for each enum member name found in the switch statement. +// +// The hasDefaultCase return value indicates whether the switch statement has a +// default clause. +func analyzeSwitchClauses(sw *ast.SwitchStmt, tagPkg *types.Package, members map[string]constantValue, info *types.Info, found func(val constantValue)) (hasDefaultCase bool) { + for _, stmt := range sw.Body.List { + caseCl := stmt.(*ast.CaseClause) + if isDefaultCase(caseCl) { + hasDefaultCase = true + continue // nothing more to do if it's the default case + } + for _, expr := range caseCl.List { + analyzeCaseClauseExpr(expr, tagPkg, members, info, found) } } - return nil + return hasDefaultCase } -// copies an GenDecl in a manner such that appending to the returned GenDecl's Specs field -// doesn't mutate the original GenDecl -func copyGenDecl(im *ast.GenDecl) *ast.GenDecl { - imCopy := *im - imCopy.Specs = make([]ast.Spec, len(im.Specs)) - for i := range im.Specs { - imCopy.Specs[i] = im.Specs[i] - } - return &imCopy -} - -func hasImportWithPath(fset *token.FileSet, f *ast.File, pathLiteral string) bool { - igroups := astutil.Imports(fset, f) - for _, igroup := range igroups { - for _, importSpec := range igroup { - if importSpec.Path.Value == pathLiteral { - return true - } +func analyzeCaseClauseExpr(e ast.Expr, tagPkg *types.Package, members map[string]constantValue, info *types.Info, found func(val constantValue)) { + handleIdent := func(ident *ast.Ident) { + obj := info.Uses[ident] + if obj == nil { + return } - } - return false -} - -func fmtImportTextEdit(fset *token.FileSet, f *ast.File) analysis.TextEdit { - firstDecl := firstImportDecl(fset, f) - - if firstDecl == nil { - // file has no import declarations - // insert "fmt" import spec after package statement - return analysis.TextEdit{ - Pos: f.Name.End() + 1, // end of package name + 1 - End: f.Name.End() + 1, - NewText: []byte(`import ( - "fmt" - )`), + if _, ok := obj.(*types.Const); !ok { + return } + + // There are two scenarios. + // See related test cases in typealias/quux/quux.go. + // + // ### Scenario 1 + // + // Tag package and constant package are the same. + // + // For example: + // var mode fs.FileMode + // switch mode { + // case fs.ModeDir: + // } + // + // This is simple: we just use fs.ModeDir's value. + // + // ### Scenario 2 + // + // Tag package and constant package are different. + // + // For example: + // var mode fs.FileMode + // switch mode { + // case os.ModeDir: + // } + // + // Or equivalently: + // var mode os.FileMode // in effect, fs.FileMode because of type alias in package os + // switch mode { + // case os.ModeDir: + // } + // + // In this scenario, too, we accept the case clause expr constant + // value, as is. If the Go type checker is okay with the + // name being listed in the case clause, we don't care much further. + // + found(determineConstVal(ident, info)) } - // copy because we'll be mutating its Specs field - firstDeclCopy := copyGenDecl(firstDecl) + e = astutil.Unparen(e) + switch e := e.(type) { + case *ast.Ident: + handleIdent(e) - // find insertion index for "fmt" import spec - var i int - for ; i < len(firstDeclCopy.Specs); i++ { - im := firstDeclCopy.Specs[i].(*ast.ImportSpec) - if v, _ := strconv.Unquote(im.Path.Value); v > "fmt" { - break + case *ast.SelectorExpr: + x := astutil.Unparen(e.X) + // Ensure we only see the form `pkg.Const`, and not e.g. `structVal.f` + // or `structVal.inner.f`. + // Check that X, which is everything except the rightmost *ast.Ident (or + // Sel), is also an *ast.Ident. + xIdent, ok := x.(*ast.Ident) + if !ok { + return } - } - - // insert "fmt" import spec at the index - fmtSpec := &ast.ImportSpec{ - Path: &ast.BasicLit{ - // NOTE: Pos field doesn't seem to be required for our - // purposes here. - Kind: token.STRING, - Value: `"fmt"`, - }, - } - s := firstDeclCopy.Specs // local var for easier comprehension of next line - s = append(s[:i], append([]ast.Spec{fmtSpec}, s[i:]...)...) - firstDeclCopy.Specs = s - - // create the text edit - var buf bytes.Buffer - printer.Fprint(&buf, fset, firstDeclCopy) - - return analysis.TextEdit{ - Pos: firstDecl.Pos(), - End: firstDecl.End(), - NewText: buf.Bytes(), + // Doesn't matter which package, just that it denotes a package. + if _, ok := denotesPackage(xIdent, info); !ok { + return + } + handleIdent(e.Sel) } } -func missingCasesTextEdit(fset *token.FileSet, f *ast.File, samePkg bool, sw *ast.SwitchStmt, defaultCase *ast.CaseClause, enumType *types.Named, missingMembers map[string]struct{}) analysis.TextEdit { - // ... Construct insertion text for case clause and its body ... - - var tag bytes.Buffer - printer.Fprint(&tag, fset, sw.Tag) - - // If possible and if necessary, determine the package identifier based on - // the AST of other `case` clauses. - var pkgIdent *ast.Ident - if !samePkg { - for _, stmt := range sw.Body.List { - caseCl := stmt.(*ast.CaseClause) - if len(caseCl.List) != 0 { // guard against default case - if sel, ok := caseCl.List[0].(*ast.SelectorExpr); ok { - pkgIdent = sel.X.(*ast.Ident) - break - } - } - } - } - - missing := make([]string, 0, len(missingMembers)) +// diagnosticMissingMembers constructs the list of missing enum members, +// suitable for use in a reported diagnostic message. +func diagnosticMissingMembers(missingMembers map[string]struct{}, em enumMembers) []string { + missingByConstVal := make(map[constantValue][]string) // missing members, keyed by constant value. for m := range missingMembers { - if !samePkg { - if pkgIdent != nil { - // we were able to determine package identifier - missing = append(missing, pkgIdent.Name+"."+m) - } else { - // use the package name (may not be correct always) - // - // TODO: May need to also add import if the package isn't imported - // elsewhere. This (ie, a switch with zero case clauses) should - // happen rarely, so don't implement this for now. - missing = append(missing, enumType.Obj().Pkg().Name()+"."+m) - } - } else { - missing = append(missing, m) - } - } - sort.Strings(missing) - - insert := `case ` + strings.Join(missing, ", ") + `: - panic(fmt.Sprintf("unhandled value: %v",` + tag.String() + `))` - - // ... Create the text edit ... - - pos := sw.Body.Rbrace - 1 // put it as last case - if defaultCase != nil { - pos = defaultCase.Case - 2 // put it before the default case (why -2?) + val := em.NameToValue[m] + missingByConstVal[val] = append(missingByConstVal[val], m) } - return analysis.TextEdit{ - Pos: pos, - End: pos, - NewText: []byte(insert), + var out []string + for _, names := range missingByConstVal { + sort.Strings(names) + out = append(out, strings.Join(names, "|")) + } + sort.Strings(out) + return out +} + +// diagnosticEnumTypeName returns a string representation of an enum type for +// use in reported diagnostics. +func diagnosticEnumTypeName(enumType *types.TypeName, samePkg bool) string { + if samePkg { + return enumType.Name() + } + return enumType.Pkg().Name() + "." + enumType.Name() +} + +func makeDiagnostic(sw *ast.SwitchStmt, samePkg bool, enumTyp enumType, allMembers enumMembers, missingMembers map[string]struct{}) analysis.Diagnostic { + message := fmt.Sprintf("missing cases in switch of type %s: %s", + diagnosticEnumTypeName(enumTyp.TypeName, samePkg), + strings.Join(diagnosticMissingMembers(missingMembers, allMembers), ", ")) + + return analysis.Diagnostic{ + Pos: sw.Pos(), + End: sw.End(), + Message: message, } } + +// A checklist holds a set of enum member names that have to be +// accounted for to satisfy exhaustiveness in an enum switch statement. +// +// The found method checks off member names from the set, based on +// constant value, when a constant value is encoutered in the switch +// statement's cases. +// +// The remaining method returns the member names not accounted for. +// +type checklist struct { + em enumMembers + names map[string]struct{} +} + +func makeChecklist(em enumMembers, enumPkg *types.Package, includeUnexported bool, ignore *regexp.Regexp) *checklist { + names := make(map[string]struct{}) + + add := func(memberName string) { + if memberName == "_" { + // Blank identifier is often used to skip entries in iota lists. + // Also, it can't be referenced anywhere (including in a switch + // statement's cases), so it doesn't make sense to include it + // as required member to satisfy exhaustiveness. + return + } + if !ast.IsExported(memberName) && !includeUnexported { + return + } + if ignore != nil && ignore.MatchString(enumPkg.Path()+"."+memberName) { + return + } + names[memberName] = struct{}{} + } + + for _, name := range em.Names { + add(name) + } + + return &checklist{ + em: em, + names: names, + } +} + +func (c *checklist) found(val constantValue) { + // Delete all of the same-valued names. + for _, name := range c.em.ValueToNames[val] { + delete(c.names, name) + } +} + +func (c *checklist) remaining() map[string]struct{} { + return c.names +} diff --git a/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go b/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go index 9b35388ef..cb8fc642d 100644 --- a/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go +++ b/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go @@ -3,7 +3,6 @@ package errorlint import ( "fmt" "go/ast" - "go/types" ) var allowedErrors = []struct { @@ -14,6 +13,8 @@ var allowedErrors = []struct { {err: "io.EOF", fun: "(*tar.Reader).Next"}, {err: "io.EOF", fun: "(*tar.Reader).Read"}, // pkg/bufio + {err: "io.EOF", fun: "(*bufio.Reader).Discard"}, + {err: "io.EOF", fun: "(*bufio.Reader).Peek"}, {err: "io.EOF", fun: "(*bufio.Reader).Read"}, {err: "io.EOF", fun: "(*bufio.Reader).ReadByte"}, {err: "io.EOF", fun: "(*bufio.Reader).ReadBytes"}, @@ -40,6 +41,15 @@ var allowedErrors = []struct { {err: "io.ErrShortBuffer", fun: "io.ReadAtLeast"}, {err: "io.ErrUnexpectedEOF", fun: "io.ReadAtLeast"}, {err: "io.ErrUnexpectedEOF", fun: "io.ReadFull"}, + // pkg/net/http + {err: "http.ErrServerClosed", fun: "(*net/http.Server).ListenAndServe"}, + {err: "http.ErrServerClosed", fun: "(*net/http.Server).ListenAndServeTLS"}, + {err: "http.ErrServerClosed", fun: "(*net/http.Server).Serve"}, + {err: "http.ErrServerClosed", fun: "(*net/http.Server).ServeTLS"}, + {err: "http.ErrServerClosed", fun: "http.ListenAndServe"}, + {err: "http.ErrServerClosed", fun: "http.ListenAndServeTLS"}, + {err: "http.ErrServerClosed", fun: "http.Serve"}, + {err: "http.ErrServerClosed", fun: "http.ServeTLS"}, // pkg/os {err: "io.EOF", fun: "(*os.File).Read"}, {err: "io.EOF", fun: "(*os.File).ReadAt"}, @@ -53,9 +63,18 @@ var allowedErrors = []struct { {err: "io.EOF", fun: "(*strings.Reader).ReadRune"}, } -func isAllowedErrorComparison(info types.Info, binExpr *ast.BinaryExpr) bool { +func isAllowedErrAndFunc(err, fun string) bool { + for _, allow := range allowedErrors { + if allow.fun == fun && allow.err == err { + return true + } + } + return false +} + +func isAllowedErrorComparison(info *TypesInfoExt, binExpr *ast.BinaryExpr) bool { var errName string // `.`, e.g. `io.EOF` - var callExpr *ast.CallExpr + var callExprs []*ast.CallExpr // Figure out which half of the expression is the returned error and which // half is the presumed error declaration. @@ -68,71 +87,95 @@ func isAllowedErrorComparison(info types.Info, binExpr *ast.BinaryExpr) bool { case *ast.Ident: // Identifier, most likely to be the `err` variable or whatever // produces it. - callExpr = assigningCallExpr(info, t) + callExprs = assigningCallExprs(info, t) case *ast.CallExpr: - callExpr = t + callExprs = append(callExprs, t) } } // Unimplemented or not sure, disallow the expression. - if errName == "" || callExpr == nil { + if errName == "" || len(callExprs) == 0 { return false } - // Find the expression that last assigned the subject identifier. - functionSelector, ok := callExpr.Fun.(*ast.SelectorExpr) - if !ok { - // If the function is not a selector it is not an Std function that is - // allowed. - return false - } - var functionName string - if sel, ok := info.Selections[functionSelector]; ok { - functionName = fmt.Sprintf("(%s).%s", sel.Recv(), sel.Obj().Name()) - } else { - // If there is no selection, assume it is a package. - functionName = selectorToString(callExpr.Fun.(*ast.SelectorExpr)) - } - - for _, w := range allowedErrors { - if w.fun == functionName && w.err == errName { - return true + // Map call expressions to the function name format of the allow list. + functionNames := make([]string, len(callExprs)) + for i, callExpr := range callExprs { + functionSelector, ok := callExpr.Fun.(*ast.SelectorExpr) + if !ok { + // If the function is not a selector it is not an Std function that is + // allowed. + return false + } + if sel, ok := info.Selections[functionSelector]; ok { + functionNames[i] = fmt.Sprintf("(%s).%s", sel.Recv(), sel.Obj().Name()) + } else { + // If there is no selection, assume it is a package. + functionNames[i] = selectorToString(callExpr.Fun.(*ast.SelectorExpr)) } } - return false + + // All assignments done must be allowed. + for _, funcName := range functionNames { + if !isAllowedErrAndFunc(errName, funcName) { + return false + } + } + return true } -func assigningCallExpr(info types.Info, subject *ast.Ident) *ast.CallExpr { +// assigningCallExprs finds all *ast.CallExpr nodes that are part of an +// *ast.AssignStmt that assign to the subject identifier. +func assigningCallExprs(info *TypesInfoExt, subject *ast.Ident) []*ast.CallExpr { if subject.Obj == nil { return nil } - switch declT := subject.Obj.Decl.(type) { - case *ast.AssignStmt: - // The identifier is LHS of an assignment. - assignment := declT - assigningExpr := assignment.Rhs[0] - // If the assignment is comprised of multiple expressions, find out - // which LHS expression we should use by finding its index in the LHS. - if len(assignment.Rhs) > 1 { - for i, lhs := range assignment.Lhs { - if subject.Name == lhs.(*ast.Ident).Name { - assigningExpr = assignment.Rhs[i] - break - } - } - } - - switch assignT := assigningExpr.(type) { - case *ast.CallExpr: - // Found the function call. - return assignT - case *ast.Ident: - // The subject was the result of assigning from another identifier. - return assigningCallExpr(info, assignT) + // Find other identifiers that reference this same object. Make sure to + // exclude the subject identifier as it will cause an infinite recursion + // and is being used in a read operation anyway. + sobj := info.ObjectOf(subject) + identifiers := []*ast.Ident{} + for _, ident := range info.IdentifiersForObject[sobj] { + if subject.Pos() != ident.Pos() { + identifiers = append(identifiers, ident) } } - return nil + + // Find out whether the identifiers are part of an assignment statement. + var callExprs []*ast.CallExpr + for _, ident := range identifiers { + parent := info.NodeParent[ident] + switch declT := parent.(type) { + case *ast.AssignStmt: + // The identifier is LHS of an assignment. + assignment := declT + + assigningExpr := assignment.Rhs[0] + // If the assignment is comprised of multiple expressions, find out + // which LHS expression we should use by finding its index in the LHS. + if len(assignment.Rhs) > 1 { + for i, lhs := range assignment.Lhs { + if subject.Name == lhs.(*ast.Ident).Name { + assigningExpr = assignment.Rhs[i] + break + } + } + } + + switch assignT := assigningExpr.(type) { + case *ast.CallExpr: + // Found the function call. + callExprs = append(callExprs, assignT) + case *ast.Ident: + // The subject was the result of assigning from another identifier. + callExprs = append(callExprs, assigningCallExprs(info, assignT)...) + default: + // TODO: inconclusive? + } + } + } + return callExprs } func selectorToString(selExpr *ast.SelectorExpr) string { diff --git a/vendor/github.com/polyfloyd/go-errorlint/errorlint/analysis.go b/vendor/github.com/polyfloyd/go-errorlint/errorlint/analysis.go index e2449f8f9..58ddb2632 100644 --- a/vendor/github.com/polyfloyd/go-errorlint/errorlint/analysis.go +++ b/vendor/github.com/polyfloyd/go-errorlint/errorlint/analysis.go @@ -2,6 +2,8 @@ package errorlint import ( "flag" + "go/ast" + "go/types" "sort" "golang.org/x/tools/go/analysis" @@ -31,8 +33,9 @@ func init() { func run(pass *analysis.Pass) (interface{}, error) { lints := []Lint{} + extInfo := newTypesInfoExt(pass.TypesInfo) if checkComparison { - l := LintErrorComparisons(pass.Fset, *pass.TypesInfo) + l := LintErrorComparisons(pass.Fset, extInfo) lints = append(lints, l...) } if checkAsserts { @@ -50,3 +53,47 @@ func run(pass *analysis.Pass) (interface{}, error) { } return nil, nil } + +type TypesInfoExt struct { + types.Info + + // Maps AST nodes back to the node they are contain within. + NodeParent map[ast.Node]ast.Node + + // Maps an object back to all identifiers to refer to it. + IdentifiersForObject map[types.Object][]*ast.Ident +} + +func newTypesInfoExt(info *types.Info) *TypesInfoExt { + nodeParent := map[ast.Node]ast.Node{} + for node := range info.Scopes { + file, ok := node.(*ast.File) + if !ok { + continue + } + stack := []ast.Node{file} + ast.Inspect(file, func(n ast.Node) bool { + nodeParent[n] = stack[len(stack)-1] + if n == nil { + stack = stack[:len(stack)-1] + } else { + stack = append(stack, n) + } + return true + }) + } + + identifiersForObject := map[types.Object][]*ast.Ident{} + for node, obj := range info.Defs { + identifiersForObject[obj] = append(identifiersForObject[obj], node) + } + for node, obj := range info.Uses { + identifiersForObject[obj] = append(identifiersForObject[obj], node) + } + + return &TypesInfoExt{ + Info: *info, + NodeParent: nodeParent, + IdentifiersForObject: identifiersForObject, + } +} diff --git a/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go b/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go index 3d11946a0..3d239f625 100644 --- a/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go +++ b/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go @@ -48,7 +48,7 @@ func LintFmtErrorfCalls(fset *token.FileSet, info types.Info) []Lint { var lintArg ast.Expr args := call.Args[1:] for i := 0; i < len(args) && i < len(formatVerbs); i++ { - if info.Types[args[i]].Type.String() != "error" && !isErrorStringCall(info, args[i]) { + if !implementsError(info.Types[args[i]].Type) && !isErrorStringCall(info, args[i]) { continue } @@ -121,7 +121,7 @@ func isFmtErrorfCallExpr(info types.Info, expr ast.Expr) (*ast.CallExpr, bool) { return nil, false } -func LintErrorComparisons(fset *token.FileSet, info types.Info) []Lint { +func LintErrorComparisons(fset *token.FileSet, info *TypesInfoExt) []Lint { lints := []Lint{} for expr := range info.Types { @@ -138,7 +138,7 @@ func LintErrorComparisons(fset *token.FileSet, info types.Info) []Lint { continue } // Find comparisons of which one side is a of type error. - if !isErrorComparison(info, binExpr) { + if !isErrorComparison(info.Info, binExpr) { continue } @@ -247,3 +247,20 @@ func isErrorTypeAssertion(info types.Info, typeAssert *ast.TypeAssertExpr) bool t := info.Types[typeAssert.X] return t.Type.String() == "error" } + +func implementsError(t types.Type) bool { + mset := types.NewMethodSet(t) + + for i := 0; i < mset.Len(); i++ { + if mset.At(i).Kind() != types.MethodVal { + continue + } + + obj := mset.At(i).Obj() + if obj.Name() == "Error" && obj.Type().String() == "func() string" { + return true + } + } + + return false +} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/collector.go b/vendor/github.com/prometheus/client_golang/prometheus/collector.go index 1e839650d..ac1ca3cf5 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/collector.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/collector.go @@ -118,3 +118,11 @@ func (c *selfCollector) Describe(ch chan<- *Desc) { func (c *selfCollector) Collect(ch chan<- Metric) { ch <- c.self } + +// collectorMetric is a metric that is also a collector. +// Because of selfCollector, most (if not all) Metrics in +// this package are also collectors. +type collectorMetric interface { + Metric + Collector +} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/go_collector_go117.go b/vendor/github.com/prometheus/client_golang/prometheus/go_collector_go117.go index d53474243..d43bdcdda 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/go_collector_go117.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/go_collector_go117.go @@ -20,6 +20,7 @@ import ( "math" "runtime" "runtime/metrics" + "strings" "sync" //nolint:staticcheck // Ignore SA1019. Need to keep deprecated package for compatibility. @@ -31,10 +32,14 @@ import ( type goCollector struct { base baseGoCollector + // mu protects updates to all fields ensuring a consistent + // snapshot is always produced by Collect. + mu sync.Mutex + // rm... fields all pertain to the runtime/metrics package. rmSampleBuf []metrics.Sample rmSampleMap map[string]*metrics.Sample - rmMetrics []Metric + rmMetrics []collectorMetric // With Go 1.17, the runtime/metrics package was introduced. // From that point on, metric names produced by the runtime/metrics @@ -52,13 +57,24 @@ type goCollector struct { // Deprecated: Use collectors.NewGoCollector instead. func NewGoCollector() Collector { descriptions := metrics.All() - descMap := make(map[string]*metrics.Description) - for i := range descriptions { - descMap[descriptions[i].Name] = &descriptions[i] + + // Collect all histogram samples so that we can get their buckets. + // The API guarantees that the buckets are always fixed for the lifetime + // of the process. + var histograms []metrics.Sample + for _, d := range descriptions { + if d.Kind == metrics.KindFloat64Histogram { + histograms = append(histograms, metrics.Sample{Name: d.Name}) + } + } + metrics.Read(histograms) + bucketsMap := make(map[string][]float64) + for i := range histograms { + bucketsMap[histograms[i].Name] = histograms[i].Value.Float64Histogram().Buckets } // Generate a Desc and ValueType for each runtime/metrics metric. - metricSet := make([]Metric, 0, len(descriptions)) + metricSet := make([]collectorMetric, 0, len(descriptions)) sampleBuf := make([]metrics.Sample, 0, len(descriptions)) sampleMap := make(map[string]*metrics.Sample, len(descriptions)) for i := range descriptions { @@ -76,9 +92,10 @@ func NewGoCollector() Collector { sampleBuf = append(sampleBuf, metrics.Sample{Name: d.Name}) sampleMap[d.Name] = &sampleBuf[len(sampleBuf)-1] - var m Metric + var m collectorMetric if d.Kind == metrics.KindFloat64Histogram { _, hasSum := rmExactSumMap[d.Name] + unit := d.Name[strings.IndexRune(d.Name, ':')+1:] m = newBatchHistogram( NewDesc( BuildFQName(namespace, subsystem, name), @@ -86,6 +103,7 @@ func NewGoCollector() Collector { nil, nil, ), + internal.RuntimeMetricsBucketsForUnit(bucketsMap[d.Name], unit), hasSum, ) } else if d.Cumulative { @@ -130,9 +148,25 @@ func (c *goCollector) Collect(ch chan<- Metric) { // Collect base non-memory metrics. c.base.Collect(ch) + // Collect must be thread-safe, so prevent concurrent use of + // rmSampleBuf. Just read into rmSampleBuf but write all the data + // we get into our Metrics or MemStats. + // + // This lock also ensures that the Metrics we send out are all from + // the same updates, ensuring their mutual consistency insofar as + // is guaranteed by the runtime/metrics package. + // + // N.B. This locking is heavy-handed, but Collect is expected to be called + // relatively infrequently. Also the core operation here, metrics.Read, + // is fast (O(tens of microseconds)) so contention should certainly be + // low, though channel operations and any allocations may add to that. + c.mu.Lock() + defer c.mu.Unlock() + // Populate runtime/metrics sample buffer. metrics.Read(c.rmSampleBuf) + // Update all our metrics from rmSampleBuf. for i, sample := range c.rmSampleBuf { // N.B. switch on concrete type because it's significantly more efficient // than checking for the Counter and Gauge interface implementations. In @@ -157,7 +191,6 @@ func (c *goCollector) Collect(ch chan<- Metric) { panic("unexpected metric type") } } - // ms is a dummy MemStats that we populate ourselves so that we can // populate the old metrics from it. var ms runtime.MemStats @@ -280,13 +313,27 @@ type batchHistogram struct { // but Write calls may operate concurrently with updates. // Contention between these two sources should be rare. mu sync.Mutex - buckets []float64 // Inclusive lower bounds. + buckets []float64 // Inclusive lower bounds, like runtime/metrics. counts []uint64 sum float64 // Used if hasSum is true. } -func newBatchHistogram(desc *Desc, hasSum bool) *batchHistogram { - h := &batchHistogram{desc: desc, hasSum: hasSum} +// newBatchHistogram creates a new batch histogram value with the given +// Desc, buckets, and whether or not it has an exact sum available. +// +// buckets must always be from the runtime/metrics package, following +// the same conventions. +func newBatchHistogram(desc *Desc, buckets []float64, hasSum bool) *batchHistogram { + h := &batchHistogram{ + desc: desc, + buckets: buckets, + // Because buckets follows runtime/metrics conventions, there's + // 1 more value in the buckets list than there are buckets represented, + // because in runtime/metrics, the bucket values represent *boundaries*, + // and non-Inf boundaries are inclusive lower bounds for that bucket. + counts: make([]uint64, len(buckets)-1), + hasSum: hasSum, + } h.init(h) return h } @@ -294,28 +341,25 @@ func newBatchHistogram(desc *Desc, hasSum bool) *batchHistogram { // update updates the batchHistogram from a runtime/metrics histogram. // // sum must be provided if the batchHistogram was created to have an exact sum. +// h.buckets must be a strict subset of his.Buckets. func (h *batchHistogram) update(his *metrics.Float64Histogram, sum float64) { counts, buckets := his.Counts, his.Buckets - // Skip a -Inf bucket altogether. It's not clear how to represent that. - if math.IsInf(buckets[0], -1) { - buckets = buckets[1:] - counts = counts[1:] - } h.mu.Lock() defer h.mu.Unlock() - // Check if we're initialized. - if h.buckets == nil { - // Make copies of counts and buckets. It's really important - // that we don't retain his.Counts or his.Buckets anywhere since - // it's going to get reused. - h.buckets = make([]float64, len(buckets)) - copy(h.buckets, buckets) - - h.counts = make([]uint64, len(counts)) + // Clear buckets. + for i := range h.counts { + h.counts[i] = 0 + } + // Copy and reduce buckets. + var j int + for i, count := range counts { + h.counts[j] += count + if buckets[i+1] == h.buckets[j+1] { + j++ + } } - copy(h.counts, counts) if h.hasSum { h.sum = sum } diff --git a/vendor/github.com/prometheus/client_golang/prometheus/internal/go_runtime_metrics.go b/vendor/github.com/prometheus/client_golang/prometheus/internal/go_runtime_metrics.go index afc8dff49..fe0a52180 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/internal/go_runtime_metrics.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/internal/go_runtime_metrics.go @@ -17,6 +17,7 @@ package internal import ( + "math" "path" "runtime/metrics" "strings" @@ -75,3 +76,67 @@ func RuntimeMetricsToProm(d *metrics.Description) (string, string, string, bool) } return namespace, subsystem, name, valid } + +// RuntimeMetricsBucketsForUnit takes a set of buckets obtained for a runtime/metrics histogram +// type (so, lower-bound inclusive) and a unit from a runtime/metrics name, and produces +// a reduced set of buckets. This function always removes any -Inf bucket as it's represented +// as the bottom-most upper-bound inclusive bucket in Prometheus. +func RuntimeMetricsBucketsForUnit(buckets []float64, unit string) []float64 { + switch unit { + case "bytes": + // Rebucket as powers of 2. + return rebucketExp(buckets, 2) + case "seconds": + // Rebucket as powers of 10 and then merge all buckets greater + // than 1 second into the +Inf bucket. + b := rebucketExp(buckets, 10) + for i := range b { + if b[i] <= 1 { + continue + } + b[i] = math.Inf(1) + b = b[:i+1] + break + } + return b + } + return buckets +} + +// rebucketExp takes a list of bucket boundaries (lower bound inclusive) and +// downsamples the buckets to those a multiple of base apart. The end result +// is a roughly exponential (in many cases, perfectly exponential) bucketing +// scheme. +func rebucketExp(buckets []float64, base float64) []float64 { + bucket := buckets[0] + var newBuckets []float64 + // We may see a -Inf here, in which case, add it and skip it + // since we risk producing NaNs otherwise. + // + // We need to preserve -Inf values to maintain runtime/metrics + // conventions. We'll strip it out later. + if bucket == math.Inf(-1) { + newBuckets = append(newBuckets, bucket) + buckets = buckets[1:] + bucket = buckets[0] + } + // From now on, bucket should always have a non-Inf value because + // Infs are only ever at the ends of the bucket lists, so + // arithmetic operations on it are non-NaN. + for i := 1; i < len(buckets); i++ { + if bucket >= 0 && buckets[i] < bucket*base { + // The next bucket we want to include is at least bucket*base. + continue + } else if bucket < 0 && buckets[i] < bucket/base { + // In this case the bucket we're targeting is negative, and since + // we're ascending through buckets here, we need to divide to get + // closer to zero exponentially. + continue + } + // The +Inf bucket will always be the last one, and we'll always + // end up including it here because bucket + newBuckets = append(newBuckets, bucket) + bucket = buckets[i] + } + return append(newBuckets, bucket) +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/LICENSE b/vendor/github.com/quasilyte/go-ruleguard/LICENSE index f0381fb49..558f81ff2 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/LICENSE +++ b/vendor/github.com/quasilyte/go-ruleguard/LICENSE @@ -1,6 +1,6 @@ BSD 3-Clause License -Copyright (c) 2019, Iskander (Alex) Sharipov / quasilyte +Copyright (c) 2022, Iskander (Alex) Sharipov / quasilyte All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ast_walker.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ast_walker.go index e3d7ea70b..c52a5a822 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ast_walker.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ast_walker.go @@ -3,6 +3,8 @@ package ruleguard import ( "go/ast" "go/constant" + + "github.com/quasilyte/gogrep/nodetag" ) type astWalker struct { @@ -10,10 +12,10 @@ type astWalker struct { filterParams *filterParams - visit func(ast.Node) + visit func(ast.Node, nodetag.Value) } -func (w *astWalker) Walk(root ast.Node, visit func(ast.Node)) { +func (w *astWalker) Walk(root ast.Node, visit func(ast.Node, nodetag.Value)) { w.visit = visit w.walk(root) } @@ -46,8 +48,6 @@ func (w *astWalker) walk(n ast.Node) { w.nodePath.Push(n) defer w.nodePath.Pop() - w.visit(n) - switch n := n.(type) { case *ast.Field: // TODO: handle field types. @@ -61,32 +61,39 @@ func (w *astWalker) walk(n ast.Node) { } case *ast.Ellipsis: + w.visit(n, nodetag.Ellipsis) if n.Elt != nil { w.walk(n.Elt) } case *ast.FuncLit: + w.visit(n, nodetag.FuncLit) w.walk(n.Type) w.walk(n.Body) case *ast.CompositeLit: + w.visit(n, nodetag.CompositeLit) if n.Type != nil { w.walk(n.Type) } w.walkExprList(n.Elts) case *ast.ParenExpr: + w.visit(n, nodetag.ParenExpr) w.walk(n.X) case *ast.SelectorExpr: + w.visit(n, nodetag.SelectorExpr) w.walk(n.X) w.walk(n.Sel) case *ast.IndexExpr: + w.visit(n, nodetag.IndexExpr) w.walk(n.X) w.walk(n.Index) case *ast.SliceExpr: + w.visit(n, nodetag.SliceExpr) w.walk(n.X) if n.Low != nil { w.walk(n.Low) @@ -99,39 +106,48 @@ func (w *astWalker) walk(n ast.Node) { } case *ast.TypeAssertExpr: + w.visit(n, nodetag.TypeAssertExpr) w.walk(n.X) if n.Type != nil { w.walk(n.Type) } case *ast.CallExpr: + w.visit(n, nodetag.CallExpr) w.walk(n.Fun) w.walkExprList(n.Args) case *ast.StarExpr: + w.visit(n, nodetag.StarExpr) w.walk(n.X) case *ast.UnaryExpr: + w.visit(n, nodetag.UnaryExpr) w.walk(n.X) case *ast.BinaryExpr: + w.visit(n, nodetag.BinaryExpr) w.walk(n.X) w.walk(n.Y) case *ast.KeyValueExpr: + w.visit(n, nodetag.KeyValueExpr) w.walk(n.Key) w.walk(n.Value) case *ast.ArrayType: + w.visit(n, nodetag.ArrayType) if n.Len != nil { w.walk(n.Len) } w.walk(n.Elt) case *ast.StructType: + w.visit(n, nodetag.StructType) w.walk(n.Fields) case *ast.FuncType: + w.visit(n, nodetag.FuncType) if n.Params != nil { w.walk(n.Params) } @@ -140,54 +156,69 @@ func (w *astWalker) walk(n ast.Node) { } case *ast.InterfaceType: + w.visit(n, nodetag.InterfaceType) w.walk(n.Methods) case *ast.MapType: + w.visit(n, nodetag.MapType) w.walk(n.Key) w.walk(n.Value) case *ast.ChanType: + w.visit(n, nodetag.ChanType) w.walk(n.Value) case *ast.DeclStmt: + w.visit(n, nodetag.DeclStmt) w.walk(n.Decl) case *ast.LabeledStmt: + w.visit(n, nodetag.LabeledStmt) w.walk(n.Label) w.walk(n.Stmt) case *ast.ExprStmt: + w.visit(n, nodetag.ExprStmt) w.walk(n.X) case *ast.SendStmt: + w.visit(n, nodetag.SendStmt) w.walk(n.Chan) w.walk(n.Value) case *ast.IncDecStmt: + w.visit(n, nodetag.IncDecStmt) w.walk(n.X) case *ast.AssignStmt: + w.visit(n, nodetag.AssignStmt) w.walkExprList(n.Lhs) w.walkExprList(n.Rhs) case *ast.GoStmt: + w.visit(n, nodetag.GoStmt) w.walk(n.Call) case *ast.DeferStmt: + w.visit(n, nodetag.DeferStmt) w.walk(n.Call) case *ast.ReturnStmt: + w.visit(n, nodetag.ReturnStmt) w.walkExprList(n.Results) case *ast.BranchStmt: + w.visit(n, nodetag.BranchStmt) if n.Label != nil { w.walk(n.Label) } case *ast.BlockStmt: + w.visit(n, nodetag.BlockStmt) w.walkStmtList(n.List) case *ast.IfStmt: + w.visit(n, nodetag.IfStmt) if n.Init != nil { w.walk(n.Init) } @@ -212,10 +243,12 @@ func (w *astWalker) walk(n ast.Node) { } case *ast.CaseClause: + w.visit(n, nodetag.CaseClause) w.walkExprList(n.List) w.walkStmtList(n.Body) case *ast.SwitchStmt: + w.visit(n, nodetag.SwitchStmt) if n.Init != nil { w.walk(n.Init) } @@ -225,6 +258,7 @@ func (w *astWalker) walk(n ast.Node) { w.walk(n.Body) case *ast.TypeSwitchStmt: + w.visit(n, nodetag.TypeSwitchStmt) if n.Init != nil { w.walk(n.Init) } @@ -232,15 +266,18 @@ func (w *astWalker) walk(n ast.Node) { w.walk(n.Body) case *ast.CommClause: + w.visit(n, nodetag.CommClause) if n.Comm != nil { w.walk(n.Comm) } w.walkStmtList(n.Body) case *ast.SelectStmt: + w.visit(n, nodetag.SelectStmt) w.walk(n.Body) case *ast.ForStmt: + w.visit(n, nodetag.ForStmt) if n.Init != nil { w.walk(n.Init) } @@ -253,6 +290,7 @@ func (w *astWalker) walk(n ast.Node) { w.walk(n.Body) case *ast.RangeStmt: + w.visit(n, nodetag.RangeStmt) if n.Key != nil { w.walk(n.Key) } @@ -263,6 +301,7 @@ func (w *astWalker) walk(n ast.Node) { w.walk(n.Body) case *ast.ImportSpec: + w.visit(n, nodetag.ImportSpec) if n.Name != nil { w.walk(n.Name) } @@ -272,6 +311,7 @@ func (w *astWalker) walk(n ast.Node) { } case *ast.ValueSpec: + w.visit(n, nodetag.ValueSpec) if n.Doc != nil { w.walk(n.Doc) } @@ -285,6 +325,7 @@ func (w *astWalker) walk(n ast.Node) { } case *ast.TypeSpec: + w.visit(n, nodetag.TypeSpec) if n.Doc != nil { w.walk(n.Doc) } @@ -295,6 +336,7 @@ func (w *astWalker) walk(n ast.Node) { } case *ast.GenDecl: + w.visit(n, nodetag.GenDecl) if n.Doc != nil { w.walk(n.Doc) } @@ -303,6 +345,9 @@ func (w *astWalker) walk(n ast.Node) { } case *ast.FuncDecl: + w.visit(n, nodetag.FuncDecl) + prevFunc := w.filterParams.currentFunc + w.filterParams.currentFunc = n if n.Doc != nil { w.walk(n.Doc) } @@ -314,8 +359,10 @@ func (w *astWalker) walk(n ast.Node) { if n.Body != nil { w.walk(n.Body) } + w.filterParams.currentFunc = prevFunc case *ast.File: + w.visit(n, nodetag.File) w.walk(n.Name) w.walkDeclList(n.Decls) } diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go index 9bf50dab8..848d2d93a 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go @@ -7,12 +7,12 @@ import ( "go/types" "path/filepath" - "github.com/quasilyte/go-ruleguard/internal/gogrep" "github.com/quasilyte/go-ruleguard/internal/xtypes" - "github.com/quasilyte/go-ruleguard/nodetag" "github.com/quasilyte/go-ruleguard/ruleguard/quasigo" "github.com/quasilyte/go-ruleguard/ruleguard/textmatch" "github.com/quasilyte/go-ruleguard/ruleguard/typematch" + "github.com/quasilyte/gogrep" + "github.com/quasilyte/gogrep/nodetag" ) const filterSuccess = matchFilterResult("") @@ -189,6 +189,63 @@ func makeTypeImplementsFilter(src, varname string, iface *types.Interface) filte } } +func makeTypeHasPointersFilter(src, varname string) filterFunc { + return func(params *filterParams) matchFilterResult { + typ := params.typeofNode(params.subExpr(varname)) + if typeHasPointers(typ) { + return filterSuccess + } + return filterFailure(src) + } +} + +func makeTypeIsIntUintFilter(src, varname string, underlying bool, kind types.BasicKind) filterFunc { + return func(params *filterParams) matchFilterResult { + typ := params.typeofNode(params.subExpr(varname)) + if underlying { + typ = typ.Underlying() + } + if basicType, ok := typ.(*types.Basic); ok { + first := kind + last := kind + 4 + if basicType.Kind() >= first && basicType.Kind() <= last { + return filterSuccess + } + } + return filterFailure(src) + } +} + +func makeTypeIsSignedFilter(src, varname string, underlying bool) filterFunc { + return func(params *filterParams) matchFilterResult { + typ := params.typeofNode(params.subExpr(varname)) + if underlying { + typ = typ.Underlying() + } + if basicType, ok := typ.(*types.Basic); ok { + if basicType.Info()&types.IsInteger != 0 && basicType.Info()&types.IsUnsigned == 0 { + return filterSuccess + } + } + return filterFailure(src) + } +} + +func makeTypeOfKindFilter(src, varname string, underlying bool, kind types.BasicInfo) filterFunc { + return func(params *filterParams) matchFilterResult { + typ := params.typeofNode(params.subExpr(varname)) + if underlying { + typ = typ.Underlying() + } + if basicType, ok := typ.(*types.Basic); ok { + if basicType.Info()&kind != 0 { + return filterSuccess + } + } + return filterFailure(src) + } +} + func makeTypeIsFilter(src, varname string, underlying bool, pat *typematch.Pattern) filterFunc { if underlying { return func(params *filterParams) matchFilterResult { @@ -197,7 +254,7 @@ func makeTypeIsFilter(src, varname string, underlying bool, pat *typematch.Patte return pat.MatchIdentical(params.typeofNode(x).Underlying()) }) } - typ := params.typeofNode(params.subExpr(varname)).Underlying() + typ := params.typeofNode(params.subNode(varname)).Underlying() if pat.MatchIdentical(typ) { return filterSuccess } @@ -211,7 +268,7 @@ func makeTypeIsFilter(src, varname string, underlying bool, pat *typematch.Patte return pat.MatchIdentical(params.typeofNode(x)) }) } - typ := params.typeofNode(params.subExpr(varname)) + typ := params.typeofNode(params.subNode(varname)) if pat.MatchIdentical(typ) { return filterSuccess } @@ -484,3 +541,31 @@ func nodeIs(n ast.Node, tag nodetag.Value) bool { } return matched } + +func typeHasPointers(typ types.Type) bool { + switch typ := typ.(type) { + case *types.Basic: + switch typ.Kind() { + case types.UnsafePointer, types.String, types.UntypedNil, types.UntypedString: + return true + } + return false + + case *types.Named: + return typeHasPointers(typ.Underlying()) + + case *types.Struct: + for i := 0; i < typ.NumFields(); i++ { + if typeHasPointers(typ.Field(i).Type()) { + return true + } + } + return false + + case *types.Array: + return typeHasPointers(typ.Elem()) + + default: + return true + } +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go index cfc7b70d6..011a82cec 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go @@ -6,9 +6,9 @@ import ( "go/types" "regexp" - "github.com/quasilyte/go-ruleguard/internal/gogrep" - "github.com/quasilyte/go-ruleguard/nodetag" "github.com/quasilyte/go-ruleguard/ruleguard/quasigo" + "github.com/quasilyte/gogrep" + "github.com/quasilyte/gogrep/nodetag" ) type goRuleSet struct { @@ -67,6 +67,8 @@ type filterParams struct { deadcode bool + currentFunc *ast.FuncDecl + // varname is set only for custom filters before bytecode function is called. varname string } @@ -89,12 +91,16 @@ func (params *filterParams) subExpr(name string) ast.Expr { } func (params *filterParams) typeofNode(n ast.Node) types.Type { - if e, ok := n.(ast.Expr); ok { - if typ := params.ctx.Types.TypeOf(e); typ != nil { - return typ - } + var e ast.Expr + switch n := n.(type) { + case ast.Expr: + e = n + case *ast.Field: + e = n.Type + } + if typ := params.ctx.Types.TypeOf(e); typ != nil { + return typ } - return types.Typ[types.Invalid] } diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/filter_op.gen.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/filter_op.gen.go index 46feaf0b5..c4a07bfc6 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/filter_op.gen.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/filter_op.gen.go @@ -64,161 +64,179 @@ const ( // $Value type: string FilterVarTypeSizeOp FilterOp = 17 + // m[$Value].Type.HasPointers() + // $Value type: string + FilterVarTypeHasPointersOp FilterOp = 18 + // m[$Value].Filter($Args[0]) // $Value type: string - FilterVarFilterOp FilterOp = 18 + FilterVarFilterOp FilterOp = 19 // m[$Value].Node.Is($Args[0]) // $Value type: string - FilterVarNodeIsOp FilterOp = 19 + FilterVarNodeIsOp FilterOp = 20 // m[$Value].Object.Is($Args[0]) // $Value type: string - FilterVarObjectIsOp FilterOp = 20 + FilterVarObjectIsOp FilterOp = 21 // m[$Value].Type.Is($Args[0]) // $Value type: string - FilterVarTypeIsOp FilterOp = 21 + FilterVarTypeIsOp FilterOp = 22 // m[$Value].Type.Underlying().Is($Args[0]) // $Value type: string - FilterVarTypeUnderlyingIsOp FilterOp = 22 + FilterVarTypeUnderlyingIsOp FilterOp = 23 + + // m[$Value].Type.OfKind($Args[0]) + // $Value type: string + FilterVarTypeOfKindOp FilterOp = 24 + + // m[$Value].Type.Underlying().OfKind($Args[0]) + // $Value type: string + FilterVarTypeUnderlyingOfKindOp FilterOp = 25 // m[$Value].Type.ConvertibleTo($Args[0]) // $Value type: string - FilterVarTypeConvertibleToOp FilterOp = 23 + FilterVarTypeConvertibleToOp FilterOp = 26 // m[$Value].Type.AssignableTo($Args[0]) // $Value type: string - FilterVarTypeAssignableToOp FilterOp = 24 + FilterVarTypeAssignableToOp FilterOp = 27 // m[$Value].Type.Implements($Args[0]) // $Value type: string - FilterVarTypeImplementsOp FilterOp = 25 + FilterVarTypeImplementsOp FilterOp = 28 // m[$Value].Text.Matches($Args[0]) // $Value type: string - FilterVarTextMatchesOp FilterOp = 26 + FilterVarTextMatchesOp FilterOp = 29 // m.Deadcode() - FilterDeadcodeOp FilterOp = 27 + FilterDeadcodeOp FilterOp = 30 // m.GoVersion().Eq($Value) // $Value type: string - FilterGoVersionEqOp FilterOp = 28 + FilterGoVersionEqOp FilterOp = 31 // m.GoVersion().LessThan($Value) // $Value type: string - FilterGoVersionLessThanOp FilterOp = 29 + FilterGoVersionLessThanOp FilterOp = 32 // m.GoVersion().GreaterThan($Value) // $Value type: string - FilterGoVersionGreaterThanOp FilterOp = 30 + FilterGoVersionGreaterThanOp FilterOp = 33 // m.GoVersion().LessEqThan($Value) // $Value type: string - FilterGoVersionLessEqThanOp FilterOp = 31 + FilterGoVersionLessEqThanOp FilterOp = 34 // m.GoVersion().GreaterEqThan($Value) // $Value type: string - FilterGoVersionGreaterEqThanOp FilterOp = 32 + FilterGoVersionGreaterEqThanOp FilterOp = 35 // m.File.Imports($Value) // $Value type: string - FilterFileImportsOp FilterOp = 33 + FilterFileImportsOp FilterOp = 36 // m.File.PkgPath.Matches($Value) // $Value type: string - FilterFilePkgPathMatchesOp FilterOp = 34 + FilterFilePkgPathMatchesOp FilterOp = 37 // m.File.Name.Matches($Value) // $Value type: string - FilterFileNameMatchesOp FilterOp = 35 + FilterFileNameMatchesOp FilterOp = 38 // $Value holds a function name // $Value type: string - FilterFilterFuncRefOp FilterOp = 36 + FilterFilterFuncRefOp FilterOp = 39 // $Value holds a string constant // $Value type: string - FilterStringOp FilterOp = 37 + FilterStringOp FilterOp = 40 // $Value holds an int64 constant // $Value type: int64 - FilterIntOp FilterOp = 38 + FilterIntOp FilterOp = 41 // m[`$$`].Node.Parent().Is($Args[0]) - FilterRootNodeParentIsOp FilterOp = 39 + FilterRootNodeParentIsOp FilterOp = 42 ) var filterOpNames = map[FilterOp]string{ - FilterInvalidOp: `Invalid`, - FilterNotOp: `Not`, - FilterAndOp: `And`, - FilterOrOp: `Or`, - FilterEqOp: `Eq`, - FilterNeqOp: `Neq`, - FilterGtOp: `Gt`, - FilterLtOp: `Lt`, - FilterGtEqOp: `GtEq`, - FilterLtEqOp: `LtEq`, - FilterVarAddressableOp: `VarAddressable`, - FilterVarPureOp: `VarPure`, - FilterVarConstOp: `VarConst`, - FilterVarConstSliceOp: `VarConstSlice`, - FilterVarTextOp: `VarText`, - FilterVarLineOp: `VarLine`, - FilterVarValueIntOp: `VarValueInt`, - FilterVarTypeSizeOp: `VarTypeSize`, - FilterVarFilterOp: `VarFilter`, - FilterVarNodeIsOp: `VarNodeIs`, - FilterVarObjectIsOp: `VarObjectIs`, - FilterVarTypeIsOp: `VarTypeIs`, - FilterVarTypeUnderlyingIsOp: `VarTypeUnderlyingIs`, - FilterVarTypeConvertibleToOp: `VarTypeConvertibleTo`, - FilterVarTypeAssignableToOp: `VarTypeAssignableTo`, - FilterVarTypeImplementsOp: `VarTypeImplements`, - FilterVarTextMatchesOp: `VarTextMatches`, - FilterDeadcodeOp: `Deadcode`, - FilterGoVersionEqOp: `GoVersionEq`, - FilterGoVersionLessThanOp: `GoVersionLessThan`, - FilterGoVersionGreaterThanOp: `GoVersionGreaterThan`, - FilterGoVersionLessEqThanOp: `GoVersionLessEqThan`, - FilterGoVersionGreaterEqThanOp: `GoVersionGreaterEqThan`, - FilterFileImportsOp: `FileImports`, - FilterFilePkgPathMatchesOp: `FilePkgPathMatches`, - FilterFileNameMatchesOp: `FileNameMatches`, - FilterFilterFuncRefOp: `FilterFuncRef`, - FilterStringOp: `String`, - FilterIntOp: `Int`, - FilterRootNodeParentIsOp: `RootNodeParentIs`, + FilterInvalidOp: `Invalid`, + FilterNotOp: `Not`, + FilterAndOp: `And`, + FilterOrOp: `Or`, + FilterEqOp: `Eq`, + FilterNeqOp: `Neq`, + FilterGtOp: `Gt`, + FilterLtOp: `Lt`, + FilterGtEqOp: `GtEq`, + FilterLtEqOp: `LtEq`, + FilterVarAddressableOp: `VarAddressable`, + FilterVarPureOp: `VarPure`, + FilterVarConstOp: `VarConst`, + FilterVarConstSliceOp: `VarConstSlice`, + FilterVarTextOp: `VarText`, + FilterVarLineOp: `VarLine`, + FilterVarValueIntOp: `VarValueInt`, + FilterVarTypeSizeOp: `VarTypeSize`, + FilterVarTypeHasPointersOp: `VarTypeHasPointers`, + FilterVarFilterOp: `VarFilter`, + FilterVarNodeIsOp: `VarNodeIs`, + FilterVarObjectIsOp: `VarObjectIs`, + FilterVarTypeIsOp: `VarTypeIs`, + FilterVarTypeUnderlyingIsOp: `VarTypeUnderlyingIs`, + FilterVarTypeOfKindOp: `VarTypeOfKind`, + FilterVarTypeUnderlyingOfKindOp: `VarTypeUnderlyingOfKind`, + FilterVarTypeConvertibleToOp: `VarTypeConvertibleTo`, + FilterVarTypeAssignableToOp: `VarTypeAssignableTo`, + FilterVarTypeImplementsOp: `VarTypeImplements`, + FilterVarTextMatchesOp: `VarTextMatches`, + FilterDeadcodeOp: `Deadcode`, + FilterGoVersionEqOp: `GoVersionEq`, + FilterGoVersionLessThanOp: `GoVersionLessThan`, + FilterGoVersionGreaterThanOp: `GoVersionGreaterThan`, + FilterGoVersionLessEqThanOp: `GoVersionLessEqThan`, + FilterGoVersionGreaterEqThanOp: `GoVersionGreaterEqThan`, + FilterFileImportsOp: `FileImports`, + FilterFilePkgPathMatchesOp: `FilePkgPathMatches`, + FilterFileNameMatchesOp: `FileNameMatches`, + FilterFilterFuncRefOp: `FilterFuncRef`, + FilterStringOp: `String`, + FilterIntOp: `Int`, + FilterRootNodeParentIsOp: `RootNodeParentIs`, } var filterOpFlags = map[FilterOp]uint64{ - FilterAndOp: flagIsBinaryExpr, - FilterOrOp: flagIsBinaryExpr, - FilterEqOp: flagIsBinaryExpr, - FilterNeqOp: flagIsBinaryExpr, - FilterGtOp: flagIsBinaryExpr, - FilterLtOp: flagIsBinaryExpr, - FilterGtEqOp: flagIsBinaryExpr, - FilterLtEqOp: flagIsBinaryExpr, - FilterVarAddressableOp: flagHasVar, - FilterVarPureOp: flagHasVar, - FilterVarConstOp: flagHasVar, - FilterVarConstSliceOp: flagHasVar, - FilterVarTextOp: flagHasVar, - FilterVarLineOp: flagHasVar, - FilterVarValueIntOp: flagHasVar, - FilterVarTypeSizeOp: flagHasVar, - FilterVarFilterOp: flagHasVar, - FilterVarNodeIsOp: flagHasVar, - FilterVarObjectIsOp: flagHasVar, - FilterVarTypeIsOp: flagHasVar, - FilterVarTypeUnderlyingIsOp: flagHasVar, - FilterVarTypeConvertibleToOp: flagHasVar, - FilterVarTypeAssignableToOp: flagHasVar, - FilterVarTypeImplementsOp: flagHasVar, - FilterVarTextMatchesOp: flagHasVar, - FilterStringOp: flagIsBasicLit, - FilterIntOp: flagIsBasicLit, + FilterAndOp: flagIsBinaryExpr, + FilterOrOp: flagIsBinaryExpr, + FilterEqOp: flagIsBinaryExpr, + FilterNeqOp: flagIsBinaryExpr, + FilterGtOp: flagIsBinaryExpr, + FilterLtOp: flagIsBinaryExpr, + FilterGtEqOp: flagIsBinaryExpr, + FilterLtEqOp: flagIsBinaryExpr, + FilterVarAddressableOp: flagHasVar, + FilterVarPureOp: flagHasVar, + FilterVarConstOp: flagHasVar, + FilterVarConstSliceOp: flagHasVar, + FilterVarTextOp: flagHasVar, + FilterVarLineOp: flagHasVar, + FilterVarValueIntOp: flagHasVar, + FilterVarTypeSizeOp: flagHasVar, + FilterVarTypeHasPointersOp: flagHasVar, + FilterVarFilterOp: flagHasVar, + FilterVarNodeIsOp: flagHasVar, + FilterVarObjectIsOp: flagHasVar, + FilterVarTypeIsOp: flagHasVar, + FilterVarTypeUnderlyingIsOp: flagHasVar, + FilterVarTypeOfKindOp: flagHasVar, + FilterVarTypeUnderlyingOfKindOp: flagHasVar, + FilterVarTypeConvertibleToOp: flagHasVar, + FilterVarTypeAssignableToOp: flagHasVar, + FilterVarTypeImplementsOp: flagHasVar, + FilterVarTextMatchesOp: flagHasVar, + FilterStringOp: flagIsBasicLit, + FilterIntOp: flagIsBasicLit, } diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/gen_filter_op.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/gen_filter_op.go index a5b7b07eb..48b719d94 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/gen_filter_op.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/gen_filter_op.go @@ -47,12 +47,15 @@ func main() { {name: "VarLine", comment: "m[$Value].Line", valueType: "string", flags: flagHasVar}, {name: "VarValueInt", comment: "m[$Value].Value.Int()", valueType: "string", flags: flagHasVar}, {name: "VarTypeSize", comment: "m[$Value].Type.Size", valueType: "string", flags: flagHasVar}, + {name: "VarTypeHasPointers", comment: "m[$Value].Type.HasPointers()", valueType: "string", flags: flagHasVar}, {name: "VarFilter", comment: "m[$Value].Filter($Args[0])", valueType: "string", flags: flagHasVar}, {name: "VarNodeIs", comment: "m[$Value].Node.Is($Args[0])", valueType: "string", flags: flagHasVar}, {name: "VarObjectIs", comment: "m[$Value].Object.Is($Args[0])", valueType: "string", flags: flagHasVar}, {name: "VarTypeIs", comment: "m[$Value].Type.Is($Args[0])", valueType: "string", flags: flagHasVar}, {name: "VarTypeUnderlyingIs", comment: "m[$Value].Type.Underlying().Is($Args[0])", valueType: "string", flags: flagHasVar}, + {name: "VarTypeOfKind", comment: "m[$Value].Type.OfKind($Args[0])", valueType: "string", flags: flagHasVar}, + {name: "VarTypeUnderlyingOfKind", comment: "m[$Value].Type.Underlying().OfKind($Args[0])", valueType: "string", flags: flagHasVar}, {name: "VarTypeConvertibleTo", comment: "m[$Value].Type.ConvertibleTo($Args[0])", valueType: "string", flags: flagHasVar}, {name: "VarTypeAssignableTo", comment: "m[$Value].Type.AssignableTo($Args[0])", valueType: "string", flags: flagHasVar}, {name: "VarTypeImplements", comment: "m[$Value].Type.Implements($Args[0])", valueType: "string", flags: flagHasVar}, diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_loader.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_loader.go index 272ab7fe2..7e29c81b0 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_loader.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_loader.go @@ -11,13 +11,13 @@ import ( "io/ioutil" "regexp" - "github.com/quasilyte/go-ruleguard/internal/gogrep" - "github.com/quasilyte/go-ruleguard/nodetag" "github.com/quasilyte/go-ruleguard/ruleguard/goutil" "github.com/quasilyte/go-ruleguard/ruleguard/ir" "github.com/quasilyte/go-ruleguard/ruleguard/quasigo" "github.com/quasilyte/go-ruleguard/ruleguard/textmatch" "github.com/quasilyte/go-ruleguard/ruleguard/typematch" + "github.com/quasilyte/gogrep" + "github.com/quasilyte/gogrep/nodetag" ) type irLoaderConfig struct { @@ -253,7 +253,7 @@ func (l *irLoader) loadRuleGroup(group *ir.RuleGroup) error { } for _, rule := range group.Rules { - if err := l.loadRule(rule); err != nil { + if err := l.loadRule(group, rule); err != nil { return err } } @@ -261,7 +261,7 @@ func (l *irLoader) loadRuleGroup(group *ir.RuleGroup) error { return nil } -func (l *irLoader) loadRule(rule ir.Rule) error { +func (l *irLoader) loadRule(group *ir.RuleGroup, rule ir.Rule) error { proto := goRule{ line: rule.Line, group: l.group, @@ -282,7 +282,7 @@ func (l *irLoader) loadRule(rule ir.Rule) error { } for _, pat := range rule.SyntaxPatterns { - if err := l.loadSyntaxRule(proto, info, rule, pat.Value, pat.Line); err != nil { + if err := l.loadSyntaxRule(group, proto, info, rule, pat.Value, pat.Line); err != nil { return err } } @@ -312,11 +312,26 @@ func (l *irLoader) loadCommentRule(resultProto goRule, rule ir.Rule, src string, return nil } -func (l *irLoader) loadSyntaxRule(resultProto goRule, filterInfo filterInfo, rule ir.Rule, src string, line int) error { +func (l *irLoader) loadSyntaxRule(group *ir.RuleGroup, resultProto goRule, filterInfo filterInfo, rule ir.Rule, src string, line int) error { result := resultProto result.line = line - pat, info, err := gogrep.Compile(l.gogrepFset, src, false) + var imports map[string]string + if len(group.Imports) != 0 { + imports = make(map[string]string) + for _, imported := range group.Imports { + imports[imported.Name] = imported.Path + } + } + + gogrepConfig := gogrep.CompileConfig{ + Fset: l.gogrepFset, + Src: src, + Strict: false, + WithTypes: true, + Imports: imports, + } + pat, info, err := gogrep.Compile(gogrepConfig) if err != nil { return l.errorf(rule.Line, err, "parse match pattern") } @@ -454,6 +469,25 @@ func (l *irLoader) unwrapStringExpr(filter ir.FilterExpr) string { return "" } +func (l *irLoader) stringToBasicKind(s string) types.BasicInfo { + switch s { + case "integer": + return types.IsInteger + case "unsigned": + return types.IsUnsigned + case "float": + return types.IsFloat + case "complex": + return types.IsComplex + case "untyped": + return types.IsUnsigned + case "numeric": + return types.IsNumeric + default: + return 0 + } +} + func (l *irLoader) newFilter(filter ir.FilterExpr, info *filterInfo) (matchFilter, error) { if filter.HasVar() { info.Vars[filter.Value.(string)] = struct{}{} @@ -507,6 +541,30 @@ func (l *irLoader) newFilter(filter ir.FilterExpr, info *filterInfo) (matchFilte } result.fn = makeNodeIsFilter(result.src, filter.Value.(string), tag) + case ir.FilterVarTypeHasPointersOp: + result.fn = makeTypeHasPointersFilter(result.src, filter.Value.(string)) + + case ir.FilterVarTypeOfKindOp, ir.FilterVarTypeUnderlyingOfKindOp: + kindString := l.unwrapStringExpr(filter.Args[0]) + if kindString == "" { + return result, l.errorf(filter.Line, nil, "expected a non-empty string argument") + } + underlying := filter.Op == ir.FilterVarTypeUnderlyingOfKindOp + switch kindString { + case "signed": + result.fn = makeTypeIsSignedFilter(result.src, filter.Value.(string), underlying) + case "int": + result.fn = makeTypeIsIntUintFilter(result.src, filter.Value.(string), underlying, types.Int) + case "uint": + result.fn = makeTypeIsIntUintFilter(result.src, filter.Value.(string), underlying, types.Uint) + default: + kind := l.stringToBasicKind(kindString) + if kind == 0 { + return result, l.errorf(filter.Line, nil, "unknown kind %s", kindString) + } + result.fn = makeTypeOfKindFilter(result.src, filter.Value.(string), underlying, kind) + } + case ir.FilterVarTypeIsOp, ir.FilterVarTypeUnderlyingIsOp: typeString := l.unwrapStringExpr(filter.Args[0]) if typeString == "" { diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/irconv/irconv.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/irconv/irconv.go index ceb6e816a..7e577a436 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/irconv/irconv.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/irconv/irconv.go @@ -10,6 +10,7 @@ import ( "strconv" "strings" + "github.com/go-toolsmith/astcopy" "github.com/quasilyte/go-ruleguard/ruleguard/goutil" "github.com/quasilyte/go-ruleguard/ruleguard/ir" "golang.org/x/tools/go/ast/astutil" @@ -52,13 +53,20 @@ type convError struct { err error } +type localMacroFunc struct { + name string + params []string + template ast.Expr +} + type converter struct { types *types.Info pkg *types.Package fset *token.FileSet src []byte - group *ir.RuleGroup + group *ir.RuleGroup + groupFuncs []localMacroFunc dslPkgname string // The local name of the "ruleguard/dsl" package (usually its just "dsl") } @@ -171,6 +179,7 @@ func (conv *converter) convertRuleGroup(decl *ast.FuncDecl) *ir.RuleGroup { Line: conv.fset.Position(decl.Name.Pos()).Line, } conv.group = result + conv.groupFuncs = conv.groupFuncs[:0] result.Name = decl.Name.String() result.MatcherName = decl.Type.Params.List[0].Names[0].String() @@ -181,6 +190,11 @@ func (conv *converter) convertRuleGroup(decl *ast.FuncDecl) *ir.RuleGroup { seenRules := false for _, stmt := range decl.Body.List { + if assign, ok := stmt.(*ast.AssignStmt); ok && assign.Tok == token.DEFINE { + conv.localDefine(assign) + continue + } + if _, ok := stmt.(*ast.DeclStmt); ok { continue } @@ -208,6 +222,146 @@ func (conv *converter) convertRuleGroup(decl *ast.FuncDecl) *ir.RuleGroup { return result } +func (conv *converter) findLocalMacro(call *ast.CallExpr) *localMacroFunc { + fn, ok := call.Fun.(*ast.Ident) + if !ok { + return nil + } + for i := range conv.groupFuncs { + if conv.groupFuncs[i].name == fn.Name { + return &conv.groupFuncs[i] + } + } + return nil +} + +func (conv *converter) expandMacro(macro *localMacroFunc, call *ast.CallExpr) ir.FilterExpr { + // Check that call args are OK. + // Since "function calls" are implemented as a macro expansion here, + // we don't allow arguments that have a non-trivial evaluation. + isSafe := func(arg ast.Expr) bool { + switch arg := astutil.Unparen(arg).(type) { + case *ast.BasicLit, *ast.Ident: + return true + + case *ast.IndexExpr: + mapIdent, ok := astutil.Unparen(arg.X).(*ast.Ident) + if !ok { + return false + } + if mapIdent.Name != conv.group.MatcherName { + return false + } + key, ok := astutil.Unparen(arg.Index).(*ast.BasicLit) + if !ok || key.Kind != token.STRING { + return false + } + return true + + default: + return false + } + } + args := map[string]ast.Expr{} + for i, arg := range call.Args { + paramName := macro.params[i] + if !isSafe(arg) { + panic(conv.errorf(arg, "unsupported/too complex %s argument", paramName)) + } + args[paramName] = astutil.Unparen(arg) + } + + body := astcopy.Expr(macro.template) + expanded := astutil.Apply(body, nil, func(cur *astutil.Cursor) bool { + if ident, ok := cur.Node().(*ast.Ident); ok { + arg, ok := args[ident.Name] + if ok { + cur.Replace(arg) + return true + } + } + // astcopy above will copy the AST tree, but it won't update + // the associated types.Info map of const values. + // We'll try to solve that issue at least partially here. + if lit, ok := cur.Node().(*ast.BasicLit); ok { + switch lit.Kind { + case token.STRING: + val, err := strconv.Unquote(lit.Value) + if err == nil { + conv.types.Types[lit] = types.TypeAndValue{ + Type: types.Typ[types.UntypedString], + Value: constant.MakeString(val), + } + } + case token.INT: + val, err := strconv.ParseInt(lit.Value, 0, 64) + if err == nil { + conv.types.Types[lit] = types.TypeAndValue{ + Type: types.Typ[types.UntypedInt], + Value: constant.MakeInt64(val), + } + } + case token.FLOAT: + val, err := strconv.ParseFloat(lit.Value, 64) + if err == nil { + conv.types.Types[lit] = types.TypeAndValue{ + Type: types.Typ[types.UntypedFloat], + Value: constant.MakeFloat64(val), + } + } + } + } + return true + }) + + return conv.convertFilterExpr(expanded.(ast.Expr)) +} + +func (conv *converter) localDefine(assign *ast.AssignStmt) { + if len(assign.Lhs) != 1 || len(assign.Rhs) != 1 { + panic(conv.errorf(assign, "multi-value := is not supported")) + } + lhs, ok := assign.Lhs[0].(*ast.Ident) + if !ok { + panic(conv.errorf(assign.Lhs[0], "only simple ident lhs is supported")) + } + rhs := assign.Rhs[0] + fn, ok := rhs.(*ast.FuncLit) + if !ok { + panic(conv.errorf(rhs, "only func literals are supported on the rhs")) + } + typ := conv.types.TypeOf(fn).(*types.Signature) + isBoolResult := typ.Results() != nil && + typ.Results().Len() == 1 && + typ.Results().At(0).Type() == types.Typ[types.Bool] + if !isBoolResult { + var loc ast.Node = fn.Type + if fn.Type.Results != nil { + loc = fn.Type.Results + } + panic(conv.errorf(loc, "only funcs returning bool are supported")) + } + if len(fn.Body.List) != 1 { + panic(conv.errorf(fn.Body, "only simple 1 return statement funcs are supported")) + } + stmt, ok := fn.Body.List[0].(*ast.ReturnStmt) + if !ok { + panic(conv.errorf(fn.Body.List[0], "expected a return statement, found %T", fn.Body.List[0])) + } + var params []string + for _, field := range fn.Type.Params.List { + for _, id := range field.Names { + params = append(params, id.Name) + } + } + macro := localMacroFunc{ + name: lhs.Name, + params: params, + template: stmt.Results[0], + } + conv.groupFuncs = append(conv.groupFuncs, macro) +} + func (conv *converter) doMatcherImport(call *ast.CallExpr) { pkgPath := conv.parseStringArg(call.Args[0]) pkgName := path.Base(pkgPath) @@ -518,6 +672,10 @@ func (conv *converter) convertFilterExprImpl(e ast.Expr) ir.FilterExpr { return ir.FilterExpr{Op: ir.FilterVarFilterOp, Value: op.varName, Args: args} } + if macro := conv.findLocalMacro(e); macro != nil { + return conv.expandMacro(macro, e) + } + args := convertExprList(e.Args) switch op.path { case "Value.Int": @@ -534,10 +692,16 @@ func (conv *converter) convertFilterExprImpl(e ast.Expr) ir.FilterExpr { return ir.FilterExpr{Op: ir.FilterRootNodeParentIsOp, Args: args} case "Object.Is": return ir.FilterExpr{Op: ir.FilterVarObjectIsOp, Value: op.varName, Args: args} + case "Type.HasPointers": + return ir.FilterExpr{Op: ir.FilterVarTypeHasPointersOp, Value: op.varName} case "Type.Is": return ir.FilterExpr{Op: ir.FilterVarTypeIsOp, Value: op.varName, Args: args} case "Type.Underlying.Is": return ir.FilterExpr{Op: ir.FilterVarTypeUnderlyingIsOp, Value: op.varName, Args: args} + case "Type.OfKind": + return ir.FilterExpr{Op: ir.FilterVarTypeOfKindOp, Value: op.varName, Args: args} + case "Type.Underlying.OfKind": + return ir.FilterExpr{Op: ir.FilterVarTypeUnderlyingOfKindOp, Value: op.varName, Args: args} case "Type.ConvertibleTo": return ir.FilterExpr{Op: ir.FilterVarTypeConvertibleToOp, Value: op.varName, Args: args} case "Type.AssignableTo": diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/match_data.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/match_data.go index c9d64aff7..3bf3bf5a8 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/match_data.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/match_data.go @@ -3,7 +3,7 @@ package ruleguard import ( "go/ast" - "github.com/quasilyte/go-ruleguard/internal/gogrep" + "github.com/quasilyte/gogrep" ) // matchData is used to handle both regexp and AST match sets in the same way. diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/profiling/no_labels.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/profiling/no_labels.go new file mode 100644 index 000000000..c5b26e230 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/profiling/no_labels.go @@ -0,0 +1,16 @@ +//go:build !pproflabels +// +build !pproflabels + +package profiling + +import ( + "context" +) + +const LabelsEnabled = false + +func EnterWithLabels(origContext context.Context, name string) { +} + +func Leave(origContext context.Context) { +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/profiling/with_labels.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/profiling/with_labels.go new file mode 100644 index 000000000..6a35a13ad --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/profiling/with_labels.go @@ -0,0 +1,21 @@ +//go:build pproflabels +// +build pproflabels + +package profiling + +import ( + "context" + "runtime/pprof" +) + +const LabelsEnabled = true + +func EnterWithLabels(origContext context.Context, name string) { + labels := pprof.Labels("rules", name) + ctx := pprof.WithLabels(origContext, labels) + pprof.SetGoroutineLabels(ctx) +} + +func Leave(origContext context.Context) { + pprof.SetGoroutineLabels(origContext) +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/compile.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/compile.go index db61b40ee..515843056 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/compile.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/compile.go @@ -11,6 +11,8 @@ import ( "golang.org/x/tools/go/ast/astutil" ) +var voidType = &types.Tuple{} + func compile(ctx *CompileContext, fn *ast.FuncDecl) (compiled *Func, err error) { defer func() { if err != nil { @@ -74,10 +76,14 @@ type compileError string func (e compileError) Error() string { return string(e) } func (cl *compiler) compileFunc(fn *ast.FuncDecl) *Func { - if cl.fnType.Results().Len() != 1 { - panic(cl.errorf(fn.Name, "only functions with a single non-void results are supported")) + switch cl.fnType.Results().Len() { + case 0: + cl.retType = voidType + case 1: + cl.retType = cl.fnType.Results().At(0).Type() + default: + panic(cl.errorf(fn.Name, "multi-result functions are not supported")) } - cl.retType = cl.fnType.Results().At(0).Type() if !cl.isSupportedType(cl.retType) { panic(cl.errorUnsupportedType(fn.Name, cl.retType, "function result")) @@ -100,6 +106,10 @@ func (cl *compiler) compileFunc(fn *ast.FuncDecl) *Func { } cl.compileStmt(fn.Body) + if cl.retType == voidType { + cl.emit(opReturn) + } + compiled := &Func{ code: cl.code, constants: cl.constants, @@ -136,6 +146,9 @@ func (cl *compiler) compileStmt(stmt ast.Stmt) { case *ast.BranchStmt: cl.compileBranchStmt(stmt) + case *ast.ExprStmt: + cl.compileExprStmt(stmt) + case *ast.BlockStmt: for i := range stmt.List { cl.compileStmt(stmt.List[i]) @@ -172,6 +185,19 @@ func (cl *compiler) compileBranchStmt(branch *ast.BranchStmt) { } } +func (cl *compiler) compileExprStmt(stmt *ast.ExprStmt) { + if call, ok := stmt.X.(*ast.CallExpr); ok { + sig := cl.ctx.Types.TypeOf(call.Fun).(*types.Signature) + if sig.Results() != nil { + panic(cl.errorf(call, "only void funcs can be used in stmt context")) + } + cl.compileCallExpr(call) + return + } + + panic(cl.errorf(stmt.X, "can't compile this expr stmt yet: %T", stmt.X)) +} + func (cl *compiler) compileForStmt(stmt *ast.ForStmt) { labelBreak := cl.newLabel() labelContinue := cl.newLabel() @@ -232,38 +258,43 @@ func (cl *compiler) compileIfStmt(stmt *ast.IfStmt) { } func (cl *compiler) compileAssignStmt(assign *ast.AssignStmt) { - if len(assign.Lhs) != 1 { - panic(cl.errorf(assign, "only single left operand is allowed in assignments")) - } if len(assign.Rhs) != 1 { panic(cl.errorf(assign, "only single right operand is allowed in assignments")) } - lhs := assign.Lhs[0] - rhs := assign.Rhs[0] - varname, ok := lhs.(*ast.Ident) - if !ok { - panic(cl.errorf(lhs, "can assign only to simple variables")) + for _, lhs := range assign.Lhs { + _, ok := lhs.(*ast.Ident) + if !ok { + panic(cl.errorf(lhs, "can assign only to simple variables")) + } } + rhs := assign.Rhs[0] cl.compileExpr(rhs) - typ := cl.ctx.Types.TypeOf(varname) if assign.Tok == token.DEFINE { - if _, ok := cl.locals[varname.String()]; ok { - panic(cl.errorf(lhs, "%s variable shadowing is not allowed", varname)) + for i := len(assign.Lhs) - 1; i >= 0; i-- { + varname := assign.Lhs[i].(*ast.Ident) + typ := cl.ctx.Types.TypeOf(varname) + if _, ok := cl.locals[varname.String()]; ok { + panic(cl.errorf(varname, "%s variable shadowing is not allowed", varname)) + } + if !cl.isSupportedType(typ) { + panic(cl.errorUnsupportedType(varname, typ, varname.String()+" local variable")) + } + if len(cl.locals) == maxFuncLocals { + panic(cl.errorf(varname, "can't define %s: too many locals", varname)) + } + id := len(cl.locals) + cl.locals[varname.String()] = id + cl.emit8(pickOp(typeIsInt(typ), opSetIntLocal, opSetLocal), id) } - if !cl.isSupportedType(typ) { - panic(cl.errorUnsupportedType(varname, typ, varname.String()+" local variable")) - } - if len(cl.locals) == maxFuncLocals { - panic(cl.errorf(lhs, "can't define %s: too many locals", varname)) - } - id := len(cl.locals) - cl.locals[varname.String()] = id - cl.emit8(pickOp(typeIsInt(typ), opSetIntLocal, opSetLocal), id) } else { - id := cl.getLocal(varname, varname.String()) - cl.emit8(pickOp(typeIsInt(typ), opSetIntLocal, opSetLocal), id) + for i := len(assign.Lhs) - 1; i >= 0; i-- { + varname := assign.Lhs[i].(*ast.Ident) + typ := cl.ctx.Types.TypeOf(varname) + id := cl.getLocal(varname, varname.String()) + cl.emit8(pickOp(typeIsInt(typ), opSetIntLocal, opSetLocal), id) + } } } @@ -279,6 +310,11 @@ func (cl *compiler) getLocal(v ast.Expr, varname string) int { } func (cl *compiler) compileReturnStmt(ret *ast.ReturnStmt) { + if cl.retType == voidType { + cl.emit(opReturn) + return + } + if ret.Results == nil { panic(cl.errorf(ret, "'naked' return statements are not allowed")) } @@ -471,6 +507,20 @@ func (cl *compiler) compileBuiltinCall(fn *ast.Ident, call *ast.CallExpr) { panic(cl.errorf(s, "can't compile len() with non-string argument yet")) } cl.emit(opStringLen) + + case `println`: + if len(call.Args) != 1 { + panic(cl.errorf(call, "only 1-arg form of println() is supported")) + } + funcName := "Print" + if typeIsInt(cl.ctx.Types.TypeOf(call.Args[0])) { + funcName = "PrintInt" + } + key := funcKey{qualifier: "builtin", name: funcName} + if !cl.compileNativeCall(key, 0, nil, call.Args) { + panic(cl.errorf(fn, "builtin.%s native func is not registered", funcName)) + } + default: panic(cl.errorf(fn, "can't compile %s() builtin function call yet", fn)) } @@ -498,19 +548,64 @@ func (cl *compiler) compileCallExpr(call *ast.CallExpr) { } else { key.qualifier = fn.Pkg().Path() } + variadic := 0 + if sig.Variadic() { + variadic = sig.Params().Len() - 1 + } + if !cl.compileNativeCall(key, variadic, expr, call.Args) { + panic(cl.errorf(call.Fun, "can't compile a call to %s func", key)) + } +} - if funcID, ok := cl.ctx.Env.nameToNativeFuncID[key]; ok { - if expr != nil { - cl.compileExpr(expr) +func (cl *compiler) compileNativeCall(key funcKey, variadic int, expr ast.Expr, args []ast.Expr) bool { + funcID, ok := cl.ctx.Env.nameToNativeFuncID[key] + if !ok { + return false + } + if expr != nil { + cl.compileExpr(expr) + } + if len(args) == 1 { + // Check that it's not a f(g()) call, where g() returns + // a multi-value result; we can't compile that yet. + if call, ok := args[0].(*ast.CallExpr); ok { + results := cl.ctx.Types.TypeOf(call.Fun).(*types.Signature).Results() + if results != nil && results.Len() > 1 { + panic(cl.errorf(args[0], "can't pass tuple as a func argument")) + } } - for _, arg := range call.Args { - cl.compileExpr(arg) - } - cl.emit16(opCallNative, int(funcID)) - return } - panic(cl.errorf(call.Fun, "can't compile a call to %s func", key)) + normalArgs := args + var variadicArgs []ast.Expr + if variadic != 0 { + normalArgs = args[:variadic] + variadicArgs = args[variadic:] + } + + for _, arg := range normalArgs { + cl.compileExpr(arg) + } + if variadic != 0 { + for _, arg := range variadicArgs { + cl.compileExpr(arg) + // int-typed values should appear in the interface{}-typed + // objects slice, so we get all variadic args placed in one place. + if typeIsInt(cl.ctx.Types.TypeOf(arg)) { + cl.emit(opConvIntToIface) + } + } + if len(variadicArgs) > 255 { + panic(cl.errorf(expr, "too many variadic args")) + } + // Even if len(variadicArgs) is 0, we still need to overwrite + // the old variadicLen value, so the variadic func is not confused + // by some unrelated value. + cl.emit8(opSetVariadicLen, len(variadicArgs)) + } + + cl.emit16(opCallNative, int(funcID)) + return true } func (cl *compiler) compileUnaryOp(op opcode, e *ast.UnaryExpr) { @@ -681,6 +776,10 @@ func (cl *compiler) isUncondJump(op opcode) bool { } func (cl *compiler) isSupportedType(typ types.Type) bool { + if typ == voidType { + return true + } + switch typ := typ.Underlying().(type) { case *types.Pointer: // 1. Pointers to structs are supported. diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/disasm.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/disasm.go index 192cf0710..8859dee9c 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/disasm.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/disasm.go @@ -47,6 +47,8 @@ func disasm(env *Env, fn *Func) string { index := int(code[pc+1]) arg = index comment = dbg.localNames[index] + case opSetVariadicLen: + arg = int(code[pc+1]) case opPushConst: arg = int(code[pc+1]) comment = fmt.Sprintf("value=%#v", fn.constants[code[pc+1]]) diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/eval.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/eval.go index afc000ea3..da16455f6 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/eval.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/eval.go @@ -99,6 +99,10 @@ func eval(env *EvalEnv, fn *Func, args []interface{}) CallResult { stack.PushInt(fn.intConstants[id]) pc += 2 + case opConvIntToIface: + stack.Push(stack.PopInt()) + pc++ + case opPushTrue: stack.Push(true) pc++ @@ -114,7 +118,12 @@ func eval(env *EvalEnv, fn *Func, args []interface{}) CallResult { return CallResult{value: stack.top()} case opReturnIntTop: return CallResult{scalarValue: uint64(stack.topInt())} + case opReturn: + return CallResult{} + case opSetVariadicLen: + stack.variadicLen = int(code[pc+1]) + pc += 2 case opCallNative: id := decode16(code, pc+1) fn := env.nativeFuncs[id].mappedFunc diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/gen_opcodes.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/gen_opcodes.go index fde48b7cd..6f8f26d58 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/gen_opcodes.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/gen_opcodes.go @@ -1,3 +1,4 @@ +//go:build main // +build main package main @@ -25,6 +26,8 @@ var opcodePrototypes = []opcodeProto{ {"PushConst", "op constid:u8", "() -> (const)"}, {"PushIntConst", "op constid:u8", "() -> (const:int)"}, + {"ConvIntToIface", "op", "(value:int) -> (value)"}, + {"SetLocal", "op index:u8", "(value) -> ()"}, {"SetIntLocal", "op index:u8", "(value:int) -> ()"}, {"IncLocal", "op index:u8", stackUnchanged}, @@ -34,18 +37,20 @@ var opcodePrototypes = []opcodeProto{ {"ReturnIntTop", "op", "(value) -> (value)"}, {"ReturnFalse", "op", stackUnchanged}, {"ReturnTrue", "op", stackUnchanged}, + {"Return", "op", stackUnchanged}, {"Jump", "op offset:i16", stackUnchanged}, {"JumpFalse", "op offset:i16", "(cond:bool) -> ()"}, {"JumpTrue", "op offset:i16", "(cond:bool) -> ()"}, + {"SetVariadicLen", "op len:u8", stackUnchanged}, {"CallNative", "op funcid:u16", "(args...) -> (results...)"}, {"IsNil", "op", "(value) -> (result:bool)"}, {"IsNotNil", "op", "(value) -> (result:bool)"}, {"Not", "op", "(value:bool) -> (result:bool)"}, - + {"EqInt", "op", "(x:int y:int) -> (result:bool)"}, {"NotEqInt", "op", "(x:int y:int) -> (result:bool)"}, {"GtInt", "op", "(x:int y:int) -> (result:bool)"}, diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcode_string.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcode_string.go index 27dfc1f67..27dfc4947 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcode_string.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcode_string.go @@ -19,41 +19,44 @@ func _() { _ = x[opPushTrue-8] _ = x[opPushConst-9] _ = x[opPushIntConst-10] - _ = x[opSetLocal-11] - _ = x[opSetIntLocal-12] - _ = x[opIncLocal-13] - _ = x[opDecLocal-14] - _ = x[opReturnTop-15] - _ = x[opReturnIntTop-16] - _ = x[opReturnFalse-17] - _ = x[opReturnTrue-18] - _ = x[opJump-19] - _ = x[opJumpFalse-20] - _ = x[opJumpTrue-21] - _ = x[opCallNative-22] - _ = x[opIsNil-23] - _ = x[opIsNotNil-24] - _ = x[opNot-25] - _ = x[opEqInt-26] - _ = x[opNotEqInt-27] - _ = x[opGtInt-28] - _ = x[opGtEqInt-29] - _ = x[opLtInt-30] - _ = x[opLtEqInt-31] - _ = x[opEqString-32] - _ = x[opNotEqString-33] - _ = x[opConcat-34] - _ = x[opAdd-35] - _ = x[opSub-36] - _ = x[opStringSlice-37] - _ = x[opStringSliceFrom-38] - _ = x[opStringSliceTo-39] - _ = x[opStringLen-40] + _ = x[opConvIntToIface-11] + _ = x[opSetLocal-12] + _ = x[opSetIntLocal-13] + _ = x[opIncLocal-14] + _ = x[opDecLocal-15] + _ = x[opReturnTop-16] + _ = x[opReturnIntTop-17] + _ = x[opReturnFalse-18] + _ = x[opReturnTrue-19] + _ = x[opReturn-20] + _ = x[opJump-21] + _ = x[opJumpFalse-22] + _ = x[opJumpTrue-23] + _ = x[opSetVariadicLen-24] + _ = x[opCallNative-25] + _ = x[opIsNil-26] + _ = x[opIsNotNil-27] + _ = x[opNot-28] + _ = x[opEqInt-29] + _ = x[opNotEqInt-30] + _ = x[opGtInt-31] + _ = x[opGtEqInt-32] + _ = x[opLtInt-33] + _ = x[opLtEqInt-34] + _ = x[opEqString-35] + _ = x[opNotEqString-36] + _ = x[opConcat-37] + _ = x[opAdd-38] + _ = x[opSub-39] + _ = x[opStringSlice-40] + _ = x[opStringSliceFrom-41] + _ = x[opStringSliceTo-42] + _ = x[opStringLen-43] } -const _opcode_name = "InvalidPopDupPushParamPushIntParamPushLocalPushIntLocalPushFalsePushTruePushConstPushIntConstSetLocalSetIntLocalIncLocalDecLocalReturnTopReturnIntTopReturnFalseReturnTrueJumpJumpFalseJumpTrueCallNativeIsNilIsNotNilNotEqIntNotEqIntGtIntGtEqIntLtIntLtEqIntEqStringNotEqStringConcatAddSubStringSliceStringSliceFromStringSliceToStringLen" +const _opcode_name = "InvalidPopDupPushParamPushIntParamPushLocalPushIntLocalPushFalsePushTruePushConstPushIntConstConvIntToIfaceSetLocalSetIntLocalIncLocalDecLocalReturnTopReturnIntTopReturnFalseReturnTrueReturnJumpJumpFalseJumpTrueSetVariadicLenCallNativeIsNilIsNotNilNotEqIntNotEqIntGtIntGtEqIntLtIntLtEqIntEqStringNotEqStringConcatAddSubStringSliceStringSliceFromStringSliceToStringLen" -var _opcode_index = [...]uint16{0, 7, 10, 13, 22, 34, 43, 55, 64, 72, 81, 93, 101, 112, 120, 128, 137, 149, 160, 170, 174, 183, 191, 201, 206, 214, 217, 222, 230, 235, 242, 247, 254, 262, 273, 279, 282, 285, 296, 311, 324, 333} +var _opcode_index = [...]uint16{0, 7, 10, 13, 22, 34, 43, 55, 64, 72, 81, 93, 107, 115, 126, 134, 142, 151, 163, 174, 184, 190, 194, 203, 211, 225, 235, 240, 248, 251, 256, 264, 269, 276, 281, 288, 296, 307, 313, 316, 319, 330, 345, 358, 367} func (i opcode) String() string { if i >= opcode(len(_opcode_index)-1) { diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcodes.gen.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcodes.gen.go index 268b42a1e..a12aecd5d 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcodes.gen.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcodes.gen.go @@ -48,125 +48,137 @@ const ( // Stack effect: () -> (const:int) opPushIntConst opcode = 10 - // Encoding: 0x0b index:u8 (width=2) - // Stack effect: (value) -> () - opSetLocal opcode = 11 + // Encoding: 0x0b (width=1) + // Stack effect: (value:int) -> (value) + opConvIntToIface opcode = 11 // Encoding: 0x0c index:u8 (width=2) - // Stack effect: (value:int) -> () - opSetIntLocal opcode = 12 + // Stack effect: (value) -> () + opSetLocal opcode = 12 // Encoding: 0x0d index:u8 (width=2) - // Stack effect: unchanged - opIncLocal opcode = 13 + // Stack effect: (value:int) -> () + opSetIntLocal opcode = 13 // Encoding: 0x0e index:u8 (width=2) // Stack effect: unchanged - opDecLocal opcode = 14 + opIncLocal opcode = 14 - // Encoding: 0x0f (width=1) - // Stack effect: (value) -> (value) - opReturnTop opcode = 15 + // Encoding: 0x0f index:u8 (width=2) + // Stack effect: unchanged + opDecLocal opcode = 15 // Encoding: 0x10 (width=1) // Stack effect: (value) -> (value) - opReturnIntTop opcode = 16 + opReturnTop opcode = 16 // Encoding: 0x11 (width=1) - // Stack effect: unchanged - opReturnFalse opcode = 17 + // Stack effect: (value) -> (value) + opReturnIntTop opcode = 17 // Encoding: 0x12 (width=1) // Stack effect: unchanged - opReturnTrue opcode = 18 + opReturnFalse opcode = 18 - // Encoding: 0x13 offset:i16 (width=3) + // Encoding: 0x13 (width=1) // Stack effect: unchanged - opJump opcode = 19 + opReturnTrue opcode = 19 - // Encoding: 0x14 offset:i16 (width=3) - // Stack effect: (cond:bool) -> () - opJumpFalse opcode = 20 + // Encoding: 0x14 (width=1) + // Stack effect: unchanged + opReturn opcode = 20 // Encoding: 0x15 offset:i16 (width=3) + // Stack effect: unchanged + opJump opcode = 21 + + // Encoding: 0x16 offset:i16 (width=3) // Stack effect: (cond:bool) -> () - opJumpTrue opcode = 21 + opJumpFalse opcode = 22 - // Encoding: 0x16 funcid:u16 (width=3) + // Encoding: 0x17 offset:i16 (width=3) + // Stack effect: (cond:bool) -> () + opJumpTrue opcode = 23 + + // Encoding: 0x18 len:u8 (width=2) + // Stack effect: unchanged + opSetVariadicLen opcode = 24 + + // Encoding: 0x19 funcid:u16 (width=3) // Stack effect: (args...) -> (results...) - opCallNative opcode = 22 - - // Encoding: 0x17 (width=1) - // Stack effect: (value) -> (result:bool) - opIsNil opcode = 23 - - // Encoding: 0x18 (width=1) - // Stack effect: (value) -> (result:bool) - opIsNotNil opcode = 24 - - // Encoding: 0x19 (width=1) - // Stack effect: (value:bool) -> (result:bool) - opNot opcode = 25 + opCallNative opcode = 25 // Encoding: 0x1a (width=1) - // Stack effect: (x:int y:int) -> (result:bool) - opEqInt opcode = 26 + // Stack effect: (value) -> (result:bool) + opIsNil opcode = 26 // Encoding: 0x1b (width=1) - // Stack effect: (x:int y:int) -> (result:bool) - opNotEqInt opcode = 27 + // Stack effect: (value) -> (result:bool) + opIsNotNil opcode = 27 // Encoding: 0x1c (width=1) - // Stack effect: (x:int y:int) -> (result:bool) - opGtInt opcode = 28 + // Stack effect: (value:bool) -> (result:bool) + opNot opcode = 28 // Encoding: 0x1d (width=1) // Stack effect: (x:int y:int) -> (result:bool) - opGtEqInt opcode = 29 + opEqInt opcode = 29 // Encoding: 0x1e (width=1) // Stack effect: (x:int y:int) -> (result:bool) - opLtInt opcode = 30 + opNotEqInt opcode = 30 // Encoding: 0x1f (width=1) // Stack effect: (x:int y:int) -> (result:bool) - opLtEqInt opcode = 31 + opGtInt opcode = 31 // Encoding: 0x20 (width=1) - // Stack effect: (x:string y:string) -> (result:bool) - opEqString opcode = 32 + // Stack effect: (x:int y:int) -> (result:bool) + opGtEqInt opcode = 32 // Encoding: 0x21 (width=1) - // Stack effect: (x:string y:string) -> (result:bool) - opNotEqString opcode = 33 + // Stack effect: (x:int y:int) -> (result:bool) + opLtInt opcode = 33 // Encoding: 0x22 (width=1) - // Stack effect: (x:string y:string) -> (result:string) - opConcat opcode = 34 + // Stack effect: (x:int y:int) -> (result:bool) + opLtEqInt opcode = 34 // Encoding: 0x23 (width=1) - // Stack effect: (x:int y:int) -> (result:int) - opAdd opcode = 35 + // Stack effect: (x:string y:string) -> (result:bool) + opEqString opcode = 35 // Encoding: 0x24 (width=1) - // Stack effect: (x:int y:int) -> (result:int) - opSub opcode = 36 + // Stack effect: (x:string y:string) -> (result:bool) + opNotEqString opcode = 36 // Encoding: 0x25 (width=1) - // Stack effect: (s:string from:int to:int) -> (result:string) - opStringSlice opcode = 37 + // Stack effect: (x:string y:string) -> (result:string) + opConcat opcode = 37 // Encoding: 0x26 (width=1) - // Stack effect: (s:string from:int) -> (result:string) - opStringSliceFrom opcode = 38 + // Stack effect: (x:int y:int) -> (result:int) + opAdd opcode = 38 // Encoding: 0x27 (width=1) - // Stack effect: (s:string to:int) -> (result:string) - opStringSliceTo opcode = 39 + // Stack effect: (x:int y:int) -> (result:int) + opSub opcode = 39 // Encoding: 0x28 (width=1) + // Stack effect: (s:string from:int to:int) -> (result:string) + opStringSlice opcode = 40 + + // Encoding: 0x29 (width=1) + // Stack effect: (s:string from:int) -> (result:string) + opStringSliceFrom opcode = 41 + + // Encoding: 0x2a (width=1) + // Stack effect: (s:string to:int) -> (result:string) + opStringSliceTo opcode = 42 + + // Encoding: 0x2b (width=1) // Stack effect: (s:string) -> (result:int) - opStringLen opcode = 40 + opStringLen opcode = 43 ) type opcodeInfo struct { @@ -186,6 +198,7 @@ var opcodeInfoTable = [256]opcodeInfo{ opPushTrue: {width: 1}, opPushConst: {width: 2}, opPushIntConst: {width: 2}, + opConvIntToIface: {width: 1}, opSetLocal: {width: 2}, opSetIntLocal: {width: 2}, opIncLocal: {width: 2}, @@ -194,9 +207,11 @@ var opcodeInfoTable = [256]opcodeInfo{ opReturnIntTop: {width: 1}, opReturnFalse: {width: 1}, opReturnTrue: {width: 1}, + opReturn: {width: 1}, opJump: {width: 3}, opJumpFalse: {width: 3}, opJumpTrue: {width: 3}, + opSetVariadicLen: {width: 2}, opCallNative: {width: 3}, opIsNil: {width: 1}, opIsNotNil: {width: 1}, diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/quasigo.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/quasigo.go index 7d457538d..0f66bde2a 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/quasigo.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/quasigo.go @@ -138,8 +138,9 @@ type Func struct { // If int was pushed with PushInt(), it should be retrieved by PopInt(). // It's a bad idea to do a Push() and then PopInt() and vice-versa. type ValueStack struct { - objects []interface{} - ints []int + objects []interface{} + ints []int + variadicLen int } // Pop removes the top stack element and returns it. @@ -157,6 +158,19 @@ func (s *ValueStack) PopInt() int { return x } +// PopVariadic removes the `...` argument and returns it as a slice. +// +// Slice elements are in the order they were passed to the function, +// for example, a call Sprintf("%s:%d", filename, line) returns +// the slice []interface{filename, line}. +func (s *ValueStack) PopVariadic() []interface{} { + to := len(s.objects) + from := to - s.variadicLen + xs := s.objects[from:to] + s.objects = s.objects[:from] + return xs +} + // Push adds x to the stack. // Important: for int-typed values, use PushInt. func (s *ValueStack) Push(x interface{}) { s.objects = append(s.objects, x) } diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go index 7e0f40f3f..2642bb7b9 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go @@ -93,15 +93,31 @@ type RunContext struct { DebugImports bool DebugPrint func(string) - Types *types.Info - Sizes types.Sizes - Fset *token.FileSet - Report func(rule GoRuleInfo, n ast.Node, msg string, s *Suggestion) - Pkg *types.Package + Types *types.Info + Sizes types.Sizes + Fset *token.FileSet + Pkg *types.Package + + // Report is a function that is called for every successful ruleguard match. + // The pointer to ReportData is reused, it should not be kept. + // If you want to keep it after Report() returns, make a copy. + Report func(*ReportData) GoVersion GoVersion } +type ReportData struct { + RuleInfo GoRuleInfo + Node ast.Node + Message string + Suggestion *Suggestion + + // Experimental: fields below are part of the experiment. + // They'll probably be removed or changed over time. + + Func *ast.FuncDecl +} + type Suggestion struct { From token.Pos To token.Pos diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go index 09cbd4060..9adeb2b06 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go @@ -2,27 +2,34 @@ package ruleguard import ( "bytes" + "context" "fmt" "go/ast" "go/build" "go/printer" "io/ioutil" "path/filepath" + "reflect" "sort" "strconv" "strings" - "github.com/quasilyte/go-ruleguard/internal/gogrep" - "github.com/quasilyte/go-ruleguard/nodetag" "github.com/quasilyte/go-ruleguard/ruleguard/goutil" + "github.com/quasilyte/go-ruleguard/ruleguard/profiling" + "github.com/quasilyte/gogrep" + "github.com/quasilyte/gogrep/nodetag" ) type rulesRunner struct { state *engineState + bgContext context.Context + ctx *RunContext rules *goRuleSet + reportData ReportData + gogrepState gogrep.MatcherState importer *goImporter @@ -58,6 +65,7 @@ func newRulesRunner(ctx *RunContext, buildContext *build.Context, state *engineS gogrepState := gogrep.NewMatcherState() gogrepState.Types = ctx.Types rr := &rulesRunner{ + bgContext: context.Background(), ctx: ctx, importer: importer, rules: rules, @@ -131,8 +139,8 @@ func (rr *rulesRunner) run(f *ast.File) error { var inspector astWalker inspector.nodePath = &rr.nodePath inspector.filterParams = &rr.filterParams - inspector.Walk(f, func(n ast.Node) { - rr.runRules(n) + inspector.Walk(f, func(n ast.Node, tag nodetag.Value) { + rr.runRules(n, tag) }) } @@ -206,14 +214,26 @@ func (rr *rulesRunner) runCommentRules(comment *ast.Comment) { } } -func (rr *rulesRunner) runRules(n ast.Node) { - tag := nodetag.FromNode(n) +func (rr *rulesRunner) runRules(n ast.Node, tag nodetag.Value) { + // profiling.LabelsEnabled is constant, so labels-related + // code should be a no-op inside normal build. + // To enable labels, use "-tags pproflabels" build tag. + for _, rule := range rr.rules.universal.rulesByTag[tag] { + if profiling.LabelsEnabled { + profiling.EnterWithLabels(rr.bgContext, rule.group.Name) + } + matched := false rule.pat.MatchNode(&rr.gogrepState, n, func(m gogrep.MatchData) { matched = rr.handleMatch(rule, m) }) - if matched { + + if profiling.LabelsEnabled { + profiling.Leave(rr.bgContext) + } + + if matched && !multiMatchTags[tag] { break } } @@ -291,7 +311,12 @@ func (rr *rulesRunner) handleCommentMatch(rule goCommentRule, m commentMatchData Group: rule.base.group, Line: rule.base.line, } - rr.ctx.Report(info, node, message, suggestion) + rr.reportData.RuleInfo = info + rr.reportData.Node = node + rr.reportData.Message = message + rr.reportData.Suggestion = suggestion + + rr.ctx.Report(&rr.reportData) return true } @@ -322,7 +347,14 @@ func (rr *rulesRunner) handleMatch(rule goRule, m gogrep.MatchData) bool { Group: rule.group, Line: rule.line, } - rr.ctx.Report(info, node, message, suggestion) + rr.reportData.RuleInfo = info + rr.reportData.Node = node + rr.reportData.Message = message + rr.reportData.Suggestion = suggestion + + rr.reportData.Func = rr.filterParams.currentFunc + + rr.ctx.Report(&rr.reportData) return true } @@ -355,20 +387,45 @@ func (rr *rulesRunner) renderMessage(msg string, m matchData, truncate bool) str for _, c := range capture { n := c.Node + // Some captured nodes are typed, but nil. + // We can't really get their text, so skip them here. + // For example, pattern `func $_() $results { $*_ }` may + // match a nil *ast.FieldList for $results if executed + // against a function with no results. + if reflect.ValueOf(n).IsNil() && !gogrep.IsEmptyNodeSlice(n) { + continue + } key := "$" + c.Name if !strings.Contains(msg, key) { continue } buf.Reset() buf.Write(rr.nodeText(n)) - // Don't interpolate strings that are too long. - var replacement string - if truncate && buf.Len() > 60 { - replacement = key - } else { - replacement = buf.String() + replacement := buf.String() + if truncate { + replacement = truncateText(replacement, 60) } msg = strings.ReplaceAll(msg, key, replacement) } return msg } + +func truncateText(s string, maxLen int) string { + const placeholder = "<...>" + if len(s) <= maxLen-len(placeholder) { + return s + } + maxLen -= len(placeholder) + leftLen := maxLen / 2 + rightLen := (maxLen % 2) + leftLen + left := s[:leftLen] + right := s[len(s)-rightLen:] + return left + placeholder + right +} + +var multiMatchTags = [nodetag.NumBuckets]bool{ + nodetag.BlockStmt: true, + nodetag.CaseClause: true, + nodetag.CommClause: true, + nodetag.File: true, +} diff --git a/vendor/github.com/quasilyte/gogrep/.gitignore b/vendor/github.com/quasilyte/gogrep/.gitignore new file mode 100644 index 000000000..ec560f1c9 --- /dev/null +++ b/vendor/github.com/quasilyte/gogrep/.gitignore @@ -0,0 +1,4 @@ +.idea +.vscode +coverage.txt +bin diff --git a/vendor/github.com/quasilyte/gogrep/.golangci.yml b/vendor/github.com/quasilyte/gogrep/.golangci.yml new file mode 100644 index 000000000..16d03c54d --- /dev/null +++ b/vendor/github.com/quasilyte/gogrep/.golangci.yml @@ -0,0 +1,49 @@ +{ + "run": { + # timeout for analysis, e.g. 30s, 5m, default is 1m + "deadline": "3m", + }, + "fast": false, + "linters": { + "enable": [ + "deadcode", + "errcheck", + "gas", + "gocritic", + "gofmt", + "goimports", + "revive", + "govet", + "gosimple", + "ineffassign", + "megacheck", + "misspell", + "nakedret", + "staticcheck", + "structcheck", + "typecheck", + "unconvert", + "unused", + "varcheck", + ], + }, + "disable": [ + "depguard", + "dupl", + "gocyclo", + "interfacer", + "lll", + "maligned", + "prealloc", + ], + "linters-settings": { + "gocritic": { + "enabled-tags": [ + "style", + "diagnostic", + "performance", + "experimental", + ], + }, + }, +} diff --git a/vendor/github.com/quasilyte/gogrep/LICENSE b/vendor/github.com/quasilyte/gogrep/LICENSE new file mode 100644 index 000000000..575b56ae1 --- /dev/null +++ b/vendor/github.com/quasilyte/gogrep/LICENSE @@ -0,0 +1,33 @@ +BSD 3-Clause License + +Copyright (c) 2021, Iskander (Alex) Sharipov + +Originally based on the Daniel Martí code | Copyright (c) 2017, Daniel Martí. All rights reserved. +See https://github.com/mvdan/gogrep + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/quasilyte/gogrep/Makefile b/vendor/github.com/quasilyte/gogrep/Makefile new file mode 100644 index 000000000..d05331f42 --- /dev/null +++ b/vendor/github.com/quasilyte/gogrep/Makefile @@ -0,0 +1,19 @@ +GOPATH_DIR=`go env GOPATH` + +test: + go test -count 2 -coverpkg=./... -coverprofile=coverage.txt -covermode=atomic ./... + go test -bench=. ./... + @echo "everything is OK" + +ci-lint: + curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(GOPATH_DIR)/bin v1.43.0 + $(GOPATH_DIR)/bin/golangci-lint run ./... + go install github.com/quasilyte/go-consistent@latest + $(GOPATH_DIR)/bin/go-consistent . ./internal/... ./nodetag/... ./filters/... + @echo "everything is OK" + +lint: + golangci-lint run ./... + @echo "everything is OK" + +.PHONY: ci-lint lint test diff --git a/vendor/github.com/quasilyte/gogrep/README.md b/vendor/github.com/quasilyte/gogrep/README.md new file mode 100644 index 000000000..b6c2c47c1 --- /dev/null +++ b/vendor/github.com/quasilyte/gogrep/README.md @@ -0,0 +1,41 @@ +![logo](https://github.com/quasilyte/vscode-gogrep/blob/master/docs/logo.png?raw=true) + +![Build Status](https://github.com/quasilyte/gogrep/workflows/Go/badge.svg) +[![PkgGoDev](https://pkg.go.dev/badge/mod/github.com/quasilyte/gogrep)](https://pkg.go.dev/github.com/quasilyte/gogrep) +[![Go Report Card](https://goreportcard.com/badge/github.com/quasilyte/gogrep)](https://goreportcard.com/report/github.com/quasilyte/gogrep) +![Code Coverage](https://codecov.io/gh/quasilyte/gogrep/branch/master/graph/badge.svg) + +# gogrep + +This is an attempt to move a modified [gogrep](https://github.com/mvdan/gogrep) from the [go-ruleguard](https://github.com/quasilyte/go-ruleguard) project, so it can be used independently. + +This repository contains two Go modules. One for the gogrep library and the second one for the command-line tool. + +## gogrep as a library + +To get a gogrep library module, install the root Go module. + +```bash +$ go get github.com/quasilyte/gogrep +``` + +## gogrep as a command-line utility + +To get a gogrep command-line tool, install the `cmd/gogrep` Go submodule. + +```bash +$ go install github.com/quasilyte/cmd/gogrep +``` + +See [docs/gogrep_cli.md](_docs/gogrep_cli.md) to learn how to use it. + +## Used by + +A gogrep library is used by: + +* [go-ruleguard](https://github.com/quasilyte/go-ruleguard) +* [gocorpus](https://github.com/quasilyte/gocorpus) + +## Acknowledgements + +The original gogrep is written by the [Daniel Martí](https://github.com/mvdan). diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/compile.go b/vendor/github.com/quasilyte/gogrep/compile.go similarity index 95% rename from vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/compile.go rename to vendor/github.com/quasilyte/gogrep/compile.go index 7e267a530..c79f290ae 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/compile.go +++ b/vendor/github.com/quasilyte/gogrep/compile.go @@ -5,7 +5,7 @@ import ( "go/ast" "go/token" - "github.com/quasilyte/go-ruleguard/internal/stdinfo" + "github.com/quasilyte/gogrep/internal/stdinfo" ) type compileError string @@ -13,18 +13,18 @@ type compileError string func (e compileError) Error() string { return string(e) } type compiler struct { + config CompileConfig + prog *program stringIndexes map[string]uint8 ifaceIndexes map[interface{}]uint8 - strict bool - fset *token.FileSet info *PatternInfo insideStmtList bool } -func (c *compiler) Compile(fset *token.FileSet, root ast.Node, info *PatternInfo, strict bool) (p *program, err error) { +func (c *compiler) Compile(root ast.Node, info *PatternInfo) (p *program, err error) { defer func() { if err != nil { return @@ -41,8 +41,6 @@ func (c *compiler) Compile(fset *token.FileSet, root ast.Node, info *PatternInfo }() c.info = info - c.fset = fset - c.strict = strict c.prog = &program{ insts: make([]instruction, 0, 8), } @@ -59,7 +57,7 @@ func (c *compiler) Compile(fset *token.FileSet, root ast.Node, info *PatternInfo } func (c *compiler) errorf(n ast.Node, format string, args ...interface{}) compileError { - loc := c.fset.Position(n.Pos()) + loc := c.config.Fset.Position(n.Pos()) message := fmt.Sprintf("%s:%d: %s", loc.Filename, loc.Line, fmt.Sprintf(format, args...)) return compileError(message) } @@ -158,11 +156,12 @@ func (c *compiler) compileOptFieldList(n *ast.FieldList) { // `func (...) $*result` - result could be anything // `func (...) $result` - result is a field list of 1 element info := decodeWildName(ident.Name) - if info.Seq { + switch { + case info.Seq: c.compileWildIdent(ident, true) - } else if info.Name == "_" { + case info.Name == "_": c.emitInstOp(opFieldNode) - } else { + default: c.emitInst(instruction{ op: opNamedFieldNode, valueIndex: c.internVar(n, info.Name), @@ -231,7 +230,7 @@ func (c *compiler) compileValueSpec(spec *ast.ValueSpec) { } c.emitInstOp(opEnd) if spec.Type != nil { - c.compileExpr(spec.Type) + c.compileOptExpr(spec.Type) } if len(spec.Values) != 0 { for _, v := range spec.Values { @@ -361,7 +360,7 @@ func (c *compiler) compileExpr(n ast.Expr) { } func (c *compiler) compileBasicLit(n *ast.BasicLit) { - if !c.strict { + if !c.config.Strict { v := literalValue(n) if v == nil { panic(c.errorf(n, "can't convert %s (%s) value", n.Value, n.Kind)) @@ -494,22 +493,41 @@ func (c *compiler) compileCallExpr(n *ast.CallExpr) { // can look like `fmt.Sprint`. It will be compiled as a special // selector expression that requires `fmt` to be a package as opposed // to only check that it's an identifier with "fmt" value. -func (c *compiler) compileSymbol(fn ast.Expr) { - if e, ok := fn.(*ast.SelectorExpr); ok { - if ident, ok := e.X.(*ast.Ident); ok && stdinfo.Packages[ident.Name] != "" { - c.emitInst(instruction{ - op: opSimpleSelectorExpr, - valueIndex: c.internString(e.Sel, e.Sel.String()), - }) - c.emitInst(instruction{ - op: opStdlibPkg, - valueIndex: c.internString(ident, ident.Name), - }) +func (c *compiler) compileSymbol(sym ast.Expr) { + compilePkgSymbol := func(c *compiler, sym ast.Expr) bool { + e, ok := sym.(*ast.SelectorExpr) + if !ok { + return false + } + ident, ok := e.X.(*ast.Ident) + if !ok || isWildName(e.Sel.Name) { + return false + } + pkgPath := c.config.Imports[ident.Name] + if pkgPath == "" && stdinfo.Packages[ident.Name] != "" { + pkgPath = stdinfo.Packages[ident.Name] + } + if pkgPath == "" { + return false + } + c.emitInst(instruction{ + op: opSimpleSelectorExpr, + valueIndex: c.internString(e.Sel, e.Sel.String()), + }) + c.emitInst(instruction{ + op: opPkg, + valueIndex: c.internString(ident, pkgPath), + }) + return true + } + + if c.config.WithTypes { + if compilePkgSymbol(c, sym) { return } } - c.compileExpr(fn) + c.compileExpr(sym) } func (c *compiler) compileUnaryExpr(n *ast.UnaryExpr) { @@ -749,9 +767,12 @@ func (c *compiler) compileAssignStmt(n *ast.AssignStmt) { func (c *compiler) compileBlockStmt(n *ast.BlockStmt) { c.emitInstOp(opBlockStmt) + insideStmtList := c.insideStmtList + c.insideStmtList = true for _, elt := range n.List { c.compileStmt(elt) } + c.insideStmtList = insideStmtList c.emitInstOp(opEnd) } diff --git a/vendor/github.com/quasilyte/gogrep/compile_import.go b/vendor/github.com/quasilyte/gogrep/compile_import.go new file mode 100644 index 000000000..ab0dd12a7 --- /dev/null +++ b/vendor/github.com/quasilyte/gogrep/compile_import.go @@ -0,0 +1,57 @@ +package gogrep + +import ( + "errors" + "fmt" + "strings" + "unicode" + "unicode/utf8" +) + +func compileImportPattern(config CompileConfig) (*Pattern, PatternInfo, error) { + // TODO: figure out how to compile it as a part of a normal pattern compilation? + // This is an adhoc solution to a problem. + + readIdent := func(s string) (varname, rest string) { + first := true + var offset int + for _, ch := range s { + ok := unicode.IsLetter(ch) || + ch == '_' || + (!first && unicode.IsDigit(ch)) + if !ok { + break + } + offset += utf8.RuneLen(ch) + first = false + } + return s[:offset], s[offset:] + } + + info := newPatternInfo() + src := config.Src + src = src[len("import $"):] + if src == "" { + return nil, info, errors.New("expected ident after $, found EOF") + } + varname, rest := readIdent(src) + if strings.TrimSpace(rest) != "" { + return nil, info, fmt.Errorf("unexpected %s", rest) + } + var p program + if varname != "_" { + info.Vars[src] = struct{}{} + p.strings = []string{varname} + p.insts = []instruction{ + {op: opImportDecl}, + {op: opNamedNodeSeq, valueIndex: 0}, + {op: opEnd}, + } + } else { + p.insts = []instruction{ + {op: opAnyImportDecl}, + } + } + m := matcher{prog: &p, insts: p.insts} + return &Pattern{m: &m}, info, nil +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/gen_operations.go b/vendor/github.com/quasilyte/gogrep/gen_operations.go similarity index 96% rename from vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/gen_operations.go rename to vendor/github.com/quasilyte/gogrep/gen_operations.go index d01d55b45..4f70b0cbb 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/gen_operations.go +++ b/vendor/github.com/quasilyte/gogrep/gen_operations.go @@ -1,3 +1,4 @@ +//go:build main // +build main package main @@ -37,7 +38,7 @@ var opPrototypes = []operationProto{ {name: "StrictComplexLit", tag: "BasicLit", valueIndex: "strings | raw literal value"}, {name: "Ident", tag: "Ident", valueIndex: "strings | ident name"}, - {name: "StdlibPkg", tag: "Ident", valueIndex: "strings | package name"}, + {name: "Pkg", tag: "Ident", valueIndex: "strings | package path"}, {name: "IndexExpr", tag: "IndexExpr", args: "x expr"}, @@ -77,15 +78,15 @@ var opPrototypes = []operationProto{ {name: "ParenExpr", tag: "ParenExpr", args: "x"}, { - name: "ArgList", - args: "exprs...", + name: "ArgList", + args: "exprs...", example: "1, 2, 3", }, { - name: "SimpleArgList", - note: "Like ArgList, but pattern contains no $*", - args: "exprs[]", - value: "int | slice len", + name: "SimpleArgList", + note: "Like ArgList, but pattern contains no $*", + args: "exprs[]", + value: "int | slice len", example: "1, 2, 3", }, @@ -174,6 +175,9 @@ var opPrototypes = []operationProto{ {name: "VarDecl", tag: "GenDecl", args: "valuespecs..."}, {name: "TypeDecl", tag: "GenDecl", args: "typespecs..."}, + {name: "AnyImportDecl", tag: "GenDecl"}, + {name: "ImportDecl", tag: "GenDecl", args: "importspecs..."}, + {name: "EmptyPackage", tag: "File", args: "name"}, } @@ -210,7 +214,7 @@ var fileTemplate = template.Must(template.New("operations.go").Parse(`// Code ge package gogrep import ( - "github.com/quasilyte/go-ruleguard/nodetag" + "github.com/quasilyte/gogrep/nodetag" ) //go:generate stringer -type=operation -trimprefix=op diff --git a/vendor/github.com/quasilyte/gogrep/go.mod b/vendor/github.com/quasilyte/gogrep/go.mod new file mode 100644 index 000000000..3c76dc5e1 --- /dev/null +++ b/vendor/github.com/quasilyte/gogrep/go.mod @@ -0,0 +1,8 @@ +module github.com/quasilyte/gogrep + +go 1.16 + +require ( + github.com/go-toolsmith/astequal v1.0.1 + github.com/google/go-cmp v0.5.6 +) diff --git a/vendor/github.com/quasilyte/gogrep/go.sum b/vendor/github.com/quasilyte/gogrep/go.sum new file mode 100644 index 000000000..25c3bbb3e --- /dev/null +++ b/vendor/github.com/quasilyte/gogrep/go.sum @@ -0,0 +1,8 @@ +github.com/go-toolsmith/astequal v1.0.1 h1:JbSszi42Jiqu36Gnf363HWS9MTEAz67vTQLponh3Moc= +github.com/go-toolsmith/astequal v1.0.1/go.mod h1:4oGA3EZXTVItV/ipGiOx7NWkY5veFfcsOJVS2YxltLw= +github.com/go-toolsmith/strparse v1.0.0 h1:Vcw78DnpCAKlM20kSbAyO4mPfJn/lyYA4BJUDxe2Jb4= +github.com/go-toolsmith/strparse v1.0.0/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslWct4wyljWhSRy8= +github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/gogrep.go b/vendor/github.com/quasilyte/gogrep/gogrep.go similarity index 58% rename from vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/gogrep.go rename to vendor/github.com/quasilyte/gogrep/gogrep.go index ea054f334..1a91e49ff 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/gogrep.go +++ b/vendor/github.com/quasilyte/gogrep/gogrep.go @@ -4,8 +4,9 @@ import ( "go/ast" "go/token" "go/types" + "strings" - "github.com/quasilyte/go-ruleguard/nodetag" + "github.com/quasilyte/gogrep/nodetag" ) func IsEmptyNodeSlice(n ast.Node) bool { @@ -74,14 +75,40 @@ func (p *Pattern) Clone() *Pattern { return &clone } -func Compile(fset *token.FileSet, src string, strict bool) (*Pattern, PatternInfo, error) { +type CompileConfig struct { + Fset *token.FileSet + + // Src is a gogrep pattern expression string. + Src string + + // When strict is false, gogrep may consider 0xA and 10 to be identical. + // If true, a compiled pattern will require a full syntax match. + Strict bool + + // WithTypes controls whether gogrep would have types.Info during the pattern execution. + // If set to true, it will compile a pattern to a potentially more precise form, where + // fmt.Printf maps to the stdlib function call but not Printf method call on some + // random fmt variable. + WithTypes bool + + // Imports specifies packages that should be recognized for the type-aware matching. + // It maps a package name to a package path. + // Only used if WithTypes is true. + Imports map[string]string +} + +func Compile(config CompileConfig) (*Pattern, PatternInfo, error) { + if strings.HasPrefix(config.Src, "import $") { + return compileImportPattern(config) + } info := newPatternInfo() - n, err := parseExpr(fset, src) + n, err := parseExpr(config.Fset, config.Src) if err != nil { return nil, info, err } var c compiler - prog, err := c.Compile(fset, n, &info, strict) + c.config = config + prog, err := c.Compile(n, &info) if err != nil { return nil, info, err } @@ -91,6 +118,6 @@ func Compile(fset *token.FileSet, src string, strict bool) (*Pattern, PatternInf func newPatternInfo() PatternInfo { return PatternInfo{ - Vars: map[string]struct{}{}, + Vars: make(map[string]struct{}), } } diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/instructions.go b/vendor/github.com/quasilyte/gogrep/instructions.go similarity index 100% rename from vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/instructions.go rename to vendor/github.com/quasilyte/gogrep/instructions.go diff --git a/vendor/github.com/quasilyte/gogrep/internal/stdinfo/stdinfo.go b/vendor/github.com/quasilyte/gogrep/internal/stdinfo/stdinfo.go new file mode 100644 index 000000000..f00d66d46 --- /dev/null +++ b/vendor/github.com/quasilyte/gogrep/internal/stdinfo/stdinfo.go @@ -0,0 +1,151 @@ +package stdinfo + +var Packages = map[string]string{ + "adler32": "hash/adler32", + "aes": "crypto/aes", + "ascii85": "encoding/ascii85", + "asn1": "encoding/asn1", + "ast": "go/ast", + "atomic": "sync/atomic", + "base32": "encoding/base32", + "base64": "encoding/base64", + "big": "math/big", + "binary": "encoding/binary", + "bits": "math/bits", + "bufio": "bufio", + "build": "go/build", + "bytes": "bytes", + "bzip2": "compress/bzip2", + "cgi": "net/http/cgi", + "cgo": "runtime/cgo", + "cipher": "crypto/cipher", + "cmplx": "math/cmplx", + "color": "image/color", + "constant": "go/constant", + "constraint": "go/build/constraint", + "context": "context", + "cookiejar": "net/http/cookiejar", + "crc32": "hash/crc32", + "crc64": "hash/crc64", + "crypto": "crypto", + "csv": "encoding/csv", + "debug": "runtime/debug", + "des": "crypto/des", + "doc": "go/doc", + "draw": "image/draw", + "driver": "database/sql/driver", + "dsa": "crypto/dsa", + "dwarf": "debug/dwarf", + "ecdsa": "crypto/ecdsa", + "ed25519": "crypto/ed25519", + "elf": "debug/elf", + "elliptic": "crypto/elliptic", + "embed": "embed", + "encoding": "encoding", + "errors": "errors", + "exec": "os/exec", + "expvar": "expvar", + "fcgi": "net/http/fcgi", + "filepath": "path/filepath", + "flag": "flag", + "flate": "compress/flate", + "fmt": "fmt", + "fnv": "hash/fnv", + "format": "go/format", + "fs": "io/fs", + "fstest": "testing/fstest", + "gif": "image/gif", + "gob": "encoding/gob", + "gosym": "debug/gosym", + "gzip": "compress/gzip", + "hash": "hash", + "heap": "container/heap", + "hex": "encoding/hex", + "hmac": "crypto/hmac", + "html": "html", + "http": "net/http", + "httptest": "net/http/httptest", + "httptrace": "net/http/httptrace", + "httputil": "net/http/httputil", + "image": "image", + "importer": "go/importer", + "io": "io", + "iotest": "testing/iotest", + "ioutil": "io/ioutil", + "jpeg": "image/jpeg", + "json": "encoding/json", + "jsonrpc": "net/rpc/jsonrpc", + "list": "container/list", + "log": "log", + "lzw": "compress/lzw", + "macho": "debug/macho", + "mail": "net/mail", + "maphash": "hash/maphash", + "math": "math", + "md5": "crypto/md5", + "metrics": "runtime/metrics", + "mime": "mime", + "multipart": "mime/multipart", + "net": "net", + "os": "os", + "palette": "image/color/palette", + "parse": "text/template/parse", + "parser": "go/parser", + "path": "path", + "pe": "debug/pe", + "pem": "encoding/pem", + "pkix": "crypto/x509/pkix", + "plan9obj": "debug/plan9obj", + "plugin": "plugin", + "png": "image/png", + "pprof": "runtime/pprof", + "printer": "go/printer", + "quick": "testing/quick", + "quotedprintable": "mime/quotedprintable", + "race": "runtime/race", + "rand": "math/rand", + "rc4": "crypto/rc4", + "reflect": "reflect", + "regexp": "regexp", + "ring": "container/ring", + "rpc": "net/rpc", + "rsa": "crypto/rsa", + "runtime": "runtime", + "scanner": "text/scanner", + "sha1": "crypto/sha1", + "sha256": "crypto/sha256", + "sha512": "crypto/sha512", + "signal": "os/signal", + "smtp": "net/smtp", + "sort": "sort", + "sql": "database/sql", + "strconv": "strconv", + "strings": "strings", + "subtle": "crypto/subtle", + "suffixarray": "index/suffixarray", + "sync": "sync", + "syntax": "regexp/syntax", + "syscall": "syscall", + "syslog": "log/syslog", + "tabwriter": "text/tabwriter", + "tar": "archive/tar", + "template": "text/template", + "testing": "testing", + "textproto": "net/textproto", + "time": "time", + "tls": "crypto/tls", + "token": "go/token", + "trace": "runtime/trace", + "types": "go/types", + "tzdata": "time/tzdata", + "unicode": "unicode", + "unsafe": "unsafe", + "url": "net/url", + "user": "os/user", + "utf16": "unicode/utf16", + "utf8": "unicode/utf8", + "x509": "crypto/x509", + "xml": "encoding/xml", + "zip": "archive/zip", + "zlib": "compress/zlib", +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/match.go b/vendor/github.com/quasilyte/gogrep/match.go similarity index 98% rename from vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/match.go rename to vendor/github.com/quasilyte/gogrep/match.go index 39b71c467..7e1e86dbe 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/match.go +++ b/vendor/github.com/quasilyte/gogrep/match.go @@ -8,7 +8,6 @@ import ( "strconv" "github.com/go-toolsmith/astequal" - "github.com/quasilyte/go-ruleguard/internal/stdinfo" ) type matcher struct { @@ -61,8 +60,7 @@ func (m *matcher) MatchNode(state *MatcherState, n ast.Node, accept func(MatchDa m.walkExprSlice(state, n.Results, accept) } case opMultiDecl: - switch n := n.(type) { - case *ast.File: + if n, ok := n.(*ast.File); ok { m.walkDeclSlice(state, n.Decls, accept) } default: @@ -187,7 +185,7 @@ func (m *matcher) matchNodeWithInst(state *MatcherState, inst instruction, n ast n, ok := n.(*ast.Ident) return ok && m.stringValue(inst) == n.Name - case opStdlibPkg: + case opPkg: n, ok := n.(*ast.Ident) if !ok { return false @@ -197,8 +195,7 @@ func (m *matcher) matchNodeWithInst(state *MatcherState, inst instruction, n ast return false } pkgName, ok := obj.(*types.PkgName) - return ok && m.stringValue(inst) == pkgName.Imported().Name() && - pkgName.Imported().Path() == stdinfo.Packages[pkgName.Imported().Name()] + return ok && pkgName.Imported().Path() == m.stringValue(inst) case opBinaryExpr: n, ok := n.(*ast.BinaryExpr) @@ -529,7 +526,7 @@ func (m *matcher) matchNodeWithInst(state *MatcherState, inst instruction, n ast m.matchIdentSlice(state, n.Names) && m.matchNode(state, n.Type) case opTypedValueInitSpec: n, ok := n.(*ast.ValueSpec) - return ok && len(n.Values) != 0 && n.Type != nil && + return ok && len(n.Values) != 0 && m.matchIdentSlice(state, n.Names) && m.matchNode(state, n.Type) && m.matchExprSlice(state, n.Values) case opTypeSpec: @@ -552,6 +549,12 @@ func (m *matcher) matchNodeWithInst(state *MatcherState, inst instruction, n ast case opTypeDecl: n, ok := n.(*ast.GenDecl) return ok && n.Tok == token.TYPE && m.matchSpecSlice(state, n.Specs) + case opAnyImportDecl: + n, ok := n.(*ast.GenDecl) + return ok && n.Tok == token.IMPORT + case opImportDecl: + n, ok := n.(*ast.GenDecl) + return ok && n.Tok == token.IMPORT && m.matchSpecSlice(state, n.Specs) case opEmptyPackage: n, ok := n.(*ast.File) @@ -609,7 +612,7 @@ func (m *matcher) matchSpecSlice(state *MatcherState, specs []ast.Spec) bool { // matchNodeList matches two lists of nodes. It uses a common algorithm to match // wildcard patterns with any number of nodes without recursion. -func (m *matcher) matchNodeList(state *MatcherState, nodes NodeSlice, partial bool) (ast.Node, int) { +func (m *matcher) matchNodeList(state *MatcherState, nodes NodeSlice, partial bool) (matched ast.Node, offset int) { sliceLen := nodes.Len() inst := m.nextInst(state) if inst.op == opEnd { @@ -656,7 +659,7 @@ func (m *matcher) matchNodeList(state *MatcherState, nodes NodeSlice, partial bo stack = stack[:len(stack)-1] pcNext = 0 jNext = 0 - if len(stack) > 0 { + if len(stack) != 0 { pcNext = stack[len(stack)-1].pc jNext = stack[len(stack)-1].j } diff --git a/vendor/github.com/quasilyte/go-ruleguard/nodetag/nodetag.go b/vendor/github.com/quasilyte/gogrep/nodetag/nodetag.go similarity index 99% rename from vendor/github.com/quasilyte/go-ruleguard/nodetag/nodetag.go rename to vendor/github.com/quasilyte/gogrep/nodetag/nodetag.go index 7c0408b59..a4cc2ff85 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/nodetag/nodetag.go +++ b/vendor/github.com/quasilyte/gogrep/nodetag/nodetag.go @@ -1,6 +1,8 @@ package nodetag -import "go/ast" +import ( + "go/ast" +) type Value int diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/operation_string.go b/vendor/github.com/quasilyte/gogrep/operation_string.go similarity index 66% rename from vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/operation_string.go rename to vendor/github.com/quasilyte/gogrep/operation_string.go index 898cc8d56..1f8f09c90 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/operation_string.go +++ b/vendor/github.com/quasilyte/gogrep/operation_string.go @@ -28,7 +28,7 @@ func _() { _ = x[opStrictStringLit-17] _ = x[opStrictComplexLit-18] _ = x[opIdent-19] - _ = x[opStdlibPkg-20] + _ = x[opPkg-20] _ = x[opIndexExpr-21] _ = x[opSliceExpr-22] _ = x[opSliceFromExpr-23] @@ -125,12 +125,14 @@ func _() { _ = x[opConstDecl-114] _ = x[opVarDecl-115] _ = x[opTypeDecl-116] - _ = x[opEmptyPackage-117] + _ = x[opAnyImportDecl-117] + _ = x[opImportDecl-118] + _ = x[opEmptyPackage-119] } -const _operation_name = "InvalidNodeNamedNodeNodeSeqNamedNodeSeqOptNodeNamedOptNodeFieldNodeNamedFieldNodeMultiStmtMultiExprMultiDeclEndBasicLitStrictIntLitStrictFloatLitStrictCharLitStrictStringLitStrictComplexLitIdentStdlibPkgIndexExprSliceExprSliceFromExprSliceToExprSliceFromToExprSliceToCapExprSliceFromToCapExprFuncLitCompositeLitTypedCompositeLitSimpleSelectorExprSelectorExprTypeAssertExprTypeSwitchAssertExprStructTypeInterfaceTypeVoidFuncTypeFuncTypeArrayTypeSliceTypeMapTypeChanTypeKeyValueExprEllipsisTypedEllipsisStarExprUnaryExprBinaryExprParenExprArgListSimpleArgListVariadicCallExprNonVariadicCallExprCallExprAssignStmtMultiAssignStmtBranchStmtSimpleLabeledBranchStmtLabeledBranchStmtSimpleLabeledStmtLabeledStmtBlockStmtExprStmtGoStmtDeferStmtSendStmtEmptyStmtIncDecStmtReturnStmtIfStmtIfInitStmtIfElseStmtIfInitElseStmtIfNamedOptStmtIfNamedOptElseStmtSwitchStmtSwitchTagStmtSwitchInitStmtSwitchInitTagStmtSelectStmtTypeSwitchStmtTypeSwitchInitStmtCaseClauseDefaultCaseClauseCommClauseDefaultCommClauseForStmtForPostStmtForCondStmtForCondPostStmtForInitStmtForInitPostStmtForInitCondStmtForInitCondPostStmtRangeStmtRangeKeyStmtRangeKeyValueStmtFieldListUnnamedFieldSimpleFieldFieldMultiFieldValueSpecValueInitSpecTypedValueInitSpecTypedValueSpecTypeSpecTypeAliasSpecFuncDeclMethodDeclFuncProtoDeclMethodProtoDeclDeclStmtConstDeclVarDeclTypeDeclEmptyPackage" +const _operation_name = "InvalidNodeNamedNodeNodeSeqNamedNodeSeqOptNodeNamedOptNodeFieldNodeNamedFieldNodeMultiStmtMultiExprMultiDeclEndBasicLitStrictIntLitStrictFloatLitStrictCharLitStrictStringLitStrictComplexLitIdentPkgIndexExprSliceExprSliceFromExprSliceToExprSliceFromToExprSliceToCapExprSliceFromToCapExprFuncLitCompositeLitTypedCompositeLitSimpleSelectorExprSelectorExprTypeAssertExprTypeSwitchAssertExprStructTypeInterfaceTypeVoidFuncTypeFuncTypeArrayTypeSliceTypeMapTypeChanTypeKeyValueExprEllipsisTypedEllipsisStarExprUnaryExprBinaryExprParenExprArgListSimpleArgListVariadicCallExprNonVariadicCallExprCallExprAssignStmtMultiAssignStmtBranchStmtSimpleLabeledBranchStmtLabeledBranchStmtSimpleLabeledStmtLabeledStmtBlockStmtExprStmtGoStmtDeferStmtSendStmtEmptyStmtIncDecStmtReturnStmtIfStmtIfInitStmtIfElseStmtIfInitElseStmtIfNamedOptStmtIfNamedOptElseStmtSwitchStmtSwitchTagStmtSwitchInitStmtSwitchInitTagStmtSelectStmtTypeSwitchStmtTypeSwitchInitStmtCaseClauseDefaultCaseClauseCommClauseDefaultCommClauseForStmtForPostStmtForCondStmtForCondPostStmtForInitStmtForInitPostStmtForInitCondStmtForInitCondPostStmtRangeStmtRangeKeyStmtRangeKeyValueStmtFieldListUnnamedFieldSimpleFieldFieldMultiFieldValueSpecValueInitSpecTypedValueInitSpecTypedValueSpecTypeSpecTypeAliasSpecFuncDeclMethodDeclFuncProtoDeclMethodProtoDeclDeclStmtConstDeclVarDeclTypeDeclAnyImportDeclImportDeclEmptyPackage" -var _operation_index = [...]uint16{0, 7, 11, 20, 27, 39, 46, 58, 67, 81, 90, 99, 108, 111, 119, 131, 145, 158, 173, 189, 194, 203, 212, 221, 234, 245, 260, 274, 292, 299, 311, 328, 346, 358, 372, 392, 402, 415, 427, 435, 444, 453, 460, 468, 480, 488, 501, 509, 518, 528, 537, 544, 557, 573, 592, 600, 610, 625, 635, 658, 675, 692, 703, 712, 720, 726, 735, 743, 752, 762, 772, 778, 788, 798, 812, 826, 844, 854, 867, 881, 898, 908, 922, 940, 950, 967, 977, 994, 1001, 1012, 1023, 1038, 1049, 1064, 1079, 1098, 1107, 1119, 1136, 1145, 1157, 1168, 1173, 1183, 1192, 1205, 1223, 1237, 1245, 1258, 1266, 1276, 1289, 1304, 1312, 1321, 1328, 1336, 1348} +var _operation_index = [...]uint16{0, 7, 11, 20, 27, 39, 46, 58, 67, 81, 90, 99, 108, 111, 119, 131, 145, 158, 173, 189, 194, 197, 206, 215, 228, 239, 254, 268, 286, 293, 305, 322, 340, 352, 366, 386, 396, 409, 421, 429, 438, 447, 454, 462, 474, 482, 495, 503, 512, 522, 531, 538, 551, 567, 586, 594, 604, 619, 629, 652, 669, 686, 697, 706, 714, 720, 729, 737, 746, 756, 766, 772, 782, 792, 806, 820, 838, 848, 861, 875, 892, 902, 916, 934, 944, 961, 971, 988, 995, 1006, 1017, 1032, 1043, 1058, 1073, 1092, 1101, 1113, 1130, 1139, 1151, 1162, 1167, 1177, 1186, 1199, 1217, 1231, 1239, 1252, 1260, 1270, 1283, 1298, 1306, 1315, 1322, 1330, 1343, 1353, 1365} func (i operation) String() string { if i >= operation(len(_operation_index)-1) { diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/operations.gen.go b/vendor/github.com/quasilyte/gogrep/operations.gen.go similarity index 98% rename from vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/operations.gen.go rename to vendor/github.com/quasilyte/gogrep/operations.gen.go index 7f6471bb1..a07285cc3 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/operations.gen.go +++ b/vendor/github.com/quasilyte/gogrep/operations.gen.go @@ -3,7 +3,7 @@ package gogrep import ( - "github.com/quasilyte/go-ruleguard/nodetag" + "github.com/quasilyte/gogrep/nodetag" ) //go:generate stringer -type=operation -trimprefix=op @@ -87,8 +87,8 @@ const ( opIdent operation = 19 // Tag: Ident - // ValueIndex: strings | package name - opStdlibPkg operation = 20 + // ValueIndex: strings | package path + opPkg operation = 20 // Tag: IndexExpr // Args: x expr @@ -537,9 +537,16 @@ const ( // Args: typespecs... opTypeDecl operation = 116 + // Tag: GenDecl + opAnyImportDecl operation = 117 + + // Tag: GenDecl + // Args: importspecs... + opImportDecl operation = 118 + // Tag: File // Args: name - opEmptyPackage operation = 117 + opEmptyPackage operation = 119 ) type operationInfo struct { @@ -706,7 +713,7 @@ var operationInfoTable = [256]operationInfo{ VariadicMap: 0, // 0 SliceIndex: -1, }, - opStdlibPkg: { + opPkg: { Tag: nodetag.Ident, NumArgs: 0, ValueKind: emptyValue, @@ -1482,6 +1489,22 @@ var operationInfoTable = [256]operationInfo{ VariadicMap: 1, // 1 SliceIndex: -1, }, + opAnyImportDecl: { + Tag: nodetag.GenDecl, + NumArgs: 0, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + SliceIndex: -1, + }, + opImportDecl: { + Tag: nodetag.GenDecl, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 1, // 1 + SliceIndex: -1, + }, opEmptyPackage: { Tag: nodetag.File, NumArgs: 1, diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/parse.go b/vendor/github.com/quasilyte/gogrep/parse.go similarity index 98% rename from vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/parse.go rename to vendor/github.com/quasilyte/gogrep/parse.go index 2cd73934e..e3d17c8ce 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/parse.go +++ b/vendor/github.com/quasilyte/gogrep/parse.go @@ -296,8 +296,9 @@ func tokenize(src []byte) ([]fullToken, error) { } toks = append(toks, wt) if caseStat == caseHere { - toks = append(toks, fullToken{wt.pos, token.COLON, ""}) - toks = append(toks, fullToken{wt.pos, token.IDENT, "gogrep_body"}) + toks = append(toks, + fullToken{wt.pos, token.COLON, ""}, + fullToken{wt.pos, token.IDENT, "gogrep_body"}) } } return toks, err @@ -341,7 +342,7 @@ func encodeWildName(name string, any bool) string { func decodeWildName(s string) varInfo { s = s[len(wildSeparator):] nameEnd := strings.Index(s, wildSeparator) - name := s[:nameEnd] + name := s[:nameEnd+0] s = s[nameEnd:] s = s[len(wildSeparator):] kind := s diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/slices.go b/vendor/github.com/quasilyte/gogrep/slices.go similarity index 100% rename from vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/slices.go rename to vendor/github.com/quasilyte/gogrep/slices.go diff --git a/vendor/github.com/securego/gosec/v2/Dockerfile b/vendor/github.com/securego/gosec/v2/Dockerfile index c937d5255..1d7f91337 100644 --- a/vendor/github.com/securego/gosec/v2/Dockerfile +++ b/vendor/github.com/securego/gosec/v2/Dockerfile @@ -8,7 +8,7 @@ RUN go mod download RUN make build-linux FROM golang:${GO_VERSION}-alpine -RUN apk add --update --no-cache ca-certificates bash git gcc libc-dev +RUN apk add --update --no-cache ca-certificates bash git gcc libc-dev openssh ENV GO111MODULE on COPY --from=builder /build/gosec /bin/gosec COPY entrypoint.sh /bin/entrypoint.sh diff --git a/vendor/github.com/securego/gosec/v2/Makefile b/vendor/github.com/securego/gosec/v2/Makefile index e28818c64..4cb90c014 100644 --- a/vendor/github.com/securego/gosec/v2/Makefile +++ b/vendor/github.com/securego/gosec/v2/Makefile @@ -12,23 +12,23 @@ GOBIN ?= $(GOPATH)/bin GOLINT ?= $(GOBIN)/golint GOSEC ?= $(GOBIN)/gosec GINKGO ?= $(GOBIN)/ginkgo -GO_VERSION = 1.15 +GO_VERSION = 1.17 default: $(MAKE) build install-test-deps: - $(GO_NOMOD) get -u github.com/onsi/ginkgo/ginkgo + go install github.com/onsi/ginkgo/v2/ginkgo@latest $(GO_NOMOD) get -u golang.org/x/crypto/ssh $(GO_NOMOD) get -u github.com/lib/pq test: install-test-deps build fmt lint sec - $(GINKGO) -r -v + $(GINKGO) -v --fail-fast fmt: @echo "FORMATTING" @FORMATTED=`$(GO) fmt ./...` - @([[ ! -z "$(FORMATTED)" ]] && printf "Fixed unformatted files:\n$(FORMATTED)") || true + @([ ! -z "$(FORMATTED)" ] && printf "Fixed unformatted files:\n$(FORMATTED)") || true lint: @echo "LINTING" diff --git a/vendor/github.com/securego/gosec/v2/README.md b/vendor/github.com/securego/gosec/v2/README.md index e6c969b22..0f5d4ea83 100644 --- a/vendor/github.com/securego/gosec/v2/README.md +++ b/vendor/github.com/securego/gosec/v2/README.md @@ -269,14 +269,15 @@ gosec -exclude-generated ./... ### Annotating code As with all automated detection tools, there will be cases of false positives. In cases where gosec reports a failure that has been manually verified as being safe, -it is possible to annotate the code with a `#nosec` comment. +it is possible to annotate the code with a comment that starts with `#nosec`. +The `#nosec` comment should have the format `#nosec [RuleList] [-- Justification]`. The annotation causes gosec to stop processing any further nodes within the AST so can apply to a whole block or more granularly to a single expression. ```go -import "md5" // #nosec +import "md5" //#nosec func main(){ @@ -292,7 +293,11 @@ func main(){ When a specific false positive has been identified and verified as safe, you may wish to suppress only that single rule (or a specific set of rules) within a section of code, while continuing to scan for other problems. To do this, you can list the rule(s) to be suppressed within -the `#nosec` annotation, e.g: `/* #nosec G401 */` or `// #nosec G201 G202 G203` +the `#nosec` annotation, e.g: `/* #nosec G401 */` or `//#nosec G201 G202 G203` + +You could put the description or justification text for the annotation. The +justification should be after the rule(s) to suppress and start with two or +more dashes, e.g: `//#nosec G101 G102 -- This is a false positive` In some cases you may also want to revisit places where `#nosec` annotations have been used. To run the scanner and ignore any `#nosec` annotations you @@ -302,6 +307,27 @@ can do the following: gosec -nosec=true ./... ``` +### Tracking suppressions + +As described above, we could suppress violations externally (using `-include`/ +`-exclude`) or inline (using `#nosec` annotations) in gosec. This suppression +inflammation can be used to generate corresponding signals for auditing +purposes. + +We could track suppressions by the `-track-suppressions` flag as follows: + +```bash +gosec -track-suppressions -exclude=G101 -fmt=sarif -out=results.sarif ./... +``` + +- For external suppressions, gosec records suppression info where `kind` is +`external` and `justification` is a certain sentence "Globally suppressed". +- For inline suppressions, gosec records suppression info where `kind` is +`inSource` and `justification` is the text after two or more dashes in the +comment. + +**Note:** Only SARIF and JSON formats support tracking suppressions. + ### Build tags gosec is able to pass your [Go build tags](https://golang.org/pkg/go/build/) to the analyzer. @@ -358,7 +384,7 @@ Then generate the types with : schema-generate -i sarif-schema-2.1.0.json -o mypath/types.go ``` -Most of the MarshallJSON/UnmarshalJSON are removed except the one for PropertyBag which is handy to inline the additionnal properties. The rest can be removed. +Most of the MarshallJSON/UnmarshalJSON are removed except the one for PropertyBag which is handy to inline the additional properties. The rest can be removed. The URI,ID, UUID, GUID were renamed so it fits the Golang convention defined [here](https://github.com/golang/lint/blob/master/lint.go#L700) ### Tests @@ -419,3 +445,9 @@ This will generate the `rules/tls_config.go` file which will contain the current ## Who is using gosec? This is a [list](USERS.md) with some of the gosec's users. + +## Sponsors + +Support this project by becoming a sponsor. Your logo will show up here with a link to your website + + diff --git a/vendor/github.com/securego/gosec/v2/analyzer.go b/vendor/github.com/securego/gosec/v2/analyzer.go index a2951683e..e696e3de5 100644 --- a/vendor/github.com/securego/gosec/v2/analyzer.go +++ b/vendor/github.com/securego/gosec/v2/analyzer.go @@ -43,6 +43,10 @@ const LoadMode = packages.NeedName | packages.NeedTypesInfo | packages.NeedSyntax +const externalSuppressionJustification = "Globally suppressed." + +const aliasOfAllRules = "*" + var generatedCodePattern = regexp.MustCompile(`^// Code generated .* DO NOT EDIT\.$`) // The Context is populated with data parsed from the source code as it is scanned. @@ -57,7 +61,7 @@ type Context struct { Root *ast.File Config Config Imports *ImportTracker - Ignores []map[string]bool + Ignores []map[string][]SuppressionInfo PassedValues map[string]interface{} } @@ -72,21 +76,29 @@ type Metrics struct { // Analyzer object is the main object of gosec. It has methods traverse an AST // and invoke the correct checking rules as on each node as required. type Analyzer struct { - ignoreNosec bool - ruleset RuleSet - context *Context - config Config - logger *log.Logger - issues []*Issue - stats *Metrics - errors map[string][]Error // keys are file paths; values are the golang errors in those files - tests bool - excludeGenerated bool - showIgnored bool + ignoreNosec bool + ruleset RuleSet + context *Context + config Config + logger *log.Logger + issues []*Issue + stats *Metrics + errors map[string][]Error // keys are file paths; values are the golang errors in those files + tests bool + excludeGenerated bool + showIgnored bool + trackSuppressions bool +} + +// SuppressionInfo object is to record the kind and the justification that used +// to suppress violations. +type SuppressionInfo struct { + Kind string `json:"kind"` + Justification string `json:"justification"` } // NewAnalyzer builds a new analyzer. -func NewAnalyzer(conf Config, tests bool, excludeGenerated bool, logger *log.Logger) *Analyzer { +func NewAnalyzer(conf Config, tests bool, excludeGenerated bool, trackSuppressions bool, logger *log.Logger) *Analyzer { ignoreNoSec := false if enabled, err := conf.IsGlobalEnabled(Nosec); err == nil { ignoreNoSec = enabled @@ -99,21 +111,22 @@ func NewAnalyzer(conf Config, tests bool, excludeGenerated bool, logger *log.Log logger = log.New(os.Stderr, "[gosec]", log.LstdFlags) } return &Analyzer{ - ignoreNosec: ignoreNoSec, - showIgnored: showIgnored, - ruleset: make(RuleSet), - context: &Context{}, - config: conf, - logger: logger, - issues: make([]*Issue, 0, 16), - stats: &Metrics{}, - errors: make(map[string][]Error), - tests: tests, - excludeGenerated: excludeGenerated, + ignoreNosec: ignoreNoSec, + showIgnored: showIgnored, + ruleset: NewRuleSet(), + context: &Context{}, + config: conf, + logger: logger, + issues: make([]*Issue, 0, 16), + stats: &Metrics{}, + errors: make(map[string][]Error), + tests: tests, + excludeGenerated: excludeGenerated, + trackSuppressions: trackSuppressions, } } -// SetConfig upates the analyzer configuration +// SetConfig updates the analyzer configuration func (gosec *Analyzer) SetConfig(conf Config) { gosec.config = conf } @@ -125,10 +138,10 @@ func (gosec *Analyzer) Config() Config { // LoadRules instantiates all the rules to be used when analyzing source // packages -func (gosec *Analyzer) LoadRules(ruleDefinitions map[string]RuleBuilder) { +func (gosec *Analyzer) LoadRules(ruleDefinitions map[string]RuleBuilder, ruleSuppressed map[string]bool) { for id, def := range ruleDefinitions { r, nodes := def(id, gosec.config) - gosec.ruleset.Register(r, nodes...) + gosec.ruleset.Register(r, ruleSuppressed[id], nodes...) } } @@ -206,7 +219,12 @@ func (gosec *Analyzer) load(pkgPath string, conf *packages.Config) ([]*packages. func (gosec *Analyzer) Check(pkg *packages.Package) { gosec.logger.Println("Checking package:", pkg.Name) for _, file := range pkg.Syntax { - checkedFile := pkg.Fset.File(file.Pos()).Name() + fp := pkg.Fset.File(file.Pos()) + if fp == nil { + // skip files which cannot be located + continue + } + checkedFile := fp.Name() // Skip the no-Go file from analysis (e.g. a Cgo files is expanded in 3 different files // stored in the cache which do not need to by analyzed) if filepath.Ext(checkedFile) != ".go" { @@ -295,7 +313,7 @@ func (gosec *Analyzer) AppendError(file string, err error) { } // ignore a node (and sub-tree) if it is tagged with a nosec tag comment -func (gosec *Analyzer) ignore(n ast.Node) ([]string, bool) { +func (gosec *Analyzer) ignore(n ast.Node) map[string]SuppressionInfo { if groups, ok := gosec.context.Comments[n]; ok && !gosec.ignoreNosec { // Checks if an alternative for #nosec is set and, if not, uses the default. @@ -306,32 +324,52 @@ func (gosec *Analyzer) ignore(n ast.Node) ([]string, bool) { } for _, group := range groups { - - foundDefaultTag := strings.Contains(group.Text(), noSecDefaultTag) - foundAlternativeTag := strings.Contains(group.Text(), noSecAlternativeTag) + comment := strings.TrimSpace(group.Text()) + foundDefaultTag := strings.HasPrefix(comment, noSecDefaultTag) || regexp.MustCompile("\n *"+noSecDefaultTag).Match([]byte(comment)) + foundAlternativeTag := strings.HasPrefix(comment, noSecAlternativeTag) || regexp.MustCompile("\n *"+noSecAlternativeTag).Match([]byte(comment)) if foundDefaultTag || foundAlternativeTag { gosec.stats.NumNosec++ + // Discard what's in front of the nosec tag. + if foundDefaultTag { + comment = strings.SplitN(comment, noSecDefaultTag, 2)[1] + } else { + comment = strings.SplitN(comment, noSecAlternativeTag, 2)[1] + } + + // Extract the directive and the justification. + justification := "" + commentParts := regexp.MustCompile(`-{2,}`).Split(comment, 2) + directive := commentParts[0] + if len(commentParts) > 1 { + justification = strings.TrimSpace(strings.TrimRight(commentParts[1], "\n")) + } + // Pull out the specific rules that are listed to be ignored. re := regexp.MustCompile(`(G\d{3})`) - matches := re.FindAllStringSubmatch(group.Text(), -1) + matches := re.FindAllStringSubmatch(directive, -1) - // If no specific rules were given, ignore everything. - if len(matches) == 0 { - return nil, true + suppression := SuppressionInfo{ + Kind: "inSource", + Justification: justification, } // Find the rule IDs to ignore. - var ignores []string + ignores := make(map[string]SuppressionInfo) for _, v := range matches { - ignores = append(ignores, v[1]) + ignores[v[1]] = suppression } - return ignores, false + + // If no specific rules were given, ignore everything. + if len(matches) == 0 { + ignores[aliasOfAllRules] = suppression + } + return ignores } } } - return nil, false + return nil } // Visit runs the gosec visitor logic over an AST created by parsing go code. @@ -346,31 +384,43 @@ func (gosec *Analyzer) Visit(n ast.Node) ast.Visitor { } // Get any new rule exclusions. - ignoredRules, ignoreAll := gosec.ignore(n) - if ignoreAll { - return nil - } + ignoredRules := gosec.ignore(n) // Now create the union of exclusions. - ignores := map[string]bool{} + ignores := map[string][]SuppressionInfo{} if len(gosec.context.Ignores) > 0 { for k, v := range gosec.context.Ignores[0] { ignores[k] = v } } - for _, v := range ignoredRules { - ignores[v] = true + for ruleID, suppression := range ignoredRules { + ignores[ruleID] = append(ignores[ruleID], suppression) } // Push the new set onto the stack. - gosec.context.Ignores = append([]map[string]bool{ignores}, gosec.context.Ignores...) + gosec.context.Ignores = append([]map[string][]SuppressionInfo{ignores}, gosec.context.Ignores...) // Track aliased and initialization imports gosec.context.Imports.TrackImport(n) for _, rule := range gosec.ruleset.RegisteredFor(n) { - _, ignored := ignores[rule.ID()] + // Check if all rules are ignored. + generalSuppressions, generalIgnored := ignores[aliasOfAllRules] + // Check if the specific rule is ignored + ruleSuppressions, ruleIgnored := ignores[rule.ID()] + + ignored := generalIgnored || ruleIgnored + suppressions := append(generalSuppressions, ruleSuppressions...) + + // Track external suppressions. + if gosec.ruleset.IsRuleSuppressed(rule.ID()) { + ignored = true + suppressions = append(suppressions, SuppressionInfo{ + Kind: "external", + Justification: externalSuppressionJustification, + }) + } issue, err := rule.Match(n, gosec.context) if err != nil { @@ -385,7 +435,10 @@ func (gosec *Analyzer) Visit(n ast.Node) ast.Visitor { if !ignored || !gosec.showIgnored { gosec.stats.NumFound++ } - if !ignored || gosec.showIgnored || gosec.ignoreNosec { + if ignored && gosec.trackSuppressions { + issue.WithSuppressions(suppressions) + gosec.issues = append(gosec.issues, issue) + } else if !ignored || gosec.showIgnored || gosec.ignoreNosec { gosec.issues = append(gosec.issues, issue) } } diff --git a/vendor/github.com/securego/gosec/v2/config.go b/vendor/github.com/securego/gosec/v2/config.go index fe60b2f6d..59f48bc5e 100644 --- a/vendor/github.com/securego/gosec/v2/config.go +++ b/vendor/github.com/securego/gosec/v2/config.go @@ -26,6 +26,10 @@ const ( Audit GlobalOption = "audit" // NoSecAlternative global option alternative for #nosec directive NoSecAlternative GlobalOption = "#nosec" + // ExcludeRules global option for some rules should not be load + ExcludeRules GlobalOption = "exclude" + // IncludeRules global option for should be load + IncludeRules GlobalOption = "include" ) // Config is used to provide configuration and customization to each of the rules. diff --git a/vendor/github.com/securego/gosec/v2/errors.go b/vendor/github.com/securego/gosec/v2/errors.go index a27aa5821..2f6672704 100644 --- a/vendor/github.com/securego/gosec/v2/errors.go +++ b/vendor/github.com/securego/gosec/v2/errors.go @@ -20,7 +20,7 @@ func NewError(line, column int, err string) *Error { } } -// sortErros sorts the golang erros by line +// sortErrors sorts the golang errors by line func sortErrors(allErrors map[string][]Error) { for _, errors := range allErrors { sort.Slice(errors, func(i, j int) bool { diff --git a/vendor/github.com/securego/gosec/v2/go.mod b/vendor/github.com/securego/gosec/v2/go.mod index c324841c4..612e4dc36 100644 --- a/vendor/github.com/securego/gosec/v2/go.mod +++ b/vendor/github.com/securego/gosec/v2/go.mod @@ -2,16 +2,16 @@ module github.com/securego/gosec/v2 require ( github.com/google/uuid v1.3.0 - github.com/gookit/color v1.4.2 - github.com/lib/pq v1.10.3 + github.com/gookit/color v1.5.0 + github.com/lib/pq v1.10.4 github.com/mozilla/tls-observatory v0.0.0-20210609171429-7bc42856d2e5 github.com/nbutton23/zxcvbn-go v0.0.0-20210217022336-fa2cb2858354 - github.com/onsi/ginkgo v1.16.4 - github.com/onsi/gomega v1.16.0 - golang.org/x/crypto v0.0.0-20210921155107-089bfa567519 + github.com/onsi/ginkgo/v2 v2.0.0 + github.com/onsi/gomega v1.17.0 + golang.org/x/crypto v0.0.0-20220112180741-5e0467b6c7ce golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 golang.org/x/text v0.3.7 - golang.org/x/tools v0.1.7 + golang.org/x/tools v0.1.8 gopkg.in/yaml.v2 v2.4.0 ) diff --git a/vendor/github.com/securego/gosec/v2/go.sum b/vendor/github.com/securego/gosec/v2/go.sum index 8d81d7161..43777ef19 100644 --- a/vendor/github.com/securego/gosec/v2/go.sum +++ b/vendor/github.com/securego/gosec/v2/go.sum @@ -157,6 +157,7 @@ github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hf github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200507031123-427632fa3b1c/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/trillian v1.3.11/go.mod h1:0tPraVHrSDkA3BO6vKX67zgLXs6SsOAbHEivX+9mPgw= github.com/google/uuid v0.0.0-20161128191214-064e2069ce9c/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= @@ -166,8 +167,8 @@ github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= -github.com/gookit/color v1.4.2 h1:tXy44JFSFkKnELV6WaMo/lLfu/meqITX3iAV52do7lk= -github.com/gookit/color v1.4.2/go.mod h1:fqRyamkC1W8uxl+lxCQxOT09l/vYfZ+QeiX3rKQHCoQ= +github.com/gookit/color v1.5.0 h1:1Opow3+BWDwqor78DcJkJCIwnkviFi+rrOANki9BUFw= +github.com/gookit/color v1.5.0/go.mod h1:43aQb+Zerm/BWh2GnrgOQm7ffz7tvQXEKV6BFMl7wAo= github.com/gordonklaus/ineffassign v0.0.0-20200309095847-7953dde2c7bf/go.mod h1:cuNKsD1zp2v6XfE/orVX2QE1LC+i254ceGcVeDT3pTU= github.com/gorhill/cronexpr v0.0.0-20180427100037-88b0669f7d75/go.mod h1:g2644b03hfBX9Ov0ZBDgXXens4rxSxmqFBbhvKv2yVA= github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= @@ -187,6 +188,7 @@ github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpO github.com/huandu/xstrings v1.0.0/go.mod h1:4qWG/gcEcfX4z/mBDHJ++3ReCw9ibxbsNJbcucJdbSo= github.com/huandu/xstrings v1.2.0/go.mod h1:DvyZB1rfVYsBIigL8HwpZgxHwXozlTgGqn63UyNX5k4= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/imdario/mergo v0.3.4/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/imdario/mergo v0.3.8/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= @@ -219,8 +221,8 @@ github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+ github.com/letsencrypt/pkcs11key/v4 v4.0.0/go.mod h1:EFUvBDay26dErnNb70Nd0/VW3tJiIbETBPTl9ATXQag= github.com/lib/pq v1.8.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lib/pq v1.9.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/lib/pq v1.10.3 h1:v9QZf2Sn6AmjXtQeFpdoq/eaNtYP6IN+7lcrygsIAtg= -github.com/lib/pq v1.10.3/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/lib/pq v1.10.4 h1:SO9z7FRPzA03QhHKJrH5BXA6HU1rS4V2nIVrrNC1iYk= +github.com/lib/pq v1.10.4/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= @@ -266,10 +268,12 @@ github.com/onsi/ginkgo v1.10.3/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+ github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= github.com/onsi/ginkgo v1.16.4 h1:29JGrr5oVBm5ulCWet69zQkzWipVXIol6ygQUe/EzNc= github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= +github.com/onsi/ginkgo/v2 v2.0.0 h1:CcuG/HvWNkkaqCUpJifQY8z7qEMBJya6aLPx6ftGyjQ= +github.com/onsi/ginkgo/v2 v2.0.0/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= -github.com/onsi/gomega v1.16.0 h1:6gjqkI8iiRHMvdccRJM8rVKjCWk6ZIm6FTm3ddIe4/c= -github.com/onsi/gomega v1.16.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= +github.com/onsi/gomega v1.17.0 h1:9Luw4uT5HTjHTN8+aNcSThgH1vdXnmdJ8xIfZ4wyTRE= +github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= @@ -322,8 +326,9 @@ github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXf github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= -github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= github.com/tmc/grpc-websocket-proxy v0.0.0-20200427203606-3cfed13b9966/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= @@ -343,7 +348,7 @@ github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.etcd.io/bbolt v1.3.4/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ= go.etcd.io/etcd v0.0.0-20200513171258-e048e166ab9c/go.mod h1:xCI7ZzBfRuGgBXyXO6yfWfDmlWd35khcWpUa4L0xI/k= @@ -371,8 +376,8 @@ golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= -golang.org/x/crypto v0.0.0-20210921155107-089bfa567519 h1:7I4JAnoQBe7ZtJcBaYHi5UtiO8tQHbUSXxL+pnGRANg= -golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20220112180741-5e0467b6c7ce h1:Roh6XWxHFKrPgC/EQhVubSAGQ6Ozk6IdxHSzt1mR0EI= +golang.org/x/crypto v0.0.0-20220112180741-5e0467b6c7ce/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -406,8 +411,8 @@ golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzB golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo= -golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.5.1 h1:OJxoQ/rynoF0dcCdI7cLPktw/hR2cueqYfjm43oqK38= +golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -442,10 +447,10 @@ golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/ golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= -golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d h1:20cMwl2fHAzkJMEA+8J4JgqBQcQGzbisXo31MIeenXI= -golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2 h1:CIJ76btIcR3eFI5EgSo6k1qKw9KJexJuRLI9G7Hp5wE= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -510,8 +515,8 @@ golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e h1:WUoyKPm6nCo1BnNUvPGnFG3T5DUVem42yDJZZ4CNxMA= -golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211019181941-9d821ace8654 h1:id054HUawV2/6IGm2IV8KZQjqtwAOo2CYlOToYqa0d0= +golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 h1:v+OssWQX+hTHEmOBgwxdZxK4zHq3yOs8F9J7mk0PY8E= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= @@ -575,8 +580,8 @@ golang.org/x/tools v0.0.0-20200626171337-aa94e735be7f/go.mod h1:EkVYQZoAsY45+roY golang.org/x/tools v0.0.0-20200630154851-b2d8b0336632/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200706234117-b22de6825cf7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.7 h1:6j8CgantCy3yc8JGBqkDLMKWqZ0RDU2g1HVgacojGWQ= -golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= +golang.org/x/tools v0.1.8 h1:P1HhGGuLW4aAclzjtmJdf0mJOjVUZUzOTqkAkWL+l6w= +golang.org/x/tools v0.1.8/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/github.com/securego/gosec/v2/issue.go b/vendor/github.com/securego/gosec/v2/issue.go index 00b193765..f0f028e10 100644 --- a/vendor/github.com/securego/gosec/v2/issue.go +++ b/vendor/github.com/securego/gosec/v2/issue.go @@ -88,16 +88,17 @@ var ruleToCWE = map[string]string{ // Issue is returned by a gosec rule if it discovers an issue with the scanned code. type Issue struct { - Severity Score `json:"severity"` // issue severity (how problematic it is) - Confidence Score `json:"confidence"` // issue confidence (how sure we are we found it) - Cwe *cwe.Weakness `json:"cwe"` // Cwe associated with RuleID - RuleID string `json:"rule_id"` // Human readable explanation - What string `json:"details"` // Human readable explanation - File string `json:"file"` // File name we found it in - Code string `json:"code"` // Impacted code line - Line string `json:"line"` // Line number in file - Col string `json:"column"` // Column number in line - NoSec bool `json:"nosec"` // true if the issue is nosec + Severity Score `json:"severity"` // issue severity (how problematic it is) + Confidence Score `json:"confidence"` // issue confidence (how sure we are we found it) + Cwe *cwe.Weakness `json:"cwe"` // Cwe associated with RuleID + RuleID string `json:"rule_id"` // Human readable explanation + What string `json:"details"` // Human readable explanation + File string `json:"file"` // File name we found it in + Code string `json:"code"` // Impacted code line + Line string `json:"line"` // Line number in file + Col string `json:"column"` // Column number in line + NoSec bool `json:"nosec"` // true if the issue is nosec + Suppressions []SuppressionInfo `json:"suppressions"` // Suppression info of the issue } // FileLocation point out the file path and line number in file @@ -179,7 +180,7 @@ func NewIssue(ctx *Context, node ast.Node, ruleID, desc string, severity Score, var code string if file, err := os.Open(fobj.Name()); err == nil { - defer file.Close() // #nosec + defer file.Close() //#nosec s := codeSnippetStartLine(node, fobj) e := codeSnippetEndLine(node, fobj) code, err = codeSnippet(file, s, e, node) @@ -200,3 +201,9 @@ func NewIssue(ctx *Context, node ast.Node, ruleID, desc string, severity Score, Cwe: GetCweByRule(ruleID), } } + +// WithSuppressions set the suppressions of the issue +func (i *Issue) WithSuppressions(suppressions []SuppressionInfo) *Issue { + i.Suppressions = suppressions + return i +} diff --git a/vendor/github.com/securego/gosec/v2/rule.go b/vendor/github.com/securego/gosec/v2/rule.go index fbba089bb..c0429c4c2 100644 --- a/vendor/github.com/securego/gosec/v2/rule.go +++ b/vendor/github.com/securego/gosec/v2/rule.go @@ -26,34 +26,45 @@ type Rule interface { // RuleBuilder is used to register a rule definition with the analyzer type RuleBuilder func(id string, c Config) (Rule, []ast.Node) -// A RuleSet maps lists of rules to the type of AST node they should be run on. +// A RuleSet contains a mapping of lists of rules to the type of AST node they +// should be run on and a mapping of rule ID's to whether the rule are +// suppressed. // The analyzer will only invoke rules contained in the list associated with the // type of AST node it is currently visiting. -type RuleSet map[reflect.Type][]Rule +type RuleSet struct { + Rules map[reflect.Type][]Rule + RuleSuppressedMap map[string]bool +} // NewRuleSet constructs a new RuleSet func NewRuleSet() RuleSet { - return make(RuleSet) + return RuleSet{make(map[reflect.Type][]Rule), make(map[string]bool)} } // Register adds a trigger for the supplied rule for the the // specified ast nodes. -func (r RuleSet) Register(rule Rule, nodes ...ast.Node) { +func (r RuleSet) Register(rule Rule, isSuppressed bool, nodes ...ast.Node) { for _, n := range nodes { t := reflect.TypeOf(n) - if rules, ok := r[t]; ok { - r[t] = append(rules, rule) + if rules, ok := r.Rules[t]; ok { + r.Rules[t] = append(rules, rule) } else { - r[t] = []Rule{rule} + r.Rules[t] = []Rule{rule} } } + r.RuleSuppressedMap[rule.ID()] = isSuppressed } // RegisteredFor will return all rules that are registered for a // specified ast node. func (r RuleSet) RegisteredFor(n ast.Node) []Rule { - if rules, found := r[reflect.TypeOf(n)]; found { + if rules, found := r.Rules[reflect.TypeOf(n)]; found { return rules } return []Rule{} } + +// IsRuleSuppressed will return whether the rule is suppressed. +func (r RuleSet) IsRuleSuppressed(ruleID string) bool { + return r.RuleSuppressedMap[ruleID] +} diff --git a/vendor/github.com/securego/gosec/v2/rules/bad_defer.go b/vendor/github.com/securego/gosec/v2/rules/bad_defer.go index f6ca0be81..13b42070d 100644 --- a/vendor/github.com/securego/gosec/v2/rules/bad_defer.go +++ b/vendor/github.com/securego/gosec/v2/rules/bad_defer.go @@ -38,11 +38,10 @@ func contains(methods []string, method string) bool { func (r *badDefer) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { if deferStmt, ok := n.(*ast.DeferStmt); ok { for _, deferTyp := range r.types { - if issue := r.checkChild(n, c, deferStmt.Call, deferTyp); issue != nil { - return issue, nil - } - if issue := r.checkFunction(n, c, deferStmt, deferTyp); issue != nil { - return issue, nil + if typ, method, err := gosec.GetCallInfo(deferStmt.Call, c); err == nil { + if normalize(typ) == deferTyp.typ && contains(deferTyp.methods, method) { + return gosec.NewIssue(c, n, r.ID(), fmt.Sprintf(r.What, method, typ), r.Severity, r.Confidence), nil + } } } } @@ -50,42 +49,6 @@ func (r *badDefer) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { return nil, nil } -func (r *badDefer) checkChild(n ast.Node, c *gosec.Context, callExp *ast.CallExpr, deferTyp deferType) *gosec.Issue { - if typ, method, err := gosec.GetCallInfo(callExp, c); err == nil { - if normalize(typ) == deferTyp.typ && contains(deferTyp.methods, method) { - return gosec.NewIssue(c, n, r.ID(), fmt.Sprintf(r.What, method, typ), r.Severity, r.Confidence) - } - } - return nil -} - -func (r *badDefer) checkFunction(n ast.Node, c *gosec.Context, deferStmt *ast.DeferStmt, deferTyp deferType) *gosec.Issue { - if anonFunc, isAnonFunc := deferStmt.Call.Fun.(*ast.FuncLit); isAnonFunc { - for _, subElem := range anonFunc.Body.List { - if issue := r.checkStmt(n, c, subElem, deferTyp); issue != nil { - return issue - } - } - } - return nil -} - -func (r *badDefer) checkStmt(n ast.Node, c *gosec.Context, subElem ast.Stmt, deferTyp deferType) *gosec.Issue { - switch stmt := subElem.(type) { - case *ast.AssignStmt: - for _, rh := range stmt.Rhs { - if e, isCallExp := rh.(*ast.CallExpr); isCallExp { - return r.checkChild(n, c, e, deferTyp) - } - } - case *ast.IfStmt: - if s, is := stmt.Init.(*ast.AssignStmt); is { - return r.checkStmt(n, c, s, deferTyp) - } - } - return nil -} - // NewDeferredClosing detects unsafe defer of error returning methods func NewDeferredClosing(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { return &badDefer{ diff --git a/vendor/github.com/securego/gosec/v2/rules/readfile.go b/vendor/github.com/securego/gosec/v2/rules/readfile.go index a4ccb720c..579f2fa44 100644 --- a/vendor/github.com/securego/gosec/v2/rules/readfile.go +++ b/vendor/github.com/securego/gosec/v2/rules/readfile.go @@ -125,5 +125,6 @@ func NewReadFile(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { rule.Add("os", "ReadFile") rule.Add("os", "Open") rule.Add("os", "OpenFile") + rule.Add("os", "Create") return rule, []ast.Node{(*ast.CallExpr)(nil)} } diff --git a/vendor/github.com/securego/gosec/v2/rules/rulelist.go b/vendor/github.com/securego/gosec/v2/rules/rulelist.go index a3d9ca2f6..dc9f14916 100644 --- a/vendor/github.com/securego/gosec/v2/rules/rulelist.go +++ b/vendor/github.com/securego/gosec/v2/rules/rulelist.go @@ -24,16 +24,21 @@ type RuleDefinition struct { Create gosec.RuleBuilder } -// RuleList is a mapping of rule ID's to rule definitions -type RuleList map[string]RuleDefinition +// RuleList contains a mapping of rule ID's to rule definitions and a mapping +// of rule ID's to whether rules are suppressed. +type RuleList struct { + Rules map[string]RuleDefinition + RuleSuppressed map[string]bool +} -// Builders returns all the create methods for a given rule list -func (rl RuleList) Builders() map[string]gosec.RuleBuilder { +// RulesInfo returns all the create methods and the rule suppressed map for a +// given list +func (rl RuleList) RulesInfo() (map[string]gosec.RuleBuilder, map[string]bool) { builders := make(map[string]gosec.RuleBuilder) - for _, def := range rl { + for _, def := range rl.Rules { builders[def.ID] = def.Create } - return builders + return builders, rl.RuleSuppressed } // RuleFilter can be used to include or exclude a rule depending on the return @@ -56,7 +61,7 @@ func NewRuleFilter(action bool, ruleIDs ...string) RuleFilter { } // Generate the list of rules to use -func Generate(filters ...RuleFilter) RuleList { +func Generate(trackSuppressions bool, filters ...RuleFilter) RuleList { rules := []RuleDefinition{ // misc {"G101", "Look for hardcoded credentials", NewHardcodedCredentials}, @@ -102,15 +107,20 @@ func Generate(filters ...RuleFilter) RuleList { } ruleMap := make(map[string]RuleDefinition) + ruleSuppressedMap := make(map[string]bool) RULES: for _, rule := range rules { + ruleSuppressedMap[rule.ID] = false for _, filter := range filters { if filter(rule.ID) { - continue RULES + ruleSuppressedMap[rule.ID] = true + if !trackSuppressions { + continue RULES + } } } ruleMap[rule.ID] = rule } - return ruleMap + return RuleList{ruleMap, ruleSuppressedMap} } diff --git a/vendor/github.com/securego/gosec/v2/rules/sql.go b/vendor/github.com/securego/gosec/v2/rules/sql.go index 8a5b63861..6f4dbf126 100644 --- a/vendor/github.com/securego/gosec/v2/rules/sql.go +++ b/vendor/github.com/securego/gosec/v2/rules/sql.go @@ -137,8 +137,8 @@ func NewSQLStrConcat(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { }, } - rule.AddAll("*database/sql.DB", "Query", "QueryContext", "QueryRow", "QueryRowContext") - rule.AddAll("*database/sql.Tx", "Query", "QueryContext", "QueryRow", "QueryRowContext") + rule.AddAll("*database/sql.DB", "Query", "QueryContext", "QueryRow", "QueryRowContext", "Exec", "ExecContext", "Prepare", "PrepareContext") + rule.AddAll("*database/sql.Tx", "Query", "QueryContext", "QueryRow", "QueryRowContext", "Exec", "ExecContext", "Prepare", "PrepareContext") return rule, []ast.Node{(*ast.AssignStmt)(nil), (*ast.ExprStmt)(nil)} } @@ -261,6 +261,19 @@ func (s *sqlStrFormat) Match(n ast.Node, ctx *gosec.Context) (*gosec.Issue, erro switch stmt := n.(type) { case *ast.AssignStmt: for _, expr := range stmt.Rhs { + if call, ok := expr.(*ast.CallExpr); ok { + selector, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + continue + } + sqlQueryCall, ok := selector.X.(*ast.CallExpr) + if ok && s.ContainsCallExpr(sqlQueryCall, ctx) != nil { + issue, err := s.checkQuery(sqlQueryCall, ctx) + if err == nil && issue != nil { + return issue, err + } + } + } if sqlQueryCall, ok := expr.(*ast.CallExpr); ok && s.ContainsCallExpr(expr, ctx) != nil { return s.checkQuery(sqlQueryCall, ctx) } @@ -282,7 +295,7 @@ func NewSQLStrFormat(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { noIssueQuoted: gosec.NewCallList(), sqlStatement: sqlStatement{ patterns: []*regexp.Regexp{ - regexp.MustCompile("(?i)(SELECT|DELETE|INSERT|UPDATE|INTO|FROM|WHERE) "), + regexp.MustCompile("(?i)(SELECT|DELETE|INSERT|UPDATE|INTO|FROM|WHERE)( |\n|\r|\t)"), regexp.MustCompile("%[^bdoxXfFp]"), }, MetaData: gosec.MetaData{ @@ -293,8 +306,8 @@ func NewSQLStrFormat(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { }, }, } - rule.AddAll("*database/sql.DB", "Query", "QueryContext", "QueryRow", "QueryRowContext") - rule.AddAll("*database/sql.Tx", "Query", "QueryContext", "QueryRow", "QueryRowContext") + rule.AddAll("*database/sql.DB", "Query", "QueryContext", "QueryRow", "QueryRowContext", "Exec", "ExecContext", "Prepare", "PrepareContext") + rule.AddAll("*database/sql.Tx", "Query", "QueryContext", "QueryRow", "QueryRowContext", "Exec", "ExecContext", "Prepare", "PrepareContext") rule.fmtCalls.AddAll("fmt", "Sprint", "Sprintf", "Sprintln", "Fprintf") rule.noIssue.AddAll("os", "Stdout", "Stderr") rule.noIssueQuoted.Add("github.com/lib/pq", "QuoteIdentifier") diff --git a/vendor/github.com/securego/gosec/v2/rules/subproc.go b/vendor/github.com/securego/gosec/v2/rules/subproc.go index 53f8eb854..5d7cadda9 100644 --- a/vendor/github.com/securego/gosec/v2/rules/subproc.go +++ b/vendor/github.com/securego/gosec/v2/rules/subproc.go @@ -55,6 +55,10 @@ func (r *subprocess) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { // .. indeed it is a variable then processing is different than a normal // field assignment if variable { + // skip the check when the declaration is not available + if ident.Obj == nil { + continue + } switch ident.Obj.Decl.(type) { case *ast.AssignStmt: _, assignment := ident.Obj.Decl.(*ast.AssignStmt) diff --git a/vendor/github.com/securego/gosec/v2/rules/tempfiles.go b/vendor/github.com/securego/gosec/v2/rules/tempfiles.go index 36f0f979b..63822c093 100644 --- a/vendor/github.com/securego/gosec/v2/rules/tempfiles.go +++ b/vendor/github.com/securego/gosec/v2/rules/tempfiles.go @@ -23,19 +23,41 @@ import ( type badTempFile struct { gosec.MetaData - calls gosec.CallList - args *regexp.Regexp + calls gosec.CallList + args *regexp.Regexp + argCalls gosec.CallList + nestedCalls gosec.CallList } func (t *badTempFile) ID() string { return t.MetaData.ID } +func (t *badTempFile) findTempDirArgs(n ast.Node, c *gosec.Context, suspect ast.Node) *gosec.Issue { + if s, e := gosec.GetString(suspect); e == nil { + if t.args.MatchString(s) { + return gosec.NewIssue(c, n, t.ID(), t.What, t.Severity, t.Confidence) + } + return nil + } + if ce := t.argCalls.ContainsPkgCallExpr(suspect, c, false); ce != nil { + return gosec.NewIssue(c, n, t.ID(), t.What, t.Severity, t.Confidence) + } + if be, ok := suspect.(*ast.BinaryExpr); ok { + if ops := gosec.GetBinaryExprOperands(be); len(ops) != 0 { + return t.findTempDirArgs(n, c, ops[0]) + } + return nil + } + if ce := t.nestedCalls.ContainsPkgCallExpr(suspect, c, false); ce != nil { + return t.findTempDirArgs(n, c, ce.Args[0]) + } + return nil +} + func (t *badTempFile) Match(n ast.Node, c *gosec.Context) (gi *gosec.Issue, err error) { if node := t.calls.ContainsPkgCallExpr(n, c, false); node != nil { - if arg, e := gosec.GetString(node.Args[0]); t.args.MatchString(arg) && e == nil { - return gosec.NewIssue(c, n, t.ID(), t.What, t.Severity, t.Confidence), nil - } + return t.findTempDirArgs(n, c, node.Args[0]), nil } return nil, nil } @@ -44,10 +66,17 @@ func (t *badTempFile) Match(n ast.Node, c *gosec.Context) (gi *gosec.Issue, err func NewBadTempFile(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { calls := gosec.NewCallList() calls.Add("io/ioutil", "WriteFile") - calls.Add("os", "Create") + calls.AddAll("os", "Create", "WriteFile") + argCalls := gosec.NewCallList() + argCalls.Add("os", "TempDir") + nestedCalls := gosec.NewCallList() + nestedCalls.Add("path", "Join") + nestedCalls.Add("path/filepath", "Join") return &badTempFile{ - calls: calls, - args: regexp.MustCompile(`^/tmp/.*$|^/var/tmp/.*$`), + calls: calls, + args: regexp.MustCompile(`^(/(usr|var))?/tmp(/.*)?$`), + argCalls: argCalls, + nestedCalls: nestedCalls, MetaData: gosec.MetaData{ ID: id, Severity: gosec.Medium, diff --git a/vendor/github.com/securego/gosec/v2/rules/tls.go b/vendor/github.com/securego/gosec/v2/rules/tls.go index 486b56e3c..55a6786ad 100644 --- a/vendor/github.com/securego/gosec/v2/rules/tls.go +++ b/vendor/github.com/securego/gosec/v2/rules/tls.go @@ -88,7 +88,7 @@ func (t *insecureConfigTLS) processTLSConfVal(n *ast.KeyValueExpr, c *gosec.Cont case "MinVersion": if d, ok := n.Value.(*ast.Ident); ok { - if vs, ok := d.Obj.Decl.(*ast.ValueSpec); ok { + if vs, ok := d.Obj.Decl.(*ast.ValueSpec); ok && len(vs.Values) > 0 { if s, ok := vs.Values[0].(*ast.SelectorExpr); ok { x := s.X.(*ast.Ident).Name sel := s.Sel.Name diff --git a/vendor/github.com/sivchari/containedctx/.golangci.yml b/vendor/github.com/sivchari/containedctx/.golangci.yml new file mode 100644 index 000000000..f687df836 --- /dev/null +++ b/vendor/github.com/sivchari/containedctx/.golangci.yml @@ -0,0 +1,38 @@ +run: + timeout: 5m + skip-files: [] + +linters-settings: + govet: + enable-all: true + disable: + - fieldalignment + gocyclo: + min-complexity: 12 + misspell: + locale: US + godox: + keywords: + - FIXME + gofumpt: + extra-rules: true + +linters: + disable-all: true + enable: + - govet + - revive + - goimports + - staticcheck + - gosimple + - unused + - godox + - gofumpt + - misspell + - gocyclo + +issues: + exclude-use-default: true + max-per-linter: 0 + max-same-issues: 0 + exclude: [] diff --git a/vendor/github.com/sivchari/containedctx/LICENCE b/vendor/github.com/sivchari/containedctx/LICENCE new file mode 100644 index 000000000..5185ec09a --- /dev/null +++ b/vendor/github.com/sivchari/containedctx/LICENCE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 sivchari + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/sivchari/containedctx/README.md b/vendor/github.com/sivchari/containedctx/README.md new file mode 100644 index 000000000..8f40499fb --- /dev/null +++ b/vendor/github.com/sivchari/containedctx/README.md @@ -0,0 +1,62 @@ +# containedctx + +[![test_and_lint](https://github.com/sivchari/containedctx/actions/workflows/ci.yml/badge.svg?branch=main)](https://github.com/sivchari/containedctx/actions/workflows/ci.yml) + +containedctx is a linter that detects struct contained context.Context field + +## Instruction + +```sh +go install github.com/sivchari/containedctx/cmd/containedctx +``` + +## Usage + +```go +package main + +import "context" + +type ok struct { + i int + s string +} + +type ng struct { + ctx context.Context +} + +type empty struct{} +``` + +```console +go vet -vettool=(which containedctx) ./... + +# a +./main.go:11:2: found a struct that contains a context.Context field +``` + + +## CI + +### CircleCI + +```yaml +- run: + name: install containedctx + command: go install github.com/sivchari/containedctx/cmd/containedctx + +- run: + name: run containedctx + command: go vet -vettool=`which containedctx` ./... +``` + +### GitHub Actions + +```yaml +- name: install containedctx + run: go install github.com/sivchari/containedctx/cmd/containedctx + +- name: run containedctx + run: go vet -vettool=`which containedctx` ./... +``` diff --git a/vendor/github.com/sivchari/containedctx/containedctx.go b/vendor/github.com/sivchari/containedctx/containedctx.go new file mode 100644 index 000000000..5a2c2dafd --- /dev/null +++ b/vendor/github.com/sivchari/containedctx/containedctx.go @@ -0,0 +1,54 @@ +package containedctx + +import ( + "go/ast" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +const doc = "containedctx is a linter that detects struct contained context.Context field" + +// Analyzer is the contanedctx analyzer +var Analyzer = &analysis.Analyzer{ + Name: "containedctx", + Doc: doc, + Run: run, + Requires: []*analysis.Analyzer{ + inspect.Analyzer, + }, +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + nodeFilter := []ast.Node{ + (*ast.StructType)(nil), + } + + inspect.Preorder(nodeFilter, func(n ast.Node) { + switch structTyp := n.(type) { + case *ast.StructType: + if structTyp.Fields.List == nil { + return + } + for _, field := range structTyp.Fields.List { + selectorExpr, ok := field.Type.(*ast.SelectorExpr) + if !ok { + continue + } + xname, ok := selectorExpr.X.(*ast.Ident) + if !ok { + continue + } + selname := selectorExpr.Sel.Name + if xname.Name+"."+selname == "context.Context" { + pass.Reportf(field.Pos(), "found a struct that contains a context.Context field") + } + } + } + }) + + return nil, nil +} diff --git a/vendor/github.com/sivchari/containedctx/go.mod b/vendor/github.com/sivchari/containedctx/go.mod new file mode 100644 index 000000000..7e37e03c4 --- /dev/null +++ b/vendor/github.com/sivchari/containedctx/go.mod @@ -0,0 +1,19 @@ +module github.com/sivchari/containedctx + +go 1.17 + +require ( + github.com/gostaticanalysis/testutil v0.4.0 + golang.org/x/tools v0.1.7 +) + +require ( + github.com/hashicorp/go-version v1.2.1 // indirect + github.com/otiai10/copy v1.2.0 // indirect + github.com/tenntenn/modver v1.0.1 // indirect + github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3 // indirect + golang.org/x/mod v0.4.2 // indirect + golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e // indirect + golang.org/x/text v0.3.6 // indirect + golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect +) diff --git a/vendor/github.com/sivchari/containedctx/go.sum b/vendor/github.com/sivchari/containedctx/go.sum new file mode 100644 index 000000000..aa103ba7c --- /dev/null +++ b/vendor/github.com/sivchari/containedctx/go.sum @@ -0,0 +1,61 @@ +github.com/google/go-cmp v0.5.2 h1:X2ev0eStA3AbceY54o37/0PQ/UWqKEiiO2dKL5OPaFM= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/gostaticanalysis/testutil v0.4.0 h1:nhdCmubdmDF6VEatUNjgUZBJKWRqugoISdUv3PPQgHY= +github.com/gostaticanalysis/testutil v0.4.0/go.mod h1:bLIoPefWXrRi/ssLFWX1dx7Repi5x3CuviD3dgAZaBU= +github.com/hashicorp/go-version v1.2.1 h1:zEfKbn2+PDgroKdiOzqiE8rsmLqU2uwi5PB5pBJ3TkI= +github.com/hashicorp/go-version v1.2.1/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/josharian/txtarfs v0.0.0-20210218200122-0702f000015a h1:8NZHLa6Gp0hW6xJ0c3F1Kse7dJw30fOcDzHuF9sLbnE= +github.com/josharian/txtarfs v0.0.0-20210218200122-0702f000015a/go.mod h1:izVPOvVRsHiKkeGCT6tYBNWyDVuzj9wAaBb5R9qamfw= +github.com/otiai10/copy v1.2.0 h1:HvG945u96iNadPoG2/Ja2+AUJeW5YuFQMixq9yirC+k= +github.com/otiai10/copy v1.2.0/go.mod h1:rrF5dJ5F0t/EWSYODDu4j9/vEeYHMkc8jt0zJChqQWw= +github.com/otiai10/curr v0.0.0-20150429015615-9b4961190c95/go.mod h1:9qAhocn7zKJG+0mI8eUu6xqkFDYS2kb2saOteoSB3cE= +github.com/otiai10/curr v1.0.0/go.mod h1:LskTG5wDwr8Rs+nNQ+1LlxRjAtTZZjtJW4rMXl6j4vs= +github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT91xUo= +github.com/otiai10/mint v1.3.1 h1:BCmzIS3n71sGfHB5NMNDB3lHYPz8fWSkCAErHed//qc= +github.com/otiai10/mint v1.3.1/go.mod h1:/yxELlJQ0ufhjUwhshSj+wFjZ78CnZ48/1wtmBH1OTc= +github.com/tenntenn/modver v1.0.1 h1:2klLppGhDgzJrScMpkj9Ujy3rXPUspSjAcev9tSEBgA= +github.com/tenntenn/modver v1.0.1/go.mod h1:bePIyQPb7UeioSRkw3Q0XeMhYZSMx9B8ePqg6SAMGH0= +github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3 h1:f+jULpRQGxTSkNYKJ51yaw6ChIqO+Je8UqsTKN/cDag= +github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3/go.mod h1:ON8b8w4BN/kE1EOhwT0o+d62W65a6aPw1nouo9LMgyY= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e h1:WUoyKPm6nCo1BnNUvPGnFG3T5DUVem42yDJZZ4CNxMA= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.1-0.20210205202024-ef80cdb6ec6d/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU= +golang.org/x/tools v0.1.7 h1:6j8CgantCy3yc8JGBqkDLMKWqZ0RDU2g1HVgacojGWQ= +golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/github.com/spf13/cobra/Makefile b/vendor/github.com/spf13/cobra/Makefile index 472c73bf1..5880f04eb 100644 --- a/vendor/github.com/spf13/cobra/Makefile +++ b/vendor/github.com/spf13/cobra/Makefile @@ -23,7 +23,7 @@ lint: $(info ******************** running lint tools ********************) golangci-lint run -v -test: install_deps lint +test: install_deps $(info ******************** running tests ********************) richgo test -v ./... diff --git a/vendor/github.com/spf13/cobra/README.md b/vendor/github.com/spf13/cobra/README.md index 074e3979f..1ade1081c 100644 --- a/vendor/github.com/spf13/cobra/README.md +++ b/vendor/github.com/spf13/cobra/README.md @@ -11,28 +11,6 @@ name a few. [This list](./projects_using_cobra.md) contains a more extensive lis [![Go Report Card](https://goreportcard.com/badge/github.com/spf13/cobra)](https://goreportcard.com/report/github.com/spf13/cobra) [![Slack](https://img.shields.io/badge/Slack-cobra-brightgreen)](https://gophers.slack.com/archives/CD3LP1199) -# Table of Contents - -- [Overview](#overview) -- [Concepts](#concepts) - * [Commands](#commands) - * [Flags](#flags) -- [Installing](#installing) -- [Usage](#usage) - * [Using the Cobra Generator](user_guide.md#using-the-cobra-generator) - * [Using the Cobra Library](user_guide.md#using-the-cobra-library) - * [Working with Flags](user_guide.md#working-with-flags) - * [Positional and Custom Arguments](user_guide.md#positional-and-custom-arguments) - * [Example](user_guide.md#example) - * [Help Command](user_guide.md#help-command) - * [Usage Message](user_guide.md#usage-message) - * [PreRun and PostRun Hooks](user_guide.md#prerun-and-postrun-hooks) - * [Suggestions when "unknown command" happens](user_guide.md#suggestions-when-unknown-command-happens) - * [Generating documentation for your command](user_guide.md#generating-documentation-for-your-command) - * [Generating shell completions](user_guide.md#generating-shell-completions) -- [Contributing](CONTRIBUTING.md) -- [License](#license) - # Overview Cobra is a library providing a simple interface to create powerful modern CLI @@ -46,7 +24,7 @@ Cobra provides: * Fully POSIX-compliant flags (including short & long versions) * Nested subcommands * Global, local and cascading flags -* Easy generation of applications & commands with `cobra init appname` & `cobra add cmdname` +* Easy generation of applications & commands with `cobra init` & `cobra add cmdname` * Intelligent suggestions (`app srver`... did you mean `app server`?) * Automatic help generation for commands and flags * Automatic help flag recognition of `-h`, `--help`, etc. @@ -54,7 +32,7 @@ Cobra provides: * Automatically generated man pages for your application * Command aliases so you can change things without breaking them * The flexibility to define your own help, usage, etc. -* Optional tight integration with [viper](http://github.com/spf13/viper) for 12-factor apps +* Optional seamless integration with [viper](http://github.com/spf13/viper) for 12-factor apps # Concepts @@ -88,7 +66,7 @@ have children commands and optionally run an action. In the example above, 'server' is the command. -[More about cobra.Command](https://godoc.org/github.com/spf13/cobra#Command) +[More about cobra.Command](https://pkg.go.dev/github.com/spf13/cobra#Command) ## Flags @@ -117,8 +95,12 @@ import "github.com/spf13/cobra" ``` # Usage +Cobra provides its own program that will create your application and add any +commands you want. It's the easiest way to incorporate Cobra into your application. -See [User Guide](user_guide.md). +For complete details on using the Cobra generator, please read [The Cobra Generator README](https://github.com/spf13/cobra/blob/master/cobra/README.md) + +For complete details on using the Cobra library, please read the [The Cobra User Guide](user_guide.md). # License diff --git a/vendor/github.com/spf13/cobra/args.go b/vendor/github.com/spf13/cobra/args.go index 70e9b2629..20a022b30 100644 --- a/vendor/github.com/spf13/cobra/args.go +++ b/vendor/github.com/spf13/cobra/args.go @@ -107,3 +107,15 @@ func RangeArgs(min int, max int) PositionalArgs { return nil } } + +// MatchAll allows combining several PositionalArgs to work in concert. +func MatchAll(pargs ...PositionalArgs) PositionalArgs { + return func(cmd *Command, args []string) error { + for _, parg := range pargs { + if err := parg(cmd, args); err != nil { + return err + } + } + return nil + } +} diff --git a/vendor/github.com/spf13/cobra/bash_completions.go b/vendor/github.com/spf13/cobra/bash_completions.go index 733f4d121..6c360c595 100644 --- a/vendor/github.com/spf13/cobra/bash_completions.go +++ b/vendor/github.com/spf13/cobra/bash_completions.go @@ -24,7 +24,7 @@ func writePreamble(buf io.StringWriter, name string) { WriteStringAndCheck(buf, fmt.Sprintf(` __%[1]s_debug() { - if [[ -n ${BASH_COMP_DEBUG_FILE} ]]; then + if [[ -n ${BASH_COMP_DEBUG_FILE:-} ]]; then echo "$*" >> "${BASH_COMP_DEBUG_FILE}" fi } @@ -134,7 +134,7 @@ __%[1]s_handle_go_custom_completion() $filteringCmd elif [ $((directive & shellCompDirectiveFilterDirs)) -ne 0 ]; then # File completion for directories only - local subDir + local subdir # Use printf to strip any trailing newline subdir=$(printf "%%s" "${out[0]}") if [ -n "$subdir" ]; then @@ -187,13 +187,19 @@ __%[1]s_handle_reply() PREFIX="" cur="${cur#*=}" ${flags_completion[${index}]} - if [ -n "${ZSH_VERSION}" ]; then + if [ -n "${ZSH_VERSION:-}" ]; then # zsh completion needs --flag= prefix eval "COMPREPLY=( \"\${COMPREPLY[@]/#/${flag}=}\" )" fi fi fi - return 0; + + if [[ -z "${flag_parsing_disabled}" ]]; then + # If flag parsing is enabled, we have completed the flags and can return. + # If flag parsing is disabled, we may not know all (or any) of the flags, so we fallthrough + # to possibly call handle_go_custom_completion. + return 0; + fi ;; esac @@ -232,13 +238,13 @@ __%[1]s_handle_reply() fi if [[ ${#COMPREPLY[@]} -eq 0 ]]; then - if declare -F __%[1]s_custom_func >/dev/null; then - # try command name qualified custom func - __%[1]s_custom_func - else - # otherwise fall back to unqualified for compatibility - declare -F __custom_func >/dev/null && __custom_func - fi + if declare -F __%[1]s_custom_func >/dev/null; then + # try command name qualified custom func + __%[1]s_custom_func + else + # otherwise fall back to unqualified for compatibility + declare -F __custom_func >/dev/null && __custom_func + fi fi # available in bash-completion >= 2, not always present on macOS @@ -272,7 +278,7 @@ __%[1]s_handle_flag() # if a command required a flag, and we found it, unset must_have_one_flag() local flagname=${words[c]} - local flagvalue + local flagvalue="" # if the word contained an = if [[ ${words[c]} == *"="* ]]; then flagvalue=${flagname#*=} # take in as flagvalue after the = @@ -291,7 +297,7 @@ __%[1]s_handle_flag() # keep flag value with flagname as flaghash # flaghash variable is an associative array which is only supported in bash > 3. - if [[ -z "${BASH_VERSION}" || "${BASH_VERSINFO[0]}" -gt 3 ]]; then + if [[ -z "${BASH_VERSION:-}" || "${BASH_VERSINFO[0]:-}" -gt 3 ]]; then if [ -n "${flagvalue}" ] ; then flaghash[${flagname}]=${flagvalue} elif [ -n "${words[ $((c+1)) ]}" ] ; then @@ -303,7 +309,7 @@ __%[1]s_handle_flag() # skip the argument to a two word flag if [[ ${words[c]} != *"="* ]] && __%[1]s_contains_word "${words[c]}" "${two_word_flags[@]}"; then - __%[1]s_debug "${FUNCNAME[0]}: found a flag ${words[c]}, skip the next argument" + __%[1]s_debug "${FUNCNAME[0]}: found a flag ${words[c]}, skip the next argument" c=$((c+1)) # if we are looking for a flags value, don't show commands if [[ $c -eq $cword ]]; then @@ -363,7 +369,7 @@ __%[1]s_handle_word() __%[1]s_handle_command elif __%[1]s_contains_word "${words[c]}" "${command_aliases[@]}"; then # aliashash variable is an associative array which is only supported in bash > 3. - if [[ -z "${BASH_VERSION}" || "${BASH_VERSINFO[0]}" -gt 3 ]]; then + if [[ -z "${BASH_VERSION:-}" || "${BASH_VERSINFO[0]:-}" -gt 3 ]]; then words[c]=${aliashash[${words[c]}]} __%[1]s_handle_command else @@ -394,6 +400,7 @@ func writePostscript(buf io.StringWriter, name string) { fi local c=0 + local flag_parsing_disabled= local flags=() local two_word_flags=() local local_nonpersistent_flags=() @@ -403,8 +410,8 @@ func writePostscript(buf io.StringWriter, name string) { local command_aliases=() local must_have_one_flag=() local must_have_one_noun=() - local has_completion_function - local last_command + local has_completion_function="" + local last_command="" local nouns=() local noun_aliases=() @@ -535,6 +542,11 @@ func writeFlags(buf io.StringWriter, cmd *Command) { flags_completion=() `) + + if cmd.DisableFlagParsing { + WriteStringAndCheck(buf, " flag_parsing_disabled=1\n") + } + localNonPersistentFlags := cmd.LocalNonPersistentFlags() cmd.NonInheritedFlags().VisitAll(func(flag *pflag.Flag) { if nonCompletableFlag(flag) { @@ -609,7 +621,7 @@ func writeCmdAliases(buf io.StringWriter, cmd *Command) { sort.Strings(cmd.Aliases) - WriteStringAndCheck(buf, fmt.Sprint(` if [[ -z "${BASH_VERSION}" || "${BASH_VERSINFO[0]}" -gt 3 ]]; then`, "\n")) + WriteStringAndCheck(buf, fmt.Sprint(` if [[ -z "${BASH_VERSION:-}" || "${BASH_VERSINFO[0]:-}" -gt 3 ]]; then`, "\n")) for _, value := range cmd.Aliases { WriteStringAndCheck(buf, fmt.Sprintf(" command_aliases+=(%q)\n", value)) WriteStringAndCheck(buf, fmt.Sprintf(" aliashash[%q]=%q\n", value, cmd.Name())) diff --git a/vendor/github.com/spf13/cobra/bash_completionsV2.go b/vendor/github.com/spf13/cobra/bash_completionsV2.go index 8859b57c4..82d26c175 100644 --- a/vendor/github.com/spf13/cobra/bash_completionsV2.go +++ b/vendor/github.com/spf13/cobra/bash_completionsV2.go @@ -138,13 +138,42 @@ __%[1]s_process_completion_results() { _filedir -d fi else - __%[1]s_handle_standard_completion_case + __%[1]s_handle_completion_types fi __%[1]s_handle_special_char "$cur" : __%[1]s_handle_special_char "$cur" = } +__%[1]s_handle_completion_types() { + __%[1]s_debug "__%[1]s_handle_completion_types: COMP_TYPE is $COMP_TYPE" + + case $COMP_TYPE in + 37|42) + # Type: menu-complete/menu-complete-backward and insert-completions + # If the user requested inserting one completion at a time, or all + # completions at once on the command-line we must remove the descriptions. + # https://github.com/spf13/cobra/issues/1508 + local tab comp + tab=$(printf '\t') + while IFS='' read -r comp; do + # Strip any description + comp=${comp%%%%$tab*} + # Only consider the completions that match + comp=$(compgen -W "$comp" -- "$cur") + if [ -n "$comp" ]; then + COMPREPLY+=("$comp") + fi + done < <(printf "%%s\n" "${out[@]}") + ;; + + *) + # Type: complete (normal completion) + __%[1]s_handle_standard_completion_case + ;; + esac +} + __%[1]s_handle_standard_completion_case() { local tab comp tab=$(printf '\t') diff --git a/vendor/github.com/spf13/cobra/command_notwin.go b/vendor/github.com/spf13/cobra/command_notwin.go index 6159c1cc1..bb5dad90b 100644 --- a/vendor/github.com/spf13/cobra/command_notwin.go +++ b/vendor/github.com/spf13/cobra/command_notwin.go @@ -1,3 +1,4 @@ +//go:build !windows // +build !windows package cobra diff --git a/vendor/github.com/spf13/cobra/command_win.go b/vendor/github.com/spf13/cobra/command_win.go index 8768b1736..a84f5a82a 100644 --- a/vendor/github.com/spf13/cobra/command_win.go +++ b/vendor/github.com/spf13/cobra/command_win.go @@ -1,3 +1,4 @@ +//go:build windows // +build windows package cobra diff --git a/vendor/github.com/spf13/cobra/completions.go b/vendor/github.com/spf13/cobra/completions.go index b849b9c84..9ecd56a47 100644 --- a/vendor/github.com/spf13/cobra/completions.go +++ b/vendor/github.com/spf13/cobra/completions.go @@ -93,6 +93,8 @@ type CompletionOptions struct { // DisableDescriptions turns off all completion descriptions for shells // that support them DisableDescriptions bool + // HiddenDefaultCmd makes the default 'completion' command hidden + HiddenDefaultCmd bool } // NoFileCompletions can be used to disable file completion for commands that should @@ -226,7 +228,17 @@ func (c *Command) getCompletions(args []string) (*Command, []string, ShellCompDi if c.Root().TraverseChildren { finalCmd, finalArgs, err = c.Root().Traverse(trimmedArgs) } else { - finalCmd, finalArgs, err = c.Root().Find(trimmedArgs) + // For Root commands that don't specify any value for their Args fields, when we call + // Find(), if those Root commands don't have any sub-commands, they will accept arguments. + // However, because we have added the __complete sub-command in the current code path, the + // call to Find() -> legacyArgs() will return an error if there are any arguments. + // To avoid this, we first remove the __complete command to get back to having no sub-commands. + rootCmd := c.Root() + if len(rootCmd.Commands()) == 1 { + rootCmd.RemoveCommand(c) + } + + finalCmd, finalArgs, err = rootCmd.Find(trimmedArgs) } if err != nil { // Unable to find the real command. E.g., someInvalidCmd @@ -266,6 +278,12 @@ func (c *Command) getCompletions(args []string) (*Command, []string, ShellCompDi } } + // We only remove the flags from the arguments if DisableFlagParsing is not set. + // This is important for commands which have requested to do their own flag completion. + if !finalCmd.DisableFlagParsing { + finalArgs = finalCmd.Flags().Args() + } + if flag != nil && flagCompletion { // Check if we are completing a flag value subject to annotations if validExts, present := flag.Annotations[BashCompFilenameExt]; present { @@ -290,12 +308,16 @@ func (c *Command) getCompletions(args []string) (*Command, []string, ShellCompDi } } + var completions []string + var directive ShellCompDirective + + // Note that we want to perform flagname completion even if finalCmd.DisableFlagParsing==true; + // doing this allows for completion of persistant flag names even for commands that disable flag parsing. + // // When doing completion of a flag name, as soon as an argument starts with // a '-' we know it is a flag. We cannot use isFlagArg() here as it requires // the flag name to be complete if flag == nil && len(toComplete) > 0 && toComplete[0] == '-' && !strings.Contains(toComplete, "=") && flagCompletion { - var completions []string - // First check for required flags completions = completeRequireFlags(finalCmd, toComplete) @@ -322,86 +344,86 @@ func (c *Command) getCompletions(args []string) (*Command, []string, ShellCompDi }) } - directive := ShellCompDirectiveNoFileComp + directive = ShellCompDirectiveNoFileComp if len(completions) == 1 && strings.HasSuffix(completions[0], "=") { // If there is a single completion, the shell usually adds a space // after the completion. We don't want that if the flag ends with an = directive = ShellCompDirectiveNoSpace } - return finalCmd, completions, directive, nil - } - // We only remove the flags from the arguments if DisableFlagParsing is not set. - // This is important for commands which have requested to do their own flag completion. - if !finalCmd.DisableFlagParsing { - finalArgs = finalCmd.Flags().Args() - } - - var completions []string - directive := ShellCompDirectiveDefault - if flag == nil { - foundLocalNonPersistentFlag := false - // If TraverseChildren is true on the root command we don't check for - // local flags because we can use a local flag on a parent command - if !finalCmd.Root().TraverseChildren { - // Check if there are any local, non-persistent flags on the command-line - localNonPersistentFlags := finalCmd.LocalNonPersistentFlags() - finalCmd.NonInheritedFlags().VisitAll(func(flag *pflag.Flag) { - if localNonPersistentFlags.Lookup(flag.Name) != nil && flag.Changed { - foundLocalNonPersistentFlag = true - } - }) + if !finalCmd.DisableFlagParsing { + // If DisableFlagParsing==false, we have completed the flags as known by Cobra; + // we can return what we found. + // If DisableFlagParsing==true, Cobra may not be aware of all flags, so we + // let the logic continue to see if ValidArgsFunction needs to be called. + return finalCmd, completions, directive, nil } - - // Complete subcommand names, including the help command - if len(finalArgs) == 0 && !foundLocalNonPersistentFlag { - // We only complete sub-commands if: - // - there are no arguments on the command-line and - // - there are no local, non-persistent flags on the command-line or TraverseChildren is true - for _, subCmd := range finalCmd.Commands() { - if subCmd.IsAvailableCommand() || subCmd == finalCmd.helpCommand { - if strings.HasPrefix(subCmd.Name(), toComplete) { - completions = append(completions, fmt.Sprintf("%s\t%s", subCmd.Name(), subCmd.Short)) + } else { + directive = ShellCompDirectiveDefault + if flag == nil { + foundLocalNonPersistentFlag := false + // If TraverseChildren is true on the root command we don't check for + // local flags because we can use a local flag on a parent command + if !finalCmd.Root().TraverseChildren { + // Check if there are any local, non-persistent flags on the command-line + localNonPersistentFlags := finalCmd.LocalNonPersistentFlags() + finalCmd.NonInheritedFlags().VisitAll(func(flag *pflag.Flag) { + if localNonPersistentFlags.Lookup(flag.Name) != nil && flag.Changed { + foundLocalNonPersistentFlag = true + } + }) + } + + // Complete subcommand names, including the help command + if len(finalArgs) == 0 && !foundLocalNonPersistentFlag { + // We only complete sub-commands if: + // - there are no arguments on the command-line and + // - there are no local, non-persistent flags on the command-line or TraverseChildren is true + for _, subCmd := range finalCmd.Commands() { + if subCmd.IsAvailableCommand() || subCmd == finalCmd.helpCommand { + if strings.HasPrefix(subCmd.Name(), toComplete) { + completions = append(completions, fmt.Sprintf("%s\t%s", subCmd.Name(), subCmd.Short)) + } + directive = ShellCompDirectiveNoFileComp } - directive = ShellCompDirectiveNoFileComp } } - } - // Complete required flags even without the '-' prefix - completions = append(completions, completeRequireFlags(finalCmd, toComplete)...) + // Complete required flags even without the '-' prefix + completions = append(completions, completeRequireFlags(finalCmd, toComplete)...) - // Always complete ValidArgs, even if we are completing a subcommand name. - // This is for commands that have both subcommands and ValidArgs. - if len(finalCmd.ValidArgs) > 0 { - if len(finalArgs) == 0 { - // ValidArgs are only for the first argument - for _, validArg := range finalCmd.ValidArgs { - if strings.HasPrefix(validArg, toComplete) { - completions = append(completions, validArg) + // Always complete ValidArgs, even if we are completing a subcommand name. + // This is for commands that have both subcommands and ValidArgs. + if len(finalCmd.ValidArgs) > 0 { + if len(finalArgs) == 0 { + // ValidArgs are only for the first argument + for _, validArg := range finalCmd.ValidArgs { + if strings.HasPrefix(validArg, toComplete) { + completions = append(completions, validArg) + } } - } - directive = ShellCompDirectiveNoFileComp + directive = ShellCompDirectiveNoFileComp - // If no completions were found within commands or ValidArgs, - // see if there are any ArgAliases that should be completed. - if len(completions) == 0 { - for _, argAlias := range finalCmd.ArgAliases { - if strings.HasPrefix(argAlias, toComplete) { - completions = append(completions, argAlias) + // If no completions were found within commands or ValidArgs, + // see if there are any ArgAliases that should be completed. + if len(completions) == 0 { + for _, argAlias := range finalCmd.ArgAliases { + if strings.HasPrefix(argAlias, toComplete) { + completions = append(completions, argAlias) + } } } } + + // If there are ValidArgs specified (even if they don't match), we stop completion. + // Only one of ValidArgs or ValidArgsFunction can be used for a single command. + return finalCmd, completions, directive, nil } - // If there are ValidArgs specified (even if they don't match), we stop completion. - // Only one of ValidArgs or ValidArgsFunction can be used for a single command. - return finalCmd, completions, directive, nil + // Let the logic continue so as to add any ValidArgsFunction completions, + // even if we already found sub-commands. + // This is for commands that have subcommands but also specify a ValidArgsFunction. } - - // Let the logic continue so as to add any ValidArgsFunction completions, - // even if we already found sub-commands. - // This is for commands that have subcommands but also specify a ValidArgsFunction. } // Find the completion function for the flag or command @@ -589,39 +611,43 @@ func (c *Command) initDefaultCompletionCmd() { completionCmd := &Command{ Use: compCmdName, - Short: "generate the autocompletion script for the specified shell", - Long: fmt.Sprintf(` -Generate the autocompletion script for %[1]s for the specified shell. + Short: "Generate the autocompletion script for the specified shell", + Long: fmt.Sprintf(`Generate the autocompletion script for %[1]s for the specified shell. See each sub-command's help for details on how to use the generated script. `, c.Root().Name()), Args: NoArgs, ValidArgsFunction: NoFileCompletions, + Hidden: c.CompletionOptions.HiddenDefaultCmd, } c.AddCommand(completionCmd) out := c.OutOrStdout() noDesc := c.CompletionOptions.DisableDescriptions - shortDesc := "generate the autocompletion script for %s" + shortDesc := "Generate the autocompletion script for %s" bash := &Command{ Use: "bash", Short: fmt.Sprintf(shortDesc, "bash"), - Long: fmt.Sprintf(` -Generate the autocompletion script for the bash shell. + Long: fmt.Sprintf(`Generate the autocompletion script for the bash shell. This script depends on the 'bash-completion' package. If it is not installed already, you can install it via your OS's package manager. To load completions in your current shell session: -$ source <(%[1]s completion bash) + + source <(%[1]s completion bash) To load completions for every new session, execute once: -Linux: - $ %[1]s completion bash > /etc/bash_completion.d/%[1]s -MacOS: - $ %[1]s completion bash > /usr/local/etc/bash_completion.d/%[1]s + +#### Linux: + + %[1]s completion bash > /etc/bash_completion.d/%[1]s + +#### macOS: + + %[1]s completion bash > /usr/local/etc/bash_completion.d/%[1]s You will need to start a new shell for this setup to take effect. - `, c.Root().Name()), +`, c.Root().Name()), Args: NoArgs, DisableFlagsInUseLine: true, ValidArgsFunction: NoFileCompletions, @@ -636,19 +662,22 @@ You will need to start a new shell for this setup to take effect. zsh := &Command{ Use: "zsh", Short: fmt.Sprintf(shortDesc, "zsh"), - Long: fmt.Sprintf(` -Generate the autocompletion script for the zsh shell. + Long: fmt.Sprintf(`Generate the autocompletion script for the zsh shell. If shell completion is not already enabled in your environment you will need to enable it. You can execute the following once: -$ echo "autoload -U compinit; compinit" >> ~/.zshrc + echo "autoload -U compinit; compinit" >> ~/.zshrc To load completions for every new session, execute once: -# Linux: -$ %[1]s completion zsh > "${fpath[1]}/_%[1]s" -# macOS: -$ %[1]s completion zsh > /usr/local/share/zsh/site-functions/_%[1]s + +#### Linux: + + %[1]s completion zsh > "${fpath[1]}/_%[1]s" + +#### macOS: + + %[1]s completion zsh > /usr/local/share/zsh/site-functions/_%[1]s You will need to start a new shell for this setup to take effect. `, c.Root().Name()), @@ -668,14 +697,15 @@ You will need to start a new shell for this setup to take effect. fish := &Command{ Use: "fish", Short: fmt.Sprintf(shortDesc, "fish"), - Long: fmt.Sprintf(` -Generate the autocompletion script for the fish shell. + Long: fmt.Sprintf(`Generate the autocompletion script for the fish shell. To load completions in your current shell session: -$ %[1]s completion fish | source + + %[1]s completion fish | source To load completions for every new session, execute once: -$ %[1]s completion fish > ~/.config/fish/completions/%[1]s.fish + + %[1]s completion fish > ~/.config/fish/completions/%[1]s.fish You will need to start a new shell for this setup to take effect. `, c.Root().Name()), @@ -692,11 +722,11 @@ You will need to start a new shell for this setup to take effect. powershell := &Command{ Use: "powershell", Short: fmt.Sprintf(shortDesc, "powershell"), - Long: fmt.Sprintf(` -Generate the autocompletion script for powershell. + Long: fmt.Sprintf(`Generate the autocompletion script for powershell. To load completions in your current shell session: -PS C:\> %[1]s completion powershell | Out-String | Invoke-Expression + + %[1]s completion powershell | Out-String | Invoke-Expression To load completions for every new session, add the output of the above command to your powershell profile. diff --git a/vendor/github.com/spf13/cobra/go.mod b/vendor/github.com/spf13/cobra/go.mod index 1fb9439dd..85e169eba 100644 --- a/vendor/github.com/spf13/cobra/go.mod +++ b/vendor/github.com/spf13/cobra/go.mod @@ -1,11 +1,11 @@ module github.com/spf13/cobra -go 1.14 +go 1.15 require ( - github.com/cpuguy83/go-md2man/v2 v2.0.0 + github.com/cpuguy83/go-md2man/v2 v2.0.1 github.com/inconshreveable/mousetrap v1.0.0 github.com/spf13/pflag v1.0.5 - github.com/spf13/viper v1.8.1 + github.com/spf13/viper v1.10.0 gopkg.in/yaml.v2 v2.4.0 ) diff --git a/vendor/github.com/spf13/cobra/go.sum b/vendor/github.com/spf13/cobra/go.sum index 3e22df29a..0ba92a6ae 100644 --- a/vendor/github.com/spf13/cobra/go.sum +++ b/vendor/github.com/spf13/cobra/go.sum @@ -18,6 +18,15 @@ cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmW cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= +cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY= +cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM= +cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY= +cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ= +cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI= +cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4= +cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc= +cloud.google.com/go v0.98.0/go.mod h1:ua6Ush4NALrHk5QXDWnjvZHN93OuF0HfuEPq9I1X0cM= +cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= @@ -26,7 +35,7 @@ cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4g cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= -cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= +cloud.google.com/go/firestore v1.6.1/go.mod h1:asNXNOzBdyVQmEU+ggO8UPodTkEVFW5Qx+rwHnAz+EY= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= @@ -39,24 +48,47 @@ cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9 dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= +github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= +github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc= github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= -github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag= +github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= +github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211130200136-a8f946100490/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= -github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM= -github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/cpuguy83/go-md2man/v2 v2.0.1 h1:r/myEWzV9lfsM1tFLgDyu0atFtJ1fXn261LKYj/3DxU= +github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -66,20 +98,33 @@ github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1m github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= +github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= +github.com/envoyproxy/go-control-plane v0.10.1/go.mod h1:AY7fTTXNdv/aJ2O5jwpxAPOWUZ7hQAEvzN5Pf27BkQQ= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/envoyproxy/protoc-gen-validate v0.6.2/go.mod h1:2t7qjJNvHPx8IjnBOzl9E9/baC+qXE/TeeyBRzgJDws= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= -github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= +github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= +github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= +github.com/fsnotify/fsnotify v1.5.1 h1:mZcQUHVQUQWoPXXtuf9yuEXKudkV2sx1E06UadKWpgI= +github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= @@ -88,6 +133,7 @@ github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= @@ -105,6 +151,7 @@ github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= @@ -118,10 +165,12 @@ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= @@ -133,100 +182,147 @@ github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= -github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8= -github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= +github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= -github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= -github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= +github.com/hashicorp/consul/api v1.11.0/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M= +github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= +github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= +github.com/hashicorp/go-hclog v1.0.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= -github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= +github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= +github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= +github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= -github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= -github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= -github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= +github.com/hashicorp/mdns v1.0.1/go.mod h1:4gW7WsVCke5TE7EPeYliwHlRUyBtfCwuFwuMg2DmyNY= +github.com/hashicorp/mdns v1.0.4/go.mod h1:mtBihi+LeNXGtG8L9dX59gAEa12BDtBQSp4v/YAJqrc= +github.com/hashicorp/memberlist v0.2.2/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= +github.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= +github.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk= +github.com/hashicorp/serf v0.9.6/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4= +github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= -github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= -github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= -github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.0 h1:s5hAObm+yFO5uHYt5dYjxi2rXrsnmRpJx4OYvIWUaQs= +github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/lyft/protoc-gen-star v0.5.3/go.mod h1:V0xaHgaf5oCCqmcxYcWiDfTiKsZsRc87/1qhoTACD8w= github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls= github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= +github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= -github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= -github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= +github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= +github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI= +github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= -github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= -github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.4.1 h1:CpVNEelQCZBooIPDn+AR3NpivK/TIKU8bDxdASFVQag= -github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.4.3 h1:OVowDSCllw/YjdLkam3/sm7wEtOy59d8ndGgCcyj8cs= +github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= -github.com/pelletier/go-toml v1.9.3 h1:zeC5b1GviRUyKYd6OJPvBU/mcVDVoL1OhT17FCt5dSQ= -github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pelletier/go-toml v1.9.4 h1:tjENF6MfZAg8e4ZmZTeWaWiT2vXtsoO6+iuOjFhECwM= +github.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= +github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q= -github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= +github.com/sagikazarmark/crypt v0.3.0/go.mod h1:uD/D+6UF4SrIR1uGEv7bBNkNqLGqUr43MRiaGWX1Nig= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= -github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo= -github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= -github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM= -github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= -github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= -github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4= github.com/spf13/afero v1.6.0 h1:xoax2sJ2DT8S8xA2paPFjDCScCNeWsg75VG0DLRreiY= github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= -github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng= -github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cast v1.4.1 h1:s0hze+J0196ZfEMTs80N7UlFt0BDuQ7Q+JDnHiMWKdA= +github.com/spf13/cast v1.4.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.8.1 h1:Kq1fyeebqsBfbjZj4EL7gj2IO0mMaiyjYUWcUsl2O44= -github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns= +github.com/spf13/viper v1.10.0 h1:mXH0UwHS4D2HwWZa75im4xIQynLfblmWV7qcWpfv0yk= +github.com/spf13/viper v1.10.0/go.mod h1:SoyBPwAtKDzypXNDFKN5kzH7ppppbGZtls1UpIy5AsM= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= @@ -236,14 +332,15 @@ github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5Cc github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s= github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= +github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= -go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= -go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= -go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ= +go.etcd.io/etcd/api/v3 v3.5.1/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= +go.etcd.io/etcd/client/pkg/v3 v3.5.1/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= +go.etcd.io/etcd/client/v2 v2.305.1/go.mod h1:pMEacxZW7o8pg4CrFE7pquyCJJzZvkvdD2RibOCCCGs= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= @@ -251,16 +348,20 @@ go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= +go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -296,10 +397,11 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= @@ -307,9 +409,11 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= @@ -332,6 +436,9 @@ golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8= +golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -343,7 +450,12 @@ golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -357,21 +469,30 @@ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200124204421-9fbb57f87de9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -390,13 +511,28 @@ golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210510120138-977fb7262007 h1:gG67DSER+11cZvqIMb8S8bt0vZtiN6xWYARwirrOSfE= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210816183151-1e6c022a8912/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211205182925-97ca703d548d h1:FjkYO/PPp4Wi0EAUOVLxePm7qVW4r4ctbWpURyuOD0E= +golang.org/x/sys v0.0.0-20211205182925-97ca703d548d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -404,8 +540,10 @@ golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3 golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.5 h1:i6eZZ+zk0SOf0xgBpEpPD18qWcJda6q1sxt3S0kzyUQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -415,7 +553,6 @@ golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3 golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= @@ -423,9 +560,9 @@ golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgw golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= @@ -459,7 +596,11 @@ golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4f golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -485,7 +626,17 @@ google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34q google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= -google.golang.org/api v0.44.0/go.mod h1:EBOGZqzyhtvMDoxwS97ctnh0zUmYY6CxqXsc1AvkYD8= +google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo= +google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4= +google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= +google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU= +google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k= +google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= +google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= +google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI= +google.golang.org/api v0.59.0/go.mod h1:sT2boj7M9YJxZzgeZqXogmhfmRWDtPzT31xkieUbuZU= +google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I= +google.golang.org/api v0.62.0/go.mod h1:dKmwPCydfsad4qCH08MSdgWjfHOyfpd4VtDGgRFdavw= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -533,7 +684,29 @@ google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6D google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= +google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= +google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= +google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= +google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= +google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= +google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w= +google.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211008145708-270636b82663/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211028162531-8db9c33dc351/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211129164237-f09f9a12af12/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211203200212-54befc351ae9/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -553,7 +726,15 @@ google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA5 google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= +google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= +google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.42.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= +google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -566,14 +747,20 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= -gopkg.in/ini.v1 v1.62.0 h1:duBzk771uxoUuOlyRLkHsygud9+5lrlGjdFBb4mSKDU= -gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/ini.v1 v1.66.2 h1:XfR1dOYubytKy4Shzc2LHrrGhU0lDCfDGG1yLPmpgsI= +gopkg.in/ini.v1 v1.66.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= diff --git a/vendor/github.com/spf13/cobra/powershell_completions.go b/vendor/github.com/spf13/cobra/powershell_completions.go index 59234c09f..62d719f0b 100644 --- a/vendor/github.com/spf13/cobra/powershell_completions.go +++ b/vendor/github.com/spf13/cobra/powershell_completions.go @@ -50,7 +50,7 @@ Register-ArgumentCompleter -CommandName '%[1]s' -ScriptBlock { if ($Command.Length -gt $CursorPosition) { $Command=$Command.Substring(0,$CursorPosition) } - __%[1]s_debug "Truncated command: $Command" + __%[1]s_debug "Truncated command: $Command" $ShellCompDirectiveError=%[3]d $ShellCompDirectiveNoSpace=%[4]d @@ -58,7 +58,7 @@ Register-ArgumentCompleter -CommandName '%[1]s' -ScriptBlock { $ShellCompDirectiveFilterFileExt=%[6]d $ShellCompDirectiveFilterDirs=%[7]d - # Prepare the command to request completions for the program. + # Prepare the command to request completions for the program. # Split the command at the first space to separate the program and arguments. $Program,$Arguments = $Command.Split(" ",2) $RequestComp="$Program %[2]s $Arguments" @@ -233,7 +233,7 @@ Register-ArgumentCompleter -CommandName '%[1]s' -ScriptBlock { Default { # Like MenuComplete but we don't want to add a space here because # the user need to press space anyway to get the completion. - # Description will not be shown because thats not possible with TabCompleteNext + # Description will not be shown because that's not possible with TabCompleteNext [System.Management.Automation.CompletionResult]::new($($comp.Name | __%[1]s_escapeStringWithSpecialChars), "$($comp.Name)", 'ParameterValue', "$($comp.Description)") } } diff --git a/vendor/github.com/spf13/cobra/projects_using_cobra.md b/vendor/github.com/spf13/cobra/projects_using_cobra.md index d98a71e36..8410c9938 100644 --- a/vendor/github.com/spf13/cobra/projects_using_cobra.md +++ b/vendor/github.com/spf13/cobra/projects_using_cobra.md @@ -4,6 +4,7 @@ - [Bleve](http://www.blevesearch.com/) - [CockroachDB](http://www.cockroachlabs.com/) - [Cosmos SDK](https://github.com/cosmos/cosmos-sdk) +- [Datree](https://github.com/datreeio/datree) - [Delve](https://github.com/derekparker/delve) - [Docker (distribution)](https://github.com/docker/distribution) - [Etcd](https://etcd.io/) @@ -14,25 +15,36 @@ - [GitHub Labeler](https://github.com/erdaltsksn/gh-label) - [Golangci-lint](https://golangci-lint.run) - [GopherJS](http://www.gopherjs.org/) +- [GoReleaser](https://goreleaser.com) - [Helm](https://helm.sh) - [Hugo](https://gohugo.io) +- [Infracost](https://github.com/infracost/infracost) - [Istio](https://istio.io) - [Kool](https://github.com/kool-dev/kool) - [Kubernetes](http://kubernetes.io/) - [Linkerd](https://linkerd.io/) - [Mattermost-server](https://github.com/mattermost/mattermost-server) +- [Mercure](https://mercure.rocks/) +- [Meroxa CLI](https://github.com/meroxa/cli) - [Metal Stack CLI](https://github.com/metal-stack/metalctl) - [Moby (former Docker)](https://github.com/moby/moby) +- [Moldy](https://github.com/Moldy-Community/moldy) +- [Multi-gitter](https://github.com/lindell/multi-gitter) - [Nanobox](https://github.com/nanobox-io/nanobox)/[Nanopack](https://github.com/nanopack) +- [nFPM](https://nfpm.goreleaser.com) - [OpenShift](https://www.openshift.com/) - [Ory Hydra](https://github.com/ory/hydra) - [Ory Kratos](https://github.com/ory/kratos) - [Pouch](https://github.com/alibaba/pouch) - [ProjectAtomic (enterprise)](http://www.projectatomic.io/) - [Prototool](https://github.com/uber/prototool) +- [QRcp](https://github.com/claudiodangelis/qrcp) - [Random](https://github.com/erdaltsksn/random) - [Rclone](https://rclone.org/) +- [Scaleway CLI](https://github.com/scaleway/scaleway-cli) - [Skaffold](https://skaffold.dev/) - [Tendermint](https://github.com/tendermint/tendermint) - [Twitch CLI](https://github.com/twitchdev/twitch-cli) +- [UpCloud CLI (`upctl`)](https://github.com/UpCloudLtd/upcloud-cli) +- VMware's [Tanzu Community Edition](https://github.com/vmware-tanzu/community-edition) & [Tanzu Framework](https://github.com/vmware-tanzu/tanzu-framework) - [Werf](https://werf.io/) diff --git a/vendor/github.com/spf13/cobra/shell_completions.md b/vendor/github.com/spf13/cobra/shell_completions.md index 4ba06a11c..03add869b 100644 --- a/vendor/github.com/spf13/cobra/shell_completions.md +++ b/vendor/github.com/spf13/cobra/shell_completions.md @@ -28,17 +28,17 @@ and then modifying the generated `cmd/completion.go` file to look something like var completionCmd = &cobra.Command{ Use: "completion [bash|zsh|fish|powershell]", Short: "Generate completion script", - Long: `To load completions: + Long: fmt.Sprintf(`To load completions: Bash: - $ source <(yourprogram completion bash) + $ source <(%[1]s completion bash) # To load completions for each session, execute once: # Linux: - $ yourprogram completion bash > /etc/bash_completion.d/yourprogram + $ %[1]s completion bash > /etc/bash_completion.d/%[1]s # macOS: - $ yourprogram completion bash > /usr/local/etc/bash_completion.d/yourprogram + $ %[1]s completion bash > /usr/local/etc/bash_completion.d/%[1]s Zsh: @@ -48,25 +48,25 @@ Zsh: $ echo "autoload -U compinit; compinit" >> ~/.zshrc # To load completions for each session, execute once: - $ yourprogram completion zsh > "${fpath[1]}/_yourprogram" + $ %[1]s completion zsh > "${fpath[1]}/_%[1]s" # You will need to start a new shell for this setup to take effect. fish: - $ yourprogram completion fish | source + $ %[1]s completion fish | source # To load completions for each session, execute once: - $ yourprogram completion fish > ~/.config/fish/completions/yourprogram.fish + $ %[1]s completion fish > ~/.config/fish/completions/%[1]s.fish PowerShell: - PS> yourprogram completion powershell | Out-String | Invoke-Expression + PS> %[1]s completion powershell | Out-String | Invoke-Expression # To load completions for every new session, run: - PS> yourprogram completion powershell > yourprogram.ps1 + PS> %[1]s completion powershell > %[1]s.ps1 # and source this file from your PowerShell profile. -`, +`,cmd.Root().Name()), DisableFlagsInUseLine: true, ValidArgs: []string{"bash", "zsh", "fish", "powershell"}, Args: cobra.ExactValidArgs(1), diff --git a/vendor/github.com/spf13/cobra/user_guide.md b/vendor/github.com/spf13/cobra/user_guide.md index 311abce28..e87cdf218 100644 --- a/vendor/github.com/spf13/cobra/user_guide.md +++ b/vendor/github.com/spf13/cobra/user_guide.md @@ -32,7 +32,7 @@ func main() { Cobra provides its own program that will create your application and add any commands you want. It's the easiest way to incorporate Cobra into your application. -[Here](https://github.com/spf13/cobra/blob/master/cobra/README.md) you can find more information about it. +For complete details on using the Cobra generator, please read [The Cobra Generator README](https://github.com/spf13/cobra/blob/master/cobra/README.md) ## Using the Cobra Library @@ -281,7 +281,7 @@ func init() { In this example, the persistent flag `author` is bound with `viper`. **Note**: the variable `author` will not be set to the value from config, -when the `--author` flag is not provided by user. +when the `--author` flag is provided by user. More in [viper documentation](https://github.com/spf13/viper#working-with-flags). @@ -315,6 +315,7 @@ The following validators are built in: - `ExactArgs(int)` - the command will report an error if there are not exactly N positional args. - `ExactValidArgs(int)` - the command will report an error if there are not exactly N positional args OR if there are any positional args that are not in the `ValidArgs` field of `Command` - `RangeArgs(min, max)` - the command will report an error if the number of args is not between the minimum and maximum number of expected args. +- `MatchAll(pargs ...PositionalArgs)` - enables combining existing checks with arbitrary other checks (e.g. you want to check the ExactArgs length along with other qualities). An example of setting the custom validator: diff --git a/vendor/github.com/spf13/cobra/zsh_completions.go b/vendor/github.com/spf13/cobra/zsh_completions.go index 1afec30ea..624adab53 100644 --- a/vendor/github.com/spf13/cobra/zsh_completions.go +++ b/vendor/github.com/spf13/cobra/zsh_completions.go @@ -202,7 +202,7 @@ _%[1]s() _arguments '*:filename:'"$filteringCmd" elif [ $((directive & shellCompDirectiveFilterDirs)) -ne 0 ]; then # File completion for directories only - local subDir + local subdir subdir="${completions[1]}" if [ -n "$subdir" ]; then __%[1]s_debug "Listing directories in $subdir" @@ -250,7 +250,7 @@ _%[1]s() # don't run the completion function when being source-ed or eval-ed if [ "$funcstack[1]" = "_%[1]s" ]; then - _%[1]s + _%[1]s fi `, name, compCmd, ShellCompDirectiveError, ShellCompDirectiveNoSpace, ShellCompDirectiveNoFileComp, diff --git a/vendor/github.com/spf13/viper/.golangci.yml b/vendor/github.com/spf13/viper/.golangci.yml index 4f970acb1..52e77eef0 100644 --- a/vendor/github.com/spf13/viper/.golangci.yml +++ b/vendor/github.com/spf13/viper/.golangci.yml @@ -20,7 +20,6 @@ linters: - exhaustive - exportloopref - gci - - goconst - gofmt - gofumpt - goimports @@ -62,6 +61,7 @@ linters: # - gochecknoglobals # - gochecknoinits # - gocognit + # - goconst # - gocritic # - gocyclo # - godot diff --git a/vendor/github.com/spf13/viper/Makefile b/vendor/github.com/spf13/viper/Makefile index b0f9acf24..1279096f4 100644 --- a/vendor/github.com/spf13/viper/Makefile +++ b/vendor/github.com/spf13/viper/Makefile @@ -15,8 +15,8 @@ TEST_FORMAT = short-verbose endif # Dependency versions -GOTESTSUM_VERSION = 1.6.4 -GOLANGCI_VERSION = 1.40.1 +GOTESTSUM_VERSION = 1.7.0 +GOLANGCI_VERSION = 1.43.0 # Add the ability to override some variables # Use with care diff --git a/vendor/github.com/spf13/viper/fs.go b/vendor/github.com/spf13/viper/fs.go new file mode 100644 index 000000000..ecb1769e5 --- /dev/null +++ b/vendor/github.com/spf13/viper/fs.go @@ -0,0 +1,65 @@ +//go:build go1.16 && finder +// +build go1.16,finder + +package viper + +import ( + "errors" + "io/fs" + "path" +) + +type finder struct { + paths []string + fileNames []string + extensions []string + + withoutExtension bool +} + +func (f finder) Find(fsys fs.FS) (string, error) { + for _, searchPath := range f.paths { + for _, fileName := range f.fileNames { + for _, extension := range f.extensions { + filePath := path.Join(searchPath, fileName+"."+extension) + + ok, err := fileExists(fsys, filePath) + if err != nil { + return "", err + } + + if ok { + return filePath, nil + } + } + + if f.withoutExtension { + filePath := path.Join(searchPath, fileName) + + ok, err := fileExists(fsys, filePath) + if err != nil { + return "", err + } + + if ok { + return filePath, nil + } + } + } + } + + return "", nil +} + +func fileExists(fsys fs.FS, filePath string) (bool, error) { + fileInfo, err := fs.Stat(fsys, filePath) + if err == nil { + return !fileInfo.IsDir(), nil + } + + if errors.Is(err, fs.ErrNotExist) { + return false, nil + } + + return false, err +} diff --git a/vendor/github.com/spf13/viper/go.mod b/vendor/github.com/spf13/viper/go.mod index fcc1a5d92..91f39a3a2 100644 --- a/vendor/github.com/spf13/viper/go.mod +++ b/vendor/github.com/spf13/viper/go.mod @@ -1,20 +1,69 @@ module github.com/spf13/viper -go 1.12 +go 1.17 require ( github.com/fsnotify/fsnotify v1.5.1 github.com/hashicorp/hcl v1.0.0 github.com/magiconair/properties v1.8.5 - github.com/mitchellh/mapstructure v1.4.2 + github.com/mitchellh/mapstructure v1.4.3 github.com/pelletier/go-toml v1.9.4 - github.com/sagikazarmark/crypt v0.1.0 + github.com/sagikazarmark/crypt v0.4.0 github.com/spf13/afero v1.6.0 github.com/spf13/cast v1.4.1 github.com/spf13/jwalterweatherman v1.1.0 github.com/spf13/pflag v1.0.5 github.com/stretchr/testify v1.7.0 github.com/subosito/gotenv v1.2.0 - gopkg.in/ini.v1 v1.63.2 + gopkg.in/ini.v1 v1.66.2 gopkg.in/yaml.v2 v2.4.0 ) + +require ( + cloud.google.com/go v0.99.0 // indirect + cloud.google.com/go/firestore v1.6.1 // indirect + github.com/armon/go-metrics v0.3.10 // indirect + github.com/census-instrumentation/opencensus-proto v0.3.0 // indirect + github.com/cespare/xxhash/v2 v2.1.2 // indirect + github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4 // indirect + github.com/cncf/xds/go v0.0.0-20211130200136-a8f946100490 // indirect + github.com/coreos/go-semver v0.3.0 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/envoyproxy/go-control-plane v0.10.1 // indirect + github.com/envoyproxy/protoc-gen-validate v0.6.2 // indirect + github.com/fatih/color v1.13.0 // indirect + github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect + github.com/golang/protobuf v1.5.2 // indirect + github.com/google/go-cmp v0.5.6 // indirect + github.com/googleapis/gax-go/v2 v2.1.1 // indirect + github.com/hashicorp/consul/api v1.12.0 // indirect + github.com/hashicorp/go-cleanhttp v0.5.2 // indirect + github.com/hashicorp/go-hclog v1.0.0 // indirect + github.com/hashicorp/go-immutable-radix v1.3.1 // indirect + github.com/hashicorp/go-rootcerts v1.0.2 // indirect + github.com/hashicorp/golang-lru v0.5.4 // indirect + github.com/hashicorp/serf v0.9.6 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/mattn/go-colorable v0.1.12 // indirect + github.com/mattn/go-isatty v0.0.14 // indirect + github.com/mitchellh/go-homedir v1.1.0 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + go.etcd.io/etcd/api/v3 v3.5.1 // indirect + go.etcd.io/etcd/client/pkg/v3 v3.5.1 // indirect + go.etcd.io/etcd/client/v2 v2.305.1 // indirect + go.opencensus.io v0.23.0 // indirect + golang.org/x/crypto v0.0.0-20210817164053-32db794688a5 // indirect + golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d // indirect + golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8 // indirect + golang.org/x/sys v0.0.0-20211210111614-af8b64212486 // indirect + golang.org/x/text v0.3.7 // indirect + golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect + google.golang.org/api v0.63.0 // indirect + google.golang.org/appengine v1.6.7 // indirect + google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa // indirect + google.golang.org/grpc v1.43.0 // indirect + google.golang.org/protobuf v1.27.1 // indirect + gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect +) diff --git a/vendor/github.com/spf13/viper/go.sum b/vendor/github.com/spf13/viper/go.sum index 3e0a13ea1..ceb4d9804 100644 --- a/vendor/github.com/spf13/viper/go.sum +++ b/vendor/github.com/spf13/viper/go.sum @@ -22,8 +22,11 @@ cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAV cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM= cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY= cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ= -cloud.google.com/go v0.93.3 h1:wPBktZFzYBcCZVARvwVKqH1uEj+aLXofJEtrb4oOsio= cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI= +cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4= +cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc= +cloud.google.com/go v0.99.0 h1:y/cM2iqGgGi5D5DQZl6D9STN/3dR/Vx5Mp8s752oJTY= +cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= @@ -32,8 +35,8 @@ cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4g cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= -cloud.google.com/go/firestore v1.6.0 h1:dMIWvm+3O0E3DM7kcZPH0FBQ94Xg/OMkdTNDaY9itbI= -cloud.google.com/go/firestore v1.6.0/go.mod h1:afJwI0vaXwAG54kI7A//lP/lSPDkQORQuMkv56TxEPU= +cloud.google.com/go/firestore v1.6.1 h1:8rBq3zRjnHx8UtBvaOWqBB1xq9jH6/wltfQLlTMh2Fw= +cloud.google.com/go/firestore v1.6.1/go.mod h1:asNXNOzBdyVQmEU+ggO8UPodTkEVFW5Qx+rwHnAz+EY= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= @@ -46,24 +49,49 @@ cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9 dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= -github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da h1:8GUt8eRujhVEGZFFEjBj46YV4rDjvGrNxb0KMWYkL2I= github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= +github.com/armon/go-metrics v0.3.10 h1:FR+drcQStOe+32sYyJYyZ7FIdgoGGBnwLl+flodp8Uo= +github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc= github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/census-instrumentation/opencensus-proto v0.3.0 h1:t/LhUZLVitR1Ow2YOnduCsavhwFUklBMoGVYUCqmCqk= +github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.1.2 h1:YRXhKfTDauu4ajMg1TPgFO5jnlC2HCbmLXMcTG5cbYE= +github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag= +github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4 h1:hzAQntlaYRkVSFEfj9OTWlVV1H155FMD8BTKktLv0QI= +github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211130200136-a8f946100490 h1:KwaoQzs/WeUxxJqiJsZ4euOly1Az/IgZXXSxlD/UBNk= +github.com/cncf/xds/go v0.0.0-20211130200136-a8f946100490/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/coreos/go-semver v0.3.0 h1:wkHLiw0WNATZnSG7epLsujiMCgPAc9xhjJ4tgnAxmfM= github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= @@ -77,23 +105,36 @@ github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5y github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= +github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= +github.com/envoyproxy/go-control-plane v0.10.1 h1:cgDRLG7bs59Zd+apAWuzLQL95obVYAymNJek76W3mgw= +github.com/envoyproxy/go-control-plane v0.10.1/go.mod h1:AY7fTTXNdv/aJ2O5jwpxAPOWUZ7hQAEvzN5Pf27BkQQ= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/envoyproxy/protoc-gen-validate v0.6.2 h1:JiO+kJTpmYGjEodY7O1Zk8oZcNz1+f30UtwtXoFUPzE= +github.com/envoyproxy/protoc-gen-validate v0.6.2/go.mod h1:2t7qjJNvHPx8IjnBOzl9E9/baC+qXE/TeeyBRzgJDws= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/fatih/color v1.9.0 h1:8xPHl4/q1VyqGIPif1F+1V3Y3lSmrq01EabUW3CoW5s= github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= +github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w= +github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= github.com/fsnotify/fsnotify v1.5.1 h1:mZcQUHVQUQWoPXXtuf9yuEXKudkV2sx1E06UadKWpgI= github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= @@ -158,29 +199,36 @@ github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.1.2 h1:EVhdT+1Kseyi1/pUmXKaFxYsDNy9RQYkMWRH68J/W7Y= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= -github.com/googleapis/gax-go/v2 v2.1.0 h1:6DWmvNpomjL1+3liNSZbVns3zsYzzCjm6pRBO1tLeso= github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= +github.com/googleapis/gax-go/v2 v2.1.1 h1:dp3bWCh+PPO1zjRRiCSczJav13sBvG4UhNyVTa1KqdU= +github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= -github.com/hashicorp/consul/api v1.10.1 h1:MwZJp86nlnL+6+W1Zly4JUuVn9YHhMggBirMpHGD7kw= -github.com/hashicorp/consul/api v1.10.1/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M= +github.com/hashicorp/consul/api v1.12.0 h1:k3y1FYv6nuKyNTqj6w9gXOx5r5CfLj/k/euUeBXj1OY= +github.com/hashicorp/consul/api v1.12.0/go.mod h1:6pVBMo0ebnYdt2S3H87XhekM/HHrUoTD2XXb/VrZVy0= github.com/hashicorp/consul/sdk v0.8.0 h1:OJtKBtEjboEZvG6AOUdh4Z1Zbyu0WcxQ0qatRrZHTVU= github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms= github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/go-cleanhttp v0.5.1 h1:dH3aiDG9Jvb5r5+bYHsikaOUIpcM0xvgMXVoDkXMzJM= +github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= -github.com/hashicorp/go-hclog v0.12.0 h1:d4QkX8FRTYaKaCZBoXYY8zJX2BXjWxurN/GA2tkrmZM= +github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= +github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= -github.com/hashicorp/go-immutable-radix v1.0.0 h1:AKDB1HM5PWEA7i4nhcpwOrO2byshxBjXVn/J/3+z5/0= +github.com/hashicorp/go-hclog v1.0.0 h1:bkKf0BeBXcSYa7f5Fyi9gMuQ8gNsxeiNpZjR6VxNZeo= +github.com/hashicorp/go-hclog v1.0.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-immutable-radix v1.3.1 h1:DKHmCUm2hRBK510BaiZlwvpD40f8bJFeZnpfm2KLowc= +github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= github.com/hashicorp/go-msgpack v0.5.3 h1:zKjpN5BK/P5lMYrLmBHdBULWbJ0XpYR+7NGzqkZzoD4= github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= github.com/hashicorp/go-multierror v1.1.0 h1:B9UzwGQJehnUY1yNrnwREHc3fGbC2xefo8g4TbElacI= github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= +github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= github.com/hashicorp/go-rootcerts v1.0.2 h1:jzhAVGtqPKbwpyCPELlgNWhE1znq+qwJtW5Oi2viEzc= github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= github.com/hashicorp/go-sockaddr v1.0.0 h1:GeH6tui99pF4NJgfnhp+L6+FfobzVW3Ah46sLo0ICXs= @@ -190,46 +238,59 @@ github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/b github.com/hashicorp/go-uuid v1.0.1 h1:fv1ep09latC32wFoVwnqcnKJGnMSdBanPczbHAYm1BE= github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.1 h1:0hERBMJE1eitiLkihrMvRVBYAkpHzc/J3QdDN+dAcgU= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc= +github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= -github.com/hashicorp/mdns v1.0.1/go.mod h1:4gW7WsVCke5TE7EPeYliwHlRUyBtfCwuFwuMg2DmyNY= -github.com/hashicorp/memberlist v0.2.2 h1:5+RffWKwqJ71YPu9mWsF7ZOscZmwfasdA8kbdC7AO2g= -github.com/hashicorp/memberlist v0.2.2/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= -github.com/hashicorp/serf v0.9.5 h1:EBWvyu9tcRszt3Bxp3KNssBMP1KuHWyO51lz9+786iM= -github.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk= +github.com/hashicorp/mdns v1.0.4/go.mod h1:mtBihi+LeNXGtG8L9dX59gAEa12BDtBQSp4v/YAJqrc= +github.com/hashicorp/memberlist v0.3.0 h1:8+567mCcFDnS5ADl7lrpxPMWiFCElyUEeW0gtj34fMA= +github.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= +github.com/hashicorp/serf v0.9.6 h1:uuEX1kLR6aoda1TBttmJQKDLZE1Ob7KN0NPdE7EtCDc= +github.com/hashicorp/serf v0.9.6/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4= +github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/json-iterator/go v1.1.11 h1:uVUAXhF2To8cbw/3xN3pxj6kk7TYKs98NIrTqPlMWAQ= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.0 h1:s5hAObm+yFO5uHYt5dYjxi2rXrsnmRpJx4OYvIWUaQs= github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/lyft/protoc-gen-star v0.5.3/go.mod h1:V0xaHgaf5oCCqmcxYcWiDfTiKsZsRc87/1qhoTACD8w= github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls= github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= -github.com/mattn/go-colorable v0.1.6 h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE= github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.12 h1:jF+Du6AlPIjs2BiUiQlKOX0rt3SujHxPnksPKZbaA40= +github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE= -github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= -github.com/miekg/dns v1.1.26 h1:gPxPSwALAeHJSjarOs00QjVdV9QoBvc1D2ujQUr5BzU= +github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= +github.com/miekg/dns v1.1.41 h1:WMszZWJG0XmzbK9FEmzH2TVcqYzFesusSIB41b8KHxY= +github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= @@ -237,17 +298,22 @@ github.com/mitchellh/go-testing-interface v1.0.0 h1:fzU/JVNcaqHQEcVFAKeR41fkiLdI github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.4.2 h1:6h7AQ0yhTcIsmFmnAwQls75jp2Gzs4iB8W7pjMO+rqo= -github.com/mitchellh/mapstructure v1.4.2/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc= +github.com/mitchellh/mapstructure v1.4.3 h1:OVowDSCllw/YjdLkam3/sm7wEtOy59d8ndGgCcyj8cs= +github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c h1:Lgl0gzECD8GnQ5QCWA8o6BtfL6mDH5rQgM4/fX3avOs= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pascaldekloe/goe v0.1.0 h1:cBOtyMzM9HTpWjXfbbunk26uA6nG3a8n06Wieeh0MwY= +github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pelletier/go-toml v1.9.4 h1:tjENF6MfZAg8e4ZmZTeWaWiT2vXtsoO6+iuOjFhECwM= github.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= @@ -255,15 +321,29 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= -github.com/sagikazarmark/crypt v0.1.0 h1:AyO7PGna28P9TMH93Bsxd7m9QC4xE6zyGQTXCo7ZrA8= -github.com/sagikazarmark/crypt v0.1.0/go.mod h1:B/mN0msZuINBtQ1zZLEQcegFJJf9vnYIR88KRMEuODE= +github.com/sagikazarmark/crypt v0.4.0 h1:Rqcx6Sf/bWQUmmfGQhcFx3wQQEfb2UZWhAKvGRairm0= +github.com/sagikazarmark/crypt v0.4.0/go.mod h1:ALv2SRj7GxYV4HO9elxH9nS6M9gW+xDNxqmyJ6RfDFM= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4= github.com/spf13/afero v1.6.0 h1:xoax2sJ2DT8S8xA2paPFjDCScCNeWsg75VG0DLRreiY= github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= github.com/spf13/cast v1.4.1 h1:s0hze+J0196ZfEMTs80N7UlFt0BDuQ7Q+JDnHiMWKdA= @@ -272,8 +352,9 @@ github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmq github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/stretchr/objx v0.1.0 h1:4G4v2dO3VZwixGIRoQ5Lfboy6nUhCyYzaqnIAPPhYs4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1 h1:2vfRuCMp5sSVIDSqO8oNnWJq7mPa6KVP3iPIwFBuy8A= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= @@ -283,17 +364,18 @@ github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5Cc github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s= github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= +github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= -go.etcd.io/etcd/api/v3 v3.5.0 h1:GsV3S+OfZEOCNXdtNkBSR7kgLobAa/SO6tCxRa0GAYw= -go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= -go.etcd.io/etcd/client/pkg/v3 v3.5.0 h1:2aQv6F436YnN7I4VbI8PPYrBhu+SmrTaADcf8Mi/6PU= -go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= -go.etcd.io/etcd/client/v2 v2.305.0 h1:ftQ0nOOHMcbMS3KIaDQ0g5Qcd6bhaBrQT6b89DfwLTs= -go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ= +go.etcd.io/etcd/api/v3 v3.5.1 h1:v28cktvBq+7vGyJXF8G+rWJmj+1XUmMtqcLnH8hDocM= +go.etcd.io/etcd/api/v3 v3.5.1/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= +go.etcd.io/etcd/client/pkg/v3 v3.5.1 h1:XIQcHCFSG53bJETYeRJtIxdLv2EWRGxcfzR8lSnTH4E= +go.etcd.io/etcd/client/pkg/v3 v3.5.1/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= +go.etcd.io/etcd/client/v2 v2.305.1 h1:vtxYCKWA9x31w0WJj7DdqsHFNjhkigdAnziDtkZb/l4= +go.etcd.io/etcd/client/v2 v2.305.1/go.mod h1:pMEacxZW7o8pg4CrFE7pquyCJJzZvkvdD2RibOCCCGs= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= @@ -306,7 +388,7 @@ go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqe go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= -golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= @@ -351,9 +433,10 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= @@ -361,6 +444,7 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= @@ -387,8 +471,10 @@ golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420 h1:a8jGStKg0XqKDlKqjLrXn0ioF5MH36pT7Z0BRTqLhbk= +golang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8= golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d h1:LO7XpTYMwTqxjLcGWPijK3vRXg1aWdlNOVOHRq45d7c= +golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -403,8 +489,10 @@ golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f h1:Qmd2pbz05z7z6lm0DrgQVVPuBm92jqujBKMHMOlOQEw= golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8 h1:RerP+noqYHUQ8CMRcPlC2nvTa4dcBIjegkuWdcUDuqg= +golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -415,15 +503,16 @@ golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -458,6 +547,7 @@ golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -471,8 +561,14 @@ golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf h1:2ucpDCmfkl8Bd/FsLtiD653Wf96cW37s+iGx93zsu4k= +golang.org/x/sys v0.0.0-20210816183151-1e6c022a8912/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211210111614-af8b64212486 h1:5hpz5aRr+W1erYCL5JRhSUBJRph7l9XkNveoExlrKYk= +golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -481,8 +577,9 @@ golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -571,8 +668,13 @@ google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtuk google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU= google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k= -google.golang.org/api v0.56.0 h1:08F9XVYTLOGeSQb3xI9C0gXMuQanhdGed0cWFhDozbI= +google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= +google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI= +google.golang.org/api v0.59.0/go.mod h1:sT2boj7M9YJxZzgeZqXogmhfmRWDtPzT31xkieUbuZU= +google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I= +google.golang.org/api v0.63.0 h1:n2bqqK895ygnBpdPDYetfy23K7fJ22wsrZKCyfuRkkA= +google.golang.org/api v0.63.0/go.mod h1:gs4ij2ffTRXwuzzgJl/56BdwJaA194ijkfn++9tDuPo= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -632,8 +734,17 @@ google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKr google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w= google.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71 h1:z+ErRPu0+KS02Td3fOAgdX+lnPDh/VyaABEJPD4JRQs= google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211008145708-270636b82663/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211028162531-8db9c33dc351/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa h1:I0YcKz0I7OAhddo7ya8kMnvprhcWM045PmkBdMO9zN0= +google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -658,8 +769,10 @@ google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQ google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= -google.golang.org/grpc v1.40.0 h1:AGJ0Ih4mHjSeibYkFGh1dD9KJ/eOtZ93I6hoHhukQ5Q= google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.43.0 h1:Eeu7bZtDZ2DpRCsLhUlcrLnvYaMK1Gz86a+hMVvELmM= +google.golang.org/grpc v1.43.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= @@ -675,15 +788,19 @@ google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp0 google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.27.1 h1:SnqbnDw1V7RiZcXPx5MEeqPv2s79L9i7BJUlG/+RurQ= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= -gopkg.in/ini.v1 v1.63.2 h1:tGK/CyBg7SMzb60vP1M03vNZ3VDu3wGQJwn7Sxi9r3c= -gopkg.in/ini.v1 v1.63.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/ini.v1 v1.66.2 h1:XfR1dOYubytKy4Shzc2LHrrGhU0lDCfDGG1yLPmpgsI= +gopkg.in/ini.v1 v1.66.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= diff --git a/vendor/github.com/spf13/viper/logger.go b/vendor/github.com/spf13/viper/logger.go new file mode 100644 index 000000000..0115067ae --- /dev/null +++ b/vendor/github.com/spf13/viper/logger.go @@ -0,0 +1,77 @@ +package viper + +import ( + "fmt" + + jww "github.com/spf13/jwalterweatherman" +) + +// Logger is a unified interface for various logging use cases and practices, including: +// - leveled logging +// - structured logging +type Logger interface { + // Trace logs a Trace event. + // + // Even more fine-grained information than Debug events. + // Loggers not supporting this level should fall back to Debug. + Trace(msg string, keyvals ...interface{}) + + // Debug logs a Debug event. + // + // A verbose series of information events. + // They are useful when debugging the system. + Debug(msg string, keyvals ...interface{}) + + // Info logs an Info event. + // + // General information about what's happening inside the system. + Info(msg string, keyvals ...interface{}) + + // Warn logs a Warn(ing) event. + // + // Non-critical events that should be looked at. + Warn(msg string, keyvals ...interface{}) + + // Error logs an Error event. + // + // Critical events that require immediate attention. + // Loggers commonly provide Fatal and Panic levels above Error level, + // but exiting and panicing is out of scope for a logging library. + Error(msg string, keyvals ...interface{}) +} + +type jwwLogger struct{} + +func (jwwLogger) Trace(msg string, keyvals ...interface{}) { + jww.TRACE.Printf(jwwLogMessage(msg, keyvals...)) +} + +func (jwwLogger) Debug(msg string, keyvals ...interface{}) { + jww.DEBUG.Printf(jwwLogMessage(msg, keyvals...)) +} + +func (jwwLogger) Info(msg string, keyvals ...interface{}) { + jww.INFO.Printf(jwwLogMessage(msg, keyvals...)) +} + +func (jwwLogger) Warn(msg string, keyvals ...interface{}) { + jww.WARN.Printf(jwwLogMessage(msg, keyvals...)) +} + +func (jwwLogger) Error(msg string, keyvals ...interface{}) { + jww.ERROR.Printf(jwwLogMessage(msg, keyvals...)) +} + +func jwwLogMessage(msg string, keyvals ...interface{}) string { + out := msg + + if len(keyvals) > 0 && len(keyvals)%2 == 1 { + keyvals = append(keyvals, nil) + } + + for i := 0; i <= len(keyvals)-2; i += 2 { + out = fmt.Sprintf("%s %v=%v", out, keyvals[i], keyvals[i+1]) + } + + return out +} diff --git a/vendor/github.com/spf13/viper/util.go b/vendor/github.com/spf13/viper/util.go index 09d051a22..ee7a86d9d 100644 --- a/vendor/github.com/spf13/viper/util.go +++ b/vendor/github.com/spf13/viper/util.go @@ -18,9 +18,7 @@ import ( "strings" "unicode" - "github.com/spf13/afero" "github.com/spf13/cast" - jww "github.com/spf13/jwalterweatherman" ) // ConfigParseError denotes failing to parse configuration file. @@ -88,8 +86,8 @@ func insensitiviseMap(m map[string]interface{}) { } } -func absPathify(inPath string) string { - jww.INFO.Println("Trying to resolve absolute path to", inPath) +func absPathify(logger Logger, inPath string) string { + logger.Info("trying to resolve absolute path", "path", inPath) if inPath == "$HOME" || strings.HasPrefix(inPath, "$HOME"+string(os.PathSeparator)) { inPath = userHomeDir() + inPath[5:] @@ -106,21 +104,9 @@ func absPathify(inPath string) string { return filepath.Clean(p) } - jww.ERROR.Println("Couldn't discover absolute path") - jww.ERROR.Println(err) - return "" -} + logger.Error(fmt.Errorf("could not discover absolute path: %w", err).Error()) -// Check if file Exists -func exists(fs afero.Fs, path string) (bool, error) { - stat, err := fs.Stat(path) - if err == nil { - return !stat.IsDir(), nil - } - if os.IsNotExist(err) { - return false, nil - } - return false, err + return "" } func stringInSlice(a string, list []string) bool { diff --git a/vendor/github.com/spf13/viper/viper.go b/vendor/github.com/spf13/viper/viper.go index 9e2e3537f..4a9935899 100644 --- a/vendor/github.com/spf13/viper/viper.go +++ b/vendor/github.com/spf13/viper/viper.go @@ -39,7 +39,6 @@ import ( "github.com/mitchellh/mapstructure" "github.com/spf13/afero" "github.com/spf13/cast" - jww "github.com/spf13/jwalterweatherman" "github.com/spf13/pflag" "github.com/subosito/gotenv" "gopkg.in/ini.v1" @@ -260,6 +259,8 @@ type Viper struct { properties *properties.Properties onConfigChange func(fsnotify.Event) + + logger Logger } // New returns an initialized Viper instance. @@ -267,7 +268,7 @@ func New() *Viper { v := new(Viper) v.keyDelim = "." v.configName = "config" - v.configPermissions = os.FileMode(0644) + v.configPermissions = os.FileMode(0o644) v.fs = afero.NewOsFs() v.config = make(map[string]interface{}) v.override = make(map[string]interface{}) @@ -277,6 +278,7 @@ func New() *Viper { v.env = make(map[string][]string) v.aliases = make(map[string]string) v.typeByDefValue = false + v.logger = jwwLogger{} return v } @@ -517,8 +519,9 @@ func AddConfigPath(in string) { v.AddConfigPath(in) } func (v *Viper) AddConfigPath(in string) { if in != "" { - absin := absPathify(in) - jww.INFO.Println("adding", absin, "to paths to search") + absin := absPathify(v.logger, in) + + v.logger.Info("adding path to search paths", "path", absin) if !stringInSlice(absin, v.configPaths) { v.configPaths = append(v.configPaths, absin) } @@ -542,7 +545,8 @@ func (v *Viper) AddRemoteProvider(provider, endpoint, path string) error { return UnsupportedRemoteProviderError(provider) } if provider != "" && endpoint != "" { - jww.INFO.Printf("adding %s:%s to remote provider list", provider, endpoint) + v.logger.Info("adding remote provider", "provider", provider, "endpoint", endpoint) + rp := &defaultRemoteProvider{ endpoint: endpoint, provider: provider, @@ -574,7 +578,8 @@ func (v *Viper) AddSecureRemoteProvider(provider, endpoint, path, secretkeyring return UnsupportedRemoteProviderError(provider) } if provider != "" && endpoint != "" { - jww.INFO.Printf("adding %s:%s to remote provider list", provider, endpoint) + v.logger.Info("adding remote provider", "provider", provider, "endpoint", endpoint) + rp := &defaultRemoteProvider{ endpoint: endpoint, provider: provider, @@ -1390,14 +1395,15 @@ func (v *Viper) registerAlias(alias string, key string) { v.aliases[alias] = key } } else { - jww.WARN.Println("Creating circular reference alias", alias, key, v.realKey(key)) + v.logger.Warn("creating circular reference alias", "alias", alias, "key", key, "real_key", v.realKey(key)) } } func (v *Viper) realKey(key string) string { newkey, exists := v.aliases[key] if exists { - jww.DEBUG.Println("Alias", key, "to", newkey) + v.logger.Debug("key is an alias", "alias", key, "to", newkey) + return v.realKey(newkey) } return key @@ -1458,7 +1464,7 @@ func (v *Viper) Set(key string, value interface{}) { func ReadInConfig() error { return v.ReadInConfig() } func (v *Viper) ReadInConfig() error { - jww.INFO.Println("Attempting to read in config file") + v.logger.Info("attempting to read in config file") filename, err := v.getConfigFile() if err != nil { return err @@ -1468,7 +1474,7 @@ func (v *Viper) ReadInConfig() error { return UnsupportedConfigError(v.getConfigType()) } - jww.DEBUG.Println("Reading file: ", filename) + v.logger.Debug("reading file", "file", filename) file, err := afero.ReadFile(v.fs, filename) if err != nil { return err @@ -1489,7 +1495,7 @@ func (v *Viper) ReadInConfig() error { func MergeInConfig() error { return v.MergeInConfig() } func (v *Viper) MergeInConfig() error { - jww.INFO.Println("Attempting to merge in config file") + v.logger.Info("attempting to merge in config file") filename, err := v.getConfigFile() if err != nil { return err @@ -1580,7 +1586,8 @@ func (v *Viper) SafeWriteConfigAs(filename string) error { } func (v *Viper) writeConfig(filename string, force bool) error { - jww.INFO.Println("Attempting to write configuration to file.") + v.logger.Info("attempting to write configuration to file") + var configType string ext := filepath.Ext(filename) @@ -1796,7 +1803,7 @@ func mergeMaps( for sk, sv := range src { tk := keyExists(sk, tgt) if tk == "" { - jww.TRACE.Printf("tk=\"\", tgt[%s]=%v", sk, sv) + v.logger.Trace("", "tk", "\"\"", fmt.Sprintf("tgt[%s]", sk), sv) tgt[sk] = sv if itgt != nil { itgt[sk] = sv @@ -1806,7 +1813,7 @@ func mergeMaps( tv, ok := tgt[tk] if !ok { - jww.TRACE.Printf("tgt[%s] != ok, tgt[%s]=%v", tk, sk, sv) + v.logger.Trace("", fmt.Sprintf("ok[%s]", tk), false, fmt.Sprintf("tgt[%s]", sk), sv) tgt[sk] = sv if itgt != nil { itgt[sk] = sv @@ -1817,27 +1824,38 @@ func mergeMaps( svType := reflect.TypeOf(sv) tvType := reflect.TypeOf(tv) if tvType != nil && svType != tvType { // Allow for the target to be nil - jww.ERROR.Printf( - "svType != tvType; key=%s, st=%v, tt=%v, sv=%v, tv=%v", - sk, svType, tvType, sv, tv) + v.logger.Error( + "svType != tvType", + "key", sk, + "st", svType, + "tt", tvType, + "sv", sv, + "tv", tv, + ) continue } - jww.TRACE.Printf("processing key=%s, st=%v, tt=%v, sv=%v, tv=%v", - sk, svType, tvType, sv, tv) + v.logger.Trace( + "processing", + "key", sk, + "st", svType, + "tt", tvType, + "sv", sv, + "tv", tv, + ) switch ttv := tv.(type) { case map[interface{}]interface{}: - jww.TRACE.Printf("merging maps (must convert)") + v.logger.Trace("merging maps (must convert)") tsv := sv.(map[interface{}]interface{}) ssv := castToMapStringInterface(tsv) stv := castToMapStringInterface(ttv) mergeMaps(ssv, stv, ttv) case map[string]interface{}: - jww.TRACE.Printf("merging maps") + v.logger.Trace("merging maps") mergeMaps(sv.(map[string]interface{}), ttv, nil) default: - jww.TRACE.Printf("setting value") + v.logger.Trace("setting value") tgt[tk] = sv if itgt != nil { itgt[tk] = sv @@ -1872,7 +1890,7 @@ func (v *Viper) getKeyValueConfig() error { for _, rp := range v.remoteProviders { val, err := v.getRemoteConfig(rp) if err != nil { - jww.ERROR.Printf("get remote config: %s", err) + v.logger.Error(fmt.Errorf("get remote config: %w", err).Error()) continue } @@ -2108,39 +2126,6 @@ func (v *Viper) getConfigFile() (string, error) { return v.configFile, nil } -func (v *Viper) searchInPath(in string) (filename string) { - jww.DEBUG.Println("Searching for config in ", in) - for _, ext := range SupportedExts { - jww.DEBUG.Println("Checking for", filepath.Join(in, v.configName+"."+ext)) - if b, _ := exists(v.fs, filepath.Join(in, v.configName+"."+ext)); b { - jww.DEBUG.Println("Found: ", filepath.Join(in, v.configName+"."+ext)) - return filepath.Join(in, v.configName+"."+ext) - } - } - - if v.configType != "" { - if b, _ := exists(v.fs, filepath.Join(in, v.configName)); b { - return filepath.Join(in, v.configName) - } - } - - return "" -} - -// Search all configPaths for any config file. -// Returns the first path that exists (and is a config file). -func (v *Viper) findConfigFile() (string, error) { - jww.INFO.Println("Searching for config in ", v.configPaths) - - for _, cp := range v.configPaths { - file := v.searchInPath(cp) - if file != "" { - return file, nil - } - } - return "", ConfigFileNotFoundError{v.configName, fmt.Sprintf("%s", v.configPaths)} -} - // Debug prints all configuration registries for debugging // purposes. func Debug() { v.Debug() } diff --git a/vendor/github.com/spf13/viper/viper_go1_15.go b/vendor/github.com/spf13/viper/viper_go1_15.go new file mode 100644 index 000000000..19a771cbd --- /dev/null +++ b/vendor/github.com/spf13/viper/viper_go1_15.go @@ -0,0 +1,57 @@ +//go:build !go1.16 || !finder +// +build !go1.16 !finder + +package viper + +import ( + "fmt" + "os" + "path/filepath" + + "github.com/spf13/afero" +) + +// Search all configPaths for any config file. +// Returns the first path that exists (and is a config file). +func (v *Viper) findConfigFile() (string, error) { + v.logger.Info("searching for config in paths", "paths", v.configPaths) + + for _, cp := range v.configPaths { + file := v.searchInPath(cp) + if file != "" { + return file, nil + } + } + return "", ConfigFileNotFoundError{v.configName, fmt.Sprintf("%s", v.configPaths)} +} + +func (v *Viper) searchInPath(in string) (filename string) { + v.logger.Debug("searching for config in path", "path", in) + for _, ext := range SupportedExts { + v.logger.Debug("checking if file exists", "file", filepath.Join(in, v.configName+"."+ext)) + if b, _ := exists(v.fs, filepath.Join(in, v.configName+"."+ext)); b { + v.logger.Debug("found file", "file", filepath.Join(in, v.configName+"."+ext)) + return filepath.Join(in, v.configName+"."+ext) + } + } + + if v.configType != "" { + if b, _ := exists(v.fs, filepath.Join(in, v.configName)); b { + return filepath.Join(in, v.configName) + } + } + + return "" +} + +// Check if file Exists +func exists(fs afero.Fs, path string) (bool, error) { + stat, err := fs.Stat(path) + if err == nil { + return !stat.IsDir(), nil + } + if os.IsNotExist(err) { + return false, nil + } + return false, err +} diff --git a/vendor/github.com/spf13/viper/viper_go1_16.go b/vendor/github.com/spf13/viper/viper_go1_16.go new file mode 100644 index 000000000..e10172fa3 --- /dev/null +++ b/vendor/github.com/spf13/viper/viper_go1_16.go @@ -0,0 +1,32 @@ +//go:build go1.16 && finder +// +build go1.16,finder + +package viper + +import ( + "fmt" + + "github.com/spf13/afero" +) + +// Search all configPaths for any config file. +// Returns the first path that exists (and is a config file). +func (v *Viper) findConfigFile() (string, error) { + finder := finder{ + paths: v.configPaths, + fileNames: []string{v.configName}, + extensions: SupportedExts, + withoutExtension: v.configType != "", + } + + file, err := finder.Find(afero.NewIOFS(v.fs)) + if err != nil { + return "", err + } + + if file == "" { + return "", ConfigFileNotFoundError{v.configName, fmt.Sprintf("%s", v.configPaths)} + } + + return file, nil +} diff --git a/vendor/github.com/spf13/viper/watch.go b/vendor/github.com/spf13/viper/watch.go index c433a8fa4..b5523b8f9 100644 --- a/vendor/github.com/spf13/viper/watch.go +++ b/vendor/github.com/spf13/viper/watch.go @@ -1,3 +1,4 @@ +//go:build !js // +build !js package viper diff --git a/vendor/github.com/tdakkota/asciicheck/.gitignore b/vendor/github.com/tdakkota/asciicheck/.gitignore index cf875a711..dfa562d3e 100644 --- a/vendor/github.com/tdakkota/asciicheck/.gitignore +++ b/vendor/github.com/tdakkota/asciicheck/.gitignore @@ -30,4 +30,3 @@ gen .idea/misc.xml .idea/modules.xml asciicheck.iml -go.sum diff --git a/vendor/github.com/tdakkota/asciicheck/ascii.go b/vendor/github.com/tdakkota/asciicheck/ascii.go index 9e70c391d..43fe25b59 100644 --- a/vendor/github.com/tdakkota/asciicheck/ascii.go +++ b/vendor/github.com/tdakkota/asciicheck/ascii.go @@ -1,16 +1,19 @@ package asciicheck -import "unicode" +import ( + "unicode" + "unicode/utf8" +) func isASCII(s string) (rune, bool) { if len(s) == 1 { - return []rune(s)[0], s[0] <= unicode.MaxASCII + r, size := utf8.DecodeRuneInString(s) + return r, size < 2 } - r := []rune(s) - for i := 0; i < len(s); i++ { - if r[i] > unicode.MaxASCII { - return r[i], false + for _, r := range s { + if r > unicode.MaxASCII { + return r, false } } diff --git a/vendor/github.com/tdakkota/asciicheck/go.sum b/vendor/github.com/tdakkota/asciicheck/go.sum new file mode 100644 index 000000000..e694efa6c --- /dev/null +++ b/vendor/github.com/tdakkota/asciicheck/go.sum @@ -0,0 +1,20 @@ +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/mod v0.2.0 h1:KU7oHjnv3XNWfa5COkzUifxZmxp1TyI7ImMXqFxLwvQ= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200414032229-332987a829c3 h1:Z68UA+HA9shnGhQbAFXKqL1Rk/tfiTHJ57bNm/MUL/A= +golang.org/x/tools v0.0.0-20200414032229-332987a829c3/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/github.com/timakin/bodyclose/passes/bodyclose/bodyclose.go b/vendor/github.com/timakin/bodyclose/passes/bodyclose/bodyclose.go index 145d5409e..a7ff30b49 100644 --- a/vendor/github.com/timakin/bodyclose/passes/bodyclose/bodyclose.go +++ b/vendor/github.com/timakin/bodyclose/passes/bodyclose/bodyclose.go @@ -80,11 +80,6 @@ func (r runner) run(pass *analysis.Pass) (interface{}, error) { r.skipFile = map[*ast.File]bool{} for _, f := range funcs { - if r.noImportedNetHTTP(f) { - // skip this - continue - } - // skip if the function is just referenced var isreffunc bool for i := 0; i < f.Signature.Results().Len(); i++ { diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/.goreleaser.yml b/vendor/github.com/tommy-muehle/go-mnd/v2/.goreleaser.yml index 47cbca5e5..7516de0b9 100644 --- a/vendor/github.com/tommy-muehle/go-mnd/v2/.goreleaser.yml +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/.goreleaser.yml @@ -1,3 +1,10 @@ +env: + - GO_VERSION=1.16 + +before: + hooks: + - go mod download + builds: - main: ./cmd/mnd/main.go binary: mnd @@ -27,3 +34,21 @@ brews: system "#{bin}/mnd --version" install: | bin.install "mnd" + +dockers: + - + goos: linux + goarch: amd64 + image_templates: + - "tommymuehle/go-mnd:latest" + - "tommymuehle/go-mnd:{{ .Tag }}" + build_flag_templates: + - "--build-arg=GO_VERSION={{.Env.GO_VERSION}}" + extra_files: + - checks + - cmd + - config + - analyzer.go + - entrypoint.sh + - go.mod + - go.sum diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/Dockerfile b/vendor/github.com/tommy-muehle/go-mnd/v2/Dockerfile index bb8e2b7f4..25c8d5475 100644 --- a/vendor/github.com/tommy-muehle/go-mnd/v2/Dockerfile +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/Dockerfile @@ -1,4 +1,4 @@ -ARG GO_VERSION=1.15 +ARG GO_VERSION=1.16 FROM golang:${GO_VERSION}-alpine AS builder RUN apk add --update --no-cache make git curl gcc libc-dev diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/Makefile b/vendor/github.com/tommy-muehle/go-mnd/v2/Makefile index b8a32316b..a21c1dcac 100644 --- a/vendor/github.com/tommy-muehle/go-mnd/v2/Makefile +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/Makefile @@ -1,6 +1,6 @@ GIT_TAG?= $(shell git describe --abbrev=0) -GO_VERSION = 1.15 +GO_VERSION = 1.16 BUILDFLAGS := '-w -s' IMAGE_REPO = "tommymuehle" diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/README.md b/vendor/github.com/tommy-muehle/go-mnd/v2/README.md index 6e3a55573..a29f266be 100644 --- a/vendor/github.com/tommy-muehle/go-mnd/v2/README.md +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/README.md @@ -118,7 +118,7 @@ The ```-ignored-numbers``` option let's you define a comma separated list of num For example: `-ignored-numbers=1000,10_000,3.14159264` The ```-ignored-functions``` option let's you define a comma separated list of function name regexp patterns to exclude. -For example: `-ignored-functions=math.*,http.StatusText` +For example: `-ignored-functions=math.*,http.StatusText,make` The ```-ignored-files``` option let's you define a comma separated list of filename regexp patterns to exclude. For example: `-ignored-files=magic_.*.go,.*_numbers.go` diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/checks/argument.go b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/argument.go index df6ad676d..5d880f0f9 100644 --- a/vendor/github.com/tommy-muehle/go-mnd/v2/checks/argument.go +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/argument.go @@ -74,6 +74,10 @@ func (a *ArgumentAnalyzer) checkCallExpr(expr *ast.CallExpr) { return } } + case *ast.Ident: + if a.config.IsIgnoredFunction(f.Name) { + return + } } for i, arg := range expr.Args { diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/config/config.go b/vendor/github.com/tommy-muehle/go-mnd/v2/config/config.go index a4681e37d..e186028e0 100644 --- a/vendor/github.com/tommy-muehle/go-mnd/v2/config/config.go +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/config/config.go @@ -44,11 +44,10 @@ func WithOptions(options ...Option) *Config { func WithIgnoredFunctions(excludes string) Option { return func(config *Config) { - if excludes == "" { - return - } - for _, exclude := range strings.Split(excludes, ",") { + if exclude == "" { + continue + } config.IgnoredFunctions = append(config.IgnoredFunctions, regexp.MustCompile(exclude)) } } @@ -56,11 +55,10 @@ func WithIgnoredFunctions(excludes string) Option { func WithIgnoredFiles(excludes string) Option { return func(config *Config) { - if excludes == "" { - return - } - for _, exclude := range strings.Split(excludes, ",") { + if exclude == "" { + continue + } config.IgnoredFiles = append(config.IgnoredFiles, regexp.MustCompile(exclude)) } } @@ -68,11 +66,10 @@ func WithIgnoredFiles(excludes string) Option { func WithIgnoredNumbers(numbers string) Option { return func(config *Config) { - if numbers == "" { - return - } - for _, number := range strings.Split(numbers, ",") { + if number == "" { + continue + } config.IgnoredNumbers[config.removeDigitSeparator(number)] = struct{}{} } } @@ -89,6 +86,9 @@ func WithCustomChecks(checks string) Option { } for _, name := range strings.Split(checks, ",") { + if name == "" { + continue + } config.Checks[name] = true } } diff --git a/vendor/github.com/ultraware/whitespace/README.md b/vendor/github.com/ultraware/whitespace/README.md index aed9a485f..2a88f1338 100644 --- a/vendor/github.com/ultraware/whitespace/README.md +++ b/vendor/github.com/ultraware/whitespace/README.md @@ -4,4 +4,4 @@ Whitespace is a linter that checks for unnecessary newlines at the start and end ## Installation guide -Whitespace is included in [https://github.com/golangci/golangci-lint/](golangci-lint). Install it and enable whitespace. +Whitespace is included in [golangci-lint](https://github.com/golangci/golangci-lint/). Install it and enable whitespace. diff --git a/vendor/github.com/ultraware/whitespace/main.go b/vendor/github.com/ultraware/whitespace/main.go index c36086c0e..d178ea293 100644 --- a/vendor/github.com/ultraware/whitespace/main.go +++ b/vendor/github.com/ultraware/whitespace/main.go @@ -64,6 +64,10 @@ func (v *visitor) Visit(node ast.Node) ast.Visitor { checkMultiLine(v, stmt.Body, stmt.Cond) } + if stmt, ok := node.(*ast.FuncLit); ok && v.settings.MultiFunc { + checkMultiLine(v, stmt.Body, stmt.Type) + } + if stmt, ok := node.(*ast.FuncDecl); ok && v.settings.MultiFunc { checkMultiLine(v, stmt.Body, stmt.Type) } diff --git a/vendor/github.com/xanzy/go-gitlab/README.md b/vendor/github.com/xanzy/go-gitlab/README.md index 568fffcab..19f6ad2a6 100644 --- a/vendor/github.com/xanzy/go-gitlab/README.md +++ b/vendor/github.com/xanzy/go-gitlab/README.md @@ -9,14 +9,14 @@ A GitLab API client enabling Go programs to interact with GitLab in a simple and ## NOTE -Release v0.6.0 (released on 25-08-2017) no longer supports the older V3 Gitlab API. If +Release v0.6.0 (released on 25-08-2017) no longer supports the older V3 GitLab API. If you need V3 support, please use the `f-api-v3` branch. This release contains some backwards -incompatible changes that were needed to fully support the V4 Gitlab API. +incompatible changes that were needed to fully support the V4 GitLab API. ## Coverage -This API client package covers most of the existing Gitlab API calls and is updated regularly -to add new and/or missing endpoints. Currently the following services are supported: +This API client package covers most of the existing GitLab API calls and is updated regularly +to add new and/or missing endpoints. Currently, the following services are supported: - [x] Applications - [x] Award Emojis @@ -52,6 +52,7 @@ to add new and/or missing endpoints. Currently the following services are suppor - [x] Keys - [x] Labels - [x] License +- [x] Markdown - [x] Merge Request Approvals - [x] Merge Requests - [x] Namespaces @@ -80,6 +81,7 @@ to add new and/or missing endpoints. Currently the following services are suppor - [x] Protected Tags - [x] Repositories - [x] Repository Files +- [x] Repository Submodules - [x] Runners - [x] Search - [x] Services diff --git a/vendor/github.com/xanzy/go-gitlab/access_requests.go b/vendor/github.com/xanzy/go-gitlab/access_requests.go index ebd6d8485..f2cefe6e3 100644 --- a/vendor/github.com/xanzy/go-gitlab/access_requests.go +++ b/vendor/github.com/xanzy/go-gitlab/access_requests.go @@ -61,7 +61,7 @@ func (s *AccessRequestsService) ListProjectAccessRequests(pid interface{}, opt * if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/access_requests", pathEscape(project)) + u := fmt.Sprintf("projects/%s/access_requests", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -87,7 +87,7 @@ func (s *AccessRequestsService) ListGroupAccessRequests(gid interface{}, opt *Li if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/access_requests", pathEscape(group)) + u := fmt.Sprintf("groups/%s/access_requests", PathEscape(group)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -113,7 +113,7 @@ func (s *AccessRequestsService) RequestProjectAccess(pid interface{}, options .. if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/access_requests", pathEscape(project)) + u := fmt.Sprintf("projects/%s/access_requests", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -139,7 +139,7 @@ func (s *AccessRequestsService) RequestGroupAccess(gid interface{}, options ...R if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/access_requests", pathEscape(group)) + u := fmt.Sprintf("groups/%s/access_requests", PathEscape(group)) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -173,7 +173,7 @@ func (s *AccessRequestsService) ApproveProjectAccessRequest(pid interface{}, use if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/access_requests/%d/approve", pathEscape(project), user) + u := fmt.Sprintf("projects/%s/access_requests/%d/approve", PathEscape(project), user) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -198,7 +198,7 @@ func (s *AccessRequestsService) ApproveGroupAccessRequest(gid interface{}, user if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/access_requests/%d/approve", pathEscape(group), user) + u := fmt.Sprintf("groups/%s/access_requests/%d/approve", PathEscape(group), user) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -223,7 +223,7 @@ func (s *AccessRequestsService) DenyProjectAccessRequest(pid interface{}, user i if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/access_requests/%d", pathEscape(project), user) + u := fmt.Sprintf("projects/%s/access_requests/%d", PathEscape(project), user) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -242,7 +242,7 @@ func (s *AccessRequestsService) DenyGroupAccessRequest(gid interface{}, user int if err != nil { return nil, err } - u := fmt.Sprintf("groups/%s/access_requests/%d", pathEscape(group), user) + u := fmt.Sprintf("groups/%s/access_requests/%d", PathEscape(group), user) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/audit_events.go b/vendor/github.com/xanzy/go-gitlab/audit_events.go index 5b54a672a..bdae200db 100644 --- a/vendor/github.com/xanzy/go-gitlab/audit_events.go +++ b/vendor/github.com/xanzy/go-gitlab/audit_events.go @@ -108,7 +108,7 @@ func (s *AuditEventsService) ListGroupAuditEvents(gid interface{}, opt *ListAudi if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/audit_events", pathEscape(group)) + u := fmt.Sprintf("groups/%s/audit_events", PathEscape(group)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -132,7 +132,7 @@ func (s *AuditEventsService) GetGroupAuditEvent(gid interface{}, event int, opti if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/audit_events/%d", pathEscape(group), event) + u := fmt.Sprintf("groups/%s/audit_events/%d", PathEscape(group), event) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -157,7 +157,7 @@ func (s *AuditEventsService) ListProjectAuditEvents(pid interface{}, opt *ListAu if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/audit_events", pathEscape(project)) + u := fmt.Sprintf("projects/%s/audit_events", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -182,7 +182,7 @@ func (s *AuditEventsService) GetProjectAuditEvent(pid interface{}, event int, op if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/audit_events/%d", pathEscape(project), event) + u := fmt.Sprintf("projects/%s/audit_events/%d", PathEscape(project), event) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/award_emojis.go b/vendor/github.com/xanzy/go-gitlab/award_emojis.go index 8ee08bfce..cb138db73 100644 --- a/vendor/github.com/xanzy/go-gitlab/award_emojis.go +++ b/vendor/github.com/xanzy/go-gitlab/award_emojis.go @@ -93,7 +93,7 @@ func (s *AwardEmojiService) listAwardEmoji(pid interface{}, resource string, res return nil, nil, err } u := fmt.Sprintf("projects/%s/%s/%d/award_emoji", - pathEscape(project), + PathEscape(project), resource, resourceID, ) @@ -142,7 +142,7 @@ func (s *AwardEmojiService) getAwardEmoji(pid interface{}, resource string, reso return nil, nil, err } u := fmt.Sprintf("projects/%s/%s/%d/award_emoji/%d", - pathEscape(project), + PathEscape(project), resource, resourceID, awardID, @@ -201,7 +201,7 @@ func (s *AwardEmojiService) createAwardEmoji(pid interface{}, resource string, r return nil, nil, err } u := fmt.Sprintf("projects/%s/%s/%d/award_emoji", - pathEscape(project), + PathEscape(project), resource, resourceID, ) @@ -253,7 +253,7 @@ func (s *AwardEmojiService) deleteAwardEmoji(pid interface{}, resource string, r if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/%s/%d/award_emoji/%d", pathEscape(project), resource, + u := fmt.Sprintf("projects/%s/%s/%d/award_emoji/%d", PathEscape(project), resource, resourceID, awardID) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) @@ -295,7 +295,7 @@ func (s *AwardEmojiService) listAwardEmojiOnNote(pid interface{}, resources stri if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/%s/%d/notes/%d/award_emoji", pathEscape(project), resources, + u := fmt.Sprintf("projects/%s/%s/%d/notes/%d/award_emoji", PathEscape(project), resources, ressourceID, noteID) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) @@ -344,7 +344,7 @@ func (s *AwardEmojiService) getSingleNoteAwardEmoji(pid interface{}, ressource s return nil, nil, err } u := fmt.Sprintf("projects/%s/%s/%d/notes/%d/award_emoji/%d", - pathEscape(project), + PathEscape(project), ressource, resourceID, noteID, @@ -400,7 +400,7 @@ func (s *AwardEmojiService) createAwardEmojiOnNote(pid interface{}, resource str return nil, nil, err } u := fmt.Sprintf("projects/%s/%s/%d/notes/%d/award_emoji", - pathEscape(project), + PathEscape(project), resource, resourceID, noteID, @@ -452,7 +452,7 @@ func (s *AwardEmojiService) deleteAwardEmojiOnNote(pid interface{}, resource str return nil, err } u := fmt.Sprintf("projects/%s/%s/%d/notes/%d/award_emoji/%d", - pathEscape(project), + PathEscape(project), resource, resourceID, noteID, diff --git a/vendor/github.com/xanzy/go-gitlab/boards.go b/vendor/github.com/xanzy/go-gitlab/boards.go index ef850234f..fa90b9264 100644 --- a/vendor/github.com/xanzy/go-gitlab/boards.go +++ b/vendor/github.com/xanzy/go-gitlab/boards.go @@ -72,7 +72,7 @@ func (s *IssueBoardsService) CreateIssueBoard(pid interface{}, opt *CreateIssueB if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/boards", pathEscape(project)) + u := fmt.Sprintf("projects/%s/boards", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -95,7 +95,7 @@ type UpdateIssueBoardOptions struct { Name *string `url:"name,omitempty" json:"name,omitempty"` AssigneeID *int `url:"assignee_id,omitempty" json:"assignee_id,omitempty"` MilestoneID *int `url:"milestone_id,omitempty" json:"milestone_id,omitempty"` - Labels Labels `url:"labels,omitempty" json:"labels,omitempty"` + Labels *Labels `url:"labels,omitempty" json:"labels,omitempty"` Weight *int `url:"weight,omitempty" json:"weight,omitempty"` } @@ -107,7 +107,7 @@ func (s *IssueBoardsService) UpdateIssueBoard(pid interface{}, board int, opt *U if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/boards/%d", pathEscape(project), board) + u := fmt.Sprintf("projects/%s/boards/%d", PathEscape(project), board) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -131,7 +131,7 @@ func (s *IssueBoardsService) DeleteIssueBoard(pid interface{}, board int, option if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/boards/%d", pathEscape(project), board) + u := fmt.Sprintf("projects/%s/boards/%d", PathEscape(project), board) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -154,7 +154,7 @@ func (s *IssueBoardsService) ListIssueBoards(pid interface{}, opt *ListIssueBoar if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/boards", pathEscape(project)) + u := fmt.Sprintf("projects/%s/boards", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -178,7 +178,7 @@ func (s *IssueBoardsService) GetIssueBoard(pid interface{}, board int, options . if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/boards/%d", pathEscape(project), board) + u := fmt.Sprintf("projects/%s/boards/%d", PathEscape(project), board) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -208,7 +208,7 @@ func (s *IssueBoardsService) GetIssueBoardLists(pid interface{}, board int, opt if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/boards/%d/lists", pathEscape(project), board) + u := fmt.Sprintf("projects/%s/boards/%d/lists", PathEscape(project), board) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -233,7 +233,7 @@ func (s *IssueBoardsService) GetIssueBoardList(pid interface{}, board, list int, return nil, nil, err } u := fmt.Sprintf("projects/%s/boards/%d/lists/%d", - pathEscape(project), + PathEscape(project), board, list, ) @@ -268,7 +268,7 @@ func (s *IssueBoardsService) CreateIssueBoardList(pid interface{}, board int, op if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/boards/%d/lists", pathEscape(project), board) + u := fmt.Sprintf("projects/%s/boards/%d/lists", PathEscape(project), board) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -301,7 +301,7 @@ func (s *IssueBoardsService) UpdateIssueBoardList(pid interface{}, board, list i return nil, nil, err } u := fmt.Sprintf("projects/%s/boards/%d/lists/%d", - pathEscape(project), + PathEscape(project), board, list, ) @@ -331,7 +331,7 @@ func (s *IssueBoardsService) DeleteIssueBoardList(pid interface{}, board, list i return nil, err } u := fmt.Sprintf("projects/%s/boards/%d/lists/%d", - pathEscape(project), + PathEscape(project), board, list, ) diff --git a/vendor/github.com/xanzy/go-gitlab/branches.go b/vendor/github.com/xanzy/go-gitlab/branches.go index 71285620e..246cb577e 100644 --- a/vendor/github.com/xanzy/go-gitlab/branches.go +++ b/vendor/github.com/xanzy/go-gitlab/branches.go @@ -68,7 +68,7 @@ func (s *BranchesService) ListBranches(pid interface{}, opts *ListBranchesOption if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/branches", pathEscape(project)) + u := fmt.Sprintf("projects/%s/repository/branches", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opts, options) if err != nil { @@ -93,7 +93,7 @@ func (s *BranchesService) GetBranch(pid interface{}, branch string, options ...R if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/branches/%s", pathEscape(project), url.PathEscape(branch)) + u := fmt.Sprintf("projects/%s/repository/branches/%s", PathEscape(project), url.PathEscape(branch)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -129,7 +129,7 @@ func (s *BranchesService) ProtectBranch(pid interface{}, branch string, opts *Pr if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/branches/%s/protect", pathEscape(project), url.PathEscape(branch)) + u := fmt.Sprintf("projects/%s/repository/branches/%s/protect", PathEscape(project), url.PathEscape(branch)) req, err := s.client.NewRequest(http.MethodPut, u, opts, options) if err != nil { @@ -156,7 +156,7 @@ func (s *BranchesService) UnprotectBranch(pid interface{}, branch string, option if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/branches/%s/unprotect", pathEscape(project), url.PathEscape(branch)) + u := fmt.Sprintf("projects/%s/repository/branches/%s/unprotect", PathEscape(project), url.PathEscape(branch)) req, err := s.client.NewRequest(http.MethodPut, u, nil, options) if err != nil { @@ -190,7 +190,7 @@ func (s *BranchesService) CreateBranch(pid interface{}, opt *CreateBranchOptions if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/branches", pathEscape(project)) + u := fmt.Sprintf("projects/%s/repository/branches", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -215,7 +215,7 @@ func (s *BranchesService) DeleteBranch(pid interface{}, branch string, options . if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/repository/branches/%s", pathEscape(project), url.PathEscape(branch)) + u := fmt.Sprintf("projects/%s/repository/branches/%s", PathEscape(project), url.PathEscape(branch)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -234,7 +234,7 @@ func (s *BranchesService) DeleteMergedBranches(pid interface{}, options ...Reque if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/repository/merged_branches", pathEscape(project)) + u := fmt.Sprintf("projects/%s/repository/merged_branches", PathEscape(project)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/ci_yml_templates.go b/vendor/github.com/xanzy/go-gitlab/ci_yml_templates.go index da447c1ba..fb4e22bf1 100644 --- a/vendor/github.com/xanzy/go-gitlab/ci_yml_templates.go +++ b/vendor/github.com/xanzy/go-gitlab/ci_yml_templates.go @@ -69,7 +69,7 @@ func (s *CIYMLTemplatesService) ListAllTemplates(opt *ListCIYMLTemplatesOptions, // GitLab API docs: // https://docs.gitlab.com/ce/api/templates/gitlab_ci_ymls.html#single-gitlab-ci-yml-template func (s *CIYMLTemplatesService) GetTemplate(key string, options ...RequestOptionFunc) (*CIYMLTemplate, *Response, error) { - u := fmt.Sprintf("templates/gitlab_ci_ymls/%s", pathEscape(key)) + u := fmt.Sprintf("templates/gitlab_ci_ymls/%s", PathEscape(key)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/client_options.go b/vendor/github.com/xanzy/go-gitlab/client_options.go index f1d39adab..2ba54243a 100644 --- a/vendor/github.com/xanzy/go-gitlab/client_options.go +++ b/vendor/github.com/xanzy/go-gitlab/client_options.go @@ -83,6 +83,22 @@ func WithHTTPClient(httpClient *http.Client) ClientOptionFunc { } } +// WithRequestLogHook can be used to configure a custom request log hook. +func WithRequestLogHook(hook retryablehttp.RequestLogHook) ClientOptionFunc { + return func(c *Client) error { + c.client.RequestLogHook = hook + return nil + } +} + +// WithResponseLogHook can be used to configure a custom response log hook. +func WithResponseLogHook(hook retryablehttp.ResponseLogHook) ClientOptionFunc { + return func(c *Client) error { + c.client.ResponseLogHook = hook + return nil + } +} + // WithoutRetries disables the default retry logic. func WithoutRetries() ClientOptionFunc { return func(c *Client) error { diff --git a/vendor/github.com/xanzy/go-gitlab/commits.go b/vendor/github.com/xanzy/go-gitlab/commits.go index 491fbd627..fd26624ae 100644 --- a/vendor/github.com/xanzy/go-gitlab/commits.go +++ b/vendor/github.com/xanzy/go-gitlab/commits.go @@ -89,7 +89,7 @@ func (s *CommitsService) ListCommits(pid interface{}, opt *ListCommitsOptions, o if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/commits", pathEscape(project)) + u := fmt.Sprintf("projects/%s/repository/commits", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -132,7 +132,7 @@ func (s *CommitsService) GetCommitRefs(pid interface{}, sha string, opt *GetComm if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/commits/%s/refs", pathEscape(project), url.PathEscape(sha)) + u := fmt.Sprintf("projects/%s/repository/commits/%s/refs", PathEscape(project), url.PathEscape(sha)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -160,7 +160,7 @@ func (s *CommitsService) GetCommit(pid interface{}, sha string, options ...Reque if sha == "" { return nil, nil, fmt.Errorf("SHA must be a non-empty string") } - u := fmt.Sprintf("projects/%s/repository/commits/%s", pathEscape(project), url.PathEscape(sha)) + u := fmt.Sprintf("projects/%s/repository/commits/%s", PathEscape(project), url.PathEscape(sha)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -214,7 +214,7 @@ func (s *CommitsService) CreateCommit(pid interface{}, opt *CreateCommitOptions, if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/commits", pathEscape(project)) + u := fmt.Sprintf("projects/%s/repository/commits", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -263,7 +263,7 @@ func (s *CommitsService) GetCommitDiff(pid interface{}, sha string, opt *GetComm if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/commits/%s/diff", pathEscape(project), url.PathEscape(sha)) + u := fmt.Sprintf("projects/%s/repository/commits/%s/diff", PathEscape(project), url.PathEscape(sha)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -320,7 +320,7 @@ func (s *CommitsService) GetCommitComments(pid interface{}, sha string, opt *Get if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/commits/%s/comments", pathEscape(project), url.PathEscape(sha)) + u := fmt.Sprintf("projects/%s/repository/commits/%s/comments", PathEscape(project), url.PathEscape(sha)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -359,7 +359,7 @@ func (s *CommitsService) PostCommitComment(pid interface{}, sha string, opt *Pos if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/commits/%s/comments", pathEscape(project), url.PathEscape(sha)) + u := fmt.Sprintf("projects/%s/repository/commits/%s/comments", PathEscape(project), url.PathEscape(sha)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -412,7 +412,7 @@ func (s *CommitsService) GetCommitStatuses(pid interface{}, sha string, opt *Get if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/commits/%s/statuses", pathEscape(project), url.PathEscape(sha)) + u := fmt.Sprintf("projects/%s/repository/commits/%s/statuses", PathEscape(project), url.PathEscape(sha)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -450,7 +450,7 @@ func (s *CommitsService) SetCommitStatus(pid interface{}, sha string, opt *SetCo if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/statuses/%s", pathEscape(project), url.PathEscape(sha)) + u := fmt.Sprintf("projects/%s/statuses/%s", PathEscape(project), url.PathEscape(sha)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -475,7 +475,7 @@ func (s *CommitsService) ListMergeRequestsByCommit(pid interface{}, sha string, if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/commits/%s/merge_requests", pathEscape(project), url.PathEscape(sha)) + u := fmt.Sprintf("projects/%s/repository/commits/%s/merge_requests", PathEscape(project), url.PathEscape(sha)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -508,7 +508,7 @@ func (s *CommitsService) CherryPickCommit(pid interface{}, sha string, opt *Cher if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/commits/%s/cherry_pick", pathEscape(project), url.PathEscape(sha)) + u := fmt.Sprintf("projects/%s/repository/commits/%s/cherry_pick", PathEscape(project), url.PathEscape(sha)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -539,7 +539,7 @@ func (s *CommitsService) RevertCommit(pid interface{}, sha string, opt *RevertCo if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/commits/%s/revert", pathEscape(project), url.PathEscape(sha)) + u := fmt.Sprintf("projects/%s/repository/commits/%s/revert", PathEscape(project), url.PathEscape(sha)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -576,7 +576,7 @@ func (s *CommitsService) GetGPGSiganature(pid interface{}, sha string, options . if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/commits/%s/signature", pathEscape(project), url.PathEscape(sha)) + u := fmt.Sprintf("projects/%s/repository/commits/%s/signature", PathEscape(project), url.PathEscape(sha)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/container_registry.go b/vendor/github.com/xanzy/go-gitlab/container_registry.go index 8adb9d1e5..43bc3d849 100644 --- a/vendor/github.com/xanzy/go-gitlab/container_registry.go +++ b/vendor/github.com/xanzy/go-gitlab/container_registry.go @@ -77,16 +77,16 @@ type ListRegistryRepositoriesOptions struct { TagsCount *bool `url:"tags_count,omitempty" json:"tags_count,omitempty"` } -// ListRegistryRepositories gets a list of registry repositories in a project. +// ListProjectRegistryRepositories gets a list of registry repositories in a project. // // GitLab API docs: // https://docs.gitlab.com/ee/api/container_registry.html#list-registry-repositories -func (s *ContainerRegistryService) ListRegistryRepositories(pid interface{}, opt *ListRegistryRepositoriesOptions, options ...RequestOptionFunc) ([]*RegistryRepository, *Response, error) { +func (s *ContainerRegistryService) ListProjectRegistryRepositories(pid interface{}, opt *ListRegistryRepositoriesOptions, options ...RequestOptionFunc) ([]*RegistryRepository, *Response, error) { project, err := parseID(pid) if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/registry/repositories", pathEscape(project)) + u := fmt.Sprintf("projects/%s/registry/repositories", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -102,6 +102,66 @@ func (s *ContainerRegistryService) ListRegistryRepositories(pid interface{}, opt return repos, resp, err } +// ListGroupRegistryRepositories gets a list of registry repositories in a group. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/container_registry.html#within-a-group +func (s *ContainerRegistryService) ListGroupRegistryRepositories(gid interface{}, opt *ListRegistryRepositoriesOptions, options ...RequestOptionFunc) ([]*RegistryRepository, *Response, error) { + group, err := parseID(gid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("groups/%s/registry/repositories", PathEscape(group)) + + req, err := s.client.NewRequest(http.MethodGet, u, opt, options) + if err != nil { + return nil, nil, err + } + + var repos []*RegistryRepository + resp, err := s.client.Do(req, &repos) + if err != nil { + return nil, resp, err + } + + return repos, resp, err +} + +// GetSingleRegistryRepositoryOptions represents the available +// GetSingleRegistryRepository() options. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/container_registry.html#get-details-of-a-single-repository +type GetSingleRegistryRepositoryOptions struct { + Tags *bool `url:"tags,omitempty" json:"tags,omitempty"` + TagsCount *bool `url:"tags_count,omitempty" json:"tags_count,omitempty"` +} + +// GetSingleRegistryRepository gets the details of single registry repository. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/container_registry.html#get-details-of-a-single-repository +func (s *ContainerRegistryService) GetSingleRegistryRepository(pid interface{}, opt *GetSingleRegistryRepositoryOptions, options ...RequestOptionFunc) (*RegistryRepository, *Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("registry/repositories/%s", PathEscape(project)) + + req, err := s.client.NewRequest(http.MethodGet, u, opt, options) + if err != nil { + return nil, nil, err + } + + repo := new(RegistryRepository) + resp, err := s.client.Do(req, repo) + if err != nil { + return nil, resp, err + } + + return repo, resp, err +} + // DeleteRegistryRepository deletes a repository in a registry. // // GitLab API docs: @@ -111,7 +171,7 @@ func (s *ContainerRegistryService) DeleteRegistryRepository(pid interface{}, rep if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/registry/repositories/%d", pathEscape(project), repository) + u := fmt.Sprintf("projects/%s/registry/repositories/%d", PathEscape(project), repository) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -138,7 +198,7 @@ func (s *ContainerRegistryService) ListRegistryRepositoryTags(pid interface{}, r return nil, nil, err } u := fmt.Sprintf("projects/%s/registry/repositories/%d/tags", - pathEscape(project), + PathEscape(project), repository, ) @@ -166,7 +226,7 @@ func (s *ContainerRegistryService) GetRegistryRepositoryTagDetail(pid interface{ return nil, nil, err } u := fmt.Sprintf("projects/%s/registry/repositories/%d/tags/%s", - pathEscape(project), + PathEscape(project), repository, tagName, ) @@ -195,7 +255,7 @@ func (s *ContainerRegistryService) DeleteRegistryRepositoryTag(pid interface{}, return nil, err } u := fmt.Sprintf("projects/%s/registry/repositories/%d/tags/%s", - pathEscape(project), + PathEscape(project), repository, tagName, ) @@ -234,7 +294,7 @@ func (s *ContainerRegistryService) DeleteRegistryRepositoryTags(pid interface{}, return nil, err } u := fmt.Sprintf("projects/%s/registry/repositories/%d/tags", - pathEscape(project), + PathEscape(project), repository, ) diff --git a/vendor/github.com/xanzy/go-gitlab/deploy_keys.go b/vendor/github.com/xanzy/go-gitlab/deploy_keys.go index 914f46e39..14ba0c8a0 100644 --- a/vendor/github.com/xanzy/go-gitlab/deploy_keys.go +++ b/vendor/github.com/xanzy/go-gitlab/deploy_keys.go @@ -78,7 +78,7 @@ func (s *DeployKeysService) ListProjectDeployKeys(pid interface{}, opt *ListProj if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/deploy_keys", pathEscape(project)) + u := fmt.Sprintf("projects/%s/deploy_keys", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -103,7 +103,7 @@ func (s *DeployKeysService) GetDeployKey(pid interface{}, deployKey int, options if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/deploy_keys/%d", pathEscape(project), deployKey) + u := fmt.Sprintf("projects/%s/deploy_keys/%d", PathEscape(project), deployKey) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -140,7 +140,7 @@ func (s *DeployKeysService) AddDeployKey(pid interface{}, opt *AddDeployKeyOptio if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/deploy_keys", pathEscape(project)) + u := fmt.Sprintf("projects/%s/deploy_keys", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -165,7 +165,7 @@ func (s *DeployKeysService) DeleteDeployKey(pid interface{}, deployKey int, opti if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/deploy_keys/%d", pathEscape(project), deployKey) + u := fmt.Sprintf("projects/%s/deploy_keys/%d", PathEscape(project), deployKey) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -184,7 +184,7 @@ func (s *DeployKeysService) EnableDeployKey(pid interface{}, deployKey int, opti if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/deploy_keys/%d/enable", pathEscape(project), deployKey) + u := fmt.Sprintf("projects/%s/deploy_keys/%d/enable", PathEscape(project), deployKey) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -218,7 +218,7 @@ func (s *DeployKeysService) UpdateDeployKey(pid interface{}, deployKey int, opt if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/deploy_keys/%d", pathEscape(project), deployKey) + u := fmt.Sprintf("projects/%s/deploy_keys/%d", PathEscape(project), deployKey) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/deploy_tokens.go b/vendor/github.com/xanzy/go-gitlab/deploy_tokens.go index 2082f96c6..1b9943d56 100644 --- a/vendor/github.com/xanzy/go-gitlab/deploy_tokens.go +++ b/vendor/github.com/xanzy/go-gitlab/deploy_tokens.go @@ -79,7 +79,7 @@ func (s *DeployTokensService) ListProjectDeployTokens(pid interface{}, opt *List if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/deploy_tokens", pathEscape(project)) + u := fmt.Sprintf("projects/%s/deploy_tokens", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -103,7 +103,7 @@ type CreateProjectDeployTokenOptions struct { Name *string `url:"name,omitempty" json:"name,omitempty"` ExpiresAt *time.Time `url:"expires_at,omitempty" json:"expires_at,omitempty"` Username *string `url:"username,omitempty" json:"username,omitempty"` - Scopes []string `url:"scopes,omitempty" json:"scopes,omitempty"` + Scopes *[]string `url:"scopes,omitempty" json:"scopes,omitempty"` } // CreateProjectDeployToken creates a new deploy token for a project. @@ -115,7 +115,7 @@ func (s *DeployTokensService) CreateProjectDeployToken(pid interface{}, opt *Cre if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/deploy_tokens", pathEscape(project)) + u := fmt.Sprintf("projects/%s/deploy_tokens", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -140,7 +140,7 @@ func (s *DeployTokensService) DeleteProjectDeployToken(pid interface{}, deployTo if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/deploy_tokens/%d", pathEscape(project), deployToken) + u := fmt.Sprintf("projects/%s/deploy_tokens/%d", PathEscape(project), deployToken) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -166,7 +166,7 @@ func (s *DeployTokensService) ListGroupDeployTokens(gid interface{}, opt *ListGr if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/deploy_tokens", pathEscape(group)) + u := fmt.Sprintf("groups/%s/deploy_tokens", PathEscape(group)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -190,7 +190,7 @@ type CreateGroupDeployTokenOptions struct { Name *string `url:"name,omitempty" json:"name,omitempty"` ExpiresAt *time.Time `url:"expires_at,omitempty" json:"expires_at,omitempty"` Username *string `url:"username,omitempty" json:"username,omitempty"` - Scopes []string `url:"scopes,omitempty" json:"scopes,omitempty"` + Scopes *[]string `url:"scopes,omitempty" json:"scopes,omitempty"` } // CreateGroupDeployToken creates a new deploy token for a group. @@ -202,7 +202,7 @@ func (s *DeployTokensService) CreateGroupDeployToken(gid interface{}, opt *Creat if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/deploy_tokens", pathEscape(group)) + u := fmt.Sprintf("groups/%s/deploy_tokens", PathEscape(group)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -227,7 +227,7 @@ func (s *DeployTokensService) DeleteGroupDeployToken(gid interface{}, deployToke if err != nil { return nil, err } - u := fmt.Sprintf("groups/%s/deploy_tokens/%d", pathEscape(group), deployToken) + u := fmt.Sprintf("groups/%s/deploy_tokens/%d", PathEscape(group), deployToken) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/deployments.go b/vendor/github.com/xanzy/go-gitlab/deployments.go index d6b8bbd8e..cfa963b01 100644 --- a/vendor/github.com/xanzy/go-gitlab/deployments.go +++ b/vendor/github.com/xanzy/go-gitlab/deployments.go @@ -94,7 +94,7 @@ func (s *DeploymentsService) ListProjectDeployments(pid interface{}, opts *ListP if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/deployments", pathEscape(project)) + u := fmt.Sprintf("projects/%s/deployments", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opts, options) if err != nil { @@ -118,7 +118,7 @@ func (s *DeploymentsService) GetProjectDeployment(pid interface{}, deployment in if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/deployments/%d", pathEscape(project), deployment) + u := fmt.Sprintf("projects/%s/deployments/%d", PathEscape(project), deployment) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -154,7 +154,7 @@ func (s *DeploymentsService) CreateProjectDeployment(pid interface{}, opt *Creat if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/deployments", pathEscape(project)) + u := fmt.Sprintf("projects/%s/deployments", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -186,7 +186,7 @@ func (s *DeploymentsService) UpdateProjectDeployment(pid interface{}, deployment if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/deployments/%d", pathEscape(project), deployment) + u := fmt.Sprintf("projects/%s/deployments/%d", PathEscape(project), deployment) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/discussions.go b/vendor/github.com/xanzy/go-gitlab/discussions.go index 2a8562d3d..c8f866876 100644 --- a/vendor/github.com/xanzy/go-gitlab/discussions.go +++ b/vendor/github.com/xanzy/go-gitlab/discussions.go @@ -60,7 +60,7 @@ func (s *DiscussionsService) ListIssueDiscussions(pid interface{}, issue int, op if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/issues/%d/discussions", pathEscape(project), issue) + u := fmt.Sprintf("projects/%s/issues/%d/discussions", PathEscape(project), issue) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -86,7 +86,7 @@ func (s *DiscussionsService) GetIssueDiscussion(pid interface{}, issue int, disc return nil, nil, err } u := fmt.Sprintf("projects/%s/issues/%d/discussions/%s", - pathEscape(project), + PathEscape(project), issue, discussion, ) @@ -124,7 +124,7 @@ func (s *DiscussionsService) CreateIssueDiscussion(pid interface{}, issue int, o if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/issues/%d/discussions", pathEscape(project), issue) + u := fmt.Sprintf("projects/%s/issues/%d/discussions", PathEscape(project), issue) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -160,7 +160,7 @@ func (s *DiscussionsService) AddIssueDiscussionNote(pid interface{}, issue int, return nil, nil, err } u := fmt.Sprintf("projects/%s/issues/%d/discussions/%s/notes", - pathEscape(project), + PathEscape(project), issue, discussion, ) @@ -199,7 +199,7 @@ func (s *DiscussionsService) UpdateIssueDiscussionNote(pid interface{}, issue in return nil, nil, err } u := fmt.Sprintf("projects/%s/issues/%d/discussions/%s/notes/%d", - pathEscape(project), + PathEscape(project), issue, discussion, note, @@ -229,7 +229,7 @@ func (s *DiscussionsService) DeleteIssueDiscussionNote(pid interface{}, issue in return nil, err } u := fmt.Sprintf("projects/%s/issues/%d/discussions/%s/notes/%d", - pathEscape(project), + PathEscape(project), issue, discussion, note, @@ -260,7 +260,7 @@ func (s *DiscussionsService) ListSnippetDiscussions(pid interface{}, snippet int if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/snippets/%d/discussions", pathEscape(project), snippet) + u := fmt.Sprintf("projects/%s/snippets/%d/discussions", PathEscape(project), snippet) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -286,7 +286,7 @@ func (s *DiscussionsService) GetSnippetDiscussion(pid interface{}, snippet int, return nil, nil, err } u := fmt.Sprintf("projects/%s/snippets/%d/discussions/%s", - pathEscape(project), + PathEscape(project), snippet, discussion, ) @@ -325,7 +325,7 @@ func (s *DiscussionsService) CreateSnippetDiscussion(pid interface{}, snippet in if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/snippets/%d/discussions", pathEscape(project), snippet) + u := fmt.Sprintf("projects/%s/snippets/%d/discussions", PathEscape(project), snippet) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -362,7 +362,7 @@ func (s *DiscussionsService) AddSnippetDiscussionNote(pid interface{}, snippet i return nil, nil, err } u := fmt.Sprintf("projects/%s/snippets/%d/discussions/%s/notes", - pathEscape(project), + PathEscape(project), snippet, discussion, ) @@ -401,7 +401,7 @@ func (s *DiscussionsService) UpdateSnippetDiscussionNote(pid interface{}, snippe return nil, nil, err } u := fmt.Sprintf("projects/%s/snippets/%d/discussions/%s/notes/%d", - pathEscape(project), + PathEscape(project), snippet, discussion, note, @@ -431,7 +431,7 @@ func (s *DiscussionsService) DeleteSnippetDiscussionNote(pid interface{}, snippe return nil, err } u := fmt.Sprintf("projects/%s/snippets/%d/discussions/%s/notes/%d", - pathEscape(project), + PathEscape(project), snippet, discussion, note, @@ -463,7 +463,7 @@ func (s *DiscussionsService) ListGroupEpicDiscussions(gid interface{}, epic int, return nil, nil, err } u := fmt.Sprintf("groups/%s/epics/%d/discussions", - pathEscape(group), + PathEscape(group), epic, ) @@ -491,7 +491,7 @@ func (s *DiscussionsService) GetEpicDiscussion(gid interface{}, epic int, discus return nil, nil, err } u := fmt.Sprintf("groups/%s/epics/%d/discussions/%s", - pathEscape(group), + PathEscape(group), epic, discussion, ) @@ -531,7 +531,7 @@ func (s *DiscussionsService) CreateEpicDiscussion(gid interface{}, epic int, opt return nil, nil, err } u := fmt.Sprintf("groups/%s/epics/%d/discussions", - pathEscape(group), + PathEscape(group), epic, ) @@ -569,7 +569,7 @@ func (s *DiscussionsService) AddEpicDiscussionNote(gid interface{}, epic int, di return nil, nil, err } u := fmt.Sprintf("groups/%s/epics/%d/discussions/%s/notes", - pathEscape(group), + PathEscape(group), epic, discussion, ) @@ -608,7 +608,7 @@ func (s *DiscussionsService) UpdateEpicDiscussionNote(gid interface{}, epic int, return nil, nil, err } u := fmt.Sprintf("groups/%s/epics/%d/discussions/%s/notes/%d", - pathEscape(group), + PathEscape(group), epic, discussion, note, @@ -638,7 +638,7 @@ func (s *DiscussionsService) DeleteEpicDiscussionNote(gid interface{}, epic int, return nil, err } u := fmt.Sprintf("groups/%s/epics/%d/discussions/%s/notes/%d", - pathEscape(group), + PathEscape(group), epic, discussion, note, @@ -670,7 +670,7 @@ func (s *DiscussionsService) ListMergeRequestDiscussions(pid interface{}, mergeR return nil, nil, err } u := fmt.Sprintf("projects/%s/merge_requests/%d/discussions", - pathEscape(project), + PathEscape(project), mergeRequest, ) @@ -699,7 +699,7 @@ func (s *DiscussionsService) GetMergeRequestDiscussion(pid interface{}, mergeReq return nil, nil, err } u := fmt.Sprintf("projects/%s/merge_requests/%d/discussions/%s", - pathEscape(project), + PathEscape(project), mergeRequest, discussion, ) @@ -741,7 +741,7 @@ func (s *DiscussionsService) CreateMergeRequestDiscussion(pid interface{}, merge return nil, nil, err } u := fmt.Sprintf("projects/%s/merge_requests/%d/discussions", - pathEscape(project), + PathEscape(project), mergeRequest, ) @@ -779,7 +779,7 @@ func (s *DiscussionsService) ResolveMergeRequestDiscussion(pid interface{}, merg return nil, nil, err } u := fmt.Sprintf("projects/%s/merge_requests/%d/discussions/%s", - pathEscape(project), + PathEscape(project), mergeRequest, discussion, ) @@ -819,7 +819,7 @@ func (s *DiscussionsService) AddMergeRequestDiscussionNote(pid interface{}, merg return nil, nil, err } u := fmt.Sprintf("projects/%s/merge_requests/%d/discussions/%s/notes", - pathEscape(project), + PathEscape(project), mergeRequest, discussion, ) @@ -860,7 +860,7 @@ func (s *DiscussionsService) UpdateMergeRequestDiscussionNote(pid interface{}, m return nil, nil, err } u := fmt.Sprintf("projects/%s/merge_requests/%d/discussions/%s/notes/%d", - pathEscape(project), + PathEscape(project), mergeRequest, discussion, note, @@ -891,7 +891,7 @@ func (s *DiscussionsService) DeleteMergeRequestDiscussionNote(pid interface{}, m return nil, err } u := fmt.Sprintf("projects/%s/merge_requests/%d/discussions/%s/notes/%d", - pathEscape(project), + PathEscape(project), mergeRequest, discussion, note, @@ -923,7 +923,7 @@ func (s *DiscussionsService) ListCommitDiscussions(pid interface{}, commit int, return nil, nil, err } u := fmt.Sprintf("projects/%s/repository/commits/%d/discussions", - pathEscape(project), + PathEscape(project), commit, ) @@ -952,7 +952,7 @@ func (s *DiscussionsService) GetCommitDiscussion(pid interface{}, commit int, di return nil, nil, err } u := fmt.Sprintf("projects/%s/repository/commits/%d/discussions/%s", - pathEscape(project), + PathEscape(project), commit, discussion, ) @@ -992,7 +992,7 @@ func (s *DiscussionsService) CreateCommitDiscussion(pid interface{}, commit int, return nil, nil, err } u := fmt.Sprintf("projects/%s/repository/commits/%d/discussions", - pathEscape(project), + PathEscape(project), commit, ) @@ -1030,7 +1030,7 @@ func (s *DiscussionsService) AddCommitDiscussionNote(pid interface{}, commit int return nil, nil, err } u := fmt.Sprintf("projects/%s/repository/commits/%d/discussions/%s/notes", - pathEscape(project), + PathEscape(project), commit, discussion, ) @@ -1069,7 +1069,7 @@ func (s *DiscussionsService) UpdateCommitDiscussionNote(pid interface{}, commit return nil, nil, err } u := fmt.Sprintf("projects/%s/repository/commits/%d/discussions/%s/notes/%d", - pathEscape(project), + PathEscape(project), commit, discussion, note, @@ -1099,7 +1099,7 @@ func (s *DiscussionsService) DeleteCommitDiscussionNote(pid interface{}, commit return nil, err } u := fmt.Sprintf("projects/%s/repository/commits/%d/discussions/%s/notes/%d", - pathEscape(project), + PathEscape(project), commit, discussion, note, diff --git a/vendor/github.com/xanzy/go-gitlab/environments.go b/vendor/github.com/xanzy/go-gitlab/environments.go index c5d66db65..d88129395 100644 --- a/vendor/github.com/xanzy/go-gitlab/environments.go +++ b/vendor/github.com/xanzy/go-gitlab/environments.go @@ -67,7 +67,7 @@ func (s *EnvironmentsService) ListEnvironments(pid interface{}, opts *ListEnviro if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/environments", pathEscape(project)) + u := fmt.Sprintf("projects/%s/environments", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opts, options) if err != nil { @@ -92,7 +92,7 @@ func (s *EnvironmentsService) GetEnvironment(pid interface{}, environment int, o if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/environments/%d", pathEscape(project), environment) + u := fmt.Sprintf("projects/%s/environments/%d", PathEscape(project), environment) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -129,7 +129,7 @@ func (s *EnvironmentsService) CreateEnvironment(pid interface{}, opt *CreateEnvi if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/environments", pathEscape(project)) + u := fmt.Sprintf("projects/%s/environments", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -163,7 +163,7 @@ func (s *EnvironmentsService) EditEnvironment(pid interface{}, environment int, if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/environments/%d", pathEscape(project), environment) + u := fmt.Sprintf("projects/%s/environments/%d", PathEscape(project), environment) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -188,7 +188,7 @@ func (s *EnvironmentsService) DeleteEnvironment(pid interface{}, environment int if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/environments/%d", pathEscape(project), environment) + u := fmt.Sprintf("projects/%s/environments/%d", PathEscape(project), environment) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -207,7 +207,7 @@ func (s *EnvironmentsService) StopEnvironment(pid interface{}, environmentID int if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/environments/%d/stop", pathEscape(project), environmentID) + u := fmt.Sprintf("projects/%s/environments/%d/stop", PathEscape(project), environmentID) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/epic_issues.go b/vendor/github.com/xanzy/go-gitlab/epic_issues.go index a3489add8..9064c89b2 100644 --- a/vendor/github.com/xanzy/go-gitlab/epic_issues.go +++ b/vendor/github.com/xanzy/go-gitlab/epic_issues.go @@ -48,7 +48,7 @@ func (s *EpicIssuesService) ListEpicIssues(gid interface{}, epic int, opt *ListO if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/epics/%d/issues", pathEscape(group), epic) + u := fmt.Sprintf("groups/%s/epics/%d/issues", PathEscape(group), epic) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -73,7 +73,7 @@ func (s *EpicIssuesService) AssignEpicIssue(gid interface{}, epic, issue int, op if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/epics/%d/issues/%d", pathEscape(group), epic, issue) + u := fmt.Sprintf("groups/%s/epics/%d/issues/%d", PathEscape(group), epic, issue) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -98,7 +98,7 @@ func (s *EpicIssuesService) RemoveEpicIssue(gid interface{}, epic, epicIssue int if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/epics/%d/issues/%d", pathEscape(group), epic, epicIssue) + u := fmt.Sprintf("groups/%s/epics/%d/issues/%d", PathEscape(group), epic, epicIssue) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -135,7 +135,7 @@ func (s *EpicIssuesService) UpdateEpicIssueAssignment(gid interface{}, epic, epi if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/epics/%d/issues/%d", pathEscape(group), epic, epicIssue) + u := fmt.Sprintf("groups/%s/epics/%d/issues/%d", PathEscape(group), epic, epicIssue) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/epics.go b/vendor/github.com/xanzy/go-gitlab/epics.go index 15002a11d..cbd6b113f 100644 --- a/vendor/github.com/xanzy/go-gitlab/epics.go +++ b/vendor/github.com/xanzy/go-gitlab/epics.go @@ -63,6 +63,7 @@ type Epic struct { DueDateFromMilestones *ISOTime `json:"due_date_from_milestones"` CreatedAt *time.Time `json:"created_at"` UpdatedAt *time.Time `json:"updated_at"` + ClosedAt *time.Time `json:"closed_at"` Labels []string `json:"labels"` Upvotes int `json:"upvotes"` Downvotes int `json:"downvotes"` @@ -80,7 +81,7 @@ func (e Epic) String() string { type ListGroupEpicsOptions struct { ListOptions AuthorID *int `url:"author_id,omitempty" json:"author_id,omitempty"` - Labels Labels `url:"labels,comma,omitempty" json:"labels,omitempty"` + Labels *Labels `url:"labels,comma,omitempty" json:"labels,omitempty"` WithLabelDetails *bool `url:"with_labels_details,omitempty" json:"with_labels_details,omitempty"` OrderBy *string `url:"order_by,omitempty" json:"order_by,omitempty"` Sort *string `url:"sort,omitempty" json:"sort,omitempty"` @@ -104,7 +105,7 @@ func (s *EpicsService) ListGroupEpics(gid interface{}, opt *ListGroupEpicsOption if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/epics", pathEscape(group)) + u := fmt.Sprintf("groups/%s/epics", PathEscape(group)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -128,7 +129,7 @@ func (s *EpicsService) GetEpic(gid interface{}, epic int, options ...RequestOpti if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/epics/%d", pathEscape(group), epic) + u := fmt.Sprintf("groups/%s/epics/%d", PathEscape(group), epic) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -152,7 +153,7 @@ func (s *EpicsService) GetEpicLinks(gid interface{}, epic int, options ...Reques if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/epics/%d/epics", pathEscape(group), epic) + u := fmt.Sprintf("groups/%s/epics/%d/epics", PathEscape(group), epic) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -174,7 +175,7 @@ func (s *EpicsService) GetEpicLinks(gid interface{}, epic int, options ...Reques type CreateEpicOptions struct { Title *string `url:"title,omitempty" json:"title,omitempty"` Description *string `url:"description,omitempty" json:"description,omitempty"` - Labels Labels `url:"labels,comma,omitempty" json:"labels,omitempty"` + Labels *Labels `url:"labels,comma,omitempty" json:"labels,omitempty"` StartDateIsFixed *bool `url:"start_date_is_fixed,omitempty" json:"start_date_is_fixed,omitempty"` StartDateFixed *ISOTime `url:"start_date_fixed,omitempty" json:"start_date_fixed,omitempty"` DueDateIsFixed *bool `url:"due_date_is_fixed,omitempty" json:"due_date_is_fixed,omitempty"` @@ -189,7 +190,7 @@ func (s *EpicsService) CreateEpic(gid interface{}, opt *CreateEpicOptions, optio if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/epics", pathEscape(group)) + u := fmt.Sprintf("groups/%s/epics", PathEscape(group)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -211,7 +212,7 @@ func (s *EpicsService) CreateEpic(gid interface{}, opt *CreateEpicOptions, optio type UpdateEpicOptions struct { Title *string `url:"title,omitempty" json:"title,omitempty"` Description *string `url:"description,omitempty" json:"description,omitempty"` - Labels Labels `url:"labels,comma,omitempty" json:"labels,omitempty"` + Labels *Labels `url:"labels,comma,omitempty" json:"labels,omitempty"` StartDateIsFixed *bool `url:"start_date_is_fixed,omitempty" json:"start_date_is_fixed,omitempty"` StartDateFixed *ISOTime `url:"start_date_fixed,omitempty" json:"start_date_fixed,omitempty"` DueDateIsFixed *bool `url:"due_date_is_fixed,omitempty" json:"due_date_is_fixed,omitempty"` @@ -228,7 +229,7 @@ func (s *EpicsService) UpdateEpic(gid interface{}, epic int, opt *UpdateEpicOpti if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/epics/%d", pathEscape(group), epic) + u := fmt.Sprintf("groups/%s/epics/%d", PathEscape(group), epic) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -252,7 +253,7 @@ func (s *EpicsService) DeleteEpic(gid interface{}, epic int, options ...RequestO if err != nil { return nil, err } - u := fmt.Sprintf("groups/%s/epics/%d", pathEscape(group), epic) + u := fmt.Sprintf("groups/%s/epics/%d", PathEscape(group), epic) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/events.go b/vendor/github.com/xanzy/go-gitlab/events.go index 8fec14c4e..c7753f2b8 100644 --- a/vendor/github.com/xanzy/go-gitlab/events.go +++ b/vendor/github.com/xanzy/go-gitlab/events.go @@ -131,7 +131,7 @@ func (s *EventsService) ListProjectVisibleEvents(pid interface{}, opt *ListContr if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/events", pathEscape(project)) + u := fmt.Sprintf("projects/%s/events", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/external_status_checks.go b/vendor/github.com/xanzy/go-gitlab/external_status_checks.go index b4b145143..b6d3092db 100644 --- a/vendor/github.com/xanzy/go-gitlab/external_status_checks.go +++ b/vendor/github.com/xanzy/go-gitlab/external_status_checks.go @@ -48,7 +48,7 @@ func (s *ExternalStatusChecksService) ListMergeStatusChecks(pid interface{}, mr if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/status_checks", pathEscape(project), mr) + u := fmt.Sprintf("projects/%s/merge_requests/%d/status_checks", PathEscape(project), mr) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -73,7 +73,7 @@ func (s *ExternalStatusChecksService) ListProjectStatusChecks(pid interface{}, o if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/external_status_checks", pathEscape(project)) + u := fmt.Sprintf("projects/%s/external_status_checks", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/freeze_periods.go b/vendor/github.com/xanzy/go-gitlab/freeze_periods.go index dfcca0789..255666f4a 100644 --- a/vendor/github.com/xanzy/go-gitlab/freeze_periods.go +++ b/vendor/github.com/xanzy/go-gitlab/freeze_periods.go @@ -59,7 +59,7 @@ func (s *FreezePeriodsService) ListFreezePeriods(pid interface{}, opt *ListFreez if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/freeze_periods", pathEscape(project)) + u := fmt.Sprintf("projects/%s/freeze_periods", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -84,7 +84,7 @@ func (s *FreezePeriodsService) GetFreezePeriod(pid interface{}, freezePeriod int if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/freeze_periods/%d", pathEscape(project), freezePeriod) + u := fmt.Sprintf("projects/%s/freeze_periods/%d", PathEscape(project), freezePeriod) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -120,7 +120,7 @@ func (s *FreezePeriodsService) CreateFreezePeriodOptions(pid interface{}, opt *C if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/freeze_periods", pathEscape(project)) + u := fmt.Sprintf("projects/%s/freeze_periods", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -156,7 +156,7 @@ func (s *FreezePeriodsService) UpdateFreezePeriodOptions(pid interface{}, freeze if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/freeze_periods/%d", pathEscape(project), freezePeriod) + u := fmt.Sprintf("projects/%s/freeze_periods/%d", PathEscape(project), freezePeriod) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -183,7 +183,7 @@ func (s *FreezePeriodsService) DeleteFreezePeriod(pid interface{}, freezePeriod if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/freeze_periods/%d", pathEscape(project), freezePeriod) + u := fmt.Sprintf("projects/%s/freeze_periods/%d", PathEscape(project), freezePeriod) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/generic_packages.go b/vendor/github.com/xanzy/go-gitlab/generic_packages.go index ba875fa35..95f382abc 100644 --- a/vendor/github.com/xanzy/go-gitlab/generic_packages.go +++ b/vendor/github.com/xanzy/go-gitlab/generic_packages.go @@ -70,10 +70,10 @@ func (s *GenericPackagesService) FormatPackageURL(pid interface{}, packageName, } u := fmt.Sprintf( "projects/%s/packages/generic/%s/%s/%s", - pathEscape(project), - pathEscape(packageName), - pathEscape(packageVersion), - pathEscape(fileName), + PathEscape(project), + PathEscape(packageName), + PathEscape(packageVersion), + PathEscape(fileName), ) return u, nil } @@ -99,10 +99,10 @@ func (s *GenericPackagesService) PublishPackageFile(pid interface{}, packageName } u := fmt.Sprintf( "projects/%s/packages/generic/%s/%s/%s", - pathEscape(project), - pathEscape(packageName), - pathEscape(packageVersion), - pathEscape(fileName), + PathEscape(project), + PathEscape(packageName), + PathEscape(packageVersion), + PathEscape(fileName), ) // We need to create the request as a GET request to make sure the options @@ -137,10 +137,10 @@ func (s *GenericPackagesService) DownloadPackageFile(pid interface{}, packageNam } u := fmt.Sprintf( "projects/%s/packages/generic/%s/%s/%s", - pathEscape(project), - pathEscape(packageName), - pathEscape(packageVersion), - pathEscape(fileName), + PathEscape(project), + PathEscape(packageName), + PathEscape(packageVersion), + PathEscape(fileName), ) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) diff --git a/vendor/github.com/xanzy/go-gitlab/geo_nodes.go b/vendor/github.com/xanzy/go-gitlab/geo_nodes.go index 446aafcc2..fa7c2546a 100644 --- a/vendor/github.com/xanzy/go-gitlab/geo_nodes.go +++ b/vendor/github.com/xanzy/go-gitlab/geo_nodes.go @@ -69,20 +69,20 @@ type GeoNodesService struct { // GitLab API docs: // https://docs.gitlab.com/ee/api/geo_nodes.html#create-a-new-geo-node type CreateGeoNodesOptions struct { - Primary *bool `url:"primary,omitempty" json:"primary,omitempty"` - Enabled *bool `url:"enabled,omitempty" json:"enabled,omitempty"` - Name *string `url:"name,omitempty" json:"name,omitempty"` - URL *string `url:"url,omitempty" json:"url,omitempty"` - InternalURL *string `url:"internal_url,omitempty" json:"internal_url,omitempty"` - FilesMaxCapacity *int `url:"files_max_capacity,omitempty" json:"files_max_capacity,omitempty"` - ReposMaxCapacity *int `url:"repos_max_capacity,omitempty" json:"repos_max_capacity,omitempty"` - VerificationMaxCapacity *int `url:"verification_max_capacity,omitempty" json:"verification_max_capacity,omitempty"` - ContainerRepositoriesMaxCapacity *int `url:"container_repositories_max_capacity,omitempty" json:"container_repositories_max_capacity,omitempty"` - SyncObjectStorage *bool `url:"sync_object_storage,omitempty" json:"sync_object_storage,omitempty"` - SelectiveSyncType *string `url:"selective_sync_type,omitempty" json:"selective_sync_type,omitempty"` - SelectiveSyncShards []string `url:"selective_sync_shards,omitempty" json:"selective_sync_shards,omitempty"` - SelectiveSyncNamespaceIds []int `url:"selective_sync_namespace_ids,omitempty" json:"selective_sync_namespace_ids,omitempty"` - MinimumReverificationInterval *int `url:"minimum_reverification_interval,omitempty" json:"minimum_reverification_interval,omitempty"` + Primary *bool `url:"primary,omitempty" json:"primary,omitempty"` + Enabled *bool `url:"enabled,omitempty" json:"enabled,omitempty"` + Name *string `url:"name,omitempty" json:"name,omitempty"` + URL *string `url:"url,omitempty" json:"url,omitempty"` + InternalURL *string `url:"internal_url,omitempty" json:"internal_url,omitempty"` + FilesMaxCapacity *int `url:"files_max_capacity,omitempty" json:"files_max_capacity,omitempty"` + ReposMaxCapacity *int `url:"repos_max_capacity,omitempty" json:"repos_max_capacity,omitempty"` + VerificationMaxCapacity *int `url:"verification_max_capacity,omitempty" json:"verification_max_capacity,omitempty"` + ContainerRepositoriesMaxCapacity *int `url:"container_repositories_max_capacity,omitempty" json:"container_repositories_max_capacity,omitempty"` + SyncObjectStorage *bool `url:"sync_object_storage,omitempty" json:"sync_object_storage,omitempty"` + SelectiveSyncType *string `url:"selective_sync_type,omitempty" json:"selective_sync_type,omitempty"` + SelectiveSyncShards *[]string `url:"selective_sync_shards,omitempty" json:"selective_sync_shards,omitempty"` + SelectiveSyncNamespaceIds *[]int `url:"selective_sync_namespace_ids,omitempty" json:"selective_sync_namespace_ids,omitempty"` + MinimumReverificationInterval *int `url:"minimum_reverification_interval,omitempty" json:"minimum_reverification_interval,omitempty"` } // CreateGeoNode creates a new Geo Node. @@ -155,20 +155,20 @@ func (s *GeoNodesService) GetGeoNode(id int, options ...RequestOptionFunc) (*Geo // GitLab API docs: // https://docs.gitlab.com/ee/api/geo_nodes.html#edit-a-geo-node type UpdateGeoNodesOptions struct { - ID *int `url:"primary,omitempty" json:"primary,omitempty"` - Enabled *bool `url:"enabled,omitempty" json:"enabled,omitempty"` - Name *string `url:"name,omitempty" json:"name,omitempty"` - URL *string `url:"url,omitempty" json:"url,omitempty"` - InternalURL *string `url:"internal_url,omitempty" json:"internal_url,omitempty"` - FilesMaxCapacity *int `url:"files_max_capacity,omitempty" json:"files_max_capacity,omitempty"` - ReposMaxCapacity *int `url:"repos_max_capacity,omitempty" json:"repos_max_capacity,omitempty"` - VerificationMaxCapacity *int `url:"verification_max_capacity,omitempty" json:"verification_max_capacity,omitempty"` - ContainerRepositoriesMaxCapacity *int `url:"container_repositories_max_capacity,omitempty" json:"container_repositories_max_capacity,omitempty"` - SyncObjectStorage *bool `url:"sync_object_storage,omitempty" json:"sync_object_storage,omitempty"` - SelectiveSyncType *string `url:"selective_sync_type,omitempty" json:"selective_sync_type,omitempty"` - SelectiveSyncShards []string `url:"selective_sync_shards,omitempty" json:"selective_sync_shards,omitempty"` - SelectiveSyncNamespaceIds []int `url:"selective_sync_namespace_ids,omitempty" json:"selective_sync_namespace_ids,omitempty"` - MinimumReverificationInterval *int `url:"minimum_reverification_interval,omitempty" json:"minimum_reverification_interval,omitempty"` + ID *int `url:"primary,omitempty" json:"primary,omitempty"` + Enabled *bool `url:"enabled,omitempty" json:"enabled,omitempty"` + Name *string `url:"name,omitempty" json:"name,omitempty"` + URL *string `url:"url,omitempty" json:"url,omitempty"` + InternalURL *string `url:"internal_url,omitempty" json:"internal_url,omitempty"` + FilesMaxCapacity *int `url:"files_max_capacity,omitempty" json:"files_max_capacity,omitempty"` + ReposMaxCapacity *int `url:"repos_max_capacity,omitempty" json:"repos_max_capacity,omitempty"` + VerificationMaxCapacity *int `url:"verification_max_capacity,omitempty" json:"verification_max_capacity,omitempty"` + ContainerRepositoriesMaxCapacity *int `url:"container_repositories_max_capacity,omitempty" json:"container_repositories_max_capacity,omitempty"` + SyncObjectStorage *bool `url:"sync_object_storage,omitempty" json:"sync_object_storage,omitempty"` + SelectiveSyncType *string `url:"selective_sync_type,omitempty" json:"selective_sync_type,omitempty"` + SelectiveSyncShards *[]string `url:"selective_sync_shards,omitempty" json:"selective_sync_shards,omitempty"` + SelectiveSyncNamespaceIds *[]int `url:"selective_sync_namespace_ids,omitempty" json:"selective_sync_namespace_ids,omitempty"` + MinimumReverificationInterval *int `url:"minimum_reverification_interval,omitempty" json:"minimum_reverification_interval,omitempty"` } // EditGeoNode updates settings of an existing Geo node. diff --git a/vendor/github.com/xanzy/go-gitlab/gitlab.go b/vendor/github.com/xanzy/go-gitlab/gitlab.go index ddfee949f..697e21df9 100644 --- a/vendor/github.com/xanzy/go-gitlab/gitlab.go +++ b/vendor/github.com/xanzy/go-gitlab/gitlab.go @@ -18,12 +18,14 @@ package gitlab import ( + "bytes" "context" "encoding/json" "fmt" "io" "io/ioutil" "math/rand" + "mime/multipart" "net/http" "net/url" "sort" @@ -125,10 +127,12 @@ type Client struct { GenericPackages *GenericPackagesService GeoNodes *GeoNodesService GitIgnoreTemplates *GitIgnoreTemplatesService + GroupAccessTokens *GroupAccessTokensService GroupBadges *GroupBadgesService GroupCluster *GroupClustersService GroupImportExport *GroupImportExportService GroupIssueBoards *GroupIssueBoardsService + GroupIterations *GroupIterationsService GroupLabels *GroupLabelsService GroupMembers *GroupMembersService GroupMilestones *GroupMilestonesService @@ -147,6 +151,7 @@ type Client struct { License *LicenseService LicenseTemplates *LicenseTemplatesService ManagedLicenses *ManagedLicensesService + Markdown *MarkdownService MergeRequestApprovals *MergeRequestApprovalsService MergeRequests *MergeRequestsService Milestones *MilestonesService @@ -164,6 +169,7 @@ type Client struct { ProjectAccessTokens *ProjectAccessTokensService ProjectCluster *ProjectClustersService ProjectImportExport *ProjectImportExportService + ProjectIterations *ProjectIterationsService ProjectMembers *ProjectMembersService ProjectMirrors *ProjectMirrorService ProjectSnippets *ProjectSnippetsService @@ -176,6 +182,7 @@ type Client struct { Releases *ReleasesService Repositories *RepositoriesService RepositoryFiles *RepositoryFilesService + RepositorySubmodules *RepositorySubmodulesService ResourceLabelEvents *ResourceLabelEventsService ResourceStateEvents *ResourceStateEventsService Runners *RunnersService @@ -187,6 +194,7 @@ type Client struct { SystemHooks *SystemHooksService Tags *TagsService Todos *TodosService + Topics *TopicsService Users *UsersService Validate *ValidateService Version *VersionService @@ -316,10 +324,12 @@ func newClient(options ...ClientOptionFunc) (*Client, error) { c.GenericPackages = &GenericPackagesService{client: c} c.GeoNodes = &GeoNodesService{client: c} c.GitIgnoreTemplates = &GitIgnoreTemplatesService{client: c} + c.GroupAccessTokens = &GroupAccessTokensService{client: c} c.GroupBadges = &GroupBadgesService{client: c} c.GroupCluster = &GroupClustersService{client: c} c.GroupImportExport = &GroupImportExportService{client: c} c.GroupIssueBoards = &GroupIssueBoardsService{client: c} + c.GroupIterations = &GroupIterationsService{client: c} c.GroupLabels = &GroupLabelsService{client: c} c.GroupMembers = &GroupMembersService{client: c} c.GroupMilestones = &GroupMilestonesService{client: c} @@ -338,6 +348,7 @@ func newClient(options ...ClientOptionFunc) (*Client, error) { c.License = &LicenseService{client: c} c.LicenseTemplates = &LicenseTemplatesService{client: c} c.ManagedLicenses = &ManagedLicensesService{client: c} + c.Markdown = &MarkdownService{client: c} c.MergeRequestApprovals = &MergeRequestApprovalsService{client: c} c.MergeRequests = &MergeRequestsService{client: c, timeStats: timeStats} c.Milestones = &MilestonesService{client: c} @@ -355,6 +366,7 @@ func newClient(options ...ClientOptionFunc) (*Client, error) { c.ProjectAccessTokens = &ProjectAccessTokensService{client: c} c.ProjectCluster = &ProjectClustersService{client: c} c.ProjectImportExport = &ProjectImportExportService{client: c} + c.ProjectIterations = &ProjectIterationsService{client: c} c.ProjectMembers = &ProjectMembersService{client: c} c.ProjectMirrors = &ProjectMirrorService{client: c} c.ProjectSnippets = &ProjectSnippetsService{client: c} @@ -367,6 +379,7 @@ func newClient(options ...ClientOptionFunc) (*Client, error) { c.Releases = &ReleasesService{client: c} c.Repositories = &RepositoriesService{client: c} c.RepositoryFiles = &RepositoryFilesService{client: c} + c.RepositorySubmodules = &RepositorySubmodulesService{client: c} c.ResourceLabelEvents = &ResourceLabelEventsService{client: c} c.ResourceStateEvents = &ResourceStateEventsService{client: c} c.Runners = &RunnersService{client: c} @@ -378,6 +391,7 @@ func newClient(options ...ClientOptionFunc) (*Client, error) { c.SystemHooks = &SystemHooksService{client: c} c.Tags = &TagsService{client: c} c.Todos = &TodosService{client: c} + c.Topics = &TopicsService{client: c} c.Users = &UsersService{client: c} c.Validate = &ValidateService{client: c} c.Version = &VersionService{client: c} @@ -514,11 +528,11 @@ func (c *Client) setBaseURL(urlStr string) error { return nil } -// NewRequest creates an API request. A relative URL path can be provided in -// path, in which case it is resolved relative to the base URL of the Client. -// Relative URL paths should always be specified without a preceding slash. If -// specified, the value pointed to by body is JSON encoded and included as the -// request body. +// NewRequest creates a new API request. The method expects a relative URL +// path that will be resolved relative to the base URL of the Client. +// Relative URL paths should always be specified without a preceding slash. +// If specified, the value pointed to by body is JSON encoded and included +// as the request body. func (c *Client) NewRequest(method, path string, opt interface{}, options []RequestOptionFunc) (*retryablehttp.Request, error) { u := *c.baseURL unescaped, err := url.PathUnescape(path) @@ -579,6 +593,82 @@ func (c *Client) NewRequest(method, path string, opt interface{}, options []Requ return req, nil } +// UploadRequest creates an API request for uploading a file. The method +// expects a relative URL path that will be resolved relative to the base +// URL of the Client. Relative URL paths should always be specified without +// a preceding slash. If specified, the value pointed to by body is JSON +// encoded and included as the request body. +func (c *Client) UploadRequest(method, path string, content io.Reader, filename string, uploadType UploadType, opt interface{}, options []RequestOptionFunc) (*retryablehttp.Request, error) { + u := *c.baseURL + unescaped, err := url.PathUnescape(path) + if err != nil { + return nil, err + } + + // Set the encoded path data + u.RawPath = c.baseURL.Path + path + u.Path = c.baseURL.Path + unescaped + + // Create a request specific headers map. + reqHeaders := make(http.Header) + reqHeaders.Set("Accept", "application/json") + + if c.UserAgent != "" { + reqHeaders.Set("User-Agent", c.UserAgent) + } + + b := new(bytes.Buffer) + w := multipart.NewWriter(b) + + fw, err := w.CreateFormFile(string(uploadType), filename) + if err != nil { + return nil, err + } + + if _, err := io.Copy(fw, content); err != nil { + return nil, err + } + + if opt != nil { + fields, err := query.Values(opt) + if err != nil { + return nil, err + } + for name := range fields { + if err = w.WriteField(name, fmt.Sprintf("%v", fields.Get(name))); err != nil { + return nil, err + } + } + } + + if err = w.Close(); err != nil { + return nil, err + } + + reqHeaders.Set("Content-Type", w.FormDataContentType()) + + req, err := retryablehttp.NewRequest(method, u.String(), b) + if err != nil { + return nil, err + } + + for _, fn := range options { + if fn == nil { + continue + } + if err := fn(req); err != nil { + return nil, err + } + } + + // Set the request specific headers. + for k, v := range reqHeaders { + req.Header[k] = v + } + + return req, nil +} + // Response is a GitLab API response. This wraps the standard http.Response // returned from GitLab and provides convenient access to things like // pagination links. @@ -757,7 +847,7 @@ func parseID(id interface{}) (string, error) { } // Helper function to escape a project identifier. -func pathEscape(s string) string { +func PathEscape(s string) string { return strings.ReplaceAll(url.PathEscape(s), ".", "%2E") } diff --git a/vendor/github.com/xanzy/go-gitlab/go.mod b/vendor/github.com/xanzy/go-gitlab/go.mod index e363c2737..f9a606620 100644 --- a/vendor/github.com/xanzy/go-gitlab/go.mod +++ b/vendor/github.com/xanzy/go-gitlab/go.mod @@ -1,15 +1,22 @@ module github.com/xanzy/go-gitlab require ( - github.com/google/go-querystring v1.0.0 + github.com/google/go-querystring v1.1.0 github.com/hashicorp/go-cleanhttp v0.5.1 github.com/hashicorp/go-retryablehttp v0.6.8 github.com/stretchr/testify v1.4.0 - golang.org/x/net v0.0.0-20201021035429-f5854403a974 // indirect golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288 - golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9 // indirect golang.org/x/time v0.0.0-20191024005414-555d28b269f0 - google.golang.org/appengine v1.3.0 // indirect ) -go 1.13 +require ( + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/golang/protobuf v1.2.0 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + golang.org/x/net v0.0.0-20201021035429-f5854403a974 // indirect + golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9 // indirect + google.golang.org/appengine v1.3.0 // indirect + gopkg.in/yaml.v2 v2.2.2 // indirect +) + +go 1.17 diff --git a/vendor/github.com/xanzy/go-gitlab/go.sum b/vendor/github.com/xanzy/go-gitlab/go.sum index dbc0a11c3..d09b2acea 100644 --- a/vendor/github.com/xanzy/go-gitlab/go.sum +++ b/vendor/github.com/xanzy/go-gitlab/go.sum @@ -3,8 +3,10 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/golang/protobuf v1.2.0 h1:P3YflyNX/ehuJFLhxviNdFxQPkGK5cDcApsge1SqnvM= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/google/go-querystring v1.0.0 h1:Xkwi/a1rcvNg1PPYe5vI8GbeBY/jrVuDX5ASuANWTrk= -github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= +github.com/google/go-cmp v0.5.2 h1:X2ev0eStA3AbceY54o37/0PQ/UWqKEiiO2dKL5OPaFM= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= +github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= github.com/hashicorp/go-cleanhttp v0.5.1 h1:dH3aiDG9Jvb5r5+bYHsikaOUIpcM0xvgMXVoDkXMzJM= github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-hclog v0.9.2 h1:CG6TE5H9/JXsFWJCfoIVpKFIkFe6ysEuHirp4DxCsHI= @@ -13,13 +15,11 @@ github.com/hashicorp/go-retryablehttp v0.6.8 h1:92lWxgpa+fF3FozM4B3UZtHZMJX8T5XT github.com/hashicorp/go-retryablehttp v0.6.8/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/stretchr/objx v0.1.0 h1:4G4v2dO3VZwixGIRoQ5Lfboy6nUhCyYzaqnIAPPhYs4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9 h1:psW17arqaxU48Z5kZ0CQnkZWQJsqcURM6tKiBApRjXI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= @@ -31,15 +31,13 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9 h1:SQFwaSi55rU7vdNs9Yr0Z324 golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f h1:+Nyd8tzPX9R7BWHguqsrbFdRx3WQ/1ib8I44HXV5yTA= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0 h1:/5xXl8Y5W96D+TtHSlonuFqGHIWVuyCkGJLwGh9JJFs= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e h1:FDhOuMEY4JVRztM/gsbk+IKUQ8kj74bxZrgw87eMMVc= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/appengine v1.3.0 h1:FBSsiFRMz3LBeXIomRnVzrQwSDj4ibvcRexLG0LZGQk= google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= diff --git a/vendor/github.com/xanzy/go-gitlab/group_access_tokens.go b/vendor/github.com/xanzy/go-gitlab/group_access_tokens.go new file mode 100644 index 000000000..41532bf08 --- /dev/null +++ b/vendor/github.com/xanzy/go-gitlab/group_access_tokens.go @@ -0,0 +1,140 @@ +// +// Copyright 2022, Masahiro Yoshida +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package gitlab + +import ( + "fmt" + "net/http" + "time" +) + +// GroupAccessTokensService handles communication with the +// groups access tokens related methods of the GitLab API. +// +// GitLab API docs: https://docs.gitlab.com/ee/api/group_access_tokens.html +type GroupAccessTokensService struct { + client *Client +} + +// GroupAccessToken represents a GitLab Group Access Token. +// +// GitLab API docs: https://docs.gitlab.com/ee/api/group_access_tokens.html +type GroupAccessToken struct { + ID int `json:"id"` + UserID int `json:"user_id"` + Name string `json:"name"` + Scopes []string `json:"scopes"` + CreatedAt *time.Time `json:"created_at"` + ExpiresAt *ISOTime `json:"expires_at"` + Active bool `json:"active"` + Revoked bool `json:"revoked"` + Token string `json:"token"` + AccessLevel AccessLevelValue `json:"access_level"` +} + +func (v GroupAccessToken) String() string { + return Stringify(v) +} + +// ListGroupAccessTokensOptions represents the available options for +// listing variables in a group. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_access_tokens.html#list-group-access-tokens +type ListGroupAccessTokensOptions ListOptions + +// ListGroupAccessTokens gets a list of all Group Access Tokens in a +// group. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_access_tokens.html#list-group-access-tokens +func (s *GroupAccessTokensService) ListGroupAccessTokens(gid interface{}, opt *ListGroupAccessTokensOptions, options ...RequestOptionFunc) ([]*GroupAccessToken, *Response, error) { + groups, err := parseID(gid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("groups/%s/access_tokens", PathEscape(groups)) + + req, err := s.client.NewRequest(http.MethodGet, u, opt, options) + if err != nil { + return nil, nil, err + } + + var gats []*GroupAccessToken + resp, err := s.client.Do(req, &gats) + if err != nil { + return nil, resp, err + } + + return gats, resp, err +} + +// CreateGroupAccessTokenOptions represents the available CreateVariable() +// options. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_access_tokens.html#create-a-group-access-token +type CreateGroupAccessTokenOptions struct { + Name *string `url:"name,omitempty" json:"name,omitempty"` + Scopes *[]string `url:"scopes,omitempty" json:"scopes,omitempty"` + AccessLevel *AccessLevelValue `url:"access_level,omitempty" json:"access_level,omitempty"` + ExpiresAt *ISOTime `url:"expires_at,omitempty" json:"expires_at,omitempty"` +} + +// CreateGroupAccessToken creates a new Group Access Token. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_access_tokens.html#create-a-group-access-token +func (s *GroupAccessTokensService) CreateGroupAccessToken(gid interface{}, opt *CreateGroupAccessTokenOptions, options ...RequestOptionFunc) (*GroupAccessToken, *Response, error) { + groups, err := parseID(gid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("groups/%s/access_tokens", PathEscape(groups)) + + req, err := s.client.NewRequest(http.MethodPost, u, opt, options) + if err != nil { + return nil, nil, err + } + + pat := new(GroupAccessToken) + resp, err := s.client.Do(req, pat) + if err != nil { + return nil, resp, err + } + + return pat, resp, err +} + +// DeleteGroupAccessToken deletes a Group Access Token. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_access_tokens.html#revoke-a-group-access-token +func (s *GroupAccessTokensService) DeleteGroupAccessToken(gid interface{}, id int, options ...RequestOptionFunc) (*Response, error) { + groups, err := parseID(gid) + if err != nil { + return nil, err + } + u := fmt.Sprintf("groups/%s/access_tokens/%d", PathEscape(groups), id) + + req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) + if err != nil { + return nil, err + } + + return s.client.Do(req, nil) +} diff --git a/vendor/github.com/xanzy/go-gitlab/group_badges.go b/vendor/github.com/xanzy/go-gitlab/group_badges.go index 964999148..7207fe71c 100644 --- a/vendor/github.com/xanzy/go-gitlab/group_badges.go +++ b/vendor/github.com/xanzy/go-gitlab/group_badges.go @@ -66,7 +66,7 @@ func (s *GroupBadgesService) ListGroupBadges(gid interface{}, opt *ListGroupBadg if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/badges", pathEscape(group)) + u := fmt.Sprintf("groups/%s/badges", PathEscape(group)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -91,7 +91,7 @@ func (s *GroupBadgesService) GetGroupBadge(gid interface{}, badge int, options . if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/badges/%d", pathEscape(group), badge) + u := fmt.Sprintf("groups/%s/badges/%d", PathEscape(group), badge) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -125,7 +125,7 @@ func (s *GroupBadgesService) AddGroupBadge(gid interface{}, opt *AddGroupBadgeOp if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/badges", pathEscape(group)) + u := fmt.Sprintf("groups/%s/badges", PathEscape(group)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -159,7 +159,7 @@ func (s *GroupBadgesService) EditGroupBadge(gid interface{}, badge int, opt *Edi if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/badges/%d", pathEscape(group), badge) + u := fmt.Sprintf("groups/%s/badges/%d", PathEscape(group), badge) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -184,7 +184,7 @@ func (s *GroupBadgesService) DeleteGroupBadge(gid interface{}, badge int, option if err != nil { return nil, err } - u := fmt.Sprintf("groups/%s/badges/%d", pathEscape(group), badge) + u := fmt.Sprintf("groups/%s/badges/%d", PathEscape(group), badge) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -213,7 +213,7 @@ func (s *GroupBadgesService) PreviewGroupBadge(gid interface{}, opt *GroupBadgeP if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/badges/render", pathEscape(group)) + u := fmt.Sprintf("groups/%s/badges/render", PathEscape(group)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/group_boards.go b/vendor/github.com/xanzy/go-gitlab/group_boards.go index 906db1079..30a791b3c 100644 --- a/vendor/github.com/xanzy/go-gitlab/group_boards.go +++ b/vendor/github.com/xanzy/go-gitlab/group_boards.go @@ -62,7 +62,7 @@ func (s *GroupIssueBoardsService) ListGroupIssueBoards(gid interface{}, opt *Lis if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/boards", pathEscape(group)) + u := fmt.Sprintf("groups/%s/boards", PathEscape(group)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -96,7 +96,7 @@ func (s *GroupIssueBoardsService) CreateGroupIssueBoard(gid interface{}, opt *Cr if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/boards", pathEscape(group)) + u := fmt.Sprintf("groups/%s/boards", PathEscape(group)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -121,7 +121,7 @@ func (s *GroupIssueBoardsService) GetGroupIssueBoard(gid interface{}, board int, if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/boards/%d", pathEscape(group), board) + u := fmt.Sprintf("groups/%s/boards/%d", PathEscape(group), board) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -145,7 +145,7 @@ type UpdateGroupIssueBoardOptions struct { Name *string `url:"name,omitempty" json:"name,omitempty"` AssigneeID *int `url:"assignee_id,omitempty" json:"assignee_id,omitempty"` MilestoneID *int `url:"milestone_id,omitempty" json:"milestone_id,omitempty"` - Labels Labels `url:"labels,omitempty" json:"labels,omitempty"` + Labels *Labels `url:"labels,omitempty" json:"labels,omitempty"` Weight *int `url:"weight,omitempty" json:"weight,omitempty"` } @@ -158,7 +158,7 @@ func (s *GroupIssueBoardsService) UpdateIssueBoard(gid interface{}, board int, o if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/boards/%d", pathEscape(group), board) + u := fmt.Sprintf("groups/%s/boards/%d", PathEscape(group), board) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -183,7 +183,7 @@ func (s *GroupIssueBoardsService) DeleteIssueBoard(gid interface{}, board int, o if err != nil { return nil, err } - u := fmt.Sprintf("groups/%s/boards/%d", pathEscape(group), board) + u := fmt.Sprintf("groups/%s/boards/%d", PathEscape(group), board) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -209,7 +209,7 @@ func (s *GroupIssueBoardsService) ListGroupIssueBoardLists(gid interface{}, boar if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/boards/%d/lists", pathEscape(group), board) + u := fmt.Sprintf("groups/%s/boards/%d/lists", PathEscape(group), board) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -235,7 +235,7 @@ func (s *GroupIssueBoardsService) GetGroupIssueBoardList(gid interface{}, board, return nil, nil, err } u := fmt.Sprintf("groups/%s/boards/%d/lists/%d", - pathEscape(group), + PathEscape(group), board, list, ) @@ -272,7 +272,7 @@ func (s *GroupIssueBoardsService) CreateGroupIssueBoardList(gid interface{}, boa if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/boards/%d/lists", pathEscape(group), board) + u := fmt.Sprintf("groups/%s/boards/%d/lists", PathEscape(group), board) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -308,7 +308,7 @@ func (s *GroupIssueBoardsService) UpdateIssueBoardList(gid interface{}, board, l return nil, nil, err } u := fmt.Sprintf("groups/%s/boards/%d/lists/%d", - pathEscape(group), + PathEscape(group), board, list, ) @@ -338,7 +338,7 @@ func (s *GroupIssueBoardsService) DeleteGroupIssueBoardList(gid interface{}, boa return nil, err } u := fmt.Sprintf("groups/%s/boards/%d/lists/%d", - pathEscape(group), + PathEscape(group), board, list, ) diff --git a/vendor/github.com/xanzy/go-gitlab/group_clusters.go b/vendor/github.com/xanzy/go-gitlab/group_clusters.go index c3f15c263..f2b9e2ac5 100644 --- a/vendor/github.com/xanzy/go-gitlab/group_clusters.go +++ b/vendor/github.com/xanzy/go-gitlab/group_clusters.go @@ -64,7 +64,7 @@ func (s *GroupClustersService) ListClusters(pid interface{}, options ...RequestO if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/clusters", pathEscape(group)) + u := fmt.Sprintf("groups/%s/clusters", PathEscape(group)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -89,7 +89,7 @@ func (s *GroupClustersService) GetCluster(pid interface{}, cluster int, options if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/clusters/%d", pathEscape(group), cluster) + u := fmt.Sprintf("groups/%s/clusters/%d", PathEscape(group), cluster) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -137,7 +137,7 @@ func (s *GroupClustersService) AddCluster(pid interface{}, opt *AddGroupClusterO if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/clusters/user", pathEscape(group)) + u := fmt.Sprintf("groups/%s/clusters/user", PathEscape(group)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -181,7 +181,7 @@ func (s *GroupClustersService) EditCluster(pid interface{}, cluster int, opt *Ed if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/clusters/%d", pathEscape(group), cluster) + u := fmt.Sprintf("groups/%s/clusters/%d", PathEscape(group), cluster) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -206,7 +206,7 @@ func (s *GroupClustersService) DeleteCluster(pid interface{}, cluster int, optio if err != nil { return nil, err } - u := fmt.Sprintf("groups/%s/clusters/%d", pathEscape(group), cluster) + u := fmt.Sprintf("groups/%s/clusters/%d", PathEscape(group), cluster) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/group_hooks.go b/vendor/github.com/xanzy/go-gitlab/group_hooks.go index 2f03c717d..55c44f181 100644 --- a/vendor/github.com/xanzy/go-gitlab/group_hooks.go +++ b/vendor/github.com/xanzy/go-gitlab/group_hooks.go @@ -48,14 +48,14 @@ type GroupHook struct { // ListGroupHooks gets a list of group hooks. // // GitLab API docs: https://docs.gitlab.com/ce/api/groups.html#list-group-hooks -func (s *GroupsService) ListGroupHooks(gid interface{}) ([]*GroupHook, *Response, error) { +func (s *GroupsService) ListGroupHooks(gid interface{}, options ...RequestOptionFunc) ([]*GroupHook, *Response, error) { group, err := parseID(gid) if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/hooks", pathEscape(group)) + u := fmt.Sprintf("groups/%s/hooks", PathEscape(group)) - req, err := s.client.NewRequest(http.MethodGet, u, nil, nil) + req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { return nil, nil, err } @@ -77,7 +77,7 @@ func (s *GroupsService) GetGroupHook(pid interface{}, hook int, options ...Reque if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/hooks/%d", pathEscape(group), hook) + u := fmt.Sprintf("groups/%s/hooks/%d", PathEscape(group), hook) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -122,7 +122,7 @@ func (s *GroupsService) AddGroupHook(gid interface{}, opt *AddGroupHookOptions, if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/hooks", pathEscape(group)) + u := fmt.Sprintf("groups/%s/hooks", PathEscape(group)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -169,7 +169,7 @@ func (s *GroupsService) EditGroupHook(pid interface{}, hook int, opt *EditGroupH if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/hooks/%d", pathEscape(group), hook) + u := fmt.Sprintf("groups/%s/hooks/%d", PathEscape(group), hook) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -195,7 +195,7 @@ func (s *GroupsService) DeleteGroupHook(pid interface{}, hook int, options ...Re if err != nil { return nil, err } - u := fmt.Sprintf("groups/%s/hooks/%d", pathEscape(group), hook) + u := fmt.Sprintf("groups/%s/hooks/%d", PathEscape(group), hook) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/group_import_export.go b/vendor/github.com/xanzy/go-gitlab/group_import_export.go index c57880583..e19d20286 100644 --- a/vendor/github.com/xanzy/go-gitlab/group_import_export.go +++ b/vendor/github.com/xanzy/go-gitlab/group_import_export.go @@ -44,7 +44,7 @@ func (s *GroupImportExportService) ScheduleExport(gid interface{}, options ...Re if err != nil { return nil, err } - u := fmt.Sprintf("groups/%s/export", pathEscape(group)) + u := fmt.Sprintf("groups/%s/export", PathEscape(group)) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -63,7 +63,7 @@ func (s *GroupImportExportService) ExportDownload(gid interface{}, options ...Re if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/export/download", pathEscape(group)) + u := fmt.Sprintf("groups/%s/export/download", PathEscape(group)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/group_iterations.go b/vendor/github.com/xanzy/go-gitlab/group_iterations.go new file mode 100644 index 000000000..a642091c2 --- /dev/null +++ b/vendor/github.com/xanzy/go-gitlab/group_iterations.go @@ -0,0 +1,90 @@ +// +// Copyright 2022, Daniel Steinke +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package gitlab + +import ( + "fmt" + "net/http" + "time" +) + +// IterationsAPI handles communication with the iterations related methods +// of the GitLab API +// +// GitLab API docs: https://docs.gitlab.com/ee/api/group_iterations.html +type GroupIterationsService struct { + client *Client +} + +// GroupInteration represents a GitLab iteration. +// +// GitLab API docs: https://docs.gitlab.com/ee/api/group_iterations.html +type GroupIteration struct { + ID int `json:"id"` + IID int `json:"iid"` + Sequence int `json:"sequence"` + GroupID int `json:"group_id"` + Title string `json:"title"` + Description string `json:"description"` + State int `json:"state"` + CreatedAt *time.Time `json:"created_at"` + UpdatedAt *time.Time `json:"updated_at"` + DueDate *ISOTime `json:"due_date"` + StartDate *ISOTime `json:"start_date"` + WebURL string `json:"web_url"` +} + +func (i GroupIteration) String() string { + return Stringify(i) +} + +// ListGroupIterationsOptions contains the available ListGroupIterations() +// options +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_iterations.html#list-group-iterations +type ListGroupIterationsOptions struct { + ListOptions + State *string `url:"state,omitempty" json:"state,omitempty"` + Search *string `url:"search,omitempty" json:"search,omitempty"` + IncludeAncestors *bool `url:"include_ancestors,omitempty" json:"include_ancestors,omitempty"` +} + +// ListGroupIterations returns a list of group iterations. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_iterations.html#list-group-iterations +func (s *GroupIterationsService) ListGroupIterations(gid interface{}, opt *ListGroupIterationsOptions, options ...RequestOptionFunc) ([]*GroupIteration, *Response, error) { + group, err := parseID(gid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("groups/%s/iterations", PathEscape(group)) + + req, err := s.client.NewRequest(http.MethodGet, u, opt, options) + if err != nil { + return nil, nil, err + } + + var gis []*GroupIteration + resp, err := s.client.Do(req, &gis) + if err != nil { + return nil, nil, err + } + + return gis, resp, err +} diff --git a/vendor/github.com/xanzy/go-gitlab/group_labels.go b/vendor/github.com/xanzy/go-gitlab/group_labels.go index e40b597df..4c4f7c4ce 100644 --- a/vendor/github.com/xanzy/go-gitlab/group_labels.go +++ b/vendor/github.com/xanzy/go-gitlab/group_labels.go @@ -40,8 +40,15 @@ func (l GroupLabel) String() string { // ListGroupLabelsOptions represents the available ListGroupLabels() options. // -// GitLab API docs: https://docs.gitlab.com/ce/api/labels.html#list-labels -type ListGroupLabelsOptions ListOptions +// GitLab API docs: https://docs.gitlab.com/ee/api/group_labels.html#list-group-labels +type ListGroupLabelsOptions struct { + ListOptions + WithCounts *bool `url:"with_counts,omitempty" json:"with_counts,omitempty"` + IncludeAncestorGroups *bool `url:"include_ancestor_groups,omitempty" json:"include_ancestor_groups,omitempty"` + IncludeDescendantGrouops *bool `url:"include_descendant_groups,omitempty" json:"include_descendant_groups,omitempty"` + OnlyGroupLabels *bool `url:"only_group_labels,omitempty" json:"only_group_labels,omitempty"` + Search *string `url:"search,omitempty" json:"search,omitempty"` +} // ListGroupLabels gets all labels for given group. // @@ -52,7 +59,7 @@ func (s *GroupLabelsService) ListGroupLabels(gid interface{}, opt *ListGroupLabe if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/labels", pathEscape(group)) + u := fmt.Sprintf("groups/%s/labels", PathEscape(group)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -81,7 +88,7 @@ func (s *GroupLabelsService) GetGroupLabel(gid interface{}, labelID interface{}, if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/labels/%s", pathEscape(group), label) + u := fmt.Sprintf("groups/%s/labels/%s", PathEscape(group), PathEscape(label)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -113,7 +120,7 @@ func (s *GroupLabelsService) CreateGroupLabel(gid interface{}, opt *CreateGroupL if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/labels", pathEscape(group)) + u := fmt.Sprintf("groups/%s/labels", PathEscape(group)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -143,7 +150,7 @@ func (s *GroupLabelsService) DeleteGroupLabel(gid interface{}, opt *DeleteGroupL if err != nil { return nil, err } - u := fmt.Sprintf("groups/%s/labels", pathEscape(group)) + u := fmt.Sprintf("groups/%s/labels", PathEscape(group)) req, err := s.client.NewRequest(http.MethodDelete, u, opt, options) if err != nil { @@ -169,7 +176,7 @@ func (s *GroupLabelsService) UpdateGroupLabel(gid interface{}, opt *UpdateGroupL if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/labels", pathEscape(group)) + u := fmt.Sprintf("groups/%s/labels", PathEscape(group)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -200,7 +207,7 @@ func (s *GroupLabelsService) SubscribeToGroupLabel(gid interface{}, labelID inte if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/labels/%s/subscribe", pathEscape(group), label) + u := fmt.Sprintf("groups/%s/labels/%s/subscribe", PathEscape(group), PathEscape(label)) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -231,7 +238,7 @@ func (s *GroupLabelsService) UnsubscribeFromGroupLabel(gid interface{}, labelID if err != nil { return nil, err } - u := fmt.Sprintf("groups/%s/labels/%s/unsubscribe", pathEscape(group), label) + u := fmt.Sprintf("groups/%s/labels/%s/unsubscribe", PathEscape(group), PathEscape(label)) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/group_members.go b/vendor/github.com/xanzy/go-gitlab/group_members.go index 3ec95de98..4eeee59fb 100644 --- a/vendor/github.com/xanzy/go-gitlab/group_members.go +++ b/vendor/github.com/xanzy/go-gitlab/group_members.go @@ -77,7 +77,7 @@ func (s *GroupsService) ListGroupMembers(gid interface{}, opt *ListGroupMembersO if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/members", pathEscape(group)) + u := fmt.Sprintf("groups/%s/members", PathEscape(group)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -103,7 +103,7 @@ func (s *GroupsService) ListAllGroupMembers(gid interface{}, opt *ListGroupMembe if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/members/all", pathEscape(group)) + u := fmt.Sprintf("groups/%s/members/all", PathEscape(group)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -138,7 +138,7 @@ func (s *GroupMembersService) GetGroupMember(gid interface{}, user int, options if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/members/%d", pathEscape(group), user) + u := fmt.Sprintf("groups/%s/members/%d", PathEscape(group), user) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -188,7 +188,7 @@ func (s *GroupsService) ListBillableGroupMembers(gid interface{}, opt *ListBilla if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/billable_members", pathEscape(group)) + u := fmt.Sprintf("groups/%s/billable_members", PathEscape(group)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -213,7 +213,7 @@ func (s *GroupsService) RemoveBillableGroupMember(gid interface{}, user int, opt if err != nil { return nil, err } - u := fmt.Sprintf("groups/%s/billable_members/%d", pathEscape(group), user) + u := fmt.Sprintf("groups/%s/billable_members/%d", PathEscape(group), user) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -232,7 +232,7 @@ func (s *GroupMembersService) AddGroupMember(gid interface{}, opt *AddGroupMembe if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/members", pathEscape(group)) + u := fmt.Sprintf("groups/%s/members", PathEscape(group)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -257,7 +257,7 @@ func (s *GroupMembersService) ShareWithGroup(gid interface{}, opt *ShareWithGrou if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/share", pathEscape(group)) + u := fmt.Sprintf("groups/%s/share", PathEscape(group)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -282,7 +282,7 @@ func (s *GroupMembersService) DeleteShareWithGroup(gid interface{}, groupID int, if err != nil { return nil, err } - u := fmt.Sprintf("groups/%s/share/%d", pathEscape(group), groupID) + u := fmt.Sprintf("groups/%s/share/%d", PathEscape(group), groupID) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -311,7 +311,7 @@ func (s *GroupMembersService) EditGroupMember(gid interface{}, user int, opt *Ed if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/members/%d", pathEscape(group), user) + u := fmt.Sprintf("groups/%s/members/%d", PathEscape(group), user) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -336,7 +336,7 @@ func (s *GroupMembersService) RemoveGroupMember(gid interface{}, user int, optio if err != nil { return nil, err } - u := fmt.Sprintf("groups/%s/members/%d", pathEscape(group), user) + u := fmt.Sprintf("groups/%s/members/%d", PathEscape(group), user) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/group_milestones.go b/vendor/github.com/xanzy/go-gitlab/group_milestones.go index 69cd997db..095b01327 100644 --- a/vendor/github.com/xanzy/go-gitlab/group_milestones.go +++ b/vendor/github.com/xanzy/go-gitlab/group_milestones.go @@ -58,7 +58,7 @@ func (m GroupMilestone) String() string { // https://docs.gitlab.com/ce/api/group_milestones.html#list-group-milestones type ListGroupMilestonesOptions struct { ListOptions - IIDs []int `url:"iids[],omitempty" json:"iids,omitempty"` + IIDs *[]int `url:"iids[],omitempty" json:"iids,omitempty"` State *string `url:"state,omitempty" json:"state,omitempty"` Title *string `url:"title,omitempty" json:"title,omitempty"` Search *string `url:"search,omitempty" json:"search,omitempty"` @@ -74,7 +74,7 @@ func (s *GroupMilestonesService) ListGroupMilestones(gid interface{}, opt *ListG if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/milestones", pathEscape(group)) + u := fmt.Sprintf("groups/%s/milestones", PathEscape(group)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -99,7 +99,7 @@ func (s *GroupMilestonesService) GetGroupMilestone(gid interface{}, milestone in if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/milestones/%d", pathEscape(group), milestone) + u := fmt.Sprintf("groups/%s/milestones/%d", PathEscape(group), milestone) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -135,7 +135,7 @@ func (s *GroupMilestonesService) CreateGroupMilestone(gid interface{}, opt *Crea if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/milestones", pathEscape(group)) + u := fmt.Sprintf("groups/%s/milestones", PathEscape(group)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -172,7 +172,7 @@ func (s *GroupMilestonesService) UpdateGroupMilestone(gid interface{}, milestone if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/milestones/%d", pathEscape(group), milestone) + u := fmt.Sprintf("groups/%s/milestones/%d", PathEscape(group), milestone) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -203,7 +203,7 @@ func (s *GroupMilestonesService) GetGroupMilestoneIssues(gid interface{}, milest if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/milestones/%d/issues", pathEscape(group), milestone) + u := fmt.Sprintf("groups/%s/milestones/%d/issues", PathEscape(group), milestone) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -236,7 +236,7 @@ func (s *GroupMilestonesService) GetGroupMilestoneMergeRequests(gid interface{}, if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/milestones/%d/merge_requests", pathEscape(group), milestone) + u := fmt.Sprintf("groups/%s/milestones/%d/merge_requests", PathEscape(group), milestone) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -279,7 +279,7 @@ func (s *GroupMilestonesService) GetGroupMilestoneBurndownChartEvents(gid interf if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/milestones/%d/burndown_events", pathEscape(group), milestone) + u := fmt.Sprintf("groups/%s/milestones/%d/burndown_events", PathEscape(group), milestone) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/group_variables.go b/vendor/github.com/xanzy/go-gitlab/group_variables.go index 4cf971481..e02176aa9 100644 --- a/vendor/github.com/xanzy/go-gitlab/group_variables.go +++ b/vendor/github.com/xanzy/go-gitlab/group_variables.go @@ -64,7 +64,7 @@ func (s *GroupVariablesService) ListVariables(gid interface{}, opt *ListGroupVar if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/variables", pathEscape(group)) + u := fmt.Sprintf("groups/%s/variables", PathEscape(group)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -89,7 +89,7 @@ func (s *GroupVariablesService) GetVariable(gid interface{}, key string, options if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/variables/%s", pathEscape(group), url.PathEscape(key)) + u := fmt.Sprintf("groups/%s/variables/%s", PathEscape(group), url.PathEscape(key)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -128,7 +128,7 @@ func (s *GroupVariablesService) CreateVariable(gid interface{}, opt *CreateGroup if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/variables", pathEscape(group)) + u := fmt.Sprintf("groups/%s/variables", PathEscape(group)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -167,7 +167,7 @@ func (s *GroupVariablesService) UpdateVariable(gid interface{}, key string, opt if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/variables/%s", pathEscape(group), url.PathEscape(key)) + u := fmt.Sprintf("groups/%s/variables/%s", PathEscape(group), url.PathEscape(key)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -192,7 +192,7 @@ func (s *GroupVariablesService) RemoveVariable(gid interface{}, key string, opti if err != nil { return nil, err } - u := fmt.Sprintf("groups/%s/variables/%s", pathEscape(group), url.PathEscape(key)) + u := fmt.Sprintf("groups/%s/variables/%s", PathEscape(group), url.PathEscape(key)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/group_wikis.go b/vendor/github.com/xanzy/go-gitlab/group_wikis.go index 74c9898e1..55e51dadd 100644 --- a/vendor/github.com/xanzy/go-gitlab/group_wikis.go +++ b/vendor/github.com/xanzy/go-gitlab/group_wikis.go @@ -61,7 +61,7 @@ func (s *GroupWikisService) ListGroupWikis(gid interface{}, opt *ListGroupWikisO if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/wikis", pathEscape(group)) + u := fmt.Sprintf("groups/%s/wikis", PathEscape(group)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -86,7 +86,7 @@ func (s *GroupWikisService) GetGroupWikiPage(gid interface{}, slug string, optio if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/wikis/%s", pathEscape(group), url.PathEscape(slug)) + u := fmt.Sprintf("groups/%s/wikis/%s", PathEscape(group), url.PathEscape(slug)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -122,7 +122,7 @@ func (s *GroupWikisService) CreateGroupWikiPage(gid interface{}, opt *CreateGrou if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/wikis", pathEscape(group)) + u := fmt.Sprintf("groups/%s/wikis", PathEscape(group)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -158,7 +158,7 @@ func (s *GroupWikisService) EditGroupWikiPage(gid interface{}, slug string, opt if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/wikis/%s", pathEscape(group), url.PathEscape(slug)) + u := fmt.Sprintf("groups/%s/wikis/%s", PathEscape(group), url.PathEscape(slug)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -183,7 +183,7 @@ func (s *GroupWikisService) DeleteGroupWikiPage(gid interface{}, slug string, op if err != nil { return nil, err } - u := fmt.Sprintf("groups/%s/wikis/%s", pathEscape(group), url.PathEscape(slug)) + u := fmt.Sprintf("groups/%s/wikis/%s", PathEscape(group), url.PathEscape(slug)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/groups.go b/vendor/github.com/xanzy/go-gitlab/groups.go index fae54679b..92e910e92 100644 --- a/vendor/github.com/xanzy/go-gitlab/groups.go +++ b/vendor/github.com/xanzy/go-gitlab/groups.go @@ -97,7 +97,7 @@ type ListGroupsOptions struct { OrderBy *string `url:"order_by,omitempty" json:"order_by,omitempty"` Owned *bool `url:"owned,omitempty" json:"owned,omitempty"` Search *string `url:"search,omitempty" json:"search,omitempty"` - SkipGroups []int `url:"skip_groups,omitempty" json:"skip_groups,omitempty"` + SkipGroups *[]int `url:"skip_groups,omitempty" json:"skip_groups,omitempty"` Sort *string `url:"sort,omitempty" json:"sort,omitempty"` Statistics *bool `url:"statistics,omitempty" json:"statistics,omitempty"` TopLevelOnly *bool `url:"top_level_only,omitempty" json:"top_level_only,omitempty"` @@ -114,13 +114,124 @@ func (s *GroupsService) ListGroups(opt *ListGroupsOptions, options ...RequestOpt return nil, nil, err } - var g []*Group - resp, err := s.client.Do(req, &g) + var gs []*Group + resp, err := s.client.Do(req, &gs) if err != nil { return nil, resp, err } - return g, resp, err + return gs, resp, err +} + +// ListSubgroupsOptions represents the available ListSubgroups() options. +// +// GitLab API docs: +// https://docs.gitlab.com/ce/api/groups.html#list-a-groups-s-subgroups +type ListSubgroupsOptions ListGroupsOptions + +// ListSubgroups gets a list of subgroups for a given group. +// +// GitLab API docs: +// https://docs.gitlab.com/ce/api/groups.html#list-a-groups-s-subgroups +func (s *GroupsService) ListSubgroups(gid interface{}, opt *ListSubgroupsOptions, options ...RequestOptionFunc) ([]*Group, *Response, error) { + group, err := parseID(gid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("groups/%s/subgroups", PathEscape(group)) + + req, err := s.client.NewRequest(http.MethodGet, u, opt, options) + if err != nil { + return nil, nil, err + } + + var gs []*Group + resp, err := s.client.Do(req, &gs) + if err != nil { + return nil, resp, err + } + + return gs, resp, err +} + +// ListDescendantGroupsOptions represents the available ListDescendantGroups() +// options. +// +// GitLab API docs: +// https://docs.gitlab.com/ce/api/groups.html#list-a-groups-descendant-groups +type ListDescendantGroupsOptions ListGroupsOptions + +// ListDescendantGroups gets a list of subgroups for a given project. +// +// GitLab API docs: +// https://docs.gitlab.com/ce/api/groups.html#list-a-groups-descendant-groups +func (s *GroupsService) ListDescendantGroups(gid interface{}, opt *ListDescendantGroupsOptions, options ...RequestOptionFunc) ([]*Group, *Response, error) { + group, err := parseID(gid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("groups/%s/descendant_groups", PathEscape(group)) + + req, err := s.client.NewRequest(http.MethodGet, u, opt, options) + if err != nil { + return nil, nil, err + } + + var gs []*Group + resp, err := s.client.Do(req, &gs) + if err != nil { + return nil, resp, err + } + + return gs, resp, err +} + +// ListGroupProjectsOptions represents the available ListGroup() options. +// +// GitLab API docs: +// https://docs.gitlab.com/ce/api/groups.html#list-a-group-39-s-projects +type ListGroupProjectsOptions struct { + ListOptions + Archived *bool `url:"archived,omitempty" json:"archived,omitempty"` + IncludeSubgroups *bool `url:"include_subgroups,omitempty" json:"include_subgroups,omitempty"` + MinAccessLevel *AccessLevelValue `url:"min_access_level,omitempty" json:"min_access_level,omitempty"` + OrderBy *string `url:"order_by,omitempty" json:"order_by,omitempty"` + Owned *bool `url:"owned,omitempty" json:"owned,omitempty"` + Search *string `url:"search,omitempty" json:"search,omitempty"` + Simple *bool `url:"simple,omitempty" json:"simple,omitempty"` + Sort *string `url:"sort,omitempty" json:"sort,omitempty"` + Starred *bool `url:"starred,omitempty" json:"starred,omitempty"` + Visibility *VisibilityValue `url:"visibility,omitempty" json:"visibility,omitempty"` + WithCustomAttributes *bool `url:"with_custom_attributes,omitempty" json:"with_custom_attributes,omitempty"` + WithIssuesEnabled *bool `url:"with_issues_enabled,omitempty" json:"with_issues_enabled,omitempty"` + WithMergeRequestsEnabled *bool `url:"with_merge_requests_enabled,omitempty" json:"with_merge_requests_enabled,omitempty"` + WithSecurityReports *bool `url:"with_security_reports,omitempty" json:"with_security_reports,omitempty"` + WithShared *bool `url:"with_shared,omitempty" json:"with_shared,omitempty"` +} + +// ListGroupProjects get a list of group projects +// +// GitLab API docs: +// https://docs.gitlab.com/ce/api/groups.html#list-a-group-39-s-projects +func (s *GroupsService) ListGroupProjects(gid interface{}, opt *ListGroupProjectsOptions, options ...RequestOptionFunc) ([]*Project, *Response, error) { + group, err := parseID(gid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("groups/%s/projects", PathEscape(group)) + + req, err := s.client.NewRequest(http.MethodGet, u, opt, options) + if err != nil { + return nil, nil, err + } + + var ps []*Project + resp, err := s.client.Do(req, &ps) + if err != nil { + return nil, resp, err + } + + return ps, resp, err } // GetGroupOptions represents the available GetGroup() options. @@ -140,7 +251,7 @@ func (s *GroupsService) GetGroup(gid interface{}, opt *GetGroupOptions, options if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s", pathEscape(group)) + u := fmt.Sprintf("groups/%s", PathEscape(group)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -214,7 +325,7 @@ func (s *GroupsService) TransferGroup(gid interface{}, pid interface{}, options if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/projects/%s", pathEscape(group), pathEscape(project)) + u := fmt.Sprintf("groups/%s/projects/%s", PathEscape(group), PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -267,7 +378,7 @@ func (s *GroupsService) UpdateGroup(gid interface{}, opt *UpdateGroupOptions, op if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s", pathEscape(group)) + u := fmt.Sprintf("groups/%s", PathEscape(group)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -291,7 +402,7 @@ func (s *GroupsService) DeleteGroup(gid interface{}, options ...RequestOptionFun if err != nil { return nil, err } - u := fmt.Sprintf("groups/%s", pathEscape(group)) + u := fmt.Sprintf("groups/%s", PathEscape(group)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -310,7 +421,7 @@ func (s *GroupsService) RestoreGroup(gid interface{}, options ...RequestOptionFu if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/restore", pathEscape(group)) + u := fmt.Sprintf("groups/%s/restore", PathEscape(group)) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -340,124 +451,53 @@ func (s *GroupsService) SearchGroup(query string, options ...RequestOptionFunc) return nil, nil, err } - var g []*Group - resp, err := s.client.Do(req, &g) + var gs []*Group + resp, err := s.client.Do(req, &gs) if err != nil { return nil, resp, err } - return g, resp, err + return gs, resp, err } -// ListGroupProjectsOptions represents the available ListGroup() options. -// -// GitLab API docs: -// https://docs.gitlab.com/ce/api/groups.html#list-a-group-39-s-projects -type ListGroupProjectsOptions struct { - ListOptions - Archived *bool `url:"archived,omitempty" json:"archived,omitempty"` - IncludeSubgroups *bool `url:"include_subgroups,omitempty" json:"include_subgroups,omitempty"` - MinAccessLevel *AccessLevelValue `url:"min_access_level,omitempty" json:"min_access_level,omitempty"` - OrderBy *string `url:"order_by,omitempty" json:"order_by,omitempty"` - Owned *bool `url:"owned,omitempty" json:"owned,omitempty"` - Search *string `url:"search,omitempty" json:"search,omitempty"` - Simple *bool `url:"simple,omitempty" json:"simple,omitempty"` - Sort *string `url:"sort,omitempty" json:"sort,omitempty"` - Starred *bool `url:"starred,omitempty" json:"starred,omitempty"` - Visibility *VisibilityValue `url:"visibility,omitempty" json:"visibility,omitempty"` - WithCustomAttributes *bool `url:"with_custom_attributes,omitempty" json:"with_custom_attributes,omitempty"` - WithIssuesEnabled *bool `url:"with_issues_enabled,omitempty" json:"with_issues_enabled,omitempty"` - WithMergeRequestsEnabled *bool `url:"with_merge_requests_enabled,omitempty" json:"with_merge_requests_enabled,omitempty"` - WithSecurityReports *bool `url:"with_security_reports,omitempty" json:"with_security_reports,omitempty"` - WithShared *bool `url:"with_shared,omitempty" json:"with_shared,omitempty"` -} - -// ListGroupProjects get a list of group projects -// -// GitLab API docs: -// https://docs.gitlab.com/ce/api/groups.html#list-a-group-39-s-projects -func (s *GroupsService) ListGroupProjects(gid interface{}, opt *ListGroupProjectsOptions, options ...RequestOptionFunc) ([]*Project, *Response, error) { - group, err := parseID(gid) - if err != nil { - return nil, nil, err - } - u := fmt.Sprintf("groups/%s/projects", pathEscape(group)) - - req, err := s.client.NewRequest(http.MethodGet, u, opt, options) - if err != nil { - return nil, nil, err - } - - var p []*Project - resp, err := s.client.Do(req, &p) - if err != nil { - return nil, resp, err - } - - return p, resp, err -} - -// ListSubgroupsOptions represents the available ListSubgroups() options. -// -// GitLab API docs: -// https://docs.gitlab.com/ce/api/groups.html#list-a-groups-s-subgroups -type ListSubgroupsOptions ListGroupsOptions - -// ListSubgroups gets a list of subgroups for a given group. -// -// GitLab API docs: -// https://docs.gitlab.com/ce/api/groups.html#list-a-groups-s-subgroups -func (s *GroupsService) ListSubgroups(gid interface{}, opt *ListSubgroupsOptions, options ...RequestOptionFunc) ([]*Group, *Response, error) { - group, err := parseID(gid) - if err != nil { - return nil, nil, err - } - u := fmt.Sprintf("groups/%s/subgroups", pathEscape(group)) - - req, err := s.client.NewRequest(http.MethodGet, u, opt, options) - if err != nil { - return nil, nil, err - } - - var g []*Group - resp, err := s.client.Do(req, &g) - if err != nil { - return nil, resp, err - } - - return g, resp, err -} - -// ListDescendantGroupsOptions represents the available ListDescendantGroups() +// ListProvisionedUsersOptions represents the available ListProvisionedUsers() // options. // // GitLab API docs: -// https://docs.gitlab.com/ce/api/groups.html#list-a-groups-descendant-groups -type ListDescendantGroupsOptions ListGroupsOptions +// https://docs.gitlab.com/ee/api/groups.html#provisioned-users-api +type ListProvisionedUsersOptions struct { + ListOptions + Username *string `url:"username,omitempty" json:"username,omitempty"` + Search *string `url:"search,omitempty" json:"search,omitempty"` + Active *bool `url:"active,omitempty" json:"active,omitempty"` + Blocked *bool `url:"blocked,omitempty" json:"blocked,omitempty"` + CreatedAfter *time.Time `url:"created_after,omitempty" json:"created_after,omitempty"` + CreatedBefore *time.Time `url:"created_before,omitempty" json:"created_before,omitempty"` +} -// ListDescendantGroups gets a list of subgroups for a given project. +// ListProvisionedUsers gets a list of users provisioned by the given group. // // GitLab API docs: -// https://docs.gitlab.com/ce/api/groups.html#list-a-groups-descendant-groups -func (s *GroupsService) ListDescendantGroups(gid interface{}, opt *ListDescendantGroupsOptions, options ...RequestOptionFunc) ([]*Group, *Response, error) { +// https://docs.gitlab.com/ee/api/groups.html#provisioned-users-api +func (s *GroupsService) ListProvisionedUsers(gid interface{}, opt *ListProvisionedUsersOptions, options ...RequestOptionFunc) ([]*User, *Response, error) { group, err := parseID(gid) if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/descendant_groups", pathEscape(group)) + u := fmt.Sprintf("groups/%s/provisioned_users", PathEscape(group)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { return nil, nil, err } - var g []*Group - resp, err := s.client.Do(req, &g) + var us []*User + resp, err := s.client.Do(req, &us) if err != nil { return nil, resp, err } - return g, resp, err + return us, resp, err } // ListGroupLDAPLinks lists the group's LDAP links. Available only for users who @@ -470,20 +510,20 @@ func (s *GroupsService) ListGroupLDAPLinks(gid interface{}, options ...RequestOp if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/ldap_group_links", pathEscape(group)) + u := fmt.Sprintf("groups/%s/ldap_group_links", PathEscape(group)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { return nil, nil, err } - var gl []*LDAPGroupLink - resp, err := s.client.Do(req, &gl) + var gls []*LDAPGroupLink + resp, err := s.client.Do(req, &gls) if err != nil { return nil, resp, err } - return gl, resp, nil + return gls, resp, nil } // AddGroupLDAPLinkOptions represents the available AddGroupLDAPLink() options. @@ -517,7 +557,7 @@ func (s *GroupsService) AddGroupLDAPLink(gid interface{}, opt *AddGroupLDAPLinkO if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/ldap_group_links", pathEscape(group)) + u := fmt.Sprintf("groups/%s/ldap_group_links", PathEscape(group)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -543,7 +583,7 @@ func (s *GroupsService) DeleteGroupLDAPLink(gid interface{}, cn string, options if err != nil { return nil, err } - u := fmt.Sprintf("groups/%s/ldap_group_links/%s", pathEscape(group), pathEscape(cn)) + u := fmt.Sprintf("groups/%s/ldap_group_links/%s", PathEscape(group), PathEscape(cn)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -563,7 +603,7 @@ func (s *GroupsService) DeleteGroupLDAPLinkWithCNOrFilter(gid interface{}, opts if err != nil { return nil, err } - u := fmt.Sprintf("groups/%s/ldap_group_links", pathEscape(group)) + u := fmt.Sprintf("groups/%s/ldap_group_links", PathEscape(group)) req, err := s.client.NewRequest(http.MethodDelete, u, opts, options) if err != nil { @@ -585,9 +625,9 @@ func (s *GroupsService) DeleteGroupLDAPLinkForProvider(gid interface{}, provider } u := fmt.Sprintf( "groups/%s/ldap_group_links/%s/%s", - pathEscape(group), - pathEscape(provider), - pathEscape(cn), + PathEscape(group), + PathEscape(provider), + PathEscape(cn), ) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) @@ -617,7 +657,7 @@ func (s *GroupsService) ShareGroupWithGroup(gid interface{}, opt *ShareGroupWith if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/share", pathEscape(group)) + u := fmt.Sprintf("groups/%s/share", PathEscape(group)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -642,7 +682,7 @@ func (s *GroupsService) UnshareGroupFromGroup(gid interface{}, groupID int, opti if err != nil { return nil, err } - u := fmt.Sprintf("groups/%s/share/%d", pathEscape(group), groupID) + u := fmt.Sprintf("groups/%s/share/%d", PathEscape(group), groupID) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -681,7 +721,7 @@ func (s *GroupsService) GetGroupPushRules(gid interface{}, options ...RequestOpt if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/push_rule", pathEscape(group)) + u := fmt.Sprintf("groups/%s/push_rule", PathEscape(group)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -725,7 +765,7 @@ func (s *GroupsService) AddGroupPushRule(gid interface{}, opt *AddGroupPushRuleO if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/push_rule", pathEscape(group)) + u := fmt.Sprintf("groups/%s/push_rule", PathEscape(group)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -769,7 +809,7 @@ func (s *GroupsService) EditGroupPushRule(gid interface{}, opt *EditGroupPushRul if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/push_rule", pathEscape(group)) + u := fmt.Sprintf("groups/%s/push_rule", PathEscape(group)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -794,7 +834,7 @@ func (s *GroupsService) DeleteGroupPushRule(gid interface{}, options ...RequestO if err != nil { return nil, err } - u := fmt.Sprintf("groups/%s/push_rule", pathEscape(group)) + u := fmt.Sprintf("groups/%s/push_rule", PathEscape(group)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/invites.go b/vendor/github.com/xanzy/go-gitlab/invites.go index fffbfa35f..b5ae81c82 100644 --- a/vendor/github.com/xanzy/go-gitlab/invites.go +++ b/vendor/github.com/xanzy/go-gitlab/invites.go @@ -62,7 +62,7 @@ func (s *InvitesService) ListPendingGroupInvitations(gid interface{}, opt *ListP if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/invitations", pathEscape(group)) + u := fmt.Sprintf("groups/%s/invitations", PathEscape(group)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -87,7 +87,7 @@ func (s *InvitesService) ListPendingProjectInvitations(pid interface{}, opt *Lis if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/invitations", pathEscape(project)) + u := fmt.Sprintf("projects/%s/invitations", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -133,7 +133,7 @@ func (s *InvitesService) GroupInvites(gid interface{}, opt *InvitesOptions, opti if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/invitations", pathEscape(group)) + u := fmt.Sprintf("groups/%s/invitations", PathEscape(group)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -158,7 +158,7 @@ func (s *InvitesService) ProjectInvites(pid interface{}, opt *InvitesOptions, op if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/invitations", pathEscape(project)) + u := fmt.Sprintf("projects/%s/invitations", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/issue_links.go b/vendor/github.com/xanzy/go-gitlab/issue_links.go index 4d8f89162..f40b65c46 100644 --- a/vendor/github.com/xanzy/go-gitlab/issue_links.go +++ b/vendor/github.com/xanzy/go-gitlab/issue_links.go @@ -50,7 +50,7 @@ func (s *IssueLinksService) ListIssueRelations(pid interface{}, issueIID int, op if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/issues/%d/links", pathEscape(project), issueIID) + u := fmt.Sprintf("projects/%s/issues/%d/links", PathEscape(project), issueIID) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -85,7 +85,7 @@ func (s *IssueLinksService) CreateIssueLink(pid interface{}, issueIID int, opt * if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/issues/%d/links", pathEscape(project), issueIID) + u := fmt.Sprintf("projects/%s/issues/%d/links", PathEscape(project), issueIID) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -111,7 +111,7 @@ func (s *IssueLinksService) DeleteIssueLink(pid interface{}, issueIID, issueLink return nil, nil, err } u := fmt.Sprintf("projects/%s/issues/%d/links/%d", - pathEscape(project), + PathEscape(project), issueIID, issueLinkID) diff --git a/vendor/github.com/xanzy/go-gitlab/issues.go b/vendor/github.com/xanzy/go-gitlab/issues.go index 60f09f9e3..0165c3093 100644 --- a/vendor/github.com/xanzy/go-gitlab/issues.go +++ b/vendor/github.com/xanzy/go-gitlab/issues.go @@ -207,20 +207,20 @@ type LabelDetails struct { type ListIssuesOptions struct { ListOptions State *string `url:"state,omitempty" json:"state,omitempty"` - Labels Labels `url:"labels,comma,omitempty" json:"labels,omitempty"` - NotLabels Labels `url:"not[labels],comma,omitempty" json:"not[labels],omitempty"` + Labels *Labels `url:"labels,comma,omitempty" json:"labels,omitempty"` + NotLabels *Labels `url:"not[labels],comma,omitempty" json:"not[labels],omitempty"` WithLabelDetails *bool `url:"with_labels_details,omitempty" json:"with_labels_details,omitempty"` Milestone *string `url:"milestone,omitempty" json:"milestone,omitempty"` NotMilestone *string `url:"not[milestone],omitempty" json:"not[milestone],omitempty"` Scope *string `url:"scope,omitempty" json:"scope,omitempty"` AuthorID *int `url:"author_id,omitempty" json:"author_id,omitempty"` - NotAuthorID []int `url:"not[author_id],omitempty" json:"not[author_id],omitempty"` + NotAuthorID *[]int `url:"not[author_id],omitempty" json:"not[author_id],omitempty"` AssigneeID *int `url:"assignee_id,omitempty" json:"assignee_id,omitempty"` - NotAssigneeID []int `url:"not[assignee_id],omitempty" json:"not[assignee_id],omitempty"` + NotAssigneeID *[]int `url:"not[assignee_id],omitempty" json:"not[assignee_id],omitempty"` AssigneeUsername *string `url:"assignee_username,omitempty" json:"assignee_username,omitempty"` MyReactionEmoji *string `url:"my_reaction_emoji,omitempty" json:"my_reaction_emoji,omitempty"` - NotMyReactionEmoji []string `url:"not[my_reaction_emoji],omitempty" json:"not[my_reaction_emoji],omitempty"` - IIDs []int `url:"iids[],omitempty" json:"iids,omitempty"` + NotMyReactionEmoji *[]string `url:"not[my_reaction_emoji],omitempty" json:"not[my_reaction_emoji],omitempty"` + IIDs *[]int `url:"iids[],omitempty" json:"iids,omitempty"` In *string `url:"in,omitempty" json:"in,omitempty"` OrderBy *string `url:"order_by,omitempty" json:"order_by,omitempty"` Sort *string `url:"sort,omitempty" json:"sort,omitempty"` @@ -258,21 +258,21 @@ func (s *IssuesService) ListIssues(opt *ListIssuesOptions, options ...RequestOpt type ListGroupIssuesOptions struct { ListOptions State *string `url:"state,omitempty" json:"state,omitempty"` - Labels Labels `url:"labels,comma,omitempty" json:"labels,omitempty"` - NotLabels Labels `url:"not[labels],comma,omitempty" json:"not[labels],omitempty"` + Labels *Labels `url:"labels,comma,omitempty" json:"labels,omitempty"` + NotLabels *Labels `url:"not[labels],comma,omitempty" json:"not[labels],omitempty"` WithLabelDetails *bool `url:"with_labels_details,omitempty" json:"with_labels_details,omitempty"` - IIDs []int `url:"iids[],omitempty" json:"iids,omitempty"` + IIDs *[]int `url:"iids[],omitempty" json:"iids,omitempty"` Milestone *string `url:"milestone,omitempty" json:"milestone,omitempty"` NotMilestone *string `url:"not[milestone],omitempty" json:"not[milestone],omitempty"` Scope *string `url:"scope,omitempty" json:"scope,omitempty"` AuthorID *int `url:"author_id,omitempty" json:"author_id,omitempty"` - NotAuthorID []int `url:"not[author_id],omitempty" json:"not[author_id],omitempty"` + NotAuthorID *[]int `url:"not[author_id],omitempty" json:"not[author_id],omitempty"` AuthorUsername *string `url:"author_username,omitempty" json:"author_username,omitempty"` AssigneeID *int `url:"assignee_id,omitempty" json:"assignee_id,omitempty"` - NotAssigneeID []int `url:"not[assignee_id],omitempty" json:"not[assignee_id],omitempty"` + NotAssigneeID *[]int `url:"not[assignee_id],omitempty" json:"not[assignee_id],omitempty"` AssigneeUsername *string `url:"assignee_username,omitempty" json:"assignee_username,omitempty"` MyReactionEmoji *string `url:"my_reaction_emoji,omitempty" json:"my_reaction_emoji,omitempty"` - NotMyReactionEmoji []string `url:"not[my_reaction_emoji],omitempty" json:"not[my_reaction_emoji],omitempty"` + NotMyReactionEmoji *[]string `url:"not[my_reaction_emoji],omitempty" json:"not[my_reaction_emoji],omitempty"` OrderBy *string `url:"order_by,omitempty" json:"order_by,omitempty"` Sort *string `url:"sort,omitempty" json:"sort,omitempty"` Search *string `url:"search,omitempty" json:"search,omitempty"` @@ -293,7 +293,7 @@ func (s *IssuesService) ListGroupIssues(pid interface{}, opt *ListGroupIssuesOpt if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/issues", pathEscape(group)) + u := fmt.Sprintf("groups/%s/issues", PathEscape(group)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -314,21 +314,21 @@ func (s *IssuesService) ListGroupIssues(pid interface{}, opt *ListGroupIssuesOpt // GitLab API docs: https://docs.gitlab.com/ce/api/issues.html#list-project-issues type ListProjectIssuesOptions struct { ListOptions - IIDs []int `url:"iids[],omitempty" json:"iids,omitempty"` + IIDs *[]int `url:"iids[],omitempty" json:"iids,omitempty"` State *string `url:"state,omitempty" json:"state,omitempty"` - Labels Labels `url:"labels,comma,omitempty" json:"labels,omitempty"` - NotLabels Labels `url:"not[labels],comma,omitempty" json:"not[labels],omitempty"` + Labels *Labels `url:"labels,comma,omitempty" json:"labels,omitempty"` + NotLabels *Labels `url:"not[labels],comma,omitempty" json:"not[labels],omitempty"` WithLabelDetails *bool `url:"with_labels_details,omitempty" json:"with_labels_details,omitempty"` Milestone *string `url:"milestone,omitempty" json:"milestone,omitempty"` - NotMilestone []string `url:"not[milestone],omitempty" json:"not[milestone],omitempty"` + NotMilestone *[]string `url:"not[milestone],omitempty" json:"not[milestone],omitempty"` Scope *string `url:"scope,omitempty" json:"scope,omitempty"` AuthorID *int `url:"author_id,omitempty" json:"author_id,omitempty"` - NotAuthorID []int `url:"not[author_id],omitempty" json:"not[author_id],omitempty"` + NotAuthorID *[]int `url:"not[author_id],omitempty" json:"not[author_id],omitempty"` AssigneeID *int `url:"assignee_id,omitempty" json:"assignee_id,omitempty"` - NotAssigneeID []int `url:"not[assignee_id],omitempty" json:"not[assignee_id],omitempty"` + NotAssigneeID *[]int `url:"not[assignee_id],omitempty" json:"not[assignee_id],omitempty"` AssigneeUsername *string `url:"assignee_username,omitempty" json:"assignee_username,omitempty"` MyReactionEmoji *string `url:"my_reaction_emoji,omitempty" json:"my_reaction_emoji,omitempty"` - NotMyReactionEmoji []string `url:"not[my_reaction_emoji],omitempty" json:"not[my_reaction_emoji],omitempty"` + NotMyReactionEmoji *[]string `url:"not[my_reaction_emoji],omitempty" json:"not[my_reaction_emoji],omitempty"` OrderBy *string `url:"order_by,omitempty" json:"order_by,omitempty"` Sort *string `url:"sort,omitempty" json:"sort,omitempty"` Search *string `url:"search,omitempty" json:"search,omitempty"` @@ -351,7 +351,7 @@ func (s *IssuesService) ListProjectIssues(pid interface{}, opt *ListProjectIssue if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/issues", pathEscape(project)) + u := fmt.Sprintf("projects/%s/issues", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -375,7 +375,7 @@ func (s *IssuesService) GetIssue(pid interface{}, issue int, options ...RequestO if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/issues/%d", pathEscape(project), issue) + u := fmt.Sprintf("projects/%s/issues/%d", PathEscape(project), issue) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -399,9 +399,9 @@ type CreateIssueOptions struct { Title *string `url:"title,omitempty" json:"title,omitempty"` Description *string `url:"description,omitempty" json:"description,omitempty"` Confidential *bool `url:"confidential,omitempty" json:"confidential,omitempty"` - AssigneeIDs []int `url:"assignee_ids,omitempty" json:"assignee_ids,omitempty"` + AssigneeIDs *[]int `url:"assignee_ids,omitempty" json:"assignee_ids,omitempty"` MilestoneID *int `url:"milestone_id,omitempty" json:"milestone_id,omitempty"` - Labels Labels `url:"labels,comma,omitempty" json:"labels,omitempty"` + Labels *Labels `url:"labels,comma,omitempty" json:"labels,omitempty"` CreatedAt *time.Time `url:"created_at,omitempty" json:"created_at,omitempty"` DueDate *ISOTime `url:"due_date,omitempty" json:"due_date,omitempty"` MergeRequestToResolveDiscussionsOf *int `url:"merge_request_to_resolve_discussions_of,omitempty" json:"merge_request_to_resolve_discussions_of,omitempty"` @@ -418,7 +418,7 @@ func (s *IssuesService) CreateIssue(pid interface{}, opt *CreateIssueOptions, op if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/issues", pathEscape(project)) + u := fmt.Sprintf("projects/%s/issues", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -441,11 +441,11 @@ type UpdateIssueOptions struct { Title *string `url:"title,omitempty" json:"title,omitempty"` Description *string `url:"description,omitempty" json:"description,omitempty"` Confidential *bool `url:"confidential,omitempty" json:"confidential,omitempty"` - AssigneeIDs []int `url:"assignee_ids,omitempty" json:"assignee_ids,omitempty"` + AssigneeIDs *[]int `url:"assignee_ids,omitempty" json:"assignee_ids,omitempty"` MilestoneID *int `url:"milestone_id,omitempty" json:"milestone_id,omitempty"` - Labels Labels `url:"labels,comma,omitempty" json:"labels,omitempty"` - AddLabels Labels `url:"add_labels,comma,omitempty" json:"add_labels,omitempty"` - RemoveLabels Labels `url:"remove_labels,comma,omitempty" json:"remove_labels,omitempty"` + Labels *Labels `url:"labels,comma,omitempty" json:"labels,omitempty"` + AddLabels *Labels `url:"add_labels,comma,omitempty" json:"add_labels,omitempty"` + RemoveLabels *Labels `url:"remove_labels,comma,omitempty" json:"remove_labels,omitempty"` StateEvent *string `url:"state_event,omitempty" json:"state_event,omitempty"` UpdatedAt *time.Time `url:"updated_at,omitempty" json:"updated_at,omitempty"` DueDate *ISOTime `url:"due_date,omitempty" json:"due_date,omitempty"` @@ -463,7 +463,7 @@ func (s *IssuesService) UpdateIssue(pid interface{}, issue int, opt *UpdateIssue if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/issues/%d", pathEscape(project), issue) + u := fmt.Sprintf("projects/%s/issues/%d", PathEscape(project), issue) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -487,7 +487,7 @@ func (s *IssuesService) DeleteIssue(pid interface{}, issue int, options ...Reque if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/issues/%d", pathEscape(project), issue) + u := fmt.Sprintf("projects/%s/issues/%d", PathEscape(project), issue) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -513,7 +513,7 @@ func (s *IssuesService) MoveIssue(pid interface{}, issue int, opt *MoveIssueOpti if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/issues/%d/move", pathEscape(project), issue) + u := fmt.Sprintf("projects/%s/issues/%d/move", PathEscape(project), issue) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -540,7 +540,7 @@ func (s *IssuesService) SubscribeToIssue(pid interface{}, issue int, options ... if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/issues/%d/subscribe", pathEscape(project), issue) + u := fmt.Sprintf("projects/%s/issues/%d/subscribe", PathEscape(project), issue) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -567,7 +567,7 @@ func (s *IssuesService) UnsubscribeFromIssue(pid interface{}, issue int, options if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/issues/%d/unsubscribe", pathEscape(project), issue) + u := fmt.Sprintf("projects/%s/issues/%d/unsubscribe", PathEscape(project), issue) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -594,7 +594,7 @@ func (s *IssuesService) CreateTodo(pid interface{}, issue int, options ...Reques if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/issues/%d/todo", pathEscape(project), issue) + u := fmt.Sprintf("projects/%s/issues/%d/todo", PathEscape(project), issue) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -627,7 +627,7 @@ func (s *IssuesService) ListMergeRequestsClosingIssue(pid interface{}, issue int if err != nil { return nil, nil, err } - u := fmt.Sprintf("/projects/%s/issues/%d/closed_by", pathEscape(project), issue) + u := fmt.Sprintf("/projects/%s/issues/%d/closed_by", PathEscape(project), issue) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -661,7 +661,7 @@ func (s *IssuesService) ListMergeRequestsRelatedToIssue(pid interface{}, issue i return nil, nil, err } u := fmt.Sprintf("/projects/%s/issues/%d/related_merge_requests", - pathEscape(project), + PathEscape(project), issue, ) @@ -728,7 +728,7 @@ func (s *IssuesService) GetParticipants(pid interface{}, issue int, options ...R if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/issues/%d/participants", pathEscape(project), issue) + u := fmt.Sprintf("projects/%s/issues/%d/participants", PathEscape(project), issue) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/issues_statistics.go b/vendor/github.com/xanzy/go-gitlab/issues_statistics.go index 6f48a17ca..134c271b4 100644 --- a/vendor/github.com/xanzy/go-gitlab/issues_statistics.go +++ b/vendor/github.com/xanzy/go-gitlab/issues_statistics.go @@ -52,15 +52,15 @@ func (n IssuesStatistics) String() string { // GitLab API docs: // https://docs.gitlab.com/ee/api/issues_statistics.html#get-issues-statistics type GetIssuesStatisticsOptions struct { - Labels Labels `url:"labels,omitempty" json:"labels,omitempty"` + Labels *Labels `url:"labels,omitempty" json:"labels,omitempty"` Milestone *string `url:"milestone,omitempty" json:"milestone,omitempty"` Scope *string `url:"scope,omitempty" json:"scope,omitempty"` AuthorID *int `url:"author_id,omitempty" json:"author_id,omitempty"` AuthorUsername *string `url:"author_username,omitempty" json:"author_username,omitempty"` AssigneeID *int `url:"assignee_id,omitempty" json:"assignee_id,omitempty"` - AssigneeUsername []string `url:"assignee_username,omitempty" json:"assignee_username,omitempty"` + AssigneeUsername *[]string `url:"assignee_username,omitempty" json:"assignee_username,omitempty"` MyReactionEmoji *string `url:"my_reaction_emoji,omitempty" json:"my_reaction_emoji,omitempty"` - IIDs []int `url:"iids[],omitempty" json:"iids,omitempty"` + IIDs *[]int `url:"iids[],omitempty" json:"iids,omitempty"` Search *string `url:"search,omitempty" json:"search,omitempty"` In *string `url:"in,omitempty" json:"in,omitempty"` CreatedAfter *time.Time `url:"created_after,omitempty" json:"created_after,omitempty"` @@ -96,14 +96,14 @@ func (s *IssuesStatisticsService) GetIssuesStatistics(opt *GetIssuesStatisticsOp // GitLab API docs: // https://docs.gitlab.com/ee/api/issues_statistics.html#get-group-issues-statistics type GetGroupIssuesStatisticsOptions struct { - Labels Labels `url:"labels,omitempty" json:"labels,omitempty"` - IIDs []int `url:"iids[],omitempty" json:"iids,omitempty"` + Labels *Labels `url:"labels,omitempty" json:"labels,omitempty"` + IIDs *[]int `url:"iids[],omitempty" json:"iids,omitempty"` Milestone *string `url:"milestone,omitempty" json:"milestone,omitempty"` Scope *string `url:"scope,omitempty" json:"scope,omitempty"` AuthorID *int `url:"author_id,omitempty" json:"author_id,omitempty"` AuthorUsername *string `url:"author_username,omitempty" json:"author_username,omitempty"` AssigneeID *int `url:"assignee_id,omitempty" json:"assignee_id,omitempty"` - AssigneeUsername []string `url:"assignee_username,omitempty" json:"assignee_username,omitempty"` + AssigneeUsername *[]string `url:"assignee_username,omitempty" json:"assignee_username,omitempty"` MyReactionEmoji *string `url:"my_reaction_emoji,omitempty" json:"my_reaction_emoji,omitempty"` Search *string `url:"search,omitempty" json:"search,omitempty"` CreatedAfter *time.Time `url:"created_after,omitempty" json:"created_after,omitempty"` @@ -122,7 +122,7 @@ func (s *IssuesStatisticsService) GetGroupIssuesStatistics(gid interface{}, opt if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/issues_statistics", pathEscape(group)) + u := fmt.Sprintf("groups/%s/issues_statistics", PathEscape(group)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -144,14 +144,14 @@ func (s *IssuesStatisticsService) GetGroupIssuesStatistics(gid interface{}, opt // GitLab API docs: // https://docs.gitlab.com/ee/api/issues_statistics.html#get-project-issues-statistics type GetProjectIssuesStatisticsOptions struct { - IIDs []int `url:"iids[],omitempty" json:"iids,omitempty"` - Labels Labels `url:"labels,omitempty" json:"labels,omitempty"` + IIDs *[]int `url:"iids[],omitempty" json:"iids,omitempty"` + Labels *Labels `url:"labels,omitempty" json:"labels,omitempty"` Milestone *Milestone `url:"milestone,omitempty" json:"milestone,omitempty"` Scope *string `url:"scope,omitempty" json:"scope,omitempty"` AuthorID *int `url:"author_id,omitempty" json:"author_id,omitempty"` AuthorUsername *string `url:"author_username,omitempty" json:"author_username,omitempty"` AssigneeID *int `url:"assignee_id,omitempty" json:"assignee_id,omitempty"` - AssigneeUsername []string `url:"assignee_username,omitempty" json:"assignee_username,omitempty"` + AssigneeUsername *[]string `url:"assignee_username,omitempty" json:"assignee_username,omitempty"` MyReactionEmoji *string `url:"my_reaction_emoji,omitempty" json:"my_reaction_emoji,omitempty"` Search *string `url:"search,omitempty" json:"search,omitempty"` CreatedAfter *time.Time `url:"created_after,omitempty" json:"created_after,omitempty"` @@ -170,7 +170,7 @@ func (s *IssuesStatisticsService) GetProjectIssuesStatistics(pid interface{}, op if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/issues_statistics", pathEscape(project)) + u := fmt.Sprintf("projects/%s/issues_statistics", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/jobs.go b/vendor/github.com/xanzy/go-gitlab/jobs.go index f07c06dea..a68abcf7d 100644 --- a/vendor/github.com/xanzy/go-gitlab/jobs.go +++ b/vendor/github.com/xanzy/go-gitlab/jobs.go @@ -71,11 +71,12 @@ type Job struct { IsShared bool `json:"is_shared"` Name string `json:"name"` } `json:"runner"` - Stage string `json:"stage"` - Status string `json:"status"` - Tag bool `json:"tag"` - WebURL string `json:"web_url"` - User *User `json:"user"` + Stage string `json:"stage"` + Status string `json:"status"` + Tag bool `json:"tag"` + WebURL string `json:"web_url"` + Project *Project `json:"project"` + User *User `json:"user"` } // Bridge represents a pipeline bridge. @@ -104,8 +105,8 @@ type Bridge struct { // ListJobsOptions are options for two list apis type ListJobsOptions struct { ListOptions - Scope []BuildStateValue `url:"scope[],omitempty" json:"scope,omitempty"` - IncludeRetried bool `url:"include_retried,omitempty" json:"include_retried,omitempty"` + Scope *[]BuildStateValue `url:"scope[],omitempty" json:"scope,omitempty"` + IncludeRetried *bool `url:"include_retried,omitempty" json:"include_retried,omitempty"` } // ListProjectJobs gets a list of jobs in a project. @@ -120,7 +121,7 @@ func (s *JobsService) ListProjectJobs(pid interface{}, opts *ListJobsOptions, op if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/jobs", pathEscape(project)) + u := fmt.Sprintf("projects/%s/jobs", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opts, options) if err != nil { @@ -146,7 +147,7 @@ func (s *JobsService) ListPipelineJobs(pid interface{}, pipelineID int, opts *Li if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/pipelines/%d/jobs", pathEscape(project), pipelineID) + u := fmt.Sprintf("projects/%s/pipelines/%d/jobs", PathEscape(project), pipelineID) req, err := s.client.NewRequest(http.MethodGet, u, opts, options) if err != nil { @@ -172,7 +173,7 @@ func (s *JobsService) ListPipelineBridges(pid interface{}, pipelineID int, opts if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/pipelines/%d/bridges", pathEscape(project), pipelineID) + u := fmt.Sprintf("projects/%s/pipelines/%d/bridges", PathEscape(project), pipelineID) req, err := s.client.NewRequest(http.MethodGet, u, opts, options) if err != nil { @@ -222,7 +223,7 @@ func (s *JobsService) GetJob(pid interface{}, jobID int, options ...RequestOptio if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/jobs/%d", pathEscape(project), jobID) + u := fmt.Sprintf("projects/%s/jobs/%d", PathEscape(project), jobID) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -247,7 +248,7 @@ func (s *JobsService) GetJobArtifacts(pid interface{}, jobID int, options ...Req if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/jobs/%d/artifacts", pathEscape(project), jobID) + u := fmt.Sprintf("projects/%s/jobs/%d/artifacts", PathEscape(project), jobID) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -282,7 +283,7 @@ func (s *JobsService) DownloadArtifactsFile(pid interface{}, refName string, opt if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/jobs/artifacts/%s/download", pathEscape(project), refName) + u := fmt.Sprintf("projects/%s/jobs/artifacts/%s/download", PathEscape(project), refName) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -313,7 +314,7 @@ func (s *JobsService) DownloadSingleArtifactsFile(pid interface{}, jobID int, ar u := fmt.Sprintf( "projects/%s/jobs/%d/artifacts/%s", - pathEscape(project), + PathEscape(project), jobID, artifactPath, ) @@ -341,7 +342,7 @@ func (s *JobsService) GetTraceFile(pid interface{}, jobID int, options ...Reques if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/jobs/%d/trace", pathEscape(project), jobID) + u := fmt.Sprintf("projects/%s/jobs/%d/trace", PathEscape(project), jobID) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -366,7 +367,7 @@ func (s *JobsService) CancelJob(pid interface{}, jobID int, options ...RequestOp if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/jobs/%d/cancel", pathEscape(project), jobID) + u := fmt.Sprintf("projects/%s/jobs/%d/cancel", PathEscape(project), jobID) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -391,7 +392,7 @@ func (s *JobsService) RetryJob(pid interface{}, jobID int, options ...RequestOpt if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/jobs/%d/retry", pathEscape(project), jobID) + u := fmt.Sprintf("projects/%s/jobs/%d/retry", PathEscape(project), jobID) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -417,7 +418,7 @@ func (s *JobsService) EraseJob(pid interface{}, jobID int, options ...RequestOpt if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/jobs/%d/erase", pathEscape(project), jobID) + u := fmt.Sprintf("projects/%s/jobs/%d/erase", PathEscape(project), jobID) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -443,7 +444,7 @@ func (s *JobsService) KeepArtifacts(pid interface{}, jobID int, options ...Reque if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/jobs/%d/artifacts/keep", pathEscape(project), jobID) + u := fmt.Sprintf("projects/%s/jobs/%d/artifacts/keep", PathEscape(project), jobID) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -468,7 +469,7 @@ func (s *JobsService) PlayJob(pid interface{}, jobID int, options ...RequestOpti if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/jobs/%d/play", pathEscape(project), jobID) + u := fmt.Sprintf("projects/%s/jobs/%d/play", PathEscape(project), jobID) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -493,7 +494,7 @@ func (s *JobsService) DeleteArtifacts(pid interface{}, jobID int, options ...Req if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/jobs/%d/artifacts", pathEscape(project), jobID) + u := fmt.Sprintf("projects/%s/jobs/%d/artifacts", PathEscape(project), jobID) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/labels.go b/vendor/github.com/xanzy/go-gitlab/labels.go index ccf6b24f7..66db47445 100644 --- a/vendor/github.com/xanzy/go-gitlab/labels.go +++ b/vendor/github.com/xanzy/go-gitlab/labels.go @@ -89,7 +89,7 @@ func (s *LabelsService) ListLabels(pid interface{}, opt *ListLabelsOptions, opti if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/labels", pathEscape(project)) + u := fmt.Sprintf("projects/%s/labels", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -117,7 +117,7 @@ func (s *LabelsService) GetLabel(pid interface{}, labelID interface{}, options . if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/labels/%s", pathEscape(project), label) + u := fmt.Sprintf("projects/%s/labels/%s", PathEscape(project), PathEscape(label)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -151,7 +151,7 @@ func (s *LabelsService) CreateLabel(pid interface{}, opt *CreateLabelOptions, op if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/labels", pathEscape(project)) + u := fmt.Sprintf("projects/%s/labels", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -182,7 +182,7 @@ func (s *LabelsService) DeleteLabel(pid interface{}, opt *DeleteLabelOptions, op if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/labels", pathEscape(project)) + u := fmt.Sprintf("projects/%s/labels", PathEscape(project)) req, err := s.client.NewRequest(http.MethodDelete, u, opt, options) if err != nil { @@ -211,7 +211,7 @@ func (s *LabelsService) UpdateLabel(pid interface{}, opt *UpdateLabelOptions, op if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/labels", pathEscape(project)) + u := fmt.Sprintf("projects/%s/labels", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -242,7 +242,7 @@ func (s *LabelsService) SubscribeToLabel(pid interface{}, labelID interface{}, o if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/labels/%s/subscribe", pathEscape(project), label) + u := fmt.Sprintf("projects/%s/labels/%s/subscribe", PathEscape(project), PathEscape(label)) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -273,7 +273,7 @@ func (s *LabelsService) UnsubscribeFromLabel(pid interface{}, labelID interface{ if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/labels/%s/unsubscribe", pathEscape(project), label) + u := fmt.Sprintf("projects/%s/labels/%s/unsubscribe", PathEscape(project), PathEscape(label)) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -296,7 +296,7 @@ func (s *LabelsService) PromoteLabel(pid interface{}, labelID interface{}, optio if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/labels/%s/promote", pathEscape(project), label) + u := fmt.Sprintf("projects/%s/labels/%s/promote", PathEscape(project), PathEscape(label)) req, err := s.client.NewRequest(http.MethodPut, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/markdown.go b/vendor/github.com/xanzy/go-gitlab/markdown.go new file mode 100644 index 000000000..8c20749fe --- /dev/null +++ b/vendor/github.com/xanzy/go-gitlab/markdown.go @@ -0,0 +1,47 @@ +package gitlab + +import "net/http" + +// MarkdownService handles communication with the markdown related methods of +// the GitLab API. +// +// Gitlab API docs: https://docs.gitlab.com/ee/api/markdown.html +type MarkdownService struct { + client *Client +} + +// Markdown represents a markdown document. +// +// Gitlab API docs: https://docs.gitlab.com/ee/api/markdown.html +type Markdown struct { + HTML string `json:"html"` +} + +// RenderOptions represents the available Render() options. +// +// Gitlab API docs: +// https://docs.gitlab.com/ee/api/markdown.html#render-an-arbitrary-markdown-document +type RenderOptions struct { + Text *string `url:"text,omitempty" json:"text,omitempty"` + GitlabFlavouredMarkdown *bool `url:"gfm,omitempty" json:"gfm,omitempty"` + Project *string `url:"project,omitempty" json:"project,omitempty"` +} + +// Render an arbitrary markdown document. +// +// Gitlab API docs: +// https://docs.gitlab.com/ee/api/markdown.html#render-an-arbitrary-markdown-document +func (s *MarkdownService) Render(opt *RenderOptions, options ...RequestOptionFunc) (*Markdown, *Response, error) { + req, err := s.client.NewRequest(http.MethodPost, "markdown", opt, options) + if err != nil { + return nil, nil, err + } + + md := new(Markdown) + response, err := s.client.Do(req, md) + if err != nil { + return nil, response, err + } + + return md, response, nil +} diff --git a/vendor/github.com/xanzy/go-gitlab/merge_request_approvals.go b/vendor/github.com/xanzy/go-gitlab/merge_request_approvals.go index 2c39e17ff..07ac10938 100644 --- a/vendor/github.com/xanzy/go-gitlab/merge_request_approvals.go +++ b/vendor/github.com/xanzy/go-gitlab/merge_request_approvals.go @@ -100,6 +100,7 @@ type MergeRequestApprovalRule struct { Users []*BasicUser `json:"users"` Groups []*Group `json:"groups"` ContainsHiddenGroups bool `json:"contains_hidden_groups"` + Section string `json:"section"` ApprovedBy []*BasicUser `json:"approved_by"` Approved bool `json:"approved"` } @@ -144,7 +145,7 @@ func (s *MergeRequestApprovalsService) ApproveMergeRequest(pid interface{}, mr i if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/approve", pathEscape(project), mr) + u := fmt.Sprintf("projects/%s/merge_requests/%d/approve", PathEscape(project), mr) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -169,7 +170,7 @@ func (s *MergeRequestApprovalsService) UnapproveMergeRequest(pid interface{}, mr if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/unapprove", pathEscape(project), mr) + u := fmt.Sprintf("projects/%s/merge_requests/%d/unapprove", PathEscape(project), mr) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -197,7 +198,7 @@ func (s *MergeRequestApprovalsService) GetConfiguration(pid interface{}, mr int, if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/approvals", pathEscape(project), mr) + u := fmt.Sprintf("projects/%s/merge_requests/%d/approvals", PathEscape(project), mr) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -222,7 +223,7 @@ func (s *MergeRequestApprovalsService) ChangeApprovalConfiguration(pid interface if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/approvals", pathEscape(project), mergeRequest) + u := fmt.Sprintf("projects/%s/merge_requests/%d/approvals", PathEscape(project), mergeRequest) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -257,7 +258,7 @@ func (s *MergeRequestApprovalsService) ChangeAllowedApprovers(pid interface{}, m if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/approvers", pathEscape(project), mergeRequest) + u := fmt.Sprintf("projects/%s/merge_requests/%d/approvers", PathEscape(project), mergeRequest) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -282,7 +283,7 @@ func (s *MergeRequestApprovalsService) GetApprovalRules(pid interface{}, mergeRe if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/approval_rules", pathEscape(project), mergeRequest) + u := fmt.Sprintf("projects/%s/merge_requests/%d/approval_rules", PathEscape(project), mergeRequest) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -307,7 +308,7 @@ func (s *MergeRequestApprovalsService) GetApprovalState(pid interface{}, mergeRe if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/approval_state", pathEscape(project), mergeRequest) + u := fmt.Sprintf("projects/%s/merge_requests/%d/approval_state", PathEscape(project), mergeRequest) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -332,8 +333,8 @@ type CreateMergeRequestApprovalRuleOptions struct { Name *string `url:"name,omitempty" json:"name,omitempty"` ApprovalsRequired *int `url:"approvals_required,omitempty" json:"approvals_required,omitempty"` ApprovalProjectRuleID *int `url:"approval_project_rule_id,omitempty" json:"approval_project_rule_id,omitempty"` - UserIDs []int `url:"user_ids,omitempty" json:"user_ids,omitempty"` - GroupIDs []int `url:"group_ids,omitempty" json:"group_ids,omitempty"` + UserIDs *[]int `url:"user_ids,omitempty" json:"user_ids,omitempty"` + GroupIDs *[]int `url:"group_ids,omitempty" json:"group_ids,omitempty"` } // CreateApprovalRule creates a new MR level approval rule. @@ -345,7 +346,7 @@ func (s *MergeRequestApprovalsService) CreateApprovalRule(pid interface{}, merge if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/approval_rules", pathEscape(project), mergeRequest) + u := fmt.Sprintf("projects/%s/merge_requests/%d/approval_rules", PathEscape(project), mergeRequest) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -369,8 +370,8 @@ func (s *MergeRequestApprovalsService) CreateApprovalRule(pid interface{}, merge type UpdateMergeRequestApprovalRuleOptions struct { Name *string `url:"name,omitempty" json:"name,omitempty"` ApprovalsRequired *int `url:"approvals_required,omitempty" json:"approvals_required,omitempty"` - UserIDs []int `url:"user_ids,omitempty" json:"user_ids,omitempty"` - GroupIDs []int `url:"group_ids,omitempty" json:"group_ids,omitempty"` + UserIDs *[]int `url:"user_ids,omitempty" json:"user_ids,omitempty"` + GroupIDs *[]int `url:"group_ids,omitempty" json:"group_ids,omitempty"` } // UpdateApprovalRule updates an existing approval rule with new options. @@ -382,7 +383,7 @@ func (s *MergeRequestApprovalsService) UpdateApprovalRule(pid interface{}, merge if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/approval_rules/%d", pathEscape(project), mergeRequest, approvalRule) + u := fmt.Sprintf("projects/%s/merge_requests/%d/approval_rules/%d", PathEscape(project), mergeRequest, approvalRule) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -407,7 +408,7 @@ func (s *MergeRequestApprovalsService) DeleteApprovalRule(pid interface{}, merge if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/approval_rules/%d", pathEscape(project), mergeRequest, approvalRule) + u := fmt.Sprintf("projects/%s/merge_requests/%d/approval_rules/%d", PathEscape(project), mergeRequest, approvalRule) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/merge_requests.go b/vendor/github.com/xanzy/go-gitlab/merge_requests.go index 0879ca8c8..8c01fe7f9 100644 --- a/vendor/github.com/xanzy/go-gitlab/merge_requests.go +++ b/vendor/github.com/xanzy/go-gitlab/merge_requests.go @@ -35,45 +35,47 @@ type MergeRequestsService struct { // // GitLab API docs: https://docs.gitlab.com/ce/api/merge_requests.html type MergeRequest struct { - ID int `json:"id"` - IID int `json:"iid"` - TargetBranch string `json:"target_branch"` - SourceBranch string `json:"source_branch"` - ProjectID int `json:"project_id"` - Title string `json:"title"` - State string `json:"state"` - CreatedAt *time.Time `json:"created_at"` - UpdatedAt *time.Time `json:"updated_at"` - Upvotes int `json:"upvotes"` - Downvotes int `json:"downvotes"` - Author *BasicUser `json:"author"` - Assignee *BasicUser `json:"assignee"` - Assignees []*BasicUser `json:"assignees"` - Reviewers []*BasicUser `json:"reviewers"` - SourceProjectID int `json:"source_project_id"` - TargetProjectID int `json:"target_project_id"` - Labels Labels `json:"labels"` - Description string `json:"description"` - WorkInProgress bool `json:"work_in_progress"` - Milestone *Milestone `json:"milestone"` - MergeWhenPipelineSucceeds bool `json:"merge_when_pipeline_succeeds"` - MergeStatus string `json:"merge_status"` - MergeError string `json:"merge_error"` - MergedBy *BasicUser `json:"merged_by"` - MergedAt *time.Time `json:"merged_at"` - ClosedBy *BasicUser `json:"closed_by"` - ClosedAt *time.Time `json:"closed_at"` - Subscribed bool `json:"subscribed"` - SHA string `json:"sha"` - MergeCommitSHA string `json:"merge_commit_sha"` - SquashCommitSHA string `json:"squash_commit_sha"` - UserNotesCount int `json:"user_notes_count"` - ChangesCount string `json:"changes_count"` - ShouldRemoveSourceBranch bool `json:"should_remove_source_branch"` - ForceRemoveSourceBranch bool `json:"force_remove_source_branch"` - AllowCollaboration bool `json:"allow_collaboration"` - WebURL string `json:"web_url"` - DiscussionLocked bool `json:"discussion_locked"` + ID int `json:"id"` + IID int `json:"iid"` + TargetBranch string `json:"target_branch"` + SourceBranch string `json:"source_branch"` + ProjectID int `json:"project_id"` + Title string `json:"title"` + State string `json:"state"` + CreatedAt *time.Time `json:"created_at"` + UpdatedAt *time.Time `json:"updated_at"` + Upvotes int `json:"upvotes"` + Downvotes int `json:"downvotes"` + Author *BasicUser `json:"author"` + Assignee *BasicUser `json:"assignee"` + Assignees []*BasicUser `json:"assignees"` + Reviewers []*BasicUser `json:"reviewers"` + SourceProjectID int `json:"source_project_id"` + TargetProjectID int `json:"target_project_id"` + Labels Labels `json:"labels"` + Description string `json:"description"` + Draft bool `json:"draft"` + WorkInProgress bool `json:"work_in_progress"` + Milestone *Milestone `json:"milestone"` + MergeWhenPipelineSucceeds bool `json:"merge_when_pipeline_succeeds"` + MergeStatus string `json:"merge_status"` + MergeError string `json:"merge_error"` + MergedBy *BasicUser `json:"merged_by"` + MergedAt *time.Time `json:"merged_at"` + ClosedBy *BasicUser `json:"closed_by"` + ClosedAt *time.Time `json:"closed_at"` + Subscribed bool `json:"subscribed"` + SHA string `json:"sha"` + MergeCommitSHA string `json:"merge_commit_sha"` + SquashCommitSHA string `json:"squash_commit_sha"` + UserNotesCount int `json:"user_notes_count"` + ChangesCount string `json:"changes_count"` + ShouldRemoveSourceBranch bool `json:"should_remove_source_branch"` + ForceRemoveSourceBranch bool `json:"force_remove_source_branch"` + AllowCollaboration bool `json:"allow_collaboration"` + WebURL string `json:"web_url"` + References *IssueReferences `json:"references"` + DiscussionLocked bool `json:"discussion_locked"` Changes []struct { OldPath string `json:"old_path"` NewPath string `json:"new_path"` @@ -139,32 +141,34 @@ func (m MergeRequestDiffVersion) String() string { // https://docs.gitlab.com/ce/api/merge_requests.html#list-merge-requests type ListMergeRequestsOptions struct { ListOptions - State *string `url:"state,omitempty" json:"state,omitempty"` - OrderBy *string `url:"order_by,omitempty" json:"order_by,omitempty"` - Sort *string `url:"sort,omitempty" json:"sort,omitempty"` - Milestone *string `url:"milestone,omitempty" json:"milestone,omitempty"` - View *string `url:"view,omitempty" json:"view,omitempty"` - Labels Labels `url:"labels,comma,omitempty" json:"labels,omitempty"` - NotLabels Labels `url:"not[labels],comma,omitempty" json:"not[labels],omitempty"` - WithLabelsDetails *bool `url:"with_labels_details,omitempty" json:"with_labels_details,omitempty"` - WithMergeStatusRecheck *bool `url:"with_merge_status_recheck,omitempty" json:"with_merge_status_recheck,omitempty"` - CreatedAfter *time.Time `url:"created_after,omitempty" json:"created_after,omitempty"` - CreatedBefore *time.Time `url:"created_before,omitempty" json:"created_before,omitempty"` - UpdatedAfter *time.Time `url:"updated_after,omitempty" json:"updated_after,omitempty"` - UpdatedBefore *time.Time `url:"updated_before,omitempty" json:"updated_before,omitempty"` - Scope *string `url:"scope,omitempty" json:"scope,omitempty"` - AuthorID *int `url:"author_id,omitempty" json:"author_id,omitempty"` - AuthorUsername *string `url:"author_username,omitempty" json:"author_username,omitempty"` - AssigneeID *int `url:"assignee_id,omitempty" json:"assignee_id,omitempty"` - ReviewerID *int `url:"reviewer_id,omitempty" json:"reviewer_id,omitempty"` - ReviewerUsername *string `url:"reviewer_username,omitempty" json:"reviewer_username,omitempty"` - MyReactionEmoji *string `url:"my_reaction_emoji,omitempty" json:"my_reaction_emoji,omitempty"` - SourceBranch *string `url:"source_branch,omitempty" json:"source_branch,omitempty"` - TargetBranch *string `url:"target_branch,omitempty" json:"target_branch,omitempty"` - Search *string `url:"search,omitempty" json:"search,omitempty"` - In *string `url:"in,omitempty" json:"in,omitempty"` - Draft *bool `url:"draft,omitempty" json:"draft,omitempty"` - WIP *string `url:"wip,omitempty" json:"wip,omitempty"` + State *string `url:"state,omitempty" json:"state,omitempty"` + OrderBy *string `url:"order_by,omitempty" json:"order_by,omitempty"` + Sort *string `url:"sort,omitempty" json:"sort,omitempty"` + Milestone *string `url:"milestone,omitempty" json:"milestone,omitempty"` + View *string `url:"view,omitempty" json:"view,omitempty"` + Labels *Labels `url:"labels,comma,omitempty" json:"labels,omitempty"` + NotLabels *Labels `url:"not[labels],comma,omitempty" json:"not[labels],omitempty"` + WithLabelsDetails *bool `url:"with_labels_details,omitempty" json:"with_labels_details,omitempty"` + WithMergeStatusRecheck *bool `url:"with_merge_status_recheck,omitempty" json:"with_merge_status_recheck,omitempty"` + CreatedAfter *time.Time `url:"created_after,omitempty" json:"created_after,omitempty"` + CreatedBefore *time.Time `url:"created_before,omitempty" json:"created_before,omitempty"` + UpdatedAfter *time.Time `url:"updated_after,omitempty" json:"updated_after,omitempty"` + UpdatedBefore *time.Time `url:"updated_before,omitempty" json:"updated_before,omitempty"` + Scope *string `url:"scope,omitempty" json:"scope,omitempty"` + AuthorID *int `url:"author_id,omitempty" json:"author_id,omitempty"` + AuthorUsername *string `url:"author_username,omitempty" json:"author_username,omitempty"` + AssigneeID *AssigneeIDValue `url:"assignee_id,omitempty" json:"assignee_id,omitempty"` + ApproverIDs *ApproverIDsValue `url:"approver_ids,omitempty" json:"approver_ids,omitempty"` + ApprovedByIDs *ApproverIDsValue `url:"approved_by_ids,omitempty" json:"approved_by_ids,omitempty"` + ReviewerID *ReviewerIDValue `url:"reviewer_id,omitempty" json:"reviewer_id,omitempty"` + ReviewerUsername *string `url:"reviewer_username,omitempty" json:"reviewer_username,omitempty"` + MyReactionEmoji *string `url:"my_reaction_emoji,omitempty" json:"my_reaction_emoji,omitempty"` + SourceBranch *string `url:"source_branch,omitempty" json:"source_branch,omitempty"` + TargetBranch *string `url:"target_branch,omitempty" json:"target_branch,omitempty"` + Search *string `url:"search,omitempty" json:"search,omitempty"` + In *string `url:"in,omitempty" json:"in,omitempty"` + Draft *bool `url:"draft,omitempty" json:"draft,omitempty"` + WIP *string `url:"wip,omitempty" json:"wip,omitempty"` } // ListMergeRequests gets all merge requests. The state parameter can be used @@ -189,51 +193,53 @@ func (s *MergeRequestsService) ListMergeRequests(opt *ListMergeRequestsOptions, return m, resp, err } -// ListGroupMergeRequestsOptions represents the available ListGroupMergeRequests() +// ListProjectMergeRequestsOptions represents the available ListMergeRequests() // options. // // GitLab API docs: -// https://docs.gitlab.com/ce/api/merge_requests.html#list-group-merge-requests -type ListGroupMergeRequestsOptions struct { +// https://docs.gitlab.com/ce/api/merge_requests.html#list-project-merge-requests +type ListProjectMergeRequestsOptions struct { ListOptions - State *string `url:"state,omitempty" json:"state,omitempty"` - OrderBy *string `url:"order_by,omitempty" json:"order_by,omitempty"` - Sort *string `url:"sort,omitempty" json:"sort,omitempty"` - Milestone *string `url:"milestone,omitempty" json:"milestone,omitempty"` - View *string `url:"view,omitempty" json:"view,omitempty"` - Labels Labels `url:"labels,comma,omitempty" json:"labels,omitempty"` - NotLabels Labels `url:"not[labels],comma,omitempty" json:"not[labels],omitempty"` - WithLabelsDetails *bool `url:"with_labels_details,omitempty" json:"with_labels_details,omitempty"` - WithMergeStatusRecheck *bool `url:"with_merge_status_recheck,omitempty" json:"with_merge_status_recheck,omitempty"` - CreatedAfter *time.Time `url:"created_after,omitempty" json:"created_after,omitempty"` - CreatedBefore *time.Time `url:"created_before,omitempty" json:"created_before,omitempty"` - UpdatedAfter *time.Time `url:"updated_after,omitempty" json:"updated_after,omitempty"` - UpdatedBefore *time.Time `url:"updated_before,omitempty" json:"updated_before,omitempty"` - Scope *string `url:"scope,omitempty" json:"scope,omitempty"` - AuthorID *int `url:"author_id,omitempty" json:"author_id,omitempty"` - AuthorUsername *string `url:"author_username,omitempty" json:"author_username,omitempty"` - AssigneeID *int `url:"assignee_id,omitempty" json:"assignee_id,omitempty"` - ReviewerID *int `url:"reviewer_id,omitempty" json:"reviewer_id,omitempty"` - ReviewerUsername *string `url:"reviewer_username,omitempty" json:"reviewer_username,omitempty"` - MyReactionEmoji *string `url:"my_reaction_emoji,omitempty" json:"my_reaction_emoji,omitempty"` - SourceBranch *string `url:"source_branch,omitempty" json:"source_branch,omitempty"` - TargetBranch *string `url:"target_branch,omitempty" json:"target_branch,omitempty"` - Search *string `url:"search,omitempty" json:"search,omitempty"` - In *string `url:"in,omitempty" json:"in,omitempty"` - Draft *bool `url:"draft,omitempty" json:"draft,omitempty"` - WIP *string `url:"wip,omitempty" json:"wip,omitempty"` + IIDs *[]int `url:"iids[],omitempty" json:"iids,omitempty"` + State *string `url:"state,omitempty" json:"state,omitempty"` + OrderBy *string `url:"order_by,omitempty" json:"order_by,omitempty"` + Sort *string `url:"sort,omitempty" json:"sort,omitempty"` + Milestone *string `url:"milestone,omitempty" json:"milestone,omitempty"` + View *string `url:"view,omitempty" json:"view,omitempty"` + Labels *Labels `url:"labels,comma,omitempty" json:"labels,omitempty"` + NotLabels *Labels `url:"not[labels],comma,omitempty" json:"not[labels],omitempty"` + WithLabelsDetails *bool `url:"with_labels_details,omitempty" json:"with_labels_details,omitempty"` + WithMergeStatusRecheck *bool `url:"with_merge_status_recheck,omitempty" json:"with_merge_status_recheck,omitempty"` + CreatedAfter *time.Time `url:"created_after,omitempty" json:"created_after,omitempty"` + CreatedBefore *time.Time `url:"created_before,omitempty" json:"created_before,omitempty"` + UpdatedAfter *time.Time `url:"updated_after,omitempty" json:"updated_after,omitempty"` + UpdatedBefore *time.Time `url:"updated_before,omitempty" json:"updated_before,omitempty"` + Scope *string `url:"scope,omitempty" json:"scope,omitempty"` + AuthorID *int `url:"author_id,omitempty" json:"author_id,omitempty"` + AuthorUsername *string `url:"author_username,omitempty" json:"author_username,omitempty"` + AssigneeID *AssigneeIDValue `url:"assignee_id,omitempty" json:"assignee_id,omitempty"` + ApproverIDs *ApproverIDsValue `url:"approver_ids,omitempty" json:"approver_ids,omitempty"` + ApprovedByIDs *ApproverIDsValue `url:"approved_by_ids,omitempty" json:"approved_by_ids,omitempty"` + ReviewerID *ReviewerIDValue `url:"reviewer_id,omitempty" json:"reviewer_id,omitempty"` + ReviewerUsername *string `url:"reviewer_username,omitempty" json:"reviewer_username,omitempty"` + MyReactionEmoji *string `url:"my_reaction_emoji,omitempty" json:"my_reaction_emoji,omitempty"` + SourceBranch *string `url:"source_branch,omitempty" json:"source_branch,omitempty"` + TargetBranch *string `url:"target_branch,omitempty" json:"target_branch,omitempty"` + Search *string `url:"search,omitempty" json:"search,omitempty"` + Draft *bool `url:"draft,omitempty" json:"draft,omitempty"` + WIP *string `url:"wip,omitempty" json:"wip,omitempty"` } -// ListGroupMergeRequests gets all merge requests for this group. +// ListProjectMergeRequests gets all merge requests for this project. // // GitLab API docs: -// https://docs.gitlab.com/ce/api/merge_requests.html#list-group-merge-requests -func (s *MergeRequestsService) ListGroupMergeRequests(gid interface{}, opt *ListGroupMergeRequestsOptions, options ...RequestOptionFunc) ([]*MergeRequest, *Response, error) { - group, err := parseID(gid) +// https://docs.gitlab.com/ce/api/merge_requests.html#list-project-merge-requests +func (s *MergeRequestsService) ListProjectMergeRequests(pid interface{}, opt *ListProjectMergeRequestsOptions, options ...RequestOptionFunc) ([]*MergeRequest, *Response, error) { + project, err := parseID(pid) if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/merge_requests", pathEscape(group)) + u := fmt.Sprintf("projects/%s/merge_requests", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -249,51 +255,53 @@ func (s *MergeRequestsService) ListGroupMergeRequests(gid interface{}, opt *List return m, resp, err } -// ListProjectMergeRequestsOptions represents the available ListMergeRequests() +// ListGroupMergeRequestsOptions represents the available ListGroupMergeRequests() // options. // // GitLab API docs: -// https://docs.gitlab.com/ce/api/merge_requests.html#list-project-merge-requests -type ListProjectMergeRequestsOptions struct { +// https://docs.gitlab.com/ce/api/merge_requests.html#list-group-merge-requests +type ListGroupMergeRequestsOptions struct { ListOptions - IIDs []int `url:"iids[],omitempty" json:"iids,omitempty"` - State *string `url:"state,omitempty" json:"state,omitempty"` - OrderBy *string `url:"order_by,omitempty" json:"order_by,omitempty"` - Sort *string `url:"sort,omitempty" json:"sort,omitempty"` - Milestone *string `url:"milestone,omitempty" json:"milestone,omitempty"` - View *string `url:"view,omitempty" json:"view,omitempty"` - Labels Labels `url:"labels,comma,omitempty" json:"labels,omitempty"` - NotLabels Labels `url:"not[labels],comma,omitempty" json:"not[labels],omitempty"` - WithLabelsDetails *bool `url:"with_labels_details,omitempty" json:"with_labels_details,omitempty"` - WithMergeStatusRecheck *bool `url:"with_merge_status_recheck,omitempty" json:"with_merge_status_recheck,omitempty"` - CreatedAfter *time.Time `url:"created_after,omitempty" json:"created_after,omitempty"` - CreatedBefore *time.Time `url:"created_before,omitempty" json:"created_before,omitempty"` - UpdatedAfter *time.Time `url:"updated_after,omitempty" json:"updated_after,omitempty"` - UpdatedBefore *time.Time `url:"updated_before,omitempty" json:"updated_before,omitempty"` - Scope *string `url:"scope,omitempty" json:"scope,omitempty"` - AuthorID *int `url:"author_id,omitempty" json:"author_id,omitempty"` - AuthorUsername *string `url:"author_username,omitempty" json:"author_username,omitempty"` - AssigneeID *int `url:"assignee_id,omitempty" json:"assignee_id,omitempty"` - ReviewerID *int `url:"reviewer_id,omitempty" json:"reviewer_id,omitempty"` - ReviewerUsername *string `url:"reviewer_username,omitempty" json:"reviewer_username,omitempty"` - MyReactionEmoji *string `url:"my_reaction_emoji,omitempty" json:"my_reaction_emoji,omitempty"` - SourceBranch *string `url:"source_branch,omitempty" json:"source_branch,omitempty"` - TargetBranch *string `url:"target_branch,omitempty" json:"target_branch,omitempty"` - Search *string `url:"search,omitempty" json:"search,omitempty"` - Draft *bool `url:"draft,omitempty" json:"draft,omitempty"` - WIP *string `url:"wip,omitempty" json:"wip,omitempty"` + State *string `url:"state,omitempty" json:"state,omitempty"` + OrderBy *string `url:"order_by,omitempty" json:"order_by,omitempty"` + Sort *string `url:"sort,omitempty" json:"sort,omitempty"` + Milestone *string `url:"milestone,omitempty" json:"milestone,omitempty"` + View *string `url:"view,omitempty" json:"view,omitempty"` + Labels *Labels `url:"labels,comma,omitempty" json:"labels,omitempty"` + NotLabels *Labels `url:"not[labels],comma,omitempty" json:"not[labels],omitempty"` + WithLabelsDetails *bool `url:"with_labels_details,omitempty" json:"with_labels_details,omitempty"` + WithMergeStatusRecheck *bool `url:"with_merge_status_recheck,omitempty" json:"with_merge_status_recheck,omitempty"` + CreatedAfter *time.Time `url:"created_after,omitempty" json:"created_after,omitempty"` + CreatedBefore *time.Time `url:"created_before,omitempty" json:"created_before,omitempty"` + UpdatedAfter *time.Time `url:"updated_after,omitempty" json:"updated_after,omitempty"` + UpdatedBefore *time.Time `url:"updated_before,omitempty" json:"updated_before,omitempty"` + Scope *string `url:"scope,omitempty" json:"scope,omitempty"` + AuthorID *int `url:"author_id,omitempty" json:"author_id,omitempty"` + AuthorUsername *string `url:"author_username,omitempty" json:"author_username,omitempty"` + AssigneeID *AssigneeIDValue `url:"assignee_id,omitempty" json:"assignee_id,omitempty"` + ApproverIDs *ApproverIDsValue `url:"approver_ids,omitempty" json:"approver_ids,omitempty"` + ApprovedByIDs *ApproverIDsValue `url:"approved_by_ids,omitempty" json:"approved_by_ids,omitempty"` + ReviewerID *ReviewerIDValue `url:"reviewer_id,omitempty" json:"reviewer_id,omitempty"` + ReviewerUsername *string `url:"reviewer_username,omitempty" json:"reviewer_username,omitempty"` + MyReactionEmoji *string `url:"my_reaction_emoji,omitempty" json:"my_reaction_emoji,omitempty"` + SourceBranch *string `url:"source_branch,omitempty" json:"source_branch,omitempty"` + TargetBranch *string `url:"target_branch,omitempty" json:"target_branch,omitempty"` + Search *string `url:"search,omitempty" json:"search,omitempty"` + In *string `url:"in,omitempty" json:"in,omitempty"` + Draft *bool `url:"draft,omitempty" json:"draft,omitempty"` + WIP *string `url:"wip,omitempty" json:"wip,omitempty"` } -// ListProjectMergeRequests gets all merge requests for this project. +// ListGroupMergeRequests gets all merge requests for this group. // // GitLab API docs: -// https://docs.gitlab.com/ce/api/merge_requests.html#list-project-merge-requests -func (s *MergeRequestsService) ListProjectMergeRequests(pid interface{}, opt *ListProjectMergeRequestsOptions, options ...RequestOptionFunc) ([]*MergeRequest, *Response, error) { - project, err := parseID(pid) +// https://docs.gitlab.com/ce/api/merge_requests.html#list-group-merge-requests +func (s *MergeRequestsService) ListGroupMergeRequests(gid interface{}, opt *ListGroupMergeRequestsOptions, options ...RequestOptionFunc) ([]*MergeRequest, *Response, error) { + group, err := parseID(gid) if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests", pathEscape(project)) + u := fmt.Sprintf("groups/%s/merge_requests", PathEscape(group)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -329,7 +337,7 @@ func (s *MergeRequestsService) GetMergeRequest(pid interface{}, mergeRequest int if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d", pathEscape(project), mergeRequest) + u := fmt.Sprintf("projects/%s/merge_requests/%d", PathEscape(project), mergeRequest) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -354,7 +362,7 @@ func (s *MergeRequestsService) GetMergeRequestApprovals(pid interface{}, mergeRe if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/approvals", pathEscape(project), mergeRequest) + u := fmt.Sprintf("projects/%s/merge_requests/%d/approvals", PathEscape(project), mergeRequest) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -386,7 +394,7 @@ func (s *MergeRequestsService) GetMergeRequestCommits(pid interface{}, mergeRequ if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/commits", pathEscape(project), mergeRequest) + u := fmt.Sprintf("projects/%s/merge_requests/%d/commits", PathEscape(project), mergeRequest) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -421,7 +429,7 @@ func (s *MergeRequestsService) GetMergeRequestChanges(pid interface{}, mergeRequ if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/changes", pathEscape(project), mergeRequest) + u := fmt.Sprintf("projects/%s/merge_requests/%d/changes", PathEscape(project), mergeRequest) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -446,7 +454,7 @@ func (s *MergeRequestsService) GetMergeRequestParticipants(pid interface{}, merg if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/participants", pathEscape(project), mergeRequest) + u := fmt.Sprintf("projects/%s/merge_requests/%d/participants", PathEscape(project), mergeRequest) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -471,7 +479,7 @@ func (s *MergeRequestsService) ListMergeRequestPipelines(pid interface{}, mergeR if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/pipelines", pathEscape(project), mergeRequest) + u := fmt.Sprintf("projects/%s/merge_requests/%d/pipelines", PathEscape(project), mergeRequest) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -496,7 +504,7 @@ func (s *MergeRequestsService) CreateMergeRequestPipeline(pid interface{}, merge if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/pipelines", pathEscape(project), mergeRequest) + u := fmt.Sprintf("projects/%s/merge_requests/%d/pipelines", PathEscape(project), mergeRequest) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -529,7 +537,7 @@ func (s *MergeRequestsService) GetIssuesClosedOnMerge(pid interface{}, mergeRequ if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/closes_issues", pathEscape(project), mergeRequest) + u := fmt.Sprintf("projects/%s/merge_requests/%d/closes_issues", PathEscape(project), mergeRequest) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -555,10 +563,10 @@ type CreateMergeRequestOptions struct { Description *string `url:"description,omitempty" json:"description,omitempty"` SourceBranch *string `url:"source_branch,omitempty" json:"source_branch,omitempty"` TargetBranch *string `url:"target_branch,omitempty" json:"target_branch,omitempty"` - Labels Labels `url:"labels,comma,omitempty" json:"labels,omitempty"` + Labels *Labels `url:"labels,comma,omitempty" json:"labels,omitempty"` AssigneeID *int `url:"assignee_id,omitempty" json:"assignee_id,omitempty"` - AssigneeIDs []int `url:"assignee_ids,omitempty" json:"assignee_ids,omitempty"` - ReviewerIDs []int `url:"reviewer_ids,omitempty" json:"reviewer_ids,omitempty"` + AssigneeIDs *[]int `url:"assignee_ids,omitempty" json:"assignee_ids,omitempty"` + ReviewerIDs *[]int `url:"reviewer_ids,omitempty" json:"reviewer_ids,omitempty"` TargetProjectID *int `url:"target_project_id,omitempty" json:"target_project_id,omitempty"` MilestoneID *int `url:"milestone_id,omitempty" json:"milestone_id,omitempty"` RemoveSourceBranch *bool `url:"remove_source_branch,omitempty" json:"remove_source_branch,omitempty"` @@ -575,7 +583,7 @@ func (s *MergeRequestsService) CreateMergeRequest(pid interface{}, opt *CreateMe if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests", pathEscape(project)) + u := fmt.Sprintf("projects/%s/merge_requests", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -601,11 +609,11 @@ type UpdateMergeRequestOptions struct { Description *string `url:"description,omitempty" json:"description,omitempty"` TargetBranch *string `url:"target_branch,omitempty" json:"target_branch,omitempty"` AssigneeID *int `url:"assignee_id,omitempty" json:"assignee_id,omitempty"` - AssigneeIDs []int `url:"assignee_ids,omitempty" json:"assignee_ids,omitempty"` - ReviewerIDs []int `url:"reviewer_ids,omitempty" json:"reviewer_ids,omitempty"` - Labels Labels `url:"labels,comma,omitempty" json:"labels,omitempty"` - AddLabels Labels `url:"add_labels,comma,omitempty" json:"add_labels,omitempty"` - RemoveLabels Labels `url:"remove_labels,comma,omitempty" json:"remove_labels,omitempty"` + AssigneeIDs *[]int `url:"assignee_ids,omitempty" json:"assignee_ids,omitempty"` + ReviewerIDs *[]int `url:"reviewer_ids,omitempty" json:"reviewer_ids,omitempty"` + Labels *Labels `url:"labels,comma,omitempty" json:"labels,omitempty"` + AddLabels *Labels `url:"add_labels,comma,omitempty" json:"add_labels,omitempty"` + RemoveLabels *Labels `url:"remove_labels,comma,omitempty" json:"remove_labels,omitempty"` MilestoneID *int `url:"milestone_id,omitempty" json:"milestone_id,omitempty"` StateEvent *string `url:"state_event,omitempty" json:"state_event,omitempty"` RemoveSourceBranch *bool `url:"remove_source_branch,omitempty" json:"remove_source_branch,omitempty"` @@ -623,7 +631,7 @@ func (s *MergeRequestsService) UpdateMergeRequest(pid interface{}, mergeRequest if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d", pathEscape(project), mergeRequest) + u := fmt.Sprintf("projects/%s/merge_requests/%d", PathEscape(project), mergeRequest) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -648,7 +656,7 @@ func (s *MergeRequestsService) DeleteMergeRequest(pid interface{}, mergeRequest if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d", pathEscape(project), mergeRequest) + u := fmt.Sprintf("projects/%s/merge_requests/%d", PathEscape(project), mergeRequest) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -684,7 +692,7 @@ func (s *MergeRequestsService) AcceptMergeRequest(pid interface{}, mergeRequest if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/merge", pathEscape(project), mergeRequest) + u := fmt.Sprintf("projects/%s/merge_requests/%d/merge", PathEscape(project), mergeRequest) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -713,7 +721,7 @@ func (s *MergeRequestsService) CancelMergeWhenPipelineSucceeds(pid interface{}, if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/cancel_merge_when_pipeline_succeeds", pathEscape(project), mergeRequest) + u := fmt.Sprintf("projects/%s/merge_requests/%d/cancel_merge_when_pipeline_succeeds", PathEscape(project), mergeRequest) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -740,7 +748,7 @@ func (s *MergeRequestsService) RebaseMergeRequest(pid interface{}, mergeRequest if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/rebase", pathEscape(project), mergeRequest) + u := fmt.Sprintf("projects/%s/merge_requests/%d/rebase", PathEscape(project), mergeRequest) req, err := s.client.NewRequest(http.MethodPut, u, nil, options) if err != nil { @@ -766,7 +774,7 @@ func (s *MergeRequestsService) GetMergeRequestDiffVersions(pid interface{}, merg if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/versions", pathEscape(project), mergeRequest) + u := fmt.Sprintf("projects/%s/merge_requests/%d/versions", PathEscape(project), mergeRequest) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -791,7 +799,7 @@ func (s *MergeRequestsService) GetSingleMergeRequestDiffVersion(pid interface{}, if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/versions/%d", pathEscape(project), mergeRequest, version) + u := fmt.Sprintf("projects/%s/merge_requests/%d/versions/%d", PathEscape(project), mergeRequest, version) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -818,7 +826,7 @@ func (s *MergeRequestsService) SubscribeToMergeRequest(pid interface{}, mergeReq if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/subscribe", pathEscape(project), mergeRequest) + u := fmt.Sprintf("projects/%s/merge_requests/%d/subscribe", PathEscape(project), mergeRequest) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -846,7 +854,7 @@ func (s *MergeRequestsService) UnsubscribeFromMergeRequest(pid interface{}, merg if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/unsubscribe", pathEscape(project), mergeRequest) + u := fmt.Sprintf("projects/%s/merge_requests/%d/unsubscribe", PathEscape(project), mergeRequest) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -873,7 +881,7 @@ func (s *MergeRequestsService) CreateTodo(pid interface{}, mergeRequest int, opt if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/todo", pathEscape(project), mergeRequest) + u := fmt.Sprintf("projects/%s/merge_requests/%d/todo", PathEscape(project), mergeRequest) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/milestones.go b/vendor/github.com/xanzy/go-gitlab/milestones.go index e908a3138..47e9fffc0 100644 --- a/vendor/github.com/xanzy/go-gitlab/milestones.go +++ b/vendor/github.com/xanzy/go-gitlab/milestones.go @@ -58,10 +58,11 @@ func (m Milestone) String() string { // https://docs.gitlab.com/ce/api/milestones.html#list-project-milestones type ListMilestonesOptions struct { ListOptions - IIDs []int `url:"iids[],omitempty" json:"iids,omitempty"` - Title *string `url:"title,omitempty" json:"title,omitempty"` - State *string `url:"state,omitempty" json:"state,omitempty"` - Search *string `url:"search,omitempty" json:"search,omitempty"` + IIDs *[]int `url:"iids[],omitempty" json:"iids,omitempty"` + Title *string `url:"title,omitempty" json:"title,omitempty"` + State *string `url:"state,omitempty" json:"state,omitempty"` + Search *string `url:"search,omitempty" json:"search,omitempty"` + IncludeParentMilestones *bool `url:"include_parent_milestones,omitempty" json:"include_parent_milestones,omitempty"` } // ListMilestones returns a list of project milestones. @@ -73,7 +74,7 @@ func (s *MilestonesService) ListMilestones(pid interface{}, opt *ListMilestonesO if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/milestones", pathEscape(project)) + u := fmt.Sprintf("projects/%s/milestones", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -98,7 +99,7 @@ func (s *MilestonesService) GetMilestone(pid interface{}, milestone int, options if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/milestones/%d", pathEscape(project), milestone) + u := fmt.Sprintf("projects/%s/milestones/%d", PathEscape(project), milestone) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -134,7 +135,7 @@ func (s *MilestonesService) CreateMilestone(pid interface{}, opt *CreateMileston if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/milestones", pathEscape(project)) + u := fmt.Sprintf("projects/%s/milestones", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -171,7 +172,7 @@ func (s *MilestonesService) UpdateMilestone(pid interface{}, milestone int, opt if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/milestones/%d", pathEscape(project), milestone) + u := fmt.Sprintf("projects/%s/milestones/%d", PathEscape(project), milestone) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -196,7 +197,7 @@ func (s *MilestonesService) DeleteMilestone(pid interface{}, milestone int, opti if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/milestones/%d", pathEscape(project), milestone) + u := fmt.Sprintf("projects/%s/milestones/%d", PathEscape(project), milestone) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -220,7 +221,7 @@ func (s *MilestonesService) GetMilestoneIssues(pid interface{}, milestone int, o if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/milestones/%d/issues", pathEscape(project), milestone) + u := fmt.Sprintf("projects/%s/milestones/%d/issues", PathEscape(project), milestone) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -253,7 +254,7 @@ func (s *MilestonesService) GetMilestoneMergeRequests(pid interface{}, milestone if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/milestones/%d/merge_requests", pathEscape(project), milestone) + u := fmt.Sprintf("projects/%s/milestones/%d/merge_requests", PathEscape(project), milestone) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/namespaces.go b/vendor/github.com/xanzy/go-gitlab/namespaces.go index c81b03203..946ee4c89 100644 --- a/vendor/github.com/xanzy/go-gitlab/namespaces.go +++ b/vendor/github.com/xanzy/go-gitlab/namespaces.go @@ -116,7 +116,7 @@ func (s *NamespacesService) GetNamespace(id interface{}, options ...RequestOptio if err != nil { return nil, nil, err } - u := fmt.Sprintf("namespaces/%s", pathEscape(namespace)) + u := fmt.Sprintf("namespaces/%s", PathEscape(namespace)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/notes.go b/vendor/github.com/xanzy/go-gitlab/notes.go index 8c9122321..1b77d2a73 100644 --- a/vendor/github.com/xanzy/go-gitlab/notes.go +++ b/vendor/github.com/xanzy/go-gitlab/notes.go @@ -121,7 +121,7 @@ func (s *NotesService) ListIssueNotes(pid interface{}, issue int, opt *ListIssue if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/issues/%d/notes", pathEscape(project), issue) + u := fmt.Sprintf("projects/%s/issues/%d/notes", PathEscape(project), issue) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -146,7 +146,7 @@ func (s *NotesService) GetIssueNote(pid interface{}, issue, note int, options .. if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/issues/%d/notes/%d", pathEscape(project), issue, note) + u := fmt.Sprintf("projects/%s/issues/%d/notes/%d", PathEscape(project), issue, note) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -181,7 +181,7 @@ func (s *NotesService) CreateIssueNote(pid interface{}, issue int, opt *CreateIs if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/issues/%d/notes", pathEscape(project), issue) + u := fmt.Sprintf("projects/%s/issues/%d/notes", PathEscape(project), issue) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -215,7 +215,7 @@ func (s *NotesService) UpdateIssueNote(pid interface{}, issue, note int, opt *Up if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/issues/%d/notes/%d", pathEscape(project), issue, note) + u := fmt.Sprintf("projects/%s/issues/%d/notes/%d", PathEscape(project), issue, note) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -240,7 +240,7 @@ func (s *NotesService) DeleteIssueNote(pid interface{}, issue, note int, options if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/issues/%d/notes/%d", pathEscape(project), issue, note) + u := fmt.Sprintf("projects/%s/issues/%d/notes/%d", PathEscape(project), issue, note) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -270,7 +270,7 @@ func (s *NotesService) ListSnippetNotes(pid interface{}, snippet int, opt *ListS if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/snippets/%d/notes", pathEscape(project), snippet) + u := fmt.Sprintf("projects/%s/snippets/%d/notes", PathEscape(project), snippet) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -295,7 +295,7 @@ func (s *NotesService) GetSnippetNote(pid interface{}, snippet, note int, option if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/snippets/%d/notes/%d", pathEscape(project), snippet, note) + u := fmt.Sprintf("projects/%s/snippets/%d/notes/%d", PathEscape(project), snippet, note) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -330,7 +330,7 @@ func (s *NotesService) CreateSnippetNote(pid interface{}, snippet int, opt *Crea if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/snippets/%d/notes", pathEscape(project), snippet) + u := fmt.Sprintf("projects/%s/snippets/%d/notes", PathEscape(project), snippet) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -364,7 +364,7 @@ func (s *NotesService) UpdateSnippetNote(pid interface{}, snippet, note int, opt if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/snippets/%d/notes/%d", pathEscape(project), snippet, note) + u := fmt.Sprintf("projects/%s/snippets/%d/notes/%d", PathEscape(project), snippet, note) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -389,7 +389,7 @@ func (s *NotesService) DeleteSnippetNote(pid interface{}, snippet, note int, opt if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/snippets/%d/notes/%d", pathEscape(project), snippet, note) + u := fmt.Sprintf("projects/%s/snippets/%d/notes/%d", PathEscape(project), snippet, note) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -419,7 +419,7 @@ func (s *NotesService) ListMergeRequestNotes(pid interface{}, mergeRequest int, if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/notes", pathEscape(project), mergeRequest) + u := fmt.Sprintf("projects/%s/merge_requests/%d/notes", PathEscape(project), mergeRequest) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -444,7 +444,7 @@ func (s *NotesService) GetMergeRequestNote(pid interface{}, mergeRequest, note i if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/notes/%d", pathEscape(project), mergeRequest, note) + u := fmt.Sprintf("projects/%s/merge_requests/%d/notes/%d", PathEscape(project), mergeRequest, note) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -478,7 +478,7 @@ func (s *NotesService) CreateMergeRequestNote(pid interface{}, mergeRequest int, if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/notes", pathEscape(project), mergeRequest) + u := fmt.Sprintf("projects/%s/merge_requests/%d/notes", PathEscape(project), mergeRequest) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -513,7 +513,7 @@ func (s *NotesService) UpdateMergeRequestNote(pid interface{}, mergeRequest, not return nil, nil, err } u := fmt.Sprintf( - "projects/%s/merge_requests/%d/notes/%d", pathEscape(project), mergeRequest, note) + "projects/%s/merge_requests/%d/notes/%d", PathEscape(project), mergeRequest, note) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { return nil, nil, err @@ -538,7 +538,7 @@ func (s *NotesService) DeleteMergeRequestNote(pid interface{}, mergeRequest, not return nil, err } u := fmt.Sprintf( - "projects/%s/merge_requests/%d/notes/%d", pathEscape(project), mergeRequest, note) + "projects/%s/merge_requests/%d/notes/%d", PathEscape(project), mergeRequest, note) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -567,7 +567,7 @@ func (s *NotesService) ListEpicNotes(gid interface{}, epic int, opt *ListEpicNot if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/epics/%d/notes", pathEscape(group), epic) + u := fmt.Sprintf("groups/%s/epics/%d/notes", PathEscape(group), epic) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -592,7 +592,7 @@ func (s *NotesService) GetEpicNote(gid interface{}, epic, note int, options ...R if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/epics/%d/notes/%d", pathEscape(group), epic, note) + u := fmt.Sprintf("groups/%s/epics/%d/notes/%d", PathEscape(group), epic, note) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -625,7 +625,7 @@ func (s *NotesService) CreateEpicNote(gid interface{}, epic int, opt *CreateEpic if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/epics/%d/notes", pathEscape(group), epic) + u := fmt.Sprintf("groups/%s/epics/%d/notes", PathEscape(group), epic) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -657,7 +657,7 @@ func (s *NotesService) UpdateEpicNote(gid interface{}, epic, note int, opt *Upda if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/epics/%d/notes/%d", pathEscape(group), epic, note) + u := fmt.Sprintf("groups/%s/epics/%d/notes/%d", PathEscape(group), epic, note) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -681,7 +681,7 @@ func (s *NotesService) DeleteEpicNote(gid interface{}, epic, note int, options . if err != nil { return nil, err } - u := fmt.Sprintf("groups/%s/epics/%d/notes/%d", pathEscape(group), epic, note) + u := fmt.Sprintf("groups/%s/epics/%d/notes/%d", PathEscape(group), epic, note) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/notifications.go b/vendor/github.com/xanzy/go-gitlab/notifications.go index 06385b377..b4771d8ed 100644 --- a/vendor/github.com/xanzy/go-gitlab/notifications.go +++ b/vendor/github.com/xanzy/go-gitlab/notifications.go @@ -138,7 +138,7 @@ func (s *NotificationSettingsService) GetSettingsForGroup(gid interface{}, optio if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/notification_settings", pathEscape(group)) + u := fmt.Sprintf("groups/%s/notification_settings", PathEscape(group)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -163,7 +163,7 @@ func (s *NotificationSettingsService) GetSettingsForProject(pid interface{}, opt if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/notification_settings", pathEscape(project)) + u := fmt.Sprintf("projects/%s/notification_settings", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -188,7 +188,7 @@ func (s *NotificationSettingsService) UpdateSettingsForGroup(gid interface{}, op if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/notification_settings", pathEscape(group)) + u := fmt.Sprintf("groups/%s/notification_settings", PathEscape(group)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -213,7 +213,7 @@ func (s *NotificationSettingsService) UpdateSettingsForProject(pid interface{}, if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/notification_settings", pathEscape(project)) + u := fmt.Sprintf("projects/%s/notification_settings", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/packages.go b/vendor/github.com/xanzy/go-gitlab/packages.go index 390ada242..37fe0e00b 100644 --- a/vendor/github.com/xanzy/go-gitlab/packages.go +++ b/vendor/github.com/xanzy/go-gitlab/packages.go @@ -30,7 +30,7 @@ type PackagesService struct { client *Client } -// Package represents a GitLab single package. +// Package represents a GitLab package. // // GitLab API docs: https://docs.gitlab.com/ee/api/packages.html type Package struct { @@ -38,14 +38,29 @@ type Package struct { Name string `json:"name"` Version string `json:"version"` PackageType string `json:"package_type"` + Status string `json:"status"` Links *PackageLinks `json:"_links"` CreatedAt *time.Time `json:"created_at"` + Tags []string `json:"tags"` } func (s Package) String() string { return Stringify(s) } +// GroupPackage represents a GitLab group package. +// +// GitLab API docs: https://docs.gitlab.com/ee/api/packages.html +type GroupPackage struct { + Package + ProjectID int `json:"project_id"` + ProjectPath string `json:"project_path"` +} + +func (s GroupPackage) String() string { + return Stringify(s) +} + // PackageLinks holds links for itself and deleting. type PackageLinks struct { WebPath string `json:"web_path"` @@ -74,7 +89,8 @@ func (s PackageFile) String() string { return Stringify(s) } -// ListProjectPackagesOptions are the parameters available in a ListProjectPackages() Operation. +// ListProjectPackagesOptions represents the available ListProjectPackages() +// options. // // GitLab API docs: // https://docs.gitlab.com/ee/api/packages.html#within-a-project @@ -85,6 +101,7 @@ type ListProjectPackagesOptions struct { PackageType *string `url:"package_type,omitempty" json:"package_type,omitempty"` PackageName *string `url:"package_name,omitempty" json:"package_name,omitempty"` IncludeVersionless *bool `url:"include_versionless,omitempty" json:"include_versionless,omitempty"` + Status *string `url:"status,omitempty" json:"status,omitempty"` } // ListProjectPackages gets a list of packages in a project. @@ -96,7 +113,7 @@ func (s *PackagesService) ListProjectPackages(pid interface{}, opt *ListProjectP if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/packages", pathEscape(project)) + u := fmt.Sprintf("projects/%s/packages", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -112,8 +129,49 @@ func (s *PackagesService) ListProjectPackages(pid interface{}, opt *ListProjectP return ps, resp, err } -// ListPackageFilesOptions represents the available -// ListPackageFiles() options. +// ListGroupPackagesOptions represents the available ListGroupPackages() +// options. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/packages.html#within-a-group +type ListGroupPackagesOptions struct { + ListOptions + ExcludeSubgroups *bool `url:"exclude_subgroups,omitempty" json:"exclude_subgroups,omitempty"` + OrderBy *string `url:"order_by,omitempty" json:"order_by,omitempty"` + Sort *string `url:"sort,omitempty" json:"sort,omitempty"` + PackageType *string `url:"package_type,omitempty" json:"package_type,omitempty"` + PackageName *string `url:"package_name,omitempty" json:"package_name,omitempty"` + IncludeVersionless *bool `url:"include_versionless,omitempty" json:"include_versionless,omitempty"` + Status *string `url:"status,omitempty" json:"status,omitempty"` +} + +// ListGroupPackages gets a list of packages in a group. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/packages.html#within-a-group +func (s *PackagesService) ListGroupPackages(gid interface{}, opt *ListGroupPackagesOptions, options ...RequestOptionFunc) ([]*GroupPackage, *Response, error) { + group, err := parseID(gid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("groups/%s/packages", PathEscape(group)) + + req, err := s.client.NewRequest(http.MethodGet, u, opt, options) + if err != nil { + return nil, nil, err + } + + var ps []*GroupPackage + resp, err := s.client.Do(req, &ps) + if err != nil { + return nil, resp, err + } + + return ps, resp, err +} + +// ListPackageFilesOptions represents the available ListPackageFiles() +// options. // // GitLab API docs: // https://docs.gitlab.com/ee/api/packages.html#list-package-files @@ -130,7 +188,7 @@ func (s *PackagesService) ListPackageFiles(pid interface{}, pkg int, opt *ListPa } u := fmt.Sprintf( "projects/%s/packages/%d/package_files", - pathEscape(project), + PathEscape(project), pkg, ) @@ -157,7 +215,7 @@ func (s *PackagesService) DeleteProjectPackage(pid interface{}, pkg int, options if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/packages/%d", pathEscape(project), pkg) + u := fmt.Sprintf("projects/%s/packages/%d", PathEscape(project), pkg) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/pages.go b/vendor/github.com/xanzy/go-gitlab/pages.go index 167551b97..617b0ba4b 100644 --- a/vendor/github.com/xanzy/go-gitlab/pages.go +++ b/vendor/github.com/xanzy/go-gitlab/pages.go @@ -34,7 +34,7 @@ func (s *PagesService) UnpublishPages(gid interface{}, options ...RequestOptionF if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/pages", pathEscape(page)) + u := fmt.Sprintf("projects/%s/pages", PathEscape(page)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/pages_domains.go b/vendor/github.com/xanzy/go-gitlab/pages_domains.go index 52b6f7bd4..3e1b04f61 100644 --- a/vendor/github.com/xanzy/go-gitlab/pages_domains.go +++ b/vendor/github.com/xanzy/go-gitlab/pages_domains.go @@ -62,7 +62,7 @@ func (s *PagesDomainsService) ListPagesDomains(pid interface{}, opt *ListPagesDo if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/pages/domains", pathEscape(project)) + u := fmt.Sprintf("projects/%s/pages/domains", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -106,7 +106,7 @@ func (s *PagesDomainsService) GetPagesDomain(pid interface{}, domain string, opt if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/pages/domains/%s", pathEscape(project), domain) + u := fmt.Sprintf("projects/%s/pages/domains/%s", PathEscape(project), domain) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -142,7 +142,7 @@ func (s *PagesDomainsService) CreatePagesDomain(pid interface{}, opt *CreatePage if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/pages/domains", pathEscape(project)) + u := fmt.Sprintf("projects/%s/pages/domains", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -177,7 +177,7 @@ func (s *PagesDomainsService) UpdatePagesDomain(pid interface{}, domain string, if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/pages/domains/%s", pathEscape(project), domain) + u := fmt.Sprintf("projects/%s/pages/domains/%s", PathEscape(project), domain) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -202,7 +202,7 @@ func (s *PagesDomainsService) DeletePagesDomain(pid interface{}, domain string, if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/pages/domains/%s", pathEscape(project), domain) + u := fmt.Sprintf("projects/%s/pages/domains/%s", PathEscape(project), domain) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/pipeline_schedules.go b/vendor/github.com/xanzy/go-gitlab/pipeline_schedules.go index 8a35ecf08..12640e27d 100644 --- a/vendor/github.com/xanzy/go-gitlab/pipeline_schedules.go +++ b/vendor/github.com/xanzy/go-gitlab/pipeline_schedules.go @@ -69,7 +69,7 @@ func (s *PipelineSchedulesService) ListPipelineSchedules(pid interface{}, opt *L if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/pipeline_schedules", pathEscape(project)) + u := fmt.Sprintf("projects/%s/pipeline_schedules", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -94,7 +94,7 @@ func (s *PipelineSchedulesService) GetPipelineSchedule(pid interface{}, schedule if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/pipeline_schedules/%d", pathEscape(project), schedule) + u := fmt.Sprintf("projects/%s/pipeline_schedules/%d", PathEscape(project), schedule) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -132,7 +132,7 @@ func (s *PipelineSchedulesService) CreatePipelineSchedule(pid interface{}, opt * if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/pipeline_schedules", pathEscape(project)) + u := fmt.Sprintf("projects/%s/pipeline_schedules", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -170,7 +170,7 @@ func (s *PipelineSchedulesService) EditPipelineSchedule(pid interface{}, schedul if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/pipeline_schedules/%d", pathEscape(project), schedule) + u := fmt.Sprintf("projects/%s/pipeline_schedules/%d", PathEscape(project), schedule) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -196,7 +196,7 @@ func (s *PipelineSchedulesService) TakeOwnershipOfPipelineSchedule(pid interface if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/pipeline_schedules/%d/take_ownership", pathEscape(project), schedule) + u := fmt.Sprintf("projects/%s/pipeline_schedules/%d/take_ownership", PathEscape(project), schedule) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -221,7 +221,7 @@ func (s *PipelineSchedulesService) DeletePipelineSchedule(pid interface{}, sched if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/pipeline_schedules/%d", pathEscape(project), schedule) + u := fmt.Sprintf("projects/%s/pipeline_schedules/%d", PathEscape(project), schedule) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -240,7 +240,7 @@ func (s *PipelineSchedulesService) RunPipelineSchedule(pid interface{}, schedule if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/pipeline_schedules/%d/play", pathEscape(project), schedule) + u := fmt.Sprintf("projects/%s/pipeline_schedules/%d/play", PathEscape(project), schedule) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -270,7 +270,7 @@ func (s *PipelineSchedulesService) CreatePipelineScheduleVariable(pid interface{ if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/pipeline_schedules/%d/variables", pathEscape(project), schedule) + u := fmt.Sprintf("projects/%s/pipeline_schedules/%d/variables", PathEscape(project), schedule) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -305,7 +305,7 @@ func (s *PipelineSchedulesService) EditPipelineScheduleVariable(pid interface{}, if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/pipeline_schedules/%d/variables/%s", pathEscape(project), schedule, key) + u := fmt.Sprintf("projects/%s/pipeline_schedules/%d/variables/%s", PathEscape(project), schedule, key) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -330,7 +330,7 @@ func (s *PipelineSchedulesService) DeletePipelineScheduleVariable(pid interface{ if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/pipeline_schedules/%d/variables/%s", pathEscape(project), schedule, key) + u := fmt.Sprintf("projects/%s/pipeline_schedules/%d/variables/%s", PathEscape(project), schedule, key) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/pipeline_triggers.go b/vendor/github.com/xanzy/go-gitlab/pipeline_triggers.go index 7fc58752b..c2cecfa51 100644 --- a/vendor/github.com/xanzy/go-gitlab/pipeline_triggers.go +++ b/vendor/github.com/xanzy/go-gitlab/pipeline_triggers.go @@ -60,7 +60,7 @@ func (s *PipelineTriggersService) ListPipelineTriggers(pid interface{}, opt *Lis if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/triggers", pathEscape(project)) + u := fmt.Sprintf("projects/%s/triggers", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -85,7 +85,7 @@ func (s *PipelineTriggersService) GetPipelineTrigger(pid interface{}, trigger in if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/triggers/%d", pathEscape(project), trigger) + u := fmt.Sprintf("projects/%s/triggers/%d", PathEscape(project), trigger) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -118,7 +118,7 @@ func (s *PipelineTriggersService) AddPipelineTrigger(pid interface{}, opt *AddPi if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/triggers", pathEscape(project)) + u := fmt.Sprintf("projects/%s/triggers", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -151,7 +151,7 @@ func (s *PipelineTriggersService) EditPipelineTrigger(pid interface{}, trigger i if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/triggers/%d", pathEscape(project), trigger) + u := fmt.Sprintf("projects/%s/triggers/%d", PathEscape(project), trigger) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -177,7 +177,7 @@ func (s *PipelineTriggersService) TakeOwnershipOfPipelineTrigger(pid interface{} if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/triggers/%d/take_ownership", pathEscape(project), trigger) + u := fmt.Sprintf("projects/%s/triggers/%d/take_ownership", PathEscape(project), trigger) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -202,7 +202,7 @@ func (s *PipelineTriggersService) DeletePipelineTrigger(pid interface{}, trigger if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/triggers/%d", pathEscape(project), trigger) + u := fmt.Sprintf("projects/%s/triggers/%d", PathEscape(project), trigger) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -231,7 +231,7 @@ func (s *PipelineTriggersService) RunPipelineTrigger(pid interface{}, opt *RunPi if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/trigger/pipeline", pathEscape(project)) + u := fmt.Sprintf("projects/%s/trigger/pipeline", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/pipelines.go b/vendor/github.com/xanzy/go-gitlab/pipelines.go index e52e3f8a5..927320209 100644 --- a/vendor/github.com/xanzy/go-gitlab/pipelines.go +++ b/vendor/github.com/xanzy/go-gitlab/pipelines.go @@ -46,6 +46,7 @@ type Pipeline struct { ID int `json:"id"` ProjectID int `json:"project_id"` Status string `json:"status"` + Source string `json:"source"` Ref string `json:"ref"` SHA string `json:"sha"` BeforeSHA string `json:"before_sha"` @@ -135,6 +136,7 @@ type PipelineInfo struct { ID int `json:"id"` ProjectID int `json:"project_id"` Status string `json:"status"` + Source string `json:"source"` Ref string `json:"ref"` SHA string `json:"sha"` WebURL string `json:"web_url"` @@ -153,6 +155,7 @@ type ListProjectPipelinesOptions struct { ListOptions Scope *string `url:"scope,omitempty" json:"scope,omitempty"` Status *BuildStateValue `url:"status,omitempty" json:"status,omitempty"` + Source *string `url:"source,omitempty" json:"source,omitempty"` Ref *string `url:"ref,omitempty" json:"ref,omitempty"` SHA *string `url:"sha,omitempty" json:"sha,omitempty"` YamlErrors *bool `url:"yaml_errors,omitempty" json:"yaml_errors,omitempty"` @@ -172,7 +175,7 @@ func (s *PipelinesService) ListProjectPipelines(pid interface{}, opt *ListProjec if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/pipelines", pathEscape(project)) + u := fmt.Sprintf("projects/%s/pipelines", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -196,7 +199,7 @@ func (s *PipelinesService) GetPipeline(pid interface{}, pipeline int, options .. if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/pipelines/%d", pathEscape(project), pipeline) + u := fmt.Sprintf("projects/%s/pipelines/%d", PathEscape(project), pipeline) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -220,7 +223,7 @@ func (s *PipelinesService) GetPipelineVariables(pid interface{}, pipeline int, o if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/pipelines/%d/variables", pathEscape(project), pipeline) + u := fmt.Sprintf("projects/%s/pipelines/%d/variables", PathEscape(project), pipeline) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -239,14 +242,14 @@ func (s *PipelinesService) GetPipelineVariables(pid interface{}, pipeline int, o // GetPipelineTestReport gets the test report of a single project pipeline. // // GitLab API docs: https://docs.gitlab.com/ee/api/pipelines.html#get-a-pipelines-test-report -func (s *PipelinesService) GetPipelineTestReport(pid interface{}, pipeline int) (*PipelineTestReport, *Response, error) { +func (s *PipelinesService) GetPipelineTestReport(pid interface{}, pipeline int, options ...RequestOptionFunc) (*PipelineTestReport, *Response, error) { project, err := parseID(pid) if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/pipelines/%d/test_report", pathEscape(project), pipeline) + u := fmt.Sprintf("projects/%s/pipelines/%d/test_report", PathEscape(project), pipeline) - req, err := s.client.NewRequest(http.MethodGet, u, nil, nil) + req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { return nil, nil, err } @@ -264,8 +267,8 @@ func (s *PipelinesService) GetPipelineTestReport(pid interface{}, pipeline int) // // GitLab API docs: https://docs.gitlab.com/ce/api/pipelines.html#create-a-new-pipeline type CreatePipelineOptions struct { - Ref *string `url:"ref" json:"ref"` - Variables []*PipelineVariable `url:"variables,omitempty" json:"variables,omitempty"` + Ref *string `url:"ref" json:"ref"` + Variables *[]*PipelineVariable `url:"variables,omitempty" json:"variables,omitempty"` } // CreatePipeline creates a new project pipeline. @@ -276,7 +279,7 @@ func (s *PipelinesService) CreatePipeline(pid interface{}, opt *CreatePipelineOp if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/pipeline", pathEscape(project)) + u := fmt.Sprintf("projects/%s/pipeline", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -301,7 +304,7 @@ func (s *PipelinesService) RetryPipelineBuild(pid interface{}, pipeline int, opt if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/pipelines/%d/retry", pathEscape(project), pipeline) + u := fmt.Sprintf("projects/%s/pipelines/%d/retry", PathEscape(project), pipeline) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -326,7 +329,7 @@ func (s *PipelinesService) CancelPipelineBuild(pid interface{}, pipeline int, op if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/pipelines/%d/cancel", pathEscape(project), pipeline) + u := fmt.Sprintf("projects/%s/pipelines/%d/cancel", PathEscape(project), pipeline) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -351,7 +354,7 @@ func (s *PipelinesService) DeletePipeline(pid interface{}, pipeline int, options if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/pipelines/%d", pathEscape(project), pipeline) + u := fmt.Sprintf("projects/%s/pipelines/%d", PathEscape(project), pipeline) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/project_access_tokens.go b/vendor/github.com/xanzy/go-gitlab/project_access_tokens.go index 2bcc7da41..2821b4556 100644 --- a/vendor/github.com/xanzy/go-gitlab/project_access_tokens.go +++ b/vendor/github.com/xanzy/go-gitlab/project_access_tokens.go @@ -67,7 +67,7 @@ func (s *ProjectAccessTokensService) ListProjectAccessTokens(pid interface{}, op if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/access_tokens", pathEscape(project)) + u := fmt.Sprintf("projects/%s/access_tokens", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -90,7 +90,7 @@ func (s *ProjectAccessTokensService) ListProjectAccessTokens(pid interface{}, op // https://docs.gitlab.com/ee/api/resource_access_tokens.html#create-a-project-access-token type CreateProjectAccessTokenOptions struct { Name *string `url:"name,omitempty" json:"name,omitempty"` - Scopes []string `url:"scopes,omitempty" json:"scopes,omitempty"` + Scopes *[]string `url:"scopes,omitempty" json:"scopes,omitempty"` AccessLevel *AccessLevelValue `url:"access_level,omitempty" json:"access_level,omitempty"` ExpiresAt *ISOTime `url:"expires_at,omitempty" json:"expires_at,omitempty"` } @@ -104,7 +104,7 @@ func (s *ProjectAccessTokensService) CreateProjectAccessToken(pid interface{}, o if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/access_tokens", pathEscape(project)) + u := fmt.Sprintf("projects/%s/access_tokens", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -129,7 +129,7 @@ func (s *ProjectAccessTokensService) DeleteProjectAccessToken(pid interface{}, i if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/access_tokens/%d", pathEscape(project), id) + u := fmt.Sprintf("projects/%s/access_tokens/%d", PathEscape(project), id) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/project_badges.go b/vendor/github.com/xanzy/go-gitlab/project_badges.go index e575f8ef1..aa62a1f62 100644 --- a/vendor/github.com/xanzy/go-gitlab/project_badges.go +++ b/vendor/github.com/xanzy/go-gitlab/project_badges.go @@ -27,6 +27,7 @@ import ( // https://docs.gitlab.com/ee/api/project_badges.html#list-all-badges-of-a-project type ProjectBadge struct { ID int `json:"id"` + Name string `json:"name"` LinkURL string `json:"link_url"` ImageURL string `json:"image_url"` RenderedLinkURL string `json:"rendered_link_url"` @@ -48,7 +49,10 @@ type ProjectBadgesService struct { // // GitLab API docs: // https://docs.gitlab.com/ee/api/project_badges.html#list-all-badges-of-a-project -type ListProjectBadgesOptions ListOptions +type ListProjectBadgesOptions struct { + ListOptions + Name *string `url:"name,omitempty" json:"name,omitempty"` +} // ListProjectBadges gets a list of a project's badges and its group badges. // @@ -59,7 +63,7 @@ func (s *ProjectBadgesService) ListProjectBadges(pid interface{}, opt *ListProje if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/badges", pathEscape(project)) + u := fmt.Sprintf("projects/%s/badges", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -84,7 +88,7 @@ func (s *ProjectBadgesService) GetProjectBadge(pid interface{}, badge int, optio if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/badges/%d", pathEscape(project), badge) + u := fmt.Sprintf("projects/%s/badges/%d", PathEscape(project), badge) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -107,6 +111,7 @@ func (s *ProjectBadgesService) GetProjectBadge(pid interface{}, badge int, optio type AddProjectBadgeOptions struct { LinkURL *string `url:"link_url,omitempty" json:"link_url,omitempty"` ImageURL *string `url:"image_url,omitempty" json:"image_url,omitempty"` + Name *string `url:"name,omitempty" json:"name,omitempty"` } // AddProjectBadge adds a badge to a project. @@ -118,7 +123,7 @@ func (s *ProjectBadgesService) AddProjectBadge(pid interface{}, opt *AddProjectB if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/badges", pathEscape(project)) + u := fmt.Sprintf("projects/%s/badges", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -141,6 +146,7 @@ func (s *ProjectBadgesService) AddProjectBadge(pid interface{}, opt *AddProjectB type EditProjectBadgeOptions struct { LinkURL *string `url:"link_url,omitempty" json:"link_url,omitempty"` ImageURL *string `url:"image_url,omitempty" json:"image_url,omitempty"` + Name *string `url:"name,omitempty" json:"name,omitempty"` } // EditProjectBadge updates a badge of a project. @@ -152,7 +158,7 @@ func (s *ProjectBadgesService) EditProjectBadge(pid interface{}, badge int, opt if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/badges/%d", pathEscape(project), badge) + u := fmt.Sprintf("projects/%s/badges/%d", PathEscape(project), badge) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -178,7 +184,7 @@ func (s *ProjectBadgesService) DeleteProjectBadge(pid interface{}, badge int, op if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/badges/%d", pathEscape(project), badge) + u := fmt.Sprintf("projects/%s/badges/%d", PathEscape(project), badge) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -207,7 +213,7 @@ func (s *ProjectBadgesService) PreviewProjectBadge(pid interface{}, opt *Project if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/badges/render", pathEscape(project)) + u := fmt.Sprintf("projects/%s/badges/render", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/project_clusters.go b/vendor/github.com/xanzy/go-gitlab/project_clusters.go index 17fd09bb7..f515821a7 100644 --- a/vendor/github.com/xanzy/go-gitlab/project_clusters.go +++ b/vendor/github.com/xanzy/go-gitlab/project_clusters.go @@ -82,7 +82,7 @@ func (s *ProjectClustersService) ListClusters(pid interface{}, options ...Reques if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/clusters", pathEscape(project)) + u := fmt.Sprintf("projects/%s/clusters", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -107,7 +107,7 @@ func (s *ProjectClustersService) GetCluster(pid interface{}, cluster int, option if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/clusters/%d", pathEscape(project), cluster) + u := fmt.Sprintf("projects/%s/clusters/%d", PathEscape(project), cluster) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -155,7 +155,7 @@ func (s *ProjectClustersService) AddCluster(pid interface{}, opt *AddClusterOpti if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/clusters/user", pathEscape(project)) + u := fmt.Sprintf("projects/%s/clusters/user", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -200,7 +200,7 @@ func (s *ProjectClustersService) EditCluster(pid interface{}, cluster int, opt * if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/clusters/%d", pathEscape(project), cluster) + u := fmt.Sprintf("projects/%s/clusters/%d", PathEscape(project), cluster) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -225,7 +225,7 @@ func (s *ProjectClustersService) DeleteCluster(pid interface{}, cluster int, opt if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/clusters/%d", pathEscape(project), cluster) + u := fmt.Sprintf("projects/%s/clusters/%d", PathEscape(project), cluster) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/project_import_export.go b/vendor/github.com/xanzy/go-gitlab/project_import_export.go index 8c89f4e9d..67f8a620a 100644 --- a/vendor/github.com/xanzy/go-gitlab/project_import_export.go +++ b/vendor/github.com/xanzy/go-gitlab/project_import_export.go @@ -19,6 +19,7 @@ package gitlab import ( "bytes" "fmt" + "io" "net/http" "time" ) @@ -96,7 +97,7 @@ func (s *ProjectImportExportService) ScheduleExport(pid interface{}, opt *Schedu if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/export", pathEscape(project)) + u := fmt.Sprintf("projects/%s/export", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -115,7 +116,7 @@ func (s *ProjectImportExportService) ExportStatus(pid interface{}, options ...Re if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/export", pathEscape(project)) + u := fmt.Sprintf("projects/%s/export", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -140,7 +141,7 @@ func (s *ProjectImportExportService) ExportDownload(pid interface{}, options ... if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/export/download", pathEscape(project)) + u := fmt.Sprintf("projects/%s/export/download", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -162,18 +163,26 @@ func (s *ProjectImportExportService) ExportDownload(pid interface{}, options ... // https://docs.gitlab.com/ce/api/project_import_export.html#import-a-file type ImportFileOptions struct { Namespace *string `url:"namespace,omitempty" json:"namespace,omitempty"` - File *string `url:"file,omitempty" json:"file,omitempty"` + Name *string `url:"name,omitempty" json:"name,omitempty"` Path *string `url:"path,omitempty" json:"path,omitempty"` Overwrite *bool `url:"overwrite,omitempty" json:"overwrite,omitempty"` OverrideParams *CreateProjectOptions `url:"override_params,omitempty" json:"override_params,omitempty"` } -// ImportFile import a file. +// Import a project from an archive file. // // GitLab API docs: // https://docs.gitlab.com/ce/api/project_import_export.html#import-a-file -func (s *ProjectImportExportService) ImportFile(opt *ImportFileOptions, options ...RequestOptionFunc) (*ImportStatus, *Response, error) { - req, err := s.client.NewRequest(http.MethodPost, "projects/import", opt, options) +func (s *ProjectImportExportService) ImportFromFile(archive io.Reader, opt *ImportFileOptions, options ...RequestOptionFunc) (*ImportStatus, *Response, error) { + req, err := s.client.UploadRequest( + http.MethodPost, + "projects/import", + archive, + "archive.tar.gz", + UploadFile, + opt, + options, + ) if err != nil { return nil, nil, err } @@ -196,7 +205,7 @@ func (s *ProjectImportExportService) ImportStatus(pid interface{}, options ...Re if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/import", pathEscape(project)) + u := fmt.Sprintf("projects/%s/import", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/project_iterations.go b/vendor/github.com/xanzy/go-gitlab/project_iterations.go new file mode 100644 index 000000000..78583efab --- /dev/null +++ b/vendor/github.com/xanzy/go-gitlab/project_iterations.go @@ -0,0 +1,90 @@ +// +// Copyright 2022, Daniel Steinke +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package gitlab + +import ( + "fmt" + "net/http" + "time" +) + +// IterationsAPI handles communication with the project iterations related +// methods of the GitLab API +// +// GitLab API docs: https://docs.gitlab.com/ee/api/iterations.html +type ProjectIterationsService struct { + client *Client +} + +// ProjectIteration represents a GitLab project iteration. +// +// GitLab API docs: https://docs.gitlab.com/ee/api/iterations.html +type ProjectIteration struct { + ID int `json:"id"` + IID int `json:"iid"` + Sequence int `json:"sequence"` + GroupID int `json:"group_id"` + Title string `json:"title"` + Description string `json:"description"` + State int `json:"state"` + CreatedAt *time.Time `json:"created_at"` + UpdatedAt *time.Time `json:"updated_at"` + DueDate *ISOTime `json:"due_date"` + StartDate *ISOTime `json:"start_date"` + WebURL string `json:"web_url"` +} + +func (i ProjectIteration) String() string { + return Stringify(i) +} + +// ListProjectIterationsOptions contains the available ListProjectIterations() +// options +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_iterations.html#list-project-iterations +type ListProjectIterationsOptions struct { + ListOptions + State *string `url:"state,omitempty" json:"state,omitempty"` + Search *string `url:"search,omitempty" json:"search,omitempty"` + IncludeAncestors *bool `url:"include_ancestors,omitempty" json:"include_ancestors,omitempty"` +} + +// ListProjectIterations returns a list of projects iterations. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_iterations.html#list-project-iterations +func (i *ProjectIterationsService) ListProjectIterations(pid interface{}, opt *ListProjectIterationsOptions, options ...RequestOptionFunc) ([]*ProjectIteration, *Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("projects/%s/iterations", PathEscape(project)) + + req, err := i.client.NewRequest(http.MethodGet, u, opt, options) + if err != nil { + return nil, nil, err + } + + var pis []*ProjectIteration + resp, err := i.client.Do(req, &pis) + if err != nil { + return nil, resp, err + } + + return pis, resp, err +} diff --git a/vendor/github.com/xanzy/go-gitlab/project_managed_licenses.go b/vendor/github.com/xanzy/go-gitlab/project_managed_licenses.go index 186bdef57..25c8ef6c8 100644 --- a/vendor/github.com/xanzy/go-gitlab/project_managed_licenses.go +++ b/vendor/github.com/xanzy/go-gitlab/project_managed_licenses.go @@ -47,7 +47,7 @@ func (s *ManagedLicensesService) ListManagedLicenses(pid interface{}, options .. if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/managed_licenses", pathEscape(project)) + u := fmt.Sprintf("projects/%s/managed_licenses", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -76,7 +76,7 @@ func (s *ManagedLicensesService) GetManagedLicense(pid, mlid interface{}, option if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/managed_licenses/%s", pathEscape(project), pathEscape(license)) + u := fmt.Sprintf("projects/%s/managed_licenses/%s", PathEscape(project), PathEscape(license)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -110,7 +110,7 @@ func (s *ManagedLicensesService) AddManagedLicense(pid interface{}, opt *AddMana if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/managed_licenses", pathEscape(project)) + u := fmt.Sprintf("projects/%s/managed_licenses", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -139,7 +139,7 @@ func (s *ManagedLicensesService) DeleteManagedLicense(pid, mlid interface{}, opt if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/managed_licenses/%s", pathEscape(project), pathEscape(license)) + u := fmt.Sprintf("projects/%s/managed_licenses/%s", PathEscape(project), PathEscape(license)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -171,7 +171,7 @@ func (s *ManagedLicensesService) EditManagedLicense(pid, mlid interface{}, opt * if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/managed_licenses/%s", pathEscape(project), pathEscape(license)) + u := fmt.Sprintf("projects/%s/managed_licenses/%s", PathEscape(project), PathEscape(license)) req, err := s.client.NewRequest(http.MethodPatch, u, opt, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/project_members.go b/vendor/github.com/xanzy/go-gitlab/project_members.go index 4f00a11c1..ad588bdf7 100644 --- a/vendor/github.com/xanzy/go-gitlab/project_members.go +++ b/vendor/github.com/xanzy/go-gitlab/project_members.go @@ -50,7 +50,7 @@ func (s *ProjectMembersService) ListProjectMembers(pid interface{}, opt *ListPro if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/members", pathEscape(project)) + u := fmt.Sprintf("projects/%s/members", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -77,7 +77,7 @@ func (s *ProjectMembersService) ListAllProjectMembers(pid interface{}, opt *List if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/members/all", pathEscape(project)) + u := fmt.Sprintf("projects/%s/members/all", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -102,7 +102,7 @@ func (s *ProjectMembersService) GetProjectMember(pid interface{}, user int, opti if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/members/%d", pathEscape(project), user) + u := fmt.Sprintf("projects/%s/members/%d", PathEscape(project), user) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -127,7 +127,7 @@ func (s *ProjectMembersService) GetInheritedProjectMember(pid interface{}, user if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/members/all/%d", pathEscape(project), user) + u := fmt.Sprintf("projects/%s/members/all/%d", PathEscape(project), user) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -165,7 +165,7 @@ func (s *ProjectMembersService) AddProjectMember(pid interface{}, opt *AddProjec if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/members", pathEscape(project)) + u := fmt.Sprintf("projects/%s/members", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -199,7 +199,7 @@ func (s *ProjectMembersService) EditProjectMember(pid interface{}, user int, opt if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/members/%d", pathEscape(project), user) + u := fmt.Sprintf("projects/%s/members/%d", PathEscape(project), user) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -224,7 +224,7 @@ func (s *ProjectMembersService) DeleteProjectMember(pid interface{}, user int, o if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/members/%d", pathEscape(project), user) + u := fmt.Sprintf("projects/%s/members/%d", PathEscape(project), user) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/project_mirror.go b/vendor/github.com/xanzy/go-gitlab/project_mirror.go index 2560a3c4d..20b4f7b60 100644 --- a/vendor/github.com/xanzy/go-gitlab/project_mirror.go +++ b/vendor/github.com/xanzy/go-gitlab/project_mirror.go @@ -58,7 +58,7 @@ func (s *ProjectMirrorService) ListProjectMirror(pid interface{}, opt *ListProje if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/remote_mirrors", pathEscape(project)) + u := fmt.Sprintf("projects/%s/remote_mirrors", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -95,7 +95,7 @@ func (s *ProjectMirrorService) AddProjectMirror(pid interface{}, opt *AddProject if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/remote_mirrors", pathEscape(project)) + u := fmt.Sprintf("projects/%s/remote_mirrors", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -131,7 +131,7 @@ func (s *ProjectMirrorService) EditProjectMirror(pid interface{}, mirror int, op if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/remote_mirrors/%d", pathEscape(project), mirror) + u := fmt.Sprintf("projects/%s/remote_mirrors/%d", PathEscape(project), mirror) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/project_snippets.go b/vendor/github.com/xanzy/go-gitlab/project_snippets.go index 1ab5e2b2e..82db71931 100644 --- a/vendor/github.com/xanzy/go-gitlab/project_snippets.go +++ b/vendor/github.com/xanzy/go-gitlab/project_snippets.go @@ -43,7 +43,7 @@ func (s *ProjectSnippetsService) ListSnippets(pid interface{}, opt *ListProjectS if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/snippets", pathEscape(project)) + u := fmt.Sprintf("projects/%s/snippets", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -68,7 +68,7 @@ func (s *ProjectSnippetsService) GetSnippet(pid interface{}, snippet int, option if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/snippets/%d", pathEscape(project), snippet) + u := fmt.Sprintf("projects/%s/snippets/%d", PathEscape(project), snippet) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -94,6 +94,7 @@ type CreateProjectSnippetOptions struct { Description *string `url:"description,omitempty" json:"description,omitempty"` Content *string `url:"content,omitempty" json:"content,omitempty"` Visibility *VisibilityValue `url:"visibility,omitempty" json:"visibility,omitempty"` + Files *[]*SnippetFile `url:"files,omitempty" json:"files,omitempty"` } // CreateSnippet creates a new project snippet. The user must have permission @@ -106,7 +107,7 @@ func (s *ProjectSnippetsService) CreateSnippet(pid interface{}, opt *CreateProje if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/snippets", pathEscape(project)) + u := fmt.Sprintf("projects/%s/snippets", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -144,7 +145,7 @@ func (s *ProjectSnippetsService) UpdateSnippet(pid interface{}, snippet int, opt if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/snippets/%d", pathEscape(project), snippet) + u := fmt.Sprintf("projects/%s/snippets/%d", PathEscape(project), snippet) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -171,7 +172,7 @@ func (s *ProjectSnippetsService) DeleteSnippet(pid interface{}, snippet int, opt if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/snippets/%d", pathEscape(project), snippet) + u := fmt.Sprintf("projects/%s/snippets/%d", PathEscape(project), snippet) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -190,7 +191,7 @@ func (s *ProjectSnippetsService) SnippetContent(pid interface{}, snippet int, op if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/snippets/%d/raw", pathEscape(project), snippet) + u := fmt.Sprintf("projects/%s/snippets/%d/raw", PathEscape(project), snippet) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/project_variables.go b/vendor/github.com/xanzy/go-gitlab/project_variables.go index 69ce4873c..5f121ff6c 100644 --- a/vendor/github.com/xanzy/go-gitlab/project_variables.go +++ b/vendor/github.com/xanzy/go-gitlab/project_variables.go @@ -48,6 +48,11 @@ func (v ProjectVariable) String() string { return Stringify(v) } +//VariableFilter filters available for project variable related functions +type VariableFilter struct { + EnvironmentScope string `url:"environment_scope, omitempty" json:"environment_scope,omitempty"` +} + // ListProjectVariablesOptions represents the available options for listing variables // in a project. // @@ -64,7 +69,7 @@ func (s *ProjectVariablesService) ListVariables(pid interface{}, opt *ListProjec if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/variables", pathEscape(project)) + u := fmt.Sprintf("projects/%s/variables", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -80,18 +85,27 @@ func (s *ProjectVariablesService) ListVariables(pid interface{}, opt *ListProjec return vs, resp, err } +// GetProjectVariableOptions represents the available GetVariable() +// options. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/project_level_variables.html#show-variable-details +type GetProjectVariableOptions struct { + Filter *VariableFilter `url:"filter,omitempty" json:"filter,omitempty"` +} + // GetVariable gets a variable. // // GitLab API docs: // https://docs.gitlab.com/ee/api/project_level_variables.html#show-variable-details -func (s *ProjectVariablesService) GetVariable(pid interface{}, key string, options ...RequestOptionFunc) (*ProjectVariable, *Response, error) { +func (s *ProjectVariablesService) GetVariable(pid interface{}, key string, opt *GetProjectVariableOptions, options ...RequestOptionFunc) (*ProjectVariable, *Response, error) { project, err := parseID(pid) if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/variables/%s", pathEscape(project), url.PathEscape(key)) + u := fmt.Sprintf("projects/%s/variables/%s", PathEscape(project), url.PathEscape(key)) - req, err := s.client.NewRequest(http.MethodGet, u, nil, options) + req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { return nil, nil, err } @@ -128,7 +142,7 @@ func (s *ProjectVariablesService) CreateVariable(pid interface{}, opt *CreatePro if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/variables", pathEscape(project)) + u := fmt.Sprintf("projects/%s/variables", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -155,6 +169,7 @@ type UpdateProjectVariableOptions struct { Protected *bool `url:"protected,omitempty" json:"protected,omitempty"` Masked *bool `url:"masked,omitempty" json:"masked,omitempty"` EnvironmentScope *string `url:"environment_scope,omitempty" json:"environment_scope,omitempty"` + Filter *VariableFilter `url:"filter,omitempty" json:"filter,omitempty"` } // UpdateVariable updates a project's variable. @@ -166,7 +181,7 @@ func (s *ProjectVariablesService) UpdateVariable(pid interface{}, key string, op if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/variables/%s", pathEscape(project), url.PathEscape(key)) + u := fmt.Sprintf("projects/%s/variables/%s", PathEscape(project), url.PathEscape(key)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -182,18 +197,27 @@ func (s *ProjectVariablesService) UpdateVariable(pid interface{}, key string, op return v, resp, err } +// RemoveProjectVariableOptions represents the available RemoveVariable() +// options. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/project_level_variables.html#remove-variable +type RemoveProjectVariableOptions struct { + Filter *VariableFilter `url:"filter,omitempty" json:"filter,omitempty"` +} + // RemoveVariable removes a project's variable. // // GitLab API docs: // https://docs.gitlab.com/ee/api/project_level_variables.html#remove-variable -func (s *ProjectVariablesService) RemoveVariable(pid interface{}, key string, options ...RequestOptionFunc) (*Response, error) { +func (s *ProjectVariablesService) RemoveVariable(pid interface{}, key string, opt *RemoveProjectVariableOptions, options ...RequestOptionFunc) (*Response, error) { project, err := parseID(pid) if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/variables/%s", pathEscape(project), url.PathEscape(key)) + u := fmt.Sprintf("projects/%s/variables/%s", PathEscape(project), url.PathEscape(key)) - req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) + req, err := s.client.NewRequest(http.MethodDelete, u, opt, options) if err != nil { return nil, err } diff --git a/vendor/github.com/xanzy/go-gitlab/projects.go b/vendor/github.com/xanzy/go-gitlab/projects.go index 0b9f180c3..1cbc1d7e7 100644 --- a/vendor/github.com/xanzy/go-gitlab/projects.go +++ b/vendor/github.com/xanzy/go-gitlab/projects.go @@ -17,14 +17,12 @@ package gitlab import ( - "bytes" "fmt" "io" - "mime/multipart" "net/http" - "os" - "path/filepath" "time" + + retryablehttp "github.com/hashicorp/go-retryablehttp" ) // ProjectsService handles communication with the repositories related methods @@ -65,6 +63,7 @@ type Project struct { ResolveOutdatedDiffDiscussions bool `json:"resolve_outdated_diff_discussions"` ContainerExpirationPolicy *ContainerExpirationPolicy `json:"container_expiration_policy,omitempty"` ContainerRegistryEnabled bool `json:"container_registry_enabled"` + ContainerRegistryAccessLevel AccessControlValue `json:"container_registry_access_level"` CreatedAt *time.Time `json:"created_at,omitempty"` LastActivityAt *time.Time `json:"last_activity_at,omitempty"` CreatorID int `json:"creator_id"` @@ -87,6 +86,7 @@ type Project struct { OnlyAllowMergeIfPipelineSucceeds bool `json:"only_allow_merge_if_pipeline_succeeds"` OnlyAllowMergeIfAllDiscussionsAreResolved bool `json:"only_allow_merge_if_all_discussions_are_resolved"` RemoveSourceBranchAfterMerge bool `json:"remove_source_branch_after_merge"` + PrintingMergeRequestLinkEnabled bool `json:"printing_merge_request_link_enabled"` LFSEnabled bool `json:"lfs_enabled"` RepositoryStorage string `json:"repository_storage"` RequestAccessEnabled bool `json:"request_access_enabled"` @@ -109,8 +109,10 @@ type Project struct { SnippetsAccessLevel AccessControlValue `json:"snippets_access_level"` PagesAccessLevel AccessControlValue `json:"pages_access_level"` OperationsAccessLevel AccessControlValue `json:"operations_access_level"` + AnalyticsAccessLevel AccessControlValue `json:"analytics_access_level"` AutocloseReferencedIssues bool `json:"autoclose_referenced_issues"` SuggestionCommitMessage string `json:"suggestion_commit_message"` + AutoCancelPendingPipelines string `json:"auto_cancel_pending_pipelines"` CIForwardDeploymentEnabled bool `json:"ci_forward_deployment_enabled"` SquashOption SquashOptionValue `json:"squash_option"` SharedWithGroups []struct { @@ -118,15 +120,22 @@ type Project struct { GroupName string `json:"group_name"` GroupAccessLevel int `json:"group_access_level"` } `json:"shared_with_groups"` - Statistics *ProjectStatistics `json:"statistics"` - Links *Links `json:"_links,omitempty"` - CIConfigPath string `json:"ci_config_path"` - CIDefaultGitDepth int `json:"ci_default_git_depth"` - CustomAttributes []*CustomAttribute `json:"custom_attributes"` - ComplianceFrameworks []string `json:"compliance_frameworks"` - BuildCoverageRegex string `json:"build_coverage_regex"` - IssuesTemplate string `json:"issues_template"` - MergeRequestsTemplate string `json:"merge_requests_template"` + Statistics *ProjectStatistics `json:"statistics"` + Links *Links `json:"_links,omitempty"` + CIConfigPath string `json:"ci_config_path"` + CIDefaultGitDepth int `json:"ci_default_git_depth"` + CustomAttributes []*CustomAttribute `json:"custom_attributes"` + ComplianceFrameworks []string `json:"compliance_frameworks"` + BuildCoverageRegex string `json:"build_coverage_regex"` + BuildTimeout int `json:"build_timeout"` + IssuesTemplate string `json:"issues_template"` + MergeRequestsTemplate string `json:"merge_requests_template"` + KeepLatestArtifact bool `json:"keep_latest_artifact"` + MergePipelinesEnabled bool `json:"merge_pipelines_enabled"` + MergeTrainsEnabled bool `json:"merge_trains_enabled"` + RestrictUserDefinedVariables bool `json:"restrict_user_defined_variables"` + MergeCommitTemplate string `json:"merge_commit_template"` + SquashCommitTemplate string `json:"squash_commit_template"` } // BasicProject included in other service responses (such as todos). @@ -342,6 +351,31 @@ func (s *ProjectsService) ListUserProjects(uid interface{}, opt *ListProjectsOpt return p, resp, err } +// ListUserStarredProjects gets a list of projects starred by the given user. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/projects.html#list-projects-starred-by-a-user +func (s *ProjectsService) ListUserStarredProjects(uid interface{}, opt *ListProjectsOptions, options ...RequestOptionFunc) ([]*Project, *Response, error) { + user, err := parseID(uid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("users/%s/starred_projects", user) + + req, err := s.client.NewRequest(http.MethodGet, u, opt, options) + if err != nil { + return nil, nil, err + } + + var p []*Project + resp, err := s.client.Do(req, &p) + if err != nil { + return nil, resp, err + } + + return p, resp, err +} + // ProjectUser represents a GitLab project user. type ProjectUser struct { ID int `json:"id"` @@ -369,7 +403,7 @@ func (s *ProjectsService) ListProjectsUsers(pid interface{}, opt *ListProjectUse if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/users", pathEscape(project)) + u := fmt.Sprintf("projects/%s/users", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -403,7 +437,7 @@ type ListProjectGroupOptions struct { Search *string `url:"search,omitempty" json:"search,omitempty"` SharedMinAccessLevel *AccessLevelValue `url:"shared_min_access_level,omitempty" json:"shared_min_access_level,omitempty"` SharedVisiableOnly *bool `url:"shared_visible_only,omitempty" json:"shared_visible_only,omitempty"` - SkipGroups []int `url:"skip_groups,omitempty" json:"skip_groups,omitempty"` + SkipGroups *[]int `url:"skip_groups,omitempty" json:"skip_groups,omitempty"` WithShared *bool `url:"with_shared,omitempty" json:"with_shared,omitempty"` } @@ -416,7 +450,7 @@ func (s *ProjectsService) ListProjectsGroups(pid interface{}, opt *ListProjectGr if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/groups", pathEscape(project)) + u := fmt.Sprintf("projects/%s/groups", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -445,7 +479,7 @@ func (s *ProjectsService) GetProjectLanguages(pid interface{}, options ...Reques if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/languages", pathEscape(project)) + u := fmt.Sprintf("projects/%s/languages", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -480,7 +514,7 @@ func (s *ProjectsService) GetProject(pid interface{}, opt *GetProjectOptions, op if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s", pathEscape(project)) + u := fmt.Sprintf("projects/%s", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -541,7 +575,7 @@ func (s *ProjectsService) GetProjectEvents(pid interface{}, opt *GetProjectEvent if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/events", pathEscape(project)) + u := fmt.Sprintf("projects/%s/events", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -562,11 +596,13 @@ func (s *ProjectsService) GetProjectEvents(pid interface{}, opt *GetProjectEvent // GitLab API docs: https://docs.gitlab.com/ee/api/projects.html#create-project type CreateProjectOptions struct { AllowMergeOnSkippedPipeline *bool `url:"allow_merge_on_skipped_pipeline,omitempty" json:"allow_merge_on_skipped_pipeline,omitempty"` + AnalyticsAccessLevel *AccessControlValue `url:"analytics_access_level,omitempty" json:"analytics_access_level,omitempty"` ApprovalsBeforeMerge *int `url:"approvals_before_merge,omitempty" json:"approvals_before_merge,omitempty"` AutoCancelPendingPipelines *string `url:"auto_cancel_pending_pipelines,omitempty" json:"auto_cancel_pending_pipelines,omitempty"` AutoDevopsDeployStrategy *string `url:"auto_devops_deploy_strategy,omitempty" json:"auto_devops_deploy_strategy,omitempty"` AutoDevopsEnabled *bool `url:"auto_devops_enabled,omitempty" json:"auto_devops_enabled,omitempty"` AutocloseReferencedIssues *bool `url:"autoclose_referenced_issues,omitempty" json:"autoclose_referenced_issues,omitempty"` + Avatar *ProjectAvatar `url:"-" json:"-"` BuildCoverageRegex *string `url:"build_coverage_regex,omitempty" json:"build_coverage_regex,omitempty"` BuildGitStrategy *string `url:"build_git_strategy,omitempty" json:"build_git_strategy,omitempty"` BuildTimeout *int `url:"build_timeout,omitempty" json:"build_timeout,omitempty"` @@ -584,8 +620,11 @@ type CreateProjectOptions struct { InitializeWithReadme *bool `url:"initialize_with_readme,omitempty" json:"initialize_with_readme,omitempty"` IssuesAccessLevel *AccessControlValue `url:"issues_access_level,omitempty" json:"issues_access_level,omitempty"` LFSEnabled *bool `url:"lfs_enabled,omitempty" json:"lfs_enabled,omitempty"` + MergeCommitTemplate *string `url:"merge_commit_template,omitempty" json:"merge_commit_template,omitempty"` MergeMethod *MergeMethodValue `url:"merge_method,omitempty" json:"merge_method,omitempty"` + MergePipelinesEnabled *bool `url:"merge_pipelines_enabled,omitempty" json:"merge_pipelines_enabled,omitempty"` MergeRequestsAccessLevel *AccessControlValue `url:"merge_requests_access_level,omitempty" json:"merge_requests_access_level,omitempty"` + MergeTrainsEnabled *bool `url:"merge_trains_enabled,omitempty" json:"merge_trains_enabled,omitempty"` Mirror *bool `url:"mirror,omitempty" json:"mirror,omitempty"` MirrorTriggerBuilds *bool `url:"mirror_trigger_builds,omitempty" json:"mirror_trigger_builds,omitempty"` Name *string `url:"name,omitempty" json:"name,omitempty"` @@ -596,17 +635,18 @@ type CreateProjectOptions struct { PackagesEnabled *bool `url:"packages_enabled,omitempty" json:"packages_enabled,omitempty"` PagesAccessLevel *AccessControlValue `url:"pages_access_level,omitempty" json:"pages_access_level,omitempty"` Path *string `url:"path,omitempty" json:"path,omitempty"` - PrintingMergeRequestLinkEnabled *bool `url:"printing_merge_request_link_enabled,omitempty" json:"printing_merge_request_link_enabled,omitempty"` PublicBuilds *bool `url:"public_builds,omitempty" json:"public_builds,omitempty"` RemoveSourceBranchAfterMerge *bool `url:"remove_source_branch_after_merge,omitempty" json:"remove_source_branch_after_merge,omitempty"` + PrintingMergeRequestLinkEnabled *bool `url:"printing_merge_request_link_enabled,omitempty" json:"printing_merge_request_link_enabled,omitempty"` RepositoryAccessLevel *AccessControlValue `url:"repository_access_level,omitempty" json:"repository_access_level,omitempty"` RepositoryStorage *string `url:"repository_storage,omitempty" json:"repository_storage,omitempty"` RequestAccessEnabled *bool `url:"request_access_enabled,omitempty" json:"request_access_enabled,omitempty"` RequirementsAccessLevel *AccessControlValue `url:"requirements_access_level,omitempty" json:"requirements_access_level,omitempty"` ResolveOutdatedDiffDiscussions *bool `url:"resolve_outdated_diff_discussions,omitempty" json:"resolve_outdated_diff_discussions,omitempty"` SharedRunnersEnabled *bool `url:"shared_runners_enabled,omitempty" json:"shared_runners_enabled,omitempty"` - ShowDefaultAwardEmojis *bool `url:"show_default_aware_emojis,omitempty" json:"show_default_aware_emojis,omitempty"` + ShowDefaultAwardEmojis *bool `url:"show_default_award_emojis,omitempty" json:"show_default_award_emojis,omitempty"` SnippetsAccessLevel *AccessControlValue `url:"snippets_access_level,omitempty" json:"snippets_access_level,omitempty"` + SquashCommitTemplate *string `url:"squash_commit_template,omitempty" json:"squash_commit_template,omitempty"` SquashOption *SquashOptionValue `url:"squash_option,omitempty" json:"squash_option,omitempty"` SuggestionCommitMessage *string `url:"suggestion_commit_message,omitempty" json:"suggestion_commit_message,omitempty"` TemplateName *string `url:"template_name,omitempty" json:"template_name,omitempty"` @@ -646,6 +686,11 @@ type ContainerExpirationPolicyAttributes struct { NameRegex *string `url:"name_regex,omitempty" json:"name_regex,omitempty"` } +type ProjectAvatar struct { + Filename string + Image io.Reader +} + // CreateProject creates a new project owned by the authenticated user. // // GitLab API docs: https://docs.gitlab.com/ce/api/projects.html#create-project @@ -657,7 +702,22 @@ func (s *ProjectsService) CreateProject(opt *CreateProjectOptions, options ...Re opt.ContainerExpirationPolicyAttributes.NameRegexDelete } - req, err := s.client.NewRequest(http.MethodPost, "projects", opt, options) + var err error + var req *retryablehttp.Request + + if opt.Avatar == nil { + req, err = s.client.NewRequest(http.MethodPost, "projects", opt, options) + } else { + req, err = s.client.UploadRequest( + http.MethodPost, + "projects", + opt.Avatar.Image, + opt.Avatar.Filename, + UploadAvatar, + opt, + options, + ) + } if err != nil { return nil, nil, err } @@ -691,8 +751,23 @@ func (s *ProjectsService) CreateProjectForUser(user int, opt *CreateProjectForUs opt.ContainerExpirationPolicyAttributes.NameRegexDelete } + var err error + var req *retryablehttp.Request u := fmt.Sprintf("projects/user/%d", user) - req, err := s.client.NewRequest(http.MethodPost, u, opt, options) + + if opt.Avatar == nil { + req, err = s.client.NewRequest(http.MethodPost, u, opt, options) + } else { + req, err = s.client.UploadRequest( + http.MethodPost, + u, + opt.Avatar.Image, + opt.Avatar.Filename, + UploadAvatar, + opt, + options, + ) + } if err != nil { return nil, nil, err } @@ -711,11 +786,13 @@ func (s *ProjectsService) CreateProjectForUser(user int, opt *CreateProjectForUs // GitLab API docs: https://docs.gitlab.com/ce/api/projects.html#edit-project type EditProjectOptions struct { AllowMergeOnSkippedPipeline *bool `url:"allow_merge_on_skipped_pipeline,omitempty" json:"allow_merge_on_skipped_pipeline,omitempty"` + AnalyticsAccessLevel *AccessControlValue `url:"analytics_access_level,omitempty" json:"analytics_access_level,omitempty"` ApprovalsBeforeMerge *int `url:"approvals_before_merge,omitempty" json:"approvals_before_merge,omitempty"` AutoCancelPendingPipelines *string `url:"auto_cancel_pending_pipelines,omitempty" json:"auto_cancel_pending_pipelines,omitempty"` AutoDevopsDeployStrategy *string `url:"auto_devops_deploy_strategy,omitempty" json:"auto_devops_deploy_strategy,omitempty"` AutoDevopsEnabled *bool `url:"auto_devops_enabled,omitempty" json:"auto_devops_enabled,omitempty"` AutocloseReferencedIssues *bool `url:"autoclose_referenced_issues,omitempty" json:"autoclose_referenced_issues,omitempty"` + Avatar *ProjectAvatar `url:"-" json:"-"` BuildCoverageRegex *string `url:"build_coverage_regex,omitempty" json:"build_coverage_regex,omitempty"` BuildGitStrategy *string `url:"build_git_strategy,omitempty" json:"build_git_strategy,omitempty"` BuildTimeout *int `url:"build_timeout,omitempty" json:"build_timeout,omitempty"` @@ -731,9 +808,13 @@ type EditProjectOptions struct { ForkingAccessLevel *AccessControlValue `url:"forking_access_level,omitempty" json:"forking_access_level,omitempty"` ImportURL *string `url:"import_url,omitempty" json:"import_url,omitempty"` IssuesAccessLevel *AccessControlValue `url:"issues_access_level,omitempty" json:"issues_access_level,omitempty"` + KeepLatestArtifact *bool `url:"keep_latest_artifact,omitempty" json:"keep_latest_artifact,omitempty"` LFSEnabled *bool `url:"lfs_enabled,omitempty" json:"lfs_enabled,omitempty"` + MergeCommitTemplate *string `url:"merge_commit_template,omitempty" json:"merge_commit_template,omitempty"` MergeMethod *MergeMethodValue `url:"merge_method,omitempty" json:"merge_method,omitempty"` + MergePipelinesEnabled *bool `url:"merge_pipelines_enabled,omitempty" json:"merge_pipelines_enabled,omitempty"` MergeRequestsAccessLevel *AccessControlValue `url:"merge_requests_access_level,omitempty" json:"merge_requests_access_level,omitempty"` + MergeTrainsEnabled *bool `url:"merge_trains_enabled,omitempty" json:"merge_trains_enabled,omitempty"` Mirror *bool `url:"mirror,omitempty" json:"mirror,omitempty"` MirrorOverwritesDivergedBranches *bool `url:"mirror_overwrites_diverged_branches,omitempty" json:"mirror_overwrites_diverged_branches,omitempty"` MirrorTriggerBuilds *bool `url:"mirror_trigger_builds,omitempty" json:"mirror_trigger_builds,omitempty"` @@ -748,14 +829,17 @@ type EditProjectOptions struct { Path *string `url:"path,omitempty" json:"path,omitempty"` PublicBuilds *bool `url:"public_builds,omitempty" json:"public_builds,omitempty"` RemoveSourceBranchAfterMerge *bool `url:"remove_source_branch_after_merge,omitempty" json:"remove_source_branch_after_merge,omitempty"` + PrintingMergeRequestLinkEnabled *bool `url:"printing_merge_request_link_enabled,omitempty" json:"printing_merge_request_link_enabled,omitempty"` RepositoryAccessLevel *AccessControlValue `url:"repository_access_level,omitempty" json:"repository_access_level,omitempty"` RepositoryStorage *string `url:"repository_storage,omitempty" json:"repository_storage,omitempty"` RequestAccessEnabled *bool `url:"request_access_enabled,omitempty" json:"request_access_enabled,omitempty"` RequirementsAccessLevel *AccessControlValue `url:"requirements_access_level,omitempty" json:"requirements_access_level,omitempty"` ResolveOutdatedDiffDiscussions *bool `url:"resolve_outdated_diff_discussions,omitempty" json:"resolve_outdated_diff_discussions,omitempty"` + RestrictUserDefinedVariables *bool `url:"restrict_user_defined_variables,omitempty" json:"restrict_user_defined_variables,omitempty"` SharedRunnersEnabled *bool `url:"shared_runners_enabled,omitempty" json:"shared_runners_enabled,omitempty"` - ShowDefaultAwardEmojis *bool `url:"show_default_aware_emojis,omitempty" json:"show_default_aware_emojis,omitempty"` + ShowDefaultAwardEmojis *bool `url:"show_default_award_emojis,omitempty" json:"show_default_award_emojis,omitempty"` SnippetsAccessLevel *AccessControlValue `url:"snippets_access_level,omitempty" json:"snippets_access_level,omitempty"` + SquashCommitTemplate *string `url:"squash_commit_template,omitempty" json:"squash_commit_template,omitempty"` SquashOption *SquashOptionValue `url:"squash_option,omitempty" json:"squash_option,omitempty"` SuggestionCommitMessage *string `url:"suggestion_commit_message,omitempty" json:"suggestion_commit_message,omitempty"` Topics *[]string `url:"topics,omitempty" json:"topics,omitempty"` @@ -791,9 +875,23 @@ func (s *ProjectsService) EditProject(pid interface{}, opt *EditProjectOptions, if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s", pathEscape(project)) + u := fmt.Sprintf("projects/%s", PathEscape(project)) - req, err := s.client.NewRequest(http.MethodPut, u, opt, options) + var req *retryablehttp.Request + + if opt.Avatar == nil { + req, err = s.client.NewRequest(http.MethodPut, u, opt, options) + } else { + req, err = s.client.UploadRequest( + http.MethodPost, + u, + opt.Avatar.Image, + opt.Avatar.Filename, + UploadAvatar, + opt, + options, + ) + } if err != nil { return nil, nil, err } @@ -825,7 +923,7 @@ func (s *ProjectsService) ForkProject(pid interface{}, opt *ForkProjectOptions, if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/fork", pathEscape(project)) + u := fmt.Sprintf("projects/%s/fork", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -850,7 +948,7 @@ func (s *ProjectsService) StarProject(pid interface{}, options ...RequestOptionF if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/star", pathEscape(project)) + u := fmt.Sprintf("projects/%s/star", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -875,7 +973,7 @@ func (s *ProjectsService) UnstarProject(pid interface{}, options ...RequestOptio if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/unstar", pathEscape(project)) + u := fmt.Sprintf("projects/%s/unstar", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -901,7 +999,7 @@ func (s *ProjectsService) ArchiveProject(pid interface{}, options ...RequestOpti if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/archive", pathEscape(project)) + u := fmt.Sprintf("projects/%s/archive", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -927,7 +1025,7 @@ func (s *ProjectsService) UnarchiveProject(pid interface{}, options ...RequestOp if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/unarchive", pathEscape(project)) + u := fmt.Sprintf("projects/%s/unarchive", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -952,7 +1050,7 @@ func (s *ProjectsService) DeleteProject(pid interface{}, options ...RequestOptio if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s", pathEscape(project)) + u := fmt.Sprintf("projects/%s", PathEscape(project)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -979,7 +1077,7 @@ func (s *ProjectsService) ShareProjectWithGroup(pid interface{}, opt *ShareWithG if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/share", pathEscape(project)) + u := fmt.Sprintf("projects/%s/share", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -997,7 +1095,7 @@ func (s *ProjectsService) DeleteSharedProjectFromGroup(pid interface{}, groupID if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/share/%d", pathEscape(project), groupID) + u := fmt.Sprintf("projects/%s/share/%d", PathEscape(project), groupID) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -1063,7 +1161,7 @@ func (s *ProjectsService) ListProjectHooks(pid interface{}, opt *ListProjectHook if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/hooks", pathEscape(project)) + u := fmt.Sprintf("projects/%s/hooks", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -1088,7 +1186,7 @@ func (s *ProjectsService) GetProjectHook(pid interface{}, hook int, options ...R if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/hooks/%d", pathEscape(project), hook) + u := fmt.Sprintf("projects/%s/hooks/%d", PathEscape(project), hook) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -1136,7 +1234,7 @@ func (s *ProjectsService) AddProjectHook(pid interface{}, opt *AddProjectHookOpt if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/hooks", pathEscape(project)) + u := fmt.Sprintf("projects/%s/hooks", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -1184,7 +1282,7 @@ func (s *ProjectsService) EditProjectHook(pid interface{}, hook int, opt *EditPr if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/hooks/%d", pathEscape(project), hook) + u := fmt.Sprintf("projects/%s/hooks/%d", PathEscape(project), hook) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -1210,7 +1308,7 @@ func (s *ProjectsService) DeleteProjectHook(pid interface{}, hook int, options . if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/hooks/%d", pathEscape(project), hook) + u := fmt.Sprintf("projects/%s/hooks/%d", PathEscape(project), hook) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -1237,8 +1335,12 @@ type ProjectForkRelation struct { // // GitLab API docs: // https://docs.gitlab.com/ce/api/projects.html#create-a-forked-fromto-relation-between-existing-projects. -func (s *ProjectsService) CreateProjectForkRelation(pid int, fork int, options ...RequestOptionFunc) (*ProjectForkRelation, *Response, error) { - u := fmt.Sprintf("projects/%d/fork/%d", pid, fork) +func (s *ProjectsService) CreateProjectForkRelation(pid interface{}, fork int, options ...RequestOptionFunc) (*ProjectForkRelation, *Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("projects/%s/fork/%d", PathEscape(project), fork) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -1258,8 +1360,12 @@ func (s *ProjectsService) CreateProjectForkRelation(pid int, fork int, options . // // GitLab API docs: // https://docs.gitlab.com/ce/api/projects.html#delete-an-existing-forked-from-relationship -func (s *ProjectsService) DeleteProjectForkRelation(pid int, options ...RequestOptionFunc) (*Response, error) { - u := fmt.Sprintf("projects/%d/fork", pid) +func (s *ProjectsService) DeleteProjectForkRelation(pid interface{}, options ...RequestOptionFunc) (*Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, err + } + u := fmt.Sprintf("projects/%s/fork", PathEscape(project)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -1269,7 +1375,7 @@ func (s *ProjectsService) DeleteProjectForkRelation(pid int, options ...RequestO return s.client.Do(req, nil) } -// ProjectFile represents an uploaded project file +// ProjectFile represents an uploaded project file. // // GitLab API docs: https://docs.gitlab.com/ce/api/projects.html#upload-a-file type ProjectFile struct { @@ -1278,57 +1384,69 @@ type ProjectFile struct { Markdown string `json:"markdown"` } -// UploadFile upload a file from disk +// UploadFile uploads a file. // // GitLab API docs: https://docs.gitlab.com/ce/api/projects.html#upload-a-file -func (s *ProjectsService) UploadFile(pid interface{}, file string, options ...RequestOptionFunc) (*ProjectFile, *Response, error) { +func (s *ProjectsService) UploadFile(pid interface{}, content io.Reader, filename string, options ...RequestOptionFunc) (*ProjectFile, *Response, error) { project, err := parseID(pid) if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/uploads", pathEscape(project)) + u := fmt.Sprintf("projects/%s/uploads", PathEscape(project)) - f, err := os.Open(file) - if err != nil { - return nil, nil, err - } - defer f.Close() - - b := &bytes.Buffer{} - w := multipart.NewWriter(b) - - _, filename := filepath.Split(file) - fw, err := w.CreateFormFile("file", filename) + req, err := s.client.UploadRequest( + http.MethodPost, + u, + content, + filename, + UploadFile, + nil, + options, + ) if err != nil { return nil, nil, err } - _, err = io.Copy(fw, f) - if err != nil { - return nil, nil, err - } - w.Close() - - req, err := s.client.NewRequest(http.MethodPost, u, nil, options) - if err != nil { - return nil, nil, err - } - - // Set the buffer as the request body. - if err = req.SetBody(b); err != nil { - return nil, nil, err - } - - // Overwrite the default content type. - req.Header.Set("Content-Type", w.FormDataContentType()) - - uf := &ProjectFile{} - resp, err := s.client.Do(req, uf) + pf := new(ProjectFile) + resp, err := s.client.Do(req, pf) if err != nil { return nil, resp, err } - return uf, resp, nil + return pf, resp, nil +} + +// UploadAvatar uploads an avatar. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/projects.html#upload-a-project-avatar +func (s *ProjectsService) UploadAvatar(pid interface{}, avatar io.Reader, filename string, options ...RequestOptionFunc) (*Project, *Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("projects/%s", PathEscape(project)) + + req, err := s.client.UploadRequest( + http.MethodPut, + u, + avatar, + filename, + UploadAvatar, + nil, + options, + ) + if err != nil { + return nil, nil, err + } + + p := new(Project) + resp, err := s.client.Do(req, p) + if err != nil { + return nil, resp, err + } + + return p, resp, err } // ListProjectForks gets a list of project forks. @@ -1340,7 +1458,7 @@ func (s *ProjectsService) ListProjectForks(pid interface{}, opt *ListProjectsOpt if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/forks", pathEscape(project)) + u := fmt.Sprintf("projects/%s/forks", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -1386,7 +1504,7 @@ func (s *ProjectsService) GetProjectPushRules(pid interface{}, options ...Reques if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/push_rule", pathEscape(project)) + u := fmt.Sprintf("projects/%s/push_rule", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -1430,7 +1548,7 @@ func (s *ProjectsService) AddProjectPushRule(pid interface{}, opt *AddProjectPus if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/push_rule", pathEscape(project)) + u := fmt.Sprintf("projects/%s/push_rule", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -1474,7 +1592,7 @@ func (s *ProjectsService) EditProjectPushRule(pid interface{}, opt *EditProjectP if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/push_rule", pathEscape(project)) + u := fmt.Sprintf("projects/%s/push_rule", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -1501,7 +1619,7 @@ func (s *ProjectsService) DeleteProjectPushRule(pid interface{}, options ...Requ if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/push_rule", pathEscape(project)) + u := fmt.Sprintf("projects/%s/push_rule", PathEscape(project)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -1535,7 +1653,7 @@ func (s *ProjectsService) GetApprovalConfiguration(pid interface{}, options ...R if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/approvals", pathEscape(project)) + u := fmt.Sprintf("projects/%s/approvals", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -1574,7 +1692,7 @@ func (s *ProjectsService) ChangeApprovalConfiguration(pid interface{}, opt *Chan if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/approvals", pathEscape(project)) + u := fmt.Sprintf("projects/%s/approvals", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -1599,7 +1717,7 @@ func (s *ProjectsService) GetProjectApprovalRules(pid interface{}, options ...Re if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/approval_rules", pathEscape(project)) + u := fmt.Sprintf("projects/%s/approval_rules", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -1622,10 +1740,10 @@ func (s *ProjectsService) GetProjectApprovalRules(pid interface{}, options ...Re // https://docs.gitlab.com/ee/api/merge_request_approvals.html#create-project-level-rules type CreateProjectLevelRuleOptions struct { ApprovalsRequired *int `url:"approvals_required,omitempty" json:"approvals_required,omitempty"` - GroupIDs []int `url:"group_ids,omitempty" json:"group_ids,omitempty"` + GroupIDs *[]int `url:"group_ids,omitempty" json:"group_ids,omitempty"` Name *string `url:"name,omitempty" json:"name,omitempty"` - ProtectedBranchIDs []int `url:"protected_branch_ids,omitempty" json:"protected_branch_ids,omitempty"` - UserIDs []int `url:"user_ids,omitempty" json:"user_ids,omitempty"` + ProtectedBranchIDs *[]int `url:"protected_branch_ids,omitempty" json:"protected_branch_ids,omitempty"` + UserIDs *[]int `url:"user_ids,omitempty" json:"user_ids,omitempty"` } // CreateProjectApprovalRule creates a new project-level approval rule. @@ -1637,7 +1755,7 @@ func (s *ProjectsService) CreateProjectApprovalRule(pid interface{}, opt *Create if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/approval_rules", pathEscape(project)) + u := fmt.Sprintf("projects/%s/approval_rules", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -1660,10 +1778,10 @@ func (s *ProjectsService) CreateProjectApprovalRule(pid interface{}, opt *Create // https://docs.gitlab.com/ee/api/merge_request_approvals.html#update-project-level-rules type UpdateProjectLevelRuleOptions struct { ApprovalsRequired *int `url:"approvals_required,omitempty" json:"approvals_required,omitempty"` - GroupIDs []int `url:"group_ids,omitempty" json:"group_ids,omitempty"` + GroupIDs *[]int `url:"group_ids,omitempty" json:"group_ids,omitempty"` Name *string `url:"name,omitempty" json:"name,omitempty"` - ProtectedBranchIDs []int `url:"protected_branch_ids,omitempty" json:"protected_branch_ids,omitempty"` - UserIDs []int `url:"user_ids,omitempty" json:"user_ids,omitempty"` + ProtectedBranchIDs *[]int `url:"protected_branch_ids,omitempty" json:"protected_branch_ids,omitempty"` + UserIDs *[]int `url:"user_ids,omitempty" json:"user_ids,omitempty"` } // UpdateProjectApprovalRule updates an existing approval rule with new options. @@ -1675,7 +1793,7 @@ func (s *ProjectsService) UpdateProjectApprovalRule(pid interface{}, approvalRul if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/approval_rules/%d", pathEscape(project), approvalRule) + u := fmt.Sprintf("projects/%s/approval_rules/%d", PathEscape(project), approvalRule) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -1700,7 +1818,7 @@ func (s *ProjectsService) DeleteProjectApprovalRule(pid interface{}, approvalRul if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/approval_rules/%d", pathEscape(project), approvalRule) + u := fmt.Sprintf("projects/%s/approval_rules/%d", PathEscape(project), approvalRule) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -1716,8 +1834,8 @@ func (s *ProjectsService) DeleteProjectApprovalRule(pid interface{}, approvalRul // GitLab API docs: // https://docs.gitlab.com/ee/api/merge_request_approvals.html#change-allowed-approvers type ChangeAllowedApproversOptions struct { - ApproverGroupIDs []int `url:"approver_group_ids,omitempty" json:"approver_group_ids,omitempty"` - ApproverIDs []int `url:"approver_ids,omitempty" json:"approver_ids,omitempty"` + ApproverGroupIDs *[]int `url:"approver_group_ids,omitempty" json:"approver_group_ids,omitempty"` + ApproverIDs *[]int `url:"approver_ids,omitempty" json:"approver_ids,omitempty"` } // ChangeAllowedApprovers updates the list of approvers and approver groups. @@ -1729,7 +1847,7 @@ func (s *ProjectsService) ChangeAllowedApprovers(pid interface{}, opt *ChangeAll if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/approvers", pathEscape(project)) + u := fmt.Sprintf("projects/%s/approvers", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -1754,7 +1872,7 @@ func (s *ProjectsService) StartMirroringProject(pid interface{}, options ...Requ if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/mirror/pull", pathEscape(project)) + u := fmt.Sprintf("projects/%s/mirror/pull", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -1784,7 +1902,7 @@ func (s *ProjectsService) TransferProject(pid interface{}, opt *TransferProjectO if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/transfer", pathEscape(project)) + u := fmt.Sprintf("projects/%s/transfer", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/protected_branches.go b/vendor/github.com/xanzy/go-gitlab/protected_branches.go index 11ef66ce2..ddcf8ea10 100644 --- a/vendor/github.com/xanzy/go-gitlab/protected_branches.go +++ b/vendor/github.com/xanzy/go-gitlab/protected_branches.go @@ -73,7 +73,7 @@ func (s *ProtectedBranchesService) ListProtectedBranches(pid interface{}, opt *L if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/protected_branches", pathEscape(project)) + u := fmt.Sprintf("projects/%s/protected_branches", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -98,7 +98,7 @@ func (s *ProtectedBranchesService) GetProtectedBranch(pid interface{}, branch st if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/protected_branches/%s", pathEscape(project), url.PathEscape(branch)) + u := fmt.Sprintf("projects/%s/protected_branches/%s", PathEscape(project), url.PathEscape(branch)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -120,15 +120,15 @@ func (s *ProtectedBranchesService) GetProtectedBranch(pid interface{}, branch st // GitLab API docs: // https://docs.gitlab.com/ce/api/protected_branches.html#protect-repository-branches type ProtectRepositoryBranchesOptions struct { - Name *string `url:"name,omitempty" json:"name,omitempty"` - PushAccessLevel *AccessLevelValue `url:"push_access_level,omitempty" json:"push_access_level,omitempty"` - MergeAccessLevel *AccessLevelValue `url:"merge_access_level,omitempty" json:"merge_access_level,omitempty"` - UnprotectAccessLevel *AccessLevelValue `url:"unprotect_access_level,omitempty" json:"unprotect_access_level,omitempty"` - AllowForcePush *bool `url:"allow_force_push,omitempty" json:"allow_force_push,omitempty"` - AllowedToPush []*BranchPermissionOptions `url:"allowed_to_push,omitempty" json:"allowed_to_push,omitempty"` - AllowedToMerge []*BranchPermissionOptions `url:"allowed_to_merge,omitempty" json:"allowed_to_merge,omitempty"` - AllowedToUnprotect []*BranchPermissionOptions `url:"allowed_to_unprotect,omitempty" json:"allowed_to_unprotect,omitempty"` - CodeOwnerApprovalRequired *bool `url:"code_owner_approval_required,omitempty" json:"code_owner_approval_required,omitempty"` + Name *string `url:"name,omitempty" json:"name,omitempty"` + PushAccessLevel *AccessLevelValue `url:"push_access_level,omitempty" json:"push_access_level,omitempty"` + MergeAccessLevel *AccessLevelValue `url:"merge_access_level,omitempty" json:"merge_access_level,omitempty"` + UnprotectAccessLevel *AccessLevelValue `url:"unprotect_access_level,omitempty" json:"unprotect_access_level,omitempty"` + AllowForcePush *bool `url:"allow_force_push,omitempty" json:"allow_force_push,omitempty"` + AllowedToPush *[]*BranchPermissionOptions `url:"allowed_to_push,omitempty" json:"allowed_to_push,omitempty"` + AllowedToMerge *[]*BranchPermissionOptions `url:"allowed_to_merge,omitempty" json:"allowed_to_merge,omitempty"` + AllowedToUnprotect *[]*BranchPermissionOptions `url:"allowed_to_unprotect,omitempty" json:"allowed_to_unprotect,omitempty"` + CodeOwnerApprovalRequired *bool `url:"code_owner_approval_required,omitempty" json:"code_owner_approval_required,omitempty"` } // BranchPermissionOptions represents a branch permission option. @@ -152,7 +152,7 @@ func (s *ProtectedBranchesService) ProtectRepositoryBranches(pid interface{}, op if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/protected_branches", pathEscape(project)) + u := fmt.Sprintf("projects/%s/protected_branches", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -178,7 +178,7 @@ func (s *ProtectedBranchesService) UnprotectRepositoryBranches(pid interface{}, if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/protected_branches/%s", pathEscape(project), url.PathEscape(branch)) + u := fmt.Sprintf("projects/%s/protected_branches/%s", PathEscape(project), url.PathEscape(branch)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -206,7 +206,7 @@ func (s *ProtectedBranchesService) RequireCodeOwnerApprovals(pid interface{}, br if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/protected_branches/%s", pathEscape(project), url.PathEscape(branch)) + u := fmt.Sprintf("projects/%s/protected_branches/%s", PathEscape(project), url.PathEscape(branch)) req, err := s.client.NewRequest(http.MethodPatch, u, opt, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/protected_environments.go b/vendor/github.com/xanzy/go-gitlab/protected_environments.go index e532ae622..41fdfb1c9 100644 --- a/vendor/github.com/xanzy/go-gitlab/protected_environments.go +++ b/vendor/github.com/xanzy/go-gitlab/protected_environments.go @@ -67,7 +67,7 @@ func (s *ProtectedEnvironmentsService) ListProtectedEnvironments(pid interface{} if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/protected_environments", pathEscape(project)) + u := fmt.Sprintf("projects/%s/protected_environments", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -92,7 +92,7 @@ func (s *ProtectedEnvironmentsService) GetProtectedEnvironment(pid interface{}, if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/protected_environments/%s", pathEscape(project), pathEscape(environment)) + u := fmt.Sprintf("projects/%s/protected_environments/%s", PathEscape(project), PathEscape(environment)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -114,8 +114,8 @@ func (s *ProtectedEnvironmentsService) GetProtectedEnvironment(pid interface{}, // GitLab API docs: // https://docs.gitlab.com/ee/api/protected_environments.html#protect-repository-environments type ProtectRepositoryEnvironmentsOptions struct { - Name *string `url:"name,omitempty" json:"name,omitempty"` - DeployAccessLevels []*EnvironmentAccessOptions `url:"deploy_access_levels,omitempty" json:"deploy_access_levels,omitempty"` + Name *string `url:"name,omitempty" json:"name,omitempty"` + DeployAccessLevels *[]*EnvironmentAccessOptions `url:"deploy_access_levels,omitempty" json:"deploy_access_levels,omitempty"` } // EnvironmentAccessOptions represents the options for an access decription for @@ -139,7 +139,7 @@ func (s *ProtectedEnvironmentsService) ProtectRepositoryEnvironments(pid interfa if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/protected_environments", pathEscape(project)) + u := fmt.Sprintf("projects/%s/protected_environments", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -165,7 +165,7 @@ func (s *ProtectedEnvironmentsService) UnprotectEnvironment(pid interface{}, env if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/protected_environments/%s", pathEscape(project), pathEscape(environment)) + u := fmt.Sprintf("projects/%s/protected_environments/%s", PathEscape(project), PathEscape(environment)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/protected_tags.go b/vendor/github.com/xanzy/go-gitlab/protected_tags.go index 628f8a24a..f53c989a6 100644 --- a/vendor/github.com/xanzy/go-gitlab/protected_tags.go +++ b/vendor/github.com/xanzy/go-gitlab/protected_tags.go @@ -64,7 +64,7 @@ func (s *ProtectedTagsService) ListProtectedTags(pid interface{}, opt *ListProte if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/protected_tags", pathEscape(project)) + u := fmt.Sprintf("projects/%s/protected_tags", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -89,7 +89,7 @@ func (s *ProtectedTagsService) GetProtectedTag(pid interface{}, tag string, opti if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/protected_tags/%s", pathEscape(project), pathEscape(tag)) + u := fmt.Sprintf("projects/%s/protected_tags/%s", PathEscape(project), PathEscape(tag)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -125,7 +125,7 @@ func (s *ProtectedTagsService) ProtectRepositoryTags(pid interface{}, opt *Prote if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/protected_tags", pathEscape(project)) + u := fmt.Sprintf("projects/%s/protected_tags", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -151,7 +151,7 @@ func (s *ProtectedTagsService) UnprotectRepositoryTags(pid interface{}, tag stri if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/protected_tags/%s", pathEscape(project), pathEscape(tag)) + u := fmt.Sprintf("projects/%s/protected_tags/%s", PathEscape(project), PathEscape(tag)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/releaselinks.go b/vendor/github.com/xanzy/go-gitlab/releaselinks.go index 5c1624c57..1c74be516 100644 --- a/vendor/github.com/xanzy/go-gitlab/releaselinks.go +++ b/vendor/github.com/xanzy/go-gitlab/releaselinks.go @@ -54,7 +54,7 @@ func (s *ReleaseLinksService) ListReleaseLinks(pid interface{}, tagName string, if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/releases/%s/assets/links", pathEscape(project), pathEscape(tagName)) + u := fmt.Sprintf("projects/%s/releases/%s/assets/links", PathEscape(project), PathEscape(tagName)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -79,8 +79,8 @@ func (s *ReleaseLinksService) GetReleaseLink(pid interface{}, tagName string, li return nil, nil, err } u := fmt.Sprintf("projects/%s/releases/%s/assets/links/%d", - pathEscape(project), - pathEscape(tagName), + PathEscape(project), + PathEscape(tagName), link) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) @@ -115,7 +115,7 @@ func (s *ReleaseLinksService) CreateReleaseLink(pid interface{}, tagName string, if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/releases/%s/assets/links", pathEscape(project), pathEscape(tagName)) + u := fmt.Sprintf("projects/%s/releases/%s/assets/links", PathEscape(project), PathEscape(tagName)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -152,8 +152,8 @@ func (s *ReleaseLinksService) UpdateReleaseLink(pid interface{}, tagName string, return nil, nil, err } u := fmt.Sprintf("projects/%s/releases/%s/assets/links/%d", - pathEscape(project), - pathEscape(tagName), + PathEscape(project), + PathEscape(tagName), link) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) @@ -179,8 +179,8 @@ func (s *ReleaseLinksService) DeleteReleaseLink(pid interface{}, tagName string, return nil, nil, err } u := fmt.Sprintf("projects/%s/releases/%s/assets/links/%d", - pathEscape(project), - pathEscape(tagName), + PathEscape(project), + PathEscape(tagName), link, ) diff --git a/vendor/github.com/xanzy/go-gitlab/releases.go b/vendor/github.com/xanzy/go-gitlab/releases.go index 0059fbba9..4c263bad5 100644 --- a/vendor/github.com/xanzy/go-gitlab/releases.go +++ b/vendor/github.com/xanzy/go-gitlab/releases.go @@ -75,7 +75,7 @@ func (s *ReleasesService) ListReleases(pid interface{}, opt *ListReleasesOptions if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/releases", pathEscape(project)) + u := fmt.Sprintf("projects/%s/releases", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -100,7 +100,7 @@ func (s *ReleasesService) GetRelease(pid interface{}, tagName string, options .. if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/releases/%s", pathEscape(project), pathEscape(tagName)) + u := fmt.Sprintf("projects/%s/releases/%s", PathEscape(project), PathEscape(tagName)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -116,35 +116,38 @@ func (s *ReleasesService) GetRelease(pid interface{}, tagName string, options .. return r, resp, err } -// ReleaseAssets represents release assets in CreateRelease() options -// -// GitLab API docs: -// https://docs.gitlab.com/ce/api/releases/index.html#create-a-release -type ReleaseAssets struct { - Links []*ReleaseAssetLink `url:"links" json:"links"` -} - -// ReleaseAssetLink represents release asset link in CreateRelease() options -// -// GitLab API docs: -// https://docs.gitlab.com/ce/api/releases/index.html#create-a-release -type ReleaseAssetLink struct { - Name string `url:"name" json:"name"` - URL string `url:"url" json:"url"` -} - // CreateReleaseOptions represents CreateRelease() options. // // GitLab API docs: // https://docs.gitlab.com/ce/api/releases/index.html#create-a-release type CreateReleaseOptions struct { - Name *string `url:"name" json:"name"` - TagName *string `url:"tag_name" json:"tag_name"` - Description *string `url:"description" json:"description"` - Ref *string `url:"ref,omitempty" json:"ref,omitempty"` - Milestones []string `url:"milestones,omitempty" json:"milestones,omitempty"` - Assets *ReleaseAssets `url:"assets,omitempty" json:"assets,omitempty"` - ReleasedAt *time.Time `url:"released_at,omitempty" json:"released_at,omitempty"` + Name *string `url:"name" json:"name"` + TagName *string `url:"tag_name" json:"tag_name"` + Description *string `url:"description" json:"description"` + Ref *string `url:"ref,omitempty" json:"ref,omitempty"` + Milestones *[]string `url:"milestones,omitempty" json:"milestones,omitempty"` + Assets *ReleaseAssetsOptions `url:"assets,omitempty" json:"assets,omitempty"` + ReleasedAt *time.Time `url:"released_at,omitempty" json:"released_at,omitempty"` +} + +// ReleaseAssetsOptions represents release assets in CreateRelease() options. +// +// GitLab API docs: +// https://docs.gitlab.com/ce/api/releases/index.html#create-a-release +type ReleaseAssetsOptions struct { + Links []*ReleaseAssetLinkOptions `url:"links,omitempty" json:"links,omitempty"` +} + +// ReleaseAssetLinkOptions represents release asset link in CreateRelease() +// options. +// +// GitLab API docs: +// https://docs.gitlab.com/ce/api/releases/index.html#create-a-release +type ReleaseAssetLinkOptions struct { + Name *string `url:"name,omitempty" json:"name,omitempty"` + URL *string `url:"url,omitempty" json:"url,omitempty"` + FilePath *string `url:"filepath,omitempty" json:"filepath,omitempty"` + LinkType *LinkTypeValue `url:"link_type,omitempty" json:"link_type,omitempty"` } // CreateRelease creates a release. @@ -156,7 +159,7 @@ func (s *ReleasesService) CreateRelease(pid interface{}, opts *CreateReleaseOpti if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/releases", pathEscape(project)) + u := fmt.Sprintf("projects/%s/releases", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opts, options) if err != nil { @@ -179,7 +182,7 @@ func (s *ReleasesService) CreateRelease(pid interface{}, opts *CreateReleaseOpti type UpdateReleaseOptions struct { Name *string `url:"name" json:"name"` Description *string `url:"description" json:"description"` - Milestones []string `url:"milestones,omitempty" json:"milestones,omitempty"` + Milestones *[]string `url:"milestones,omitempty" json:"milestones,omitempty"` ReleasedAt *time.Time `url:"released_at,omitempty" json:"released_at,omitempty"` } @@ -192,7 +195,7 @@ func (s *ReleasesService) UpdateRelease(pid interface{}, tagName string, opts *U if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/releases/%s", pathEscape(project), pathEscape(tagName)) + u := fmt.Sprintf("projects/%s/releases/%s", PathEscape(project), PathEscape(tagName)) req, err := s.client.NewRequest(http.MethodPut, u, opts, options) if err != nil { @@ -217,7 +220,7 @@ func (s *ReleasesService) DeleteRelease(pid interface{}, tagName string, options if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/releases/%s", pathEscape(project), pathEscape(tagName)) + u := fmt.Sprintf("projects/%s/releases/%s", PathEscape(project), PathEscape(tagName)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/repositories.go b/vendor/github.com/xanzy/go-gitlab/repositories.go index b9c99cf7d..ea56735e3 100644 --- a/vendor/github.com/xanzy/go-gitlab/repositories.go +++ b/vendor/github.com/xanzy/go-gitlab/repositories.go @@ -67,7 +67,7 @@ func (s *RepositoriesService) ListTree(pid interface{}, opt *ListTreeOptions, op if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/tree", pathEscape(project)) + u := fmt.Sprintf("projects/%s/repository/tree", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -93,7 +93,7 @@ func (s *RepositoriesService) Blob(pid interface{}, sha string, options ...Reque if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/blobs/%s", pathEscape(project), url.PathEscape(sha)) + u := fmt.Sprintf("projects/%s/repository/blobs/%s", PathEscape(project), url.PathEscape(sha)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -118,7 +118,7 @@ func (s *RepositoriesService) RawBlobContent(pid interface{}, sha string, option if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/blobs/%s/raw", pathEscape(project), url.PathEscape(sha)) + u := fmt.Sprintf("projects/%s/repository/blobs/%s/raw", PathEscape(project), url.PathEscape(sha)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -152,7 +152,7 @@ func (s *RepositoriesService) Archive(pid interface{}, opt *ArchiveOptions, opti if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/archive", pathEscape(project)) + u := fmt.Sprintf("projects/%s/repository/archive", PathEscape(project)) // Set an optional format for the archive. if opt != nil && opt.Format != nil { @@ -183,7 +183,7 @@ func (s *RepositoriesService) StreamArchive(pid interface{}, w io.Writer, opt *A if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/repository/archive", pathEscape(project)) + u := fmt.Sprintf("projects/%s/repository/archive", PathEscape(project)) // Set an optional format for the archive. if opt != nil && opt.Format != nil { @@ -233,7 +233,7 @@ func (s *RepositoriesService) Compare(pid interface{}, opt *CompareOptions, opti if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/compare", pathEscape(project)) + u := fmt.Sprintf("projects/%s/repository/compare", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -281,7 +281,7 @@ func (s *RepositoriesService) Contributors(pid interface{}, opt *ListContributor if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/contributors", pathEscape(project)) + u := fmt.Sprintf("projects/%s/repository/contributors", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -302,7 +302,7 @@ func (s *RepositoriesService) Contributors(pid interface{}, opt *ListContributor // GitLab API docs: // https://docs.gitlab.com/ce/api/repositories.html#merge-base type MergeBaseOptions struct { - Ref []string `url:"refs[],omitempty" json:"refs,omitempty"` + Ref *[]string `url:"refs[],omitempty" json:"refs,omitempty"` } // MergeBase gets the common ancestor for 2 refs (commit SHAs, branch @@ -315,7 +315,7 @@ func (s *RepositoriesService) MergeBase(pid interface{}, opt *MergeBaseOptions, if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/merge_base", pathEscape(project)) + u := fmt.Sprintf("projects/%s/repository/merge_base", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/repository_files.go b/vendor/github.com/xanzy/go-gitlab/repository_files.go index f9e9ab0ea..64189f3e6 100644 --- a/vendor/github.com/xanzy/go-gitlab/repository_files.go +++ b/vendor/github.com/xanzy/go-gitlab/repository_files.go @@ -72,8 +72,8 @@ func (s *RepositoryFilesService) GetFile(pid interface{}, fileName string, opt * } u := fmt.Sprintf( "projects/%s/repository/files/%s", - pathEscape(project), - pathEscape(fileName), + PathEscape(project), + PathEscape(fileName), ) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) @@ -110,8 +110,8 @@ func (s *RepositoryFilesService) GetFileMetaData(pid interface{}, fileName strin } u := fmt.Sprintf( "projects/%s/repository/files/%s", - pathEscape(project), - pathEscape(fileName), + PathEscape(project), + PathEscape(fileName), ) req, err := s.client.NewRequest(http.MethodHead, u, opt, options) @@ -187,8 +187,8 @@ func (s *RepositoryFilesService) GetFileBlame(pid interface{}, file string, opt } u := fmt.Sprintf( "projects/%s/repository/files/%s/blame", - pathEscape(project), - pathEscape(file), + PathEscape(project), + PathEscape(file), ) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) @@ -224,8 +224,8 @@ func (s *RepositoryFilesService) GetRawFile(pid interface{}, fileName string, op } u := fmt.Sprintf( "projects/%s/repository/files/%s/raw", - pathEscape(project), - pathEscape(fileName), + PathEscape(project), + PathEscape(fileName), ) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) @@ -279,8 +279,8 @@ func (s *RepositoryFilesService) CreateFile(pid interface{}, fileName string, op } u := fmt.Sprintf( "projects/%s/repository/files/%s", - pathEscape(project), - pathEscape(fileName), + PathEscape(project), + PathEscape(fileName), ) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) @@ -323,8 +323,8 @@ func (s *RepositoryFilesService) UpdateFile(pid interface{}, fileName string, op } u := fmt.Sprintf( "projects/%s/repository/files/%s", - pathEscape(project), - pathEscape(fileName), + PathEscape(project), + PathEscape(fileName), ) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) @@ -365,8 +365,8 @@ func (s *RepositoryFilesService) DeleteFile(pid interface{}, fileName string, op } u := fmt.Sprintf( "projects/%s/repository/files/%s", - pathEscape(project), - pathEscape(fileName), + PathEscape(project), + PathEscape(fileName), ) req, err := s.client.NewRequest(http.MethodDelete, u, opt, options) diff --git a/vendor/github.com/xanzy/go-gitlab/repository_submodules.go b/vendor/github.com/xanzy/go-gitlab/repository_submodules.go new file mode 100644 index 000000000..70ac05e3e --- /dev/null +++ b/vendor/github.com/xanzy/go-gitlab/repository_submodules.go @@ -0,0 +1,93 @@ +// +// Copyright 2021, Sander van Harmelen +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package gitlab + +import ( + "fmt" + "net/http" + "time" +) + +// RepositorySubmodulesService handles communication with the repository +// submodules related methods of the GitLab API. +// +// GitLab API docs: https://docs.gitlab.com/ee/api/repository_submodules.html +type RepositorySubmodulesService struct { + client *Client +} + +// SubmoduleCommit represents a GitLab submodule commit. +// +// GitLab API docs: https://docs.gitlab.com/ee/api/repository_submodules.html +type SubmoduleCommit struct { + ID string `json:"id"` + ShortID string `json:"short_id"` + Title string `json:"title"` + AuthorName string `json:"author_name"` + AuthorEmail string `json:"author_email"` + CommitterName string `json:"committer_name"` + CommitterEmail string `json:"committer_email"` + CreatedAt *time.Time `json:"created_at"` + Message string `json:"message"` + ParentIDs []string `json:"parent_ids"` + CommittedDate *time.Time `json:"committed_date"` + AuthoredDate *time.Time `json:"authored_date"` + Status *BuildStateValue `json:"status"` +} + +func (r SubmoduleCommit) String() string { + return Stringify(r) +} + +// UpdateSubmoduleOptions represents the available UpdateSubmodule() options. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/repository_submodules.html#update-existing-submodule-reference-in-repository +type UpdateSubmoduleOptions struct { + Branch *string `url:"branch,omitempty" json:"branch,omitempty"` + CommitSHA *string `url:"commit_sha,omitempty" json:"commit_sha,omitempty"` + CommitMessage *string `url:"commit_message,omitempty" json:"commit_message,omitempty"` +} + +// UpdateSubmodule updates an existing submodule reference. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/repository_submodules.html#update-existing-submodule-reference-in-repository +func (s *RepositorySubmodulesService) UpdateSubmodule(pid interface{}, submodule string, opt *UpdateSubmoduleOptions, options ...RequestOptionFunc) (*SubmoduleCommit, *Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf( + "projects/%s/repository/submodules/%s", + PathEscape(project), + PathEscape(submodule), + ) + + req, err := s.client.NewRequest(http.MethodPut, u, opt, options) + if err != nil { + return nil, nil, err + } + + sc := new(SubmoduleCommit) + resp, err := s.client.Do(req, sc) + if err != nil { + return nil, resp, err + } + + return sc, resp, err +} diff --git a/vendor/github.com/xanzy/go-gitlab/resource_label_events.go b/vendor/github.com/xanzy/go-gitlab/resource_label_events.go index 2bc93896d..37da156a7 100644 --- a/vendor/github.com/xanzy/go-gitlab/resource_label_events.go +++ b/vendor/github.com/xanzy/go-gitlab/resource_label_events.go @@ -76,7 +76,7 @@ func (s *ResourceLabelEventsService) ListIssueLabelEvents(pid interface{}, issue if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/issues/%d/resource_label_events", pathEscape(project), issue) + u := fmt.Sprintf("projects/%s/issues/%d/resource_label_events", PathEscape(project), issue) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -101,7 +101,7 @@ func (s *ResourceLabelEventsService) GetIssueLabelEvent(pid interface{}, issue i if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/issues/%d/resource_label_events/%d", pathEscape(project), issue, event) + u := fmt.Sprintf("projects/%s/issues/%d/resource_label_events/%d", PathEscape(project), issue, event) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -127,7 +127,7 @@ func (s *ResourceLabelEventsService) ListGroupEpicLabelEvents(gid interface{}, e if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/epics/%d/resource_label_events", pathEscape(group), epic) + u := fmt.Sprintf("groups/%s/epics/%d/resource_label_events", PathEscape(group), epic) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -152,7 +152,7 @@ func (s *ResourceLabelEventsService) GetGroupEpicLabelEvent(gid interface{}, epi if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/epics/%d/resource_label_events/%d", pathEscape(group), epic, event) + u := fmt.Sprintf("groups/%s/epics/%d/resource_label_events/%d", PathEscape(group), epic, event) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -178,7 +178,7 @@ func (s *ResourceLabelEventsService) ListMergeRequestsLabelEvents(pid interface{ if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/resource_label_events", pathEscape(project), request) + u := fmt.Sprintf("projects/%s/merge_requests/%d/resource_label_events", PathEscape(project), request) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -203,7 +203,7 @@ func (s *ResourceLabelEventsService) GetMergeRequestLabelEvent(pid interface{}, if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/resource_label_events/%d", pathEscape(project), request, event) + u := fmt.Sprintf("projects/%s/merge_requests/%d/resource_label_events/%d", PathEscape(project), request, event) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/resource_state_events.go b/vendor/github.com/xanzy/go-gitlab/resource_state_events.go index 38f6d8222..e73ef773e 100644 --- a/vendor/github.com/xanzy/go-gitlab/resource_state_events.go +++ b/vendor/github.com/xanzy/go-gitlab/resource_state_events.go @@ -61,7 +61,7 @@ func (s *ResourceStateEventsService) ListIssueStateEvents(pid interface{}, issue if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/issues/%d/resource_state_events", pathEscape(project), issue) + u := fmt.Sprintf("projects/%s/issues/%d/resource_state_events", PathEscape(project), issue) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -86,7 +86,7 @@ func (s *ResourceStateEventsService) GetIssueStateEvent(pid interface{}, issue i if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/issues/%d/resource_state_events/%d", pathEscape(project), issue, event) + u := fmt.Sprintf("projects/%s/issues/%d/resource_state_events/%d", PathEscape(project), issue, event) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -112,7 +112,7 @@ func (s *ResourceStateEventsService) ListMergeStateEvents(pid interface{}, reque if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/resource_state_events", pathEscape(project), request) + u := fmt.Sprintf("projects/%s/merge_requests/%d/resource_state_events", PathEscape(project), request) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -137,7 +137,7 @@ func (s *ResourceStateEventsService) GetMergeRequestStateEvent(pid interface{}, if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/merge_requests/%d/resource_state_events/%d", pathEscape(project), request, event) + u := fmt.Sprintf("projects/%s/merge_requests/%d/resource_state_events/%d", PathEscape(project), request, event) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/runners.go b/vendor/github.com/xanzy/go-gitlab/runners.go index 98405fb1c..60c483ab5 100644 --- a/vendor/github.com/xanzy/go-gitlab/runners.go +++ b/vendor/github.com/xanzy/go-gitlab/runners.go @@ -90,10 +90,10 @@ type RunnerDetails struct { // https://docs.gitlab.com/ce/api/runners.html#list-owned-runners type ListRunnersOptions struct { ListOptions - Scope *string `url:"scope,omitempty" json:"scope,omitempty"` - Type *string `url:"type,omitempty" json:"type,omitempty"` - Status *string `url:"status,omitempty" json:"status,omitempty"` - TagList []string `url:"tag_list,comma,omitempty" json:"tag_list,omitempty"` + Scope *string `url:"scope,omitempty" json:"scope,omitempty"` + Type *string `url:"type,omitempty" json:"type,omitempty"` + Status *string `url:"status,omitempty" json:"status,omitempty"` + TagList *[]string `url:"tag_list,comma,omitempty" json:"tag_list,omitempty"` } // ListRunners gets a list of runners accessible by the authenticated user. @@ -165,13 +165,13 @@ func (s *RunnersService) GetRunnerDetails(rid interface{}, options ...RequestOpt // GitLab API docs: // https://docs.gitlab.com/ce/api/runners.html#update-runner-39-s-details type UpdateRunnerDetailsOptions struct { - Description *string `url:"description,omitempty" json:"description,omitempty"` - Active *bool `url:"active,omitempty" json:"active,omitempty"` - TagList []string `url:"tag_list[],omitempty" json:"tag_list,omitempty"` - RunUntagged *bool `url:"run_untagged,omitempty" json:"run_untagged,omitempty"` - Locked *bool `url:"locked,omitempty" json:"locked,omitempty"` - AccessLevel *string `url:"access_level,omitempty" json:"access_level,omitempty"` - MaximumTimeout *int `url:"maximum_timeout,omitempty" json:"maximum_timeout,omitempty"` + Description *string `url:"description,omitempty" json:"description,omitempty"` + Active *bool `url:"active,omitempty" json:"active,omitempty"` + TagList *[]string `url:"tag_list[],omitempty" json:"tag_list,omitempty"` + RunUntagged *bool `url:"run_untagged,omitempty" json:"run_untagged,omitempty"` + Locked *bool `url:"locked,omitempty" json:"locked,omitempty"` + AccessLevel *string `url:"access_level,omitempty" json:"access_level,omitempty"` + MaximumTimeout *int `url:"maximum_timeout,omitempty" json:"maximum_timeout,omitempty"` } // UpdateRunnerDetails updates details for a given runner. @@ -271,7 +271,7 @@ func (s *RunnersService) ListProjectRunners(pid interface{}, opt *ListProjectRun if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/runners", pathEscape(project)) + u := fmt.Sprintf("projects/%s/runners", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -305,7 +305,7 @@ func (s *RunnersService) EnableProjectRunner(pid interface{}, opt *EnableProject if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/runners", pathEscape(project)) + u := fmt.Sprintf("projects/%s/runners", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -330,7 +330,7 @@ func (s *RunnersService) DisableProjectRunner(pid interface{}, runner int, optio if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/runners/%d", pathEscape(project), runner) + u := fmt.Sprintf("projects/%s/runners/%d", PathEscape(project), runner) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -346,9 +346,9 @@ func (s *RunnersService) DisableProjectRunner(pid interface{}, runner int, optio // https://docs.gitlab.com/ee/api/runners.html#list-groups-runners type ListGroupsRunnersOptions struct { ListOptions - Type *string `url:"type,omitempty" json:"type,omitempty"` - Status *string `url:"status,omitempty" json:"status,omitempty"` - TagList []string `url:"tag_list,comma,omitempty" json:"tag_list,omitempty"` + Type *string `url:"type,omitempty" json:"type,omitempty"` + Status *string `url:"status,omitempty" json:"status,omitempty"` + TagList *[]string `url:"tag_list,comma,omitempty" json:"tag_list,omitempty"` } // ListGroupsRunners lists all runners (specific and shared) available in the @@ -362,7 +362,7 @@ func (s *RunnersService) ListGroupsRunners(gid interface{}, opt *ListGroupsRunne if err != nil { return nil, nil, err } - u := fmt.Sprintf("groups/%s/runners", pathEscape(group)) + u := fmt.Sprintf("groups/%s/runners", PathEscape(group)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -390,7 +390,7 @@ type RegisterNewRunnerOptions struct { Active *bool `url:"active,omitempty" json:"active,omitempty"` Locked *bool `url:"locked,omitempty" json:"locked,omitempty"` RunUntagged *bool `url:"run_untagged,omitempty" json:"run_untagged,omitempty"` - TagList []string `url:"tag_list[],omitempty" json:"tag_list,omitempty"` + TagList *[]string `url:"tag_list[],omitempty" json:"tag_list,omitempty"` MaximumTimeout *int `url:"maximum_timeout,omitempty" json:"maximum_timeout,omitempty"` } diff --git a/vendor/github.com/xanzy/go-gitlab/search.go b/vendor/github.com/xanzy/go-gitlab/search.go index 972a37d81..86ff83326 100644 --- a/vendor/github.com/xanzy/go-gitlab/search.go +++ b/vendor/github.com/xanzy/go-gitlab/search.go @@ -328,7 +328,7 @@ func (s *SearchService) searchByGroup(gid interface{}, scope, query string, resu if err != nil { return nil, err } - u := fmt.Sprintf("groups/%s/-/search", pathEscape(group)) + u := fmt.Sprintf("groups/%s/-/search", PathEscape(group)) opts := &searchOptions{SearchOptions: *opt, Scope: scope, Search: query} @@ -345,7 +345,7 @@ func (s *SearchService) searchByProject(pid interface{}, scope, query string, re if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/-/search", pathEscape(project)) + u := fmt.Sprintf("projects/%s/-/search", PathEscape(project)) opts := &searchOptions{SearchOptions: *opt, Scope: scope, Search: query} diff --git a/vendor/github.com/xanzy/go-gitlab/services.go b/vendor/github.com/xanzy/go-gitlab/services.go index 973a30cb8..2abcdc737 100644 --- a/vendor/github.com/xanzy/go-gitlab/services.go +++ b/vendor/github.com/xanzy/go-gitlab/services.go @@ -38,6 +38,7 @@ type ServicesService struct { type Service struct { ID int `json:"id"` Title string `json:"title"` + Slug string `json:"slug"` CreatedAt *time.Time `json:"created_at"` UpdatedAt *time.Time `json:"updated_at"` Active bool `json:"active"` @@ -64,7 +65,7 @@ func (s *ServicesService) ListServices(pid interface{}, options ...RequestOption if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/services", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -108,7 +109,7 @@ func (s *ServicesService) GetCustomIssueTrackerService(pid interface{}, options if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/services/custom-issue-tracker", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/custom-issue-tracker", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -147,7 +148,7 @@ func (s *ServicesService) SetCustomIssueTrackerService(pid interface{}, opt *Set if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/custom-issue-tracker", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/custom-issue-tracker", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -166,7 +167,7 @@ func (s *ServicesService) DeleteCustomIssueTrackerService(pid interface{}, optio if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/custom-issue-tracker", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/custom-issue-tracker", PathEscape(project)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -204,7 +205,7 @@ func (s *ServicesService) GetDroneCIService(pid interface{}, options ...RequestO if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/services/drone-ci", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/drone-ci", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -240,7 +241,7 @@ func (s *ServicesService) SetDroneCIService(pid interface{}, opt *SetDroneCIServ if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/drone-ci", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/drone-ci", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -259,7 +260,7 @@ func (s *ServicesService) DeleteDroneCIService(pid interface{}, options ...Reque if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/drone-ci", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/drone-ci", PathEscape(project)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -295,7 +296,7 @@ func (s *ServicesService) GetExternalWikiService(pid interface{}, options ...Req if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/services/external-wiki", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/external-wiki", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -329,7 +330,7 @@ func (s *ServicesService) SetExternalWikiService(pid interface{}, opt *SetExtern if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/external-wiki", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/external-wiki", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -348,7 +349,7 @@ func (s *ServicesService) DeleteExternalWikiService(pid interface{}, options ... if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/external-wiki", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/external-wiki", PathEscape(project)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -385,7 +386,7 @@ func (s *ServicesService) GetGithubService(pid interface{}, options ...RequestOp if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/services/github", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/github", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -421,7 +422,7 @@ func (s *ServicesService) SetGithubService(pid interface{}, opt *SetGithubServic if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/github", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/github", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -440,7 +441,7 @@ func (s *ServicesService) DeleteGithubService(pid interface{}, options ...Reques if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/github", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/github", PathEscape(project)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -469,7 +470,7 @@ func (s *ServicesService) SetGitLabCIService(pid interface{}, opt *SetGitLabCISe if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/gitlab-ci", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/gitlab-ci", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -488,7 +489,7 @@ func (s *ServicesService) DeleteGitLabCIService(pid interface{}, options ...Requ if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/gitlab-ci", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/gitlab-ci", PathEscape(project)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -517,7 +518,7 @@ func (s *ServicesService) SetHipChatService(pid interface{}, opt *SetHipChatServ if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/hipchat", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/hipchat", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -536,7 +537,7 @@ func (s *ServicesService) DeleteHipChatService(pid interface{}, options ...Reque if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/hipchat", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/hipchat", PathEscape(project)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -574,7 +575,7 @@ func (s *ServicesService) GetJenkinsCIService(pid interface{}, options ...Reques if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/services/jenkins", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/jenkins", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -614,7 +615,7 @@ func (s *ServicesService) SetJenkinsCIService(pid interface{}, opt *SetJenkinsCI if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/jenkins", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/jenkins", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -633,7 +634,7 @@ func (s *ServicesService) DeleteJenkinsCIService(pid interface{}, options ...Req if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/jenkins", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/jenkins", PathEscape(project)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -704,7 +705,7 @@ func (s *ServicesService) GetJiraService(pid interface{}, options ...RequestOpti if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/services/jira", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/jira", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -747,7 +748,7 @@ func (s *ServicesService) SetJiraService(pid interface{}, opt *SetJiraServiceOpt if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/jira", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/jira", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -766,7 +767,7 @@ func (s *ServicesService) DeleteJiraService(pid interface{}, options ...RequestO if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/jira", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/jira", PathEscape(project)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -815,7 +816,7 @@ func (s *ServicesService) GetMattermostService(pid interface{}, options ...Reque if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/services/mattermost", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/mattermost", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -871,7 +872,7 @@ func (s *ServicesService) SetMattermostService(pid interface{}, opt *SetMattermo if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/mattermost", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/mattermost", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -890,7 +891,7 @@ func (s *ServicesService) DeleteMattermostService(pid interface{}, options ...Re if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/mattermost", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/mattermost", PathEscape(project)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -936,7 +937,7 @@ func (s *ServicesService) GetMicrosoftTeamsService(pid interface{}, options ...R if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/services/microsoft-teams", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/microsoft-teams", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -981,7 +982,7 @@ func (s *ServicesService) SetMicrosoftTeamsService(pid interface{}, opt *SetMicr if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/microsoft-teams", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/microsoft-teams", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -999,7 +1000,7 @@ func (s *ServicesService) DeleteMicrosoftTeamsService(pid interface{}, options . if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/microsoft-teams", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/microsoft-teams", PathEscape(project)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -1038,7 +1039,7 @@ func (s *ServicesService) GetPipelinesEmailService(pid interface{}, options ...R if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/services/pipelines-email", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/pipelines-email", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -1077,7 +1078,7 @@ func (s *ServicesService) SetPipelinesEmailService(pid interface{}, opt *SetPipe if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/pipelines-email", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/pipelines-email", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -1096,7 +1097,7 @@ func (s *ServicesService) DeletePipelinesEmailService(pid interface{}, options . if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/pipelines-email", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/pipelines-email", PathEscape(project)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -1134,7 +1135,7 @@ func (s *ServicesService) GetPrometheusService(pid interface{}, options ...Reque if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/services/prometheus", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/prometheus", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -1170,7 +1171,7 @@ func (s *ServicesService) SetPrometheusService(pid interface{}, opt *SetPromethe if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/prometheus", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/prometheus", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -1189,7 +1190,7 @@ func (s *ServicesService) DeletePrometheusService(pid interface{}, options ...Re if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/prometheus", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/prometheus", PathEscape(project)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -1240,7 +1241,7 @@ func (s *ServicesService) GetSlackService(pid interface{}, options ...RequestOpt if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/services/slack", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/slack", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -1302,7 +1303,7 @@ func (s *ServicesService) SetSlackService(pid interface{}, opt *SetSlackServiceO if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/slack", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/slack", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -1321,7 +1322,96 @@ func (s *ServicesService) DeleteSlackService(pid interface{}, options ...Request if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/slack", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/slack", PathEscape(project)) + + req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) + if err != nil { + return nil, err + } + + return s.client.Do(req, nil) +} + +// SlackSlashCommandsService represents Slack slash commands settings. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/integrations.html#slack-slash-commands +type SlackSlashCommandsService struct { + Service + Properties *SlackSlashCommandsProperties `json:"properties"` +} + +// SlackSlashCommandsProperties represents Slack slash commands specific properties. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/integrations.html#slack-slash-commands +type SlackSlashCommandsProperties struct { + Token string `json:"token"` +} + +// GetSlackSlashCommandsService gets Slack slash commands service settings for a project. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/integrations.html#get-slack-slash-command-integration-settings +func (s *ServicesService) GetSlackSlashCommandsService(pid interface{}, options ...RequestOptionFunc) (*SlackSlashCommandsService, *Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("projects/%s/services/slack-slash-commands", PathEscape(project)) + + req, err := s.client.NewRequest(http.MethodGet, u, nil, options) + if err != nil { + return nil, nil, err + } + + svc := new(SlackSlashCommandsService) + resp, err := s.client.Do(req, svc) + if err != nil { + return nil, resp, err + } + + return svc, resp, err +} + +// SetSlackSlashCommandsServiceOptions represents the available SetSlackSlashCommandsService() +// options. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/services.html#createedit-slack-slash-command-service +type SetSlackSlashCommandsServiceOptions struct { + Token *string `url:"token,omitempty" json:"token,omitempty"` +} + +// SetSlackSlashCommandsService sets Slack slash commands service for a project +// +// GitLab API docs: +// https://docs.gitlab.com/13.12/ee/api/services.html#createedit-slack-slash-command-service +func (s *ServicesService) SetSlackSlashCommandsService(pid interface{}, opt *SetSlackSlashCommandsServiceOptions, options ...RequestOptionFunc) (*Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, err + } + u := fmt.Sprintf("projects/%s/services/slack-slash-commands", PathEscape(project)) + + req, err := s.client.NewRequest(http.MethodPut, u, opt, options) + if err != nil { + return nil, err + } + + return s.client.Do(req, nil) +} + +// DeleteSlackSlashCommandsService deletes Slack slash commands service for project. +// +// GitLab API docs: +// https://docs.gitlab.com/13.12/ee/api/services.html#delete-slack-slash-command-service +func (s *ServicesService) DeleteSlackSlashCommandsService(pid interface{}, options ...RequestOptionFunc) (*Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, err + } + u := fmt.Sprintf("projects/%s/services/slack-slash-commands", PathEscape(project)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -1360,7 +1450,7 @@ func (s *ServicesService) GetYouTrackService(pid interface{}, options ...Request if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/services/youtrack", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/youtrack", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -1397,7 +1487,7 @@ func (s *ServicesService) SetYouTrackService(pid interface{}, opt *SetYouTrackSe if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/youtrack", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/youtrack", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -1416,7 +1506,7 @@ func (s *ServicesService) DeleteYouTrackService(pid interface{}, options ...Requ if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/services/youtrack", pathEscape(project)) + u := fmt.Sprintf("projects/%s/services/youtrack", PathEscape(project)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/settings.go b/vendor/github.com/xanzy/go-gitlab/settings.go index 10d76e899..23039f54e 100644 --- a/vendor/github.com/xanzy/go-gitlab/settings.go +++ b/vendor/github.com/xanzy/go-gitlab/settings.go @@ -229,169 +229,169 @@ func (s *SettingsService) GetSettings(options ...RequestOptionFunc) (*Settings, // GitLab API docs: // https://docs.gitlab.com/ce/api/settings.html#change-application.settings type UpdateSettingsOptions struct { - AdminMode *bool `url:"admin_mode,omitempty" json:"admin_mode,omitempty"` - AdminNotificationEmail *string `url:"admin_notification_email,omitempty" json:"admin_notification_email,omitempty"` - AfterSignOutPath *string `url:"after_sign_out_path,omitempty" json:"after_sign_out_path,omitempty"` - AfterSignUpText *string `url:"after_sign_up_text,omitempty" json:"after_sign_up_text,omitempty"` - AkismetAPIKey *string `url:"akismet_api_key,omitempty" json:"akismet_api_key,omitempty"` - AkismetEnabled *bool `url:"akismet_enabled,omitempty" json:"akismet_enabled,omitempty"` - AllowGroupOwnersToManageLDAP *bool `url:"allow_group_owners_to_manage_ldap,omitempty" json:"allow_group_owners_to_manage_ldap,omitempty"` - AllowLocalRequestsFromHooksAndServices *bool `url:"allow_local_requests_from_hooks_and_services,omitempty" json:"allow_local_requests_from_hooks_and_services,omitempty"` - AllowLocalRequestsFromSystemHooks *bool `url:"allow_local_requests_from_system_hooks,omitempty" json:"allow_local_requests_from_system_hooks,omitempty"` - AllowLocalRequestsFromWebHooksAndServices *bool `url:"allow_local_requests_from_web_hooks_and_services,omitempty" json:"allow_local_requests_from_web_hooks_and_services,omitempty"` - ArchiveBuildsInHumanReadable *string `url:"archive_builds_in_human_readable,omitempty" json:"archive_builds_in_human_readable,omitempty"` - AssetProxyEnabled *bool `url:"asset_proxy_enabled,omitempty" json:"asset_proxy_enabled,omitempty"` - AssetProxySecretKey *string `url:"asset_proxy_secret_key,omitempty" json:"asset_proxy_secret_key,omitempty"` - AssetProxyURL *string `url:"asset_proxy_url,omitempty" json:"asset_proxy_url,omitempty"` - AssetProxyWhitelist []string `url:"asset_proxy_whitelist,omitempty" json:"asset_proxy_whitelist,omitempty"` - AuthorizedKeysEnabled *bool `url:"authorized_keys_enabled,omitempty" json:"authorized_keys_enabled,omitempty"` - AutoDevOpsDomain *string `url:"auto_devops_domain,omitempty" json:"auto_devops_domain,omitempty"` - AutoDevOpsEnabled *bool `url:"auto_devops_enabled,omitempty" json:"auto_devops_enabled,omitempty"` - CheckNamespacePlan *bool `url:"check_namespace_plan,omitempty" json:"check_namespace_plan,omitempty"` - CommitEmailHostname *string `url:"commit_email_hostname,omitempty" json:"commit_email_hostname,omitempty"` - ContainerRegistryTokenExpireDelay *int `url:"container_registry_token_expire_delay,omitempty" json:"container_registry_token_expire_delay,omitempty"` - DefaultArtifactsExpireIn *string `url:"default_artifacts_expire_in,omitempty" json:"default_artifacts_expire_in,omitempty"` - DefaultBranchProtection *int `url:"default_branch_protection,omitempty" json:"default_branch_protection,omitempty"` - DefaultGroupVisibility *VisibilityValue `url:"default_group_visibility,omitempty" json:"default_group_visibility,omitempty"` - DefaultProjectCreation *int `url:"default_project_creation,omitempty" json:"default_project_creation,omitempty"` - DefaultProjectsLimit *int `url:"default_projects_limit,omitempty" json:"default_projects_limit,omitempty"` - DefaultProjectVisibility *VisibilityValue `url:"default_project_visibility,omitempty" json:"default_project_visibility,omitempty"` - DefaultSnippetVisibility *VisibilityValue `url:"default_snippet_visibility,omitempty" json:"default_snippet_visibility,omitempty"` - DiffMaxPatchBytes *int `url:"diff_max_patch_bytes,omitempty" json:"diff_max_patch_bytes,omitempty"` - DisabledOauthSignInSources []string `url:"disabled_oauth_sign_in_sources,omitempty" json:"disabled_oauth_sign_in_sources,omitempty"` - DNSRebindingProtectionEnabled *bool `url:"dns_rebinding_protection_enabled,omitempty" json:"dns_rebinding_protection_enabled,omitempty"` - DomainBlacklist []string `url:"domain_blacklist,omitempty" json:"domain_blacklist,omitempty"` - DomainBlacklistEnabled *bool `url:"domain_blacklist_enabled,omitempty" json:"domain_blacklist_enabled,omitempty"` - DomainWhitelist []string `url:"domain_whitelist,omitempty" json:"domain_whitelist,omitempty"` - DSAKeyRestriction *int `url:"dsa_key_restriction,omitempty" json:"dsa_key_restriction,omitempty"` - ECDSAKeyRestriction *int `url:"ecdsa_key_restriction,omitempty" json:"ecdsa_key_restriction,omitempty"` - Ed25519KeyRestriction *int `url:"ed25519_key_restriction,omitempty" json:"ed25519_key_restriction,omitempty"` - ElasticsearchAWSAccessKey *string `url:"elasticsearch_aws_access_key,omitempty" json:"elasticsearch_aws_access_key,omitempty"` - ElasticsearchAWS *bool `url:"elasticsearch_aws,omitempty" json:"elasticsearch_aws,omitempty"` - ElasticsearchAWSRegion *string `url:"elasticsearch_aws_region,omitempty" json:"elasticsearch_aws_region,omitempty"` - ElasticsearchAWSSecretAccessKey *string `url:"elasticsearch_aws_secret_access_key,omitempty" json:"elasticsearch_aws_secret_access_key,omitempty"` - ElasticsearchIndexing *bool `url:"elasticsearch_indexing,omitempty" json:"elasticsearch_indexing,omitempty"` - ElasticsearchLimitIndexing *bool `url:"elasticsearch_limit_indexing,omitempty" json:"elasticsearch_limit_indexing,omitempty"` - ElasticsearchNamespaceIDs []int `url:"elasticsearch_namespace_ids,omitempty" json:"elasticsearch_namespace_ids,omitempty"` - ElasticsearchProjectIDs []int `url:"elasticsearch_project_ids,omitempty" json:"elasticsearch_project_ids,omitempty"` - ElasticsearchSearch *bool `url:"elasticsearch_search,omitempty" json:"elasticsearch_search,omitempty"` - ElasticsearchURL *string `url:"elasticsearch_url,omitempty" json:"elasticsearch_url,omitempty"` - EmailAdditionalText *string `url:"email_additional_text,omitempty" json:"email_additional_text,omitempty"` - EmailAuthorInBody *bool `url:"email_author_in_body,omitempty" json:"email_author_in_body,omitempty"` - EnabledGitAccessProtocol *string `url:"enabled_git_access_protocol,omitempty" json:"enabled_git_access_protocol,omitempty"` - EnforceTerms *bool `url:"enforce_terms,omitempty" json:"enforce_terms,omitempty"` - ExternalAuthClientCert *string `url:"external_auth_client_cert,omitempty" json:"external_auth_client_cert,omitempty"` - ExternalAuthClientKeyPass *string `url:"external_auth_client_key_pass,omitempty" json:"external_auth_client_key_pass,omitempty"` - ExternalAuthClientKey *string `url:"external_auth_client_key,omitempty" json:"external_auth_client_key,omitempty"` - ExternalAuthorizationServiceDefaultLabel *string `url:"external_authorization_service_default_label,omitempty" json:"external_authorization_service_default_label,omitempty"` - ExternalAuthorizationServiceEnabled *bool `url:"external_authorization_service_enabled,omitempty" json:"external_authorization_service_enabled,omitempty"` - ExternalAuthorizationServiceTimeout *float64 `url:"external_authorization_service_timeout,omitempty" json:"external_authorization_service_timeout,omitempty"` - ExternalAuthorizationServiceURL *string `url:"external_authorization_service_url,omitempty" json:"external_authorization_service_url,omitempty"` - FileTemplateProjectID *int `url:"file_template_project_id,omitempty" json:"file_template_project_id,omitempty"` - FirstDayOfWeek *int `url:"first_day_of_week,omitempty" json:"first_day_of_week,omitempty"` - GeoNodeAllowedIPs *string `url:"geo_node_allowed_ips,omitempty" json:"geo_node_allowed_ips,omitempty"` - GeoStatusTimeout *int `url:"geo_status_timeout,omitempty" json:"geo_status_timeout,omitempty"` - GitalyTimeoutDefault *int `url:"gitaly_timeout_default,omitempty" json:"gitaly_timeout_default,omitempty"` - GitalyTimeoutFast *int `url:"gitaly_timeout_fast,omitempty" json:"gitaly_timeout_fast,omitempty"` - GitalyTimeoutMedium *int `url:"gitaly_timeout_medium,omitempty" json:"gitaly_timeout_medium,omitempty"` - GrafanaEnabled *bool `url:"grafana_enabled,omitempty" json:"grafana_enabled,omitempty"` - GrafanaURL *string `url:"grafana_url,omitempty" json:"grafana_url,omitempty"` - GravatarEnabled *bool `url:"gravatar_enabled,omitempty" json:"gravatar_enabled,omitempty"` - HashedStorageEnabled *bool `url:"hashed_storage_enabled,omitempty" json:"hashed_storage_enabled,omitempty"` - HelpPageHideCommercialContent *bool `url:"help_page_hide_commercial_content,omitempty" json:"help_page_hide_commercial_content,omitempty"` - HelpPageSupportURL *string `url:"help_page_support_url,omitempty" json:"help_page_support_url,omitempty"` - HelpPageText *string `url:"help_page_text,omitempty" json:"help_page_text,omitempty"` - HelpText *string `url:"help_text,omitempty" json:"help_text,omitempty"` - HideThirdPartyOffers *bool `url:"hide_third_party_offers,omitempty" json:"hide_third_party_offers,omitempty"` - HomePageURL *string `url:"home_page_url,omitempty" json:"home_page_url,omitempty"` - HousekeepingBitmapsEnabled *bool `url:"housekeeping_bitmaps_enabled,omitempty" json:"housekeeping_bitmaps_enabled,omitempty"` - HousekeepingEnabled *bool `url:"housekeeping_enabled,omitempty" json:"housekeeping_enabled,omitempty"` - HousekeepingFullRepackPeriod *int `url:"housekeeping_full_repack_period,omitempty" json:"housekeeping_full_repack_period,omitempty"` - HousekeepingGcPeriod *int `url:"housekeeping_gc_period,omitempty" json:"housekeeping_gc_period,omitempty"` - HousekeepingIncrementalRepackPeriod *int `url:"housekeeping_incremental_repack_period,omitempty" json:"housekeeping_incremental_repack_period,omitempty"` - HTMLEmailsEnabled *bool `url:"html_emails_enabled,omitempty" json:"html_emails_enabled,omitempty"` - ImportSources []string `url:"import_sources,omitempty" json:"import_sources,omitempty"` - InstanceStatisticsVisibilityPrivate *bool `url:"instance_statistics_visibility_private,omitempty" json:"instance_statistics_visibility_private,omitempty"` - LocalMarkdownVersion *int `url:"local_markdown_version,omitempty" json:"local_markdown_version,omitempty"` - MaxArtifactsSize *int `url:"max_artifacts_size,omitempty" json:"max_artifacts_size,omitempty"` - MaxAttachmentSize *int `url:"max_attachment_size,omitempty" json:"max_attachment_size,omitempty"` - MaxPagesSize *int `url:"max_pages_size,omitempty" json:"max_pages_size,omitempty"` - MetricsEnabled *bool `url:"metrics_enabled,omitempty" json:"metrics_enabled,omitempty"` - MetricsHost *string `url:"metrics_host,omitempty" json:"metrics_host,omitempty"` - MetricsMethodCallThreshold *int `url:"metrics_method_call_threshold,omitempty" json:"metrics_method_call_threshold,omitempty"` - MetricsPacketSize *int `url:"metrics_packet_size,omitempty" json:"metrics_packet_size,omitempty"` - MetricsPoolSize *int `url:"metrics_pool_size,omitempty" json:"metrics_pool_size,omitempty"` - MetricsPort *int `url:"metrics_port,omitempty" json:"metrics_port,omitempty"` - MetricsSampleInterval *int `url:"metrics_sample_interval,omitempty" json:"metrics_sample_interval,omitempty"` - MetricsTimeout *int `url:"metrics_timeout,omitempty" json:"metrics_timeout,omitempty"` - MirrorAvailable *bool `url:"mirror_available,omitempty" json:"mirror_available,omitempty"` - MirrorCapacityThreshold *int `url:"mirror_capacity_threshold,omitempty" json:"mirror_capacity_threshold,omitempty"` - MirrorMaxCapacity *int `url:"mirror_max_capacity,omitempty" json:"mirror_max_capacity,omitempty"` - MirrorMaxDelay *int `url:"mirror_max_delay,omitempty" json:"mirror_max_delay,omitempty"` - OutboundLocalRequestsWhitelist []string `url:"outbound_local_requests_whitelist,omitempty" json:"outbound_local_requests_whitelist,omitempty"` - PagesDomainVerificationEnabled *bool `url:"pages_domain_verification_enabled,omitempty" json:"pages_domain_verification_enabled,omitempty"` - PasswordAuthenticationEnabledForGit *bool `url:"password_authentication_enabled_for_git,omitempty" json:"password_authentication_enabled_for_git,omitempty"` - PasswordAuthenticationEnabledForWeb *bool `url:"password_authentication_enabled_for_web,omitempty" json:"password_authentication_enabled_for_web,omitempty"` - PerformanceBarAllowedGroupID *string `url:"performance_bar_allowed_group_id,omitempty" json:"performance_bar_allowed_group_id,omitempty"` - PerformanceBarAllowedGroupPath *string `url:"performance_bar_allowed_group_path,omitempty" json:"performance_bar_allowed_group_path,omitempty"` - PerformanceBarEnabled *bool `url:"performance_bar_enabled,omitempty" json:"performance_bar_enabled,omitempty"` - PlantumlEnabled *bool `url:"plantuml_enabled,omitempty" json:"plantuml_enabled,omitempty"` - PlantumlURL *string `url:"plantuml_url,omitempty" json:"plantuml_url,omitempty"` - PollingIntervalMultiplier *float64 `url:"polling_interval_multiplier,omitempty" json:"polling_interval_multiplier,omitempty"` - ProjectExportEnabled *bool `url:"project_export_enabled,omitempty" json:"project_export_enabled,omitempty"` - PrometheusMetricsEnabled *bool `url:"prometheus_metrics_enabled,omitempty" json:"prometheus_metrics_enabled,omitempty"` - ProtectedCIVariables *bool `url:"protected_ci_variables,omitempty" json:"protected_ci_variables,omitempty"` - PseudonymizerEnabled *bool `url:"psedonymizer_enabled,omitempty" json:"psedonymizer_enabled,omitempty"` - PushEventHooksLimit *int `url:"push_event_hooks_limit,omitempty" json:"push_event_hooks_limit,omitempty"` - PushEventActivitiesLimit *int `url:"push_event_activities_limit,omitempty" json:"push_event_activities_limit,omitempty"` - RecaptchaEnabled *bool `url:"recaptcha_enabled,omitempty" json:"recaptcha_enabled,omitempty"` - RecaptchaPrivateKey *string `url:"recaptcha_private_key,omitempty" json:"recaptcha_private_key,omitempty"` - RecaptchaSiteKey *string `url:"recaptcha_site_key,omitempty" json:"recaptcha_site_key,omitempty"` - ReceiveMaxInputSize *int `url:"receive_max_input_size,omitempty" json:"receive_max_input_size,omitempty"` - RepositoryChecksEnabled *bool `url:"repository_checks_enabled,omitempty" json:"repository_checks_enabled,omitempty"` - RepositorySizeLimit *int `url:"repository_size_limit,omitempty" json:"repository_size_limit,omitempty"` - RepositoryStorages []string `url:"repository_storages,omitempty" json:"repository_storages,omitempty"` - RequireTwoFactorAuthentication *bool `url:"require_two_factor_authentication,omitempty" json:"require_two_factor_authentication,omitempty"` - RestrictedVisibilityLevels []VisibilityValue `url:"restricted_visibility_levels,omitempty" json:"restricted_visibility_levels,omitempty"` - RsaKeyRestriction *int `url:"rsa_key_restriction,omitempty" json:"rsa_key_restriction,omitempty"` - SendUserConfirmationEmail *bool `url:"send_user_confirmation_email,omitempty" json:"send_user_confirmation_email,omitempty"` - SessionExpireDelay *int `url:"session_expire_delay,omitempty" json:"session_expire_delay,omitempty"` - SharedRunnersEnabled *bool `url:"shared_runners_enabled,omitempty" json:"shared_runners_enabled,omitempty"` - SharedRunnersMinutes *int `url:"shared_runners_minutes,omitempty" json:"shared_runners_minutes,omitempty"` - SharedRunnersText *string `url:"shared_runners_text,omitempty" json:"shared_runners_text,omitempty"` - SignInText *string `url:"sign_in_text,omitempty" json:"sign_in_text,omitempty"` - SignupEnabled *bool `url:"signup_enabled,omitempty" json:"signup_enabled,omitempty"` - SlackAppEnabled *bool `url:"slack_app_enabled,omitempty" json:"slack_app_enabled,omitempty"` - SlackAppID *string `url:"slack_app_id,omitempty" json:"slack_app_id,omitempty"` - SlackAppSecret *string `url:"slack_app_secret,omitempty" json:"slack_app_secret,omitempty"` - SlackAppVerificationToken *string `url:"slack_app_verification_token,omitempty" json:"slack_app_verification_token,omitempty"` - SnowplowCollectorHostname *string `url:"snowplow_collector_hostname,omitempty" json:"snowplow_collector_hostname,omitempty"` - SnowplowCookieDomain *string `url:"snowplow_cookie_domain,omitempty" json:"snowplow_cookie_domain,omitempty"` - SnowplowEnabled *bool `url:"snowplow_enabled,omitempty" json:"snowplow_enabled,omitempty"` - SnowplowSiteID *string `url:"snowplow_site_id,omitempty" json:"snowplow_site_id,omitempty"` - TerminalMaxSessionTime *int `url:"terminal_max_session_time,omitempty" json:"terminal_max_session_time,omitempty"` - Terms *string `url:"terms,omitempty" json:"terms,omitempty"` - ThrottleAuthenticatedAPIEnabled *bool `url:"throttle_authenticated_api_enabled,omitempty" json:"throttle_authenticated_api_enabled,omitempty"` - ThrottleAuthenticatedAPIPeriodInSeconds *int `url:"throttle_authenticated_api_period_in_seconds,omitempty" json:"throttle_authenticated_api_period_in_seconds,omitempty"` - ThrottleAuthenticatedAPIRequestsPerPeriod *int `url:"throttle_authenticated_api_requests_per_period,omitempty" json:"throttle_authenticated_api_requests_per_period,omitempty"` - ThrottleAuthenticatedWebEnabled *bool `url:"throttle_authenticated_web_enabled,omitempty" json:"throttle_authenticated_web_enabled,omitempty"` - ThrottleAuthenticatedWebPeriodInSeconds *int `url:"throttle_authenticated_web_period_in_seconds,omitempty" json:"throttle_authenticated_web_period_in_seconds,omitempty"` - ThrottleAuthenticatedWebRequestsPerPeriod *int `url:"throttle_authenticated_web_requests_per_period,omitempty" json:"throttle_authenticated_web_requests_per_period,omitempty"` - ThrottleUnauthenticatedEnabled *bool `url:"throttle_unauthenticated_enabled,omitempty" json:"throttle_unauthenticated_enabled,omitempty"` - ThrottleUnauthenticatedPeriodInSeconds *int `url:"throttle_unauthenticated_period_in_seconds,omitempty" json:"throttle_unauthenticated_period_in_seconds,omitempty"` - ThrottleUnauthenticatedRequestsPerPeriod *int `url:"throttle_unauthenticated_requests_per_period,omitempty" json:"throttle_unauthenticated_requests_per_period,omitempty"` - TimeTrackingLimitToHours *bool `url:"time_tracking_limit_to_hours,omitempty" json:"time_tracking_limit_to_hours,omitempty"` - TwoFactorGracePeriod *int `url:"two_factor_grace_period,omitempty" json:"two_factor_grace_period,omitempty"` - UniqueIPsLimitEnabled *bool `url:"unique_ips_limit_enabled,omitempty" json:"unique_ips_limit_enabled,omitempty"` - UniqueIPsLimitPerUser *int `url:"unique_ips_limit_per_user,omitempty" json:"unique_ips_limit_per_user,omitempty"` - UniqueIPsLimitTimeWindow *int `url:"unique_ips_limit_time_window,omitempty" json:"unique_ips_limit_time_window,omitempty"` - UsagePingEnabled *bool `url:"usage_ping_enabled,omitempty" json:"usage_ping_enabled,omitempty"` - UserDefaultExternal *bool `url:"user_default_external,omitempty" json:"user_default_external,omitempty"` - UserDefaultInternalRegex *string `url:"user_default_internal_regex,omitempty" json:"user_default_internal_regex,omitempty"` - UserOauthApplications *bool `url:"user_oauth_applications,omitempty" json:"user_oauth_applications,omitempty"` - UserShowAddSSHKeyMessage *bool `url:"user_show_add_ssh_key_message,omitempty" json:"user_show_add_ssh_key_message,omitempty"` - VersionCheckEnabled *bool `url:"version_check_enabled,omitempty" json:"version_check_enabled,omitempty"` - WebIDEClientsidePreviewEnabled *bool `url:"web_ide_clientside_preview_enabled,omitempty" json:"web_ide_clientside_preview_enabled,omitempty"` + AdminMode *bool `url:"admin_mode,omitempty" json:"admin_mode,omitempty"` + AdminNotificationEmail *string `url:"admin_notification_email,omitempty" json:"admin_notification_email,omitempty"` + AfterSignOutPath *string `url:"after_sign_out_path,omitempty" json:"after_sign_out_path,omitempty"` + AfterSignUpText *string `url:"after_sign_up_text,omitempty" json:"after_sign_up_text,omitempty"` + AkismetAPIKey *string `url:"akismet_api_key,omitempty" json:"akismet_api_key,omitempty"` + AkismetEnabled *bool `url:"akismet_enabled,omitempty" json:"akismet_enabled,omitempty"` + AllowGroupOwnersToManageLDAP *bool `url:"allow_group_owners_to_manage_ldap,omitempty" json:"allow_group_owners_to_manage_ldap,omitempty"` + AllowLocalRequestsFromHooksAndServices *bool `url:"allow_local_requests_from_hooks_and_services,omitempty" json:"allow_local_requests_from_hooks_and_services,omitempty"` + AllowLocalRequestsFromSystemHooks *bool `url:"allow_local_requests_from_system_hooks,omitempty" json:"allow_local_requests_from_system_hooks,omitempty"` + AllowLocalRequestsFromWebHooksAndServices *bool `url:"allow_local_requests_from_web_hooks_and_services,omitempty" json:"allow_local_requests_from_web_hooks_and_services,omitempty"` + ArchiveBuildsInHumanReadable *string `url:"archive_builds_in_human_readable,omitempty" json:"archive_builds_in_human_readable,omitempty"` + AssetProxyEnabled *bool `url:"asset_proxy_enabled,omitempty" json:"asset_proxy_enabled,omitempty"` + AssetProxySecretKey *string `url:"asset_proxy_secret_key,omitempty" json:"asset_proxy_secret_key,omitempty"` + AssetProxyURL *string `url:"asset_proxy_url,omitempty" json:"asset_proxy_url,omitempty"` + AssetProxyWhitelist *[]string `url:"asset_proxy_whitelist,omitempty" json:"asset_proxy_whitelist,omitempty"` + AuthorizedKeysEnabled *bool `url:"authorized_keys_enabled,omitempty" json:"authorized_keys_enabled,omitempty"` + AutoDevOpsDomain *string `url:"auto_devops_domain,omitempty" json:"auto_devops_domain,omitempty"` + AutoDevOpsEnabled *bool `url:"auto_devops_enabled,omitempty" json:"auto_devops_enabled,omitempty"` + CheckNamespacePlan *bool `url:"check_namespace_plan,omitempty" json:"check_namespace_plan,omitempty"` + CommitEmailHostname *string `url:"commit_email_hostname,omitempty" json:"commit_email_hostname,omitempty"` + ContainerRegistryTokenExpireDelay *int `url:"container_registry_token_expire_delay,omitempty" json:"container_registry_token_expire_delay,omitempty"` + DefaultArtifactsExpireIn *string `url:"default_artifacts_expire_in,omitempty" json:"default_artifacts_expire_in,omitempty"` + DefaultBranchProtection *int `url:"default_branch_protection,omitempty" json:"default_branch_protection,omitempty"` + DefaultGroupVisibility *VisibilityValue `url:"default_group_visibility,omitempty" json:"default_group_visibility,omitempty"` + DefaultProjectCreation *int `url:"default_project_creation,omitempty" json:"default_project_creation,omitempty"` + DefaultProjectsLimit *int `url:"default_projects_limit,omitempty" json:"default_projects_limit,omitempty"` + DefaultProjectVisibility *VisibilityValue `url:"default_project_visibility,omitempty" json:"default_project_visibility,omitempty"` + DefaultSnippetVisibility *VisibilityValue `url:"default_snippet_visibility,omitempty" json:"default_snippet_visibility,omitempty"` + DiffMaxPatchBytes *int `url:"diff_max_patch_bytes,omitempty" json:"diff_max_patch_bytes,omitempty"` + DisabledOauthSignInSources *[]string `url:"disabled_oauth_sign_in_sources,omitempty" json:"disabled_oauth_sign_in_sources,omitempty"` + DNSRebindingProtectionEnabled *bool `url:"dns_rebinding_protection_enabled,omitempty" json:"dns_rebinding_protection_enabled,omitempty"` + DomainBlacklist *[]string `url:"domain_blacklist,omitempty" json:"domain_blacklist,omitempty"` + DomainBlacklistEnabled *bool `url:"domain_blacklist_enabled,omitempty" json:"domain_blacklist_enabled,omitempty"` + DomainWhitelist *[]string `url:"domain_whitelist,omitempty" json:"domain_whitelist,omitempty"` + DSAKeyRestriction *int `url:"dsa_key_restriction,omitempty" json:"dsa_key_restriction,omitempty"` + ECDSAKeyRestriction *int `url:"ecdsa_key_restriction,omitempty" json:"ecdsa_key_restriction,omitempty"` + Ed25519KeyRestriction *int `url:"ed25519_key_restriction,omitempty" json:"ed25519_key_restriction,omitempty"` + ElasticsearchAWSAccessKey *string `url:"elasticsearch_aws_access_key,omitempty" json:"elasticsearch_aws_access_key,omitempty"` + ElasticsearchAWS *bool `url:"elasticsearch_aws,omitempty" json:"elasticsearch_aws,omitempty"` + ElasticsearchAWSRegion *string `url:"elasticsearch_aws_region,omitempty" json:"elasticsearch_aws_region,omitempty"` + ElasticsearchAWSSecretAccessKey *string `url:"elasticsearch_aws_secret_access_key,omitempty" json:"elasticsearch_aws_secret_access_key,omitempty"` + ElasticsearchIndexing *bool `url:"elasticsearch_indexing,omitempty" json:"elasticsearch_indexing,omitempty"` + ElasticsearchLimitIndexing *bool `url:"elasticsearch_limit_indexing,omitempty" json:"elasticsearch_limit_indexing,omitempty"` + ElasticsearchNamespaceIDs *[]int `url:"elasticsearch_namespace_ids,omitempty" json:"elasticsearch_namespace_ids,omitempty"` + ElasticsearchProjectIDs *[]int `url:"elasticsearch_project_ids,omitempty" json:"elasticsearch_project_ids,omitempty"` + ElasticsearchSearch *bool `url:"elasticsearch_search,omitempty" json:"elasticsearch_search,omitempty"` + ElasticsearchURL *string `url:"elasticsearch_url,omitempty" json:"elasticsearch_url,omitempty"` + EmailAdditionalText *string `url:"email_additional_text,omitempty" json:"email_additional_text,omitempty"` + EmailAuthorInBody *bool `url:"email_author_in_body,omitempty" json:"email_author_in_body,omitempty"` + EnabledGitAccessProtocol *string `url:"enabled_git_access_protocol,omitempty" json:"enabled_git_access_protocol,omitempty"` + EnforceTerms *bool `url:"enforce_terms,omitempty" json:"enforce_terms,omitempty"` + ExternalAuthClientCert *string `url:"external_auth_client_cert,omitempty" json:"external_auth_client_cert,omitempty"` + ExternalAuthClientKeyPass *string `url:"external_auth_client_key_pass,omitempty" json:"external_auth_client_key_pass,omitempty"` + ExternalAuthClientKey *string `url:"external_auth_client_key,omitempty" json:"external_auth_client_key,omitempty"` + ExternalAuthorizationServiceDefaultLabel *string `url:"external_authorization_service_default_label,omitempty" json:"external_authorization_service_default_label,omitempty"` + ExternalAuthorizationServiceEnabled *bool `url:"external_authorization_service_enabled,omitempty" json:"external_authorization_service_enabled,omitempty"` + ExternalAuthorizationServiceTimeout *float64 `url:"external_authorization_service_timeout,omitempty" json:"external_authorization_service_timeout,omitempty"` + ExternalAuthorizationServiceURL *string `url:"external_authorization_service_url,omitempty" json:"external_authorization_service_url,omitempty"` + FileTemplateProjectID *int `url:"file_template_project_id,omitempty" json:"file_template_project_id,omitempty"` + FirstDayOfWeek *int `url:"first_day_of_week,omitempty" json:"first_day_of_week,omitempty"` + GeoNodeAllowedIPs *string `url:"geo_node_allowed_ips,omitempty" json:"geo_node_allowed_ips,omitempty"` + GeoStatusTimeout *int `url:"geo_status_timeout,omitempty" json:"geo_status_timeout,omitempty"` + GitalyTimeoutDefault *int `url:"gitaly_timeout_default,omitempty" json:"gitaly_timeout_default,omitempty"` + GitalyTimeoutFast *int `url:"gitaly_timeout_fast,omitempty" json:"gitaly_timeout_fast,omitempty"` + GitalyTimeoutMedium *int `url:"gitaly_timeout_medium,omitempty" json:"gitaly_timeout_medium,omitempty"` + GrafanaEnabled *bool `url:"grafana_enabled,omitempty" json:"grafana_enabled,omitempty"` + GrafanaURL *string `url:"grafana_url,omitempty" json:"grafana_url,omitempty"` + GravatarEnabled *bool `url:"gravatar_enabled,omitempty" json:"gravatar_enabled,omitempty"` + HashedStorageEnabled *bool `url:"hashed_storage_enabled,omitempty" json:"hashed_storage_enabled,omitempty"` + HelpPageHideCommercialContent *bool `url:"help_page_hide_commercial_content,omitempty" json:"help_page_hide_commercial_content,omitempty"` + HelpPageSupportURL *string `url:"help_page_support_url,omitempty" json:"help_page_support_url,omitempty"` + HelpPageText *string `url:"help_page_text,omitempty" json:"help_page_text,omitempty"` + HelpText *string `url:"help_text,omitempty" json:"help_text,omitempty"` + HideThirdPartyOffers *bool `url:"hide_third_party_offers,omitempty" json:"hide_third_party_offers,omitempty"` + HomePageURL *string `url:"home_page_url,omitempty" json:"home_page_url,omitempty"` + HousekeepingBitmapsEnabled *bool `url:"housekeeping_bitmaps_enabled,omitempty" json:"housekeeping_bitmaps_enabled,omitempty"` + HousekeepingEnabled *bool `url:"housekeeping_enabled,omitempty" json:"housekeeping_enabled,omitempty"` + HousekeepingFullRepackPeriod *int `url:"housekeeping_full_repack_period,omitempty" json:"housekeeping_full_repack_period,omitempty"` + HousekeepingGcPeriod *int `url:"housekeeping_gc_period,omitempty" json:"housekeeping_gc_period,omitempty"` + HousekeepingIncrementalRepackPeriod *int `url:"housekeeping_incremental_repack_period,omitempty" json:"housekeeping_incremental_repack_period,omitempty"` + HTMLEmailsEnabled *bool `url:"html_emails_enabled,omitempty" json:"html_emails_enabled,omitempty"` + ImportSources *[]string `url:"import_sources,omitempty" json:"import_sources,omitempty"` + InstanceStatisticsVisibilityPrivate *bool `url:"instance_statistics_visibility_private,omitempty" json:"instance_statistics_visibility_private,omitempty"` + LocalMarkdownVersion *int `url:"local_markdown_version,omitempty" json:"local_markdown_version,omitempty"` + MaxArtifactsSize *int `url:"max_artifacts_size,omitempty" json:"max_artifacts_size,omitempty"` + MaxAttachmentSize *int `url:"max_attachment_size,omitempty" json:"max_attachment_size,omitempty"` + MaxPagesSize *int `url:"max_pages_size,omitempty" json:"max_pages_size,omitempty"` + MetricsEnabled *bool `url:"metrics_enabled,omitempty" json:"metrics_enabled,omitempty"` + MetricsHost *string `url:"metrics_host,omitempty" json:"metrics_host,omitempty"` + MetricsMethodCallThreshold *int `url:"metrics_method_call_threshold,omitempty" json:"metrics_method_call_threshold,omitempty"` + MetricsPacketSize *int `url:"metrics_packet_size,omitempty" json:"metrics_packet_size,omitempty"` + MetricsPoolSize *int `url:"metrics_pool_size,omitempty" json:"metrics_pool_size,omitempty"` + MetricsPort *int `url:"metrics_port,omitempty" json:"metrics_port,omitempty"` + MetricsSampleInterval *int `url:"metrics_sample_interval,omitempty" json:"metrics_sample_interval,omitempty"` + MetricsTimeout *int `url:"metrics_timeout,omitempty" json:"metrics_timeout,omitempty"` + MirrorAvailable *bool `url:"mirror_available,omitempty" json:"mirror_available,omitempty"` + MirrorCapacityThreshold *int `url:"mirror_capacity_threshold,omitempty" json:"mirror_capacity_threshold,omitempty"` + MirrorMaxCapacity *int `url:"mirror_max_capacity,omitempty" json:"mirror_max_capacity,omitempty"` + MirrorMaxDelay *int `url:"mirror_max_delay,omitempty" json:"mirror_max_delay,omitempty"` + OutboundLocalRequestsWhitelist *[]string `url:"outbound_local_requests_whitelist,omitempty" json:"outbound_local_requests_whitelist,omitempty"` + PagesDomainVerificationEnabled *bool `url:"pages_domain_verification_enabled,omitempty" json:"pages_domain_verification_enabled,omitempty"` + PasswordAuthenticationEnabledForGit *bool `url:"password_authentication_enabled_for_git,omitempty" json:"password_authentication_enabled_for_git,omitempty"` + PasswordAuthenticationEnabledForWeb *bool `url:"password_authentication_enabled_for_web,omitempty" json:"password_authentication_enabled_for_web,omitempty"` + PerformanceBarAllowedGroupID *string `url:"performance_bar_allowed_group_id,omitempty" json:"performance_bar_allowed_group_id,omitempty"` + PerformanceBarAllowedGroupPath *string `url:"performance_bar_allowed_group_path,omitempty" json:"performance_bar_allowed_group_path,omitempty"` + PerformanceBarEnabled *bool `url:"performance_bar_enabled,omitempty" json:"performance_bar_enabled,omitempty"` + PlantumlEnabled *bool `url:"plantuml_enabled,omitempty" json:"plantuml_enabled,omitempty"` + PlantumlURL *string `url:"plantuml_url,omitempty" json:"plantuml_url,omitempty"` + PollingIntervalMultiplier *float64 `url:"polling_interval_multiplier,omitempty" json:"polling_interval_multiplier,omitempty"` + ProjectExportEnabled *bool `url:"project_export_enabled,omitempty" json:"project_export_enabled,omitempty"` + PrometheusMetricsEnabled *bool `url:"prometheus_metrics_enabled,omitempty" json:"prometheus_metrics_enabled,omitempty"` + ProtectedCIVariables *bool `url:"protected_ci_variables,omitempty" json:"protected_ci_variables,omitempty"` + PseudonymizerEnabled *bool `url:"psedonymizer_enabled,omitempty" json:"psedonymizer_enabled,omitempty"` + PushEventHooksLimit *int `url:"push_event_hooks_limit,omitempty" json:"push_event_hooks_limit,omitempty"` + PushEventActivitiesLimit *int `url:"push_event_activities_limit,omitempty" json:"push_event_activities_limit,omitempty"` + RecaptchaEnabled *bool `url:"recaptcha_enabled,omitempty" json:"recaptcha_enabled,omitempty"` + RecaptchaPrivateKey *string `url:"recaptcha_private_key,omitempty" json:"recaptcha_private_key,omitempty"` + RecaptchaSiteKey *string `url:"recaptcha_site_key,omitempty" json:"recaptcha_site_key,omitempty"` + ReceiveMaxInputSize *int `url:"receive_max_input_size,omitempty" json:"receive_max_input_size,omitempty"` + RepositoryChecksEnabled *bool `url:"repository_checks_enabled,omitempty" json:"repository_checks_enabled,omitempty"` + RepositorySizeLimit *int `url:"repository_size_limit,omitempty" json:"repository_size_limit,omitempty"` + RepositoryStorages *[]string `url:"repository_storages,omitempty" json:"repository_storages,omitempty"` + RequireTwoFactorAuthentication *bool `url:"require_two_factor_authentication,omitempty" json:"require_two_factor_authentication,omitempty"` + RestrictedVisibilityLevels *[]VisibilityValue `url:"restricted_visibility_levels,omitempty" json:"restricted_visibility_levels,omitempty"` + RsaKeyRestriction *int `url:"rsa_key_restriction,omitempty" json:"rsa_key_restriction,omitempty"` + SendUserConfirmationEmail *bool `url:"send_user_confirmation_email,omitempty" json:"send_user_confirmation_email,omitempty"` + SessionExpireDelay *int `url:"session_expire_delay,omitempty" json:"session_expire_delay,omitempty"` + SharedRunnersEnabled *bool `url:"shared_runners_enabled,omitempty" json:"shared_runners_enabled,omitempty"` + SharedRunnersMinutes *int `url:"shared_runners_minutes,omitempty" json:"shared_runners_minutes,omitempty"` + SharedRunnersText *string `url:"shared_runners_text,omitempty" json:"shared_runners_text,omitempty"` + SignInText *string `url:"sign_in_text,omitempty" json:"sign_in_text,omitempty"` + SignupEnabled *bool `url:"signup_enabled,omitempty" json:"signup_enabled,omitempty"` + SlackAppEnabled *bool `url:"slack_app_enabled,omitempty" json:"slack_app_enabled,omitempty"` + SlackAppID *string `url:"slack_app_id,omitempty" json:"slack_app_id,omitempty"` + SlackAppSecret *string `url:"slack_app_secret,omitempty" json:"slack_app_secret,omitempty"` + SlackAppVerificationToken *string `url:"slack_app_verification_token,omitempty" json:"slack_app_verification_token,omitempty"` + SnowplowCollectorHostname *string `url:"snowplow_collector_hostname,omitempty" json:"snowplow_collector_hostname,omitempty"` + SnowplowCookieDomain *string `url:"snowplow_cookie_domain,omitempty" json:"snowplow_cookie_domain,omitempty"` + SnowplowEnabled *bool `url:"snowplow_enabled,omitempty" json:"snowplow_enabled,omitempty"` + SnowplowSiteID *string `url:"snowplow_site_id,omitempty" json:"snowplow_site_id,omitempty"` + TerminalMaxSessionTime *int `url:"terminal_max_session_time,omitempty" json:"terminal_max_session_time,omitempty"` + Terms *string `url:"terms,omitempty" json:"terms,omitempty"` + ThrottleAuthenticatedAPIEnabled *bool `url:"throttle_authenticated_api_enabled,omitempty" json:"throttle_authenticated_api_enabled,omitempty"` + ThrottleAuthenticatedAPIPeriodInSeconds *int `url:"throttle_authenticated_api_period_in_seconds,omitempty" json:"throttle_authenticated_api_period_in_seconds,omitempty"` + ThrottleAuthenticatedAPIRequestsPerPeriod *int `url:"throttle_authenticated_api_requests_per_period,omitempty" json:"throttle_authenticated_api_requests_per_period,omitempty"` + ThrottleAuthenticatedWebEnabled *bool `url:"throttle_authenticated_web_enabled,omitempty" json:"throttle_authenticated_web_enabled,omitempty"` + ThrottleAuthenticatedWebPeriodInSeconds *int `url:"throttle_authenticated_web_period_in_seconds,omitempty" json:"throttle_authenticated_web_period_in_seconds,omitempty"` + ThrottleAuthenticatedWebRequestsPerPeriod *int `url:"throttle_authenticated_web_requests_per_period,omitempty" json:"throttle_authenticated_web_requests_per_period,omitempty"` + ThrottleUnauthenticatedEnabled *bool `url:"throttle_unauthenticated_enabled,omitempty" json:"throttle_unauthenticated_enabled,omitempty"` + ThrottleUnauthenticatedPeriodInSeconds *int `url:"throttle_unauthenticated_period_in_seconds,omitempty" json:"throttle_unauthenticated_period_in_seconds,omitempty"` + ThrottleUnauthenticatedRequestsPerPeriod *int `url:"throttle_unauthenticated_requests_per_period,omitempty" json:"throttle_unauthenticated_requests_per_period,omitempty"` + TimeTrackingLimitToHours *bool `url:"time_tracking_limit_to_hours,omitempty" json:"time_tracking_limit_to_hours,omitempty"` + TwoFactorGracePeriod *int `url:"two_factor_grace_period,omitempty" json:"two_factor_grace_period,omitempty"` + UniqueIPsLimitEnabled *bool `url:"unique_ips_limit_enabled,omitempty" json:"unique_ips_limit_enabled,omitempty"` + UniqueIPsLimitPerUser *int `url:"unique_ips_limit_per_user,omitempty" json:"unique_ips_limit_per_user,omitempty"` + UniqueIPsLimitTimeWindow *int `url:"unique_ips_limit_time_window,omitempty" json:"unique_ips_limit_time_window,omitempty"` + UsagePingEnabled *bool `url:"usage_ping_enabled,omitempty" json:"usage_ping_enabled,omitempty"` + UserDefaultExternal *bool `url:"user_default_external,omitempty" json:"user_default_external,omitempty"` + UserDefaultInternalRegex *string `url:"user_default_internal_regex,omitempty" json:"user_default_internal_regex,omitempty"` + UserOauthApplications *bool `url:"user_oauth_applications,omitempty" json:"user_oauth_applications,omitempty"` + UserShowAddSSHKeyMessage *bool `url:"user_show_add_ssh_key_message,omitempty" json:"user_show_add_ssh_key_message,omitempty"` + VersionCheckEnabled *bool `url:"version_check_enabled,omitempty" json:"version_check_enabled,omitempty"` + WebIDEClientsidePreviewEnabled *bool `url:"web_ide_clientside_preview_enabled,omitempty" json:"web_ide_clientside_preview_enabled,omitempty"` } // UpdateSettings updates the application settings. diff --git a/vendor/github.com/xanzy/go-gitlab/snippets.go b/vendor/github.com/xanzy/go-gitlab/snippets.go index 26468b7fd..26b8d4c1d 100644 --- a/vendor/github.com/xanzy/go-gitlab/snippets.go +++ b/vendor/github.com/xanzy/go-gitlab/snippets.go @@ -51,6 +51,10 @@ type Snippet struct { CreatedAt *time.Time `json:"created_at"` WebURL string `json:"web_url"` RawURL string `json:"raw_url"` + Files []struct { + Path string `json:"path"` + RawURL string `json:"raw_url"` + } `json:"files"` } func (s Snippet) String() string { @@ -101,6 +105,15 @@ func (s *SnippetsService) GetSnippet(snippet int, options ...RequestOptionFunc) return ps, resp, err } +// SnippetFile represents the object that is used to create snippets +// +// GitLab API docs: +// https://docs.gitlab.com/ce/api/snippets.html#create-new-snippet +type SnippetFile struct { + FilePath *string `url:"file_path,omitempty" json:"file_path,omitempty"` + Content *string `url:"content,omitempty" json:"content,omitempty"` +} + // CreateSnippetOptions represents the available CreateSnippet() options. // // GitLab API docs: @@ -111,6 +124,7 @@ type CreateSnippetOptions struct { Description *string `url:"description,omitempty" json:"description,omitempty"` Content *string `url:"content,omitempty" json:"content,omitempty"` Visibility *VisibilityValue `url:"visibility,omitempty" json:"visibility,omitempty"` + Files *[]*SnippetFile `url:"files,omitempty" json:"files,omitempty"` } // CreateSnippet creates a new snippet. The user must have permission @@ -216,7 +230,7 @@ type ExploreSnippetsOptions ListOptions // GitLab API docs: // https://docs.gitlab.com/ce/api/snippets.html#explore-all-public-snippets func (s *SnippetsService) ExploreSnippets(opt *ExploreSnippetsOptions, options ...RequestOptionFunc) ([]*Snippet, *Response, error) { - req, err := s.client.NewRequest(http.MethodGet, "snippets/public", nil, options) + req, err := s.client.NewRequest(http.MethodGet, "snippets/public", opt, options) if err != nil { return nil, nil, err } diff --git a/vendor/github.com/xanzy/go-gitlab/tags.go b/vendor/github.com/xanzy/go-gitlab/tags.go index e0eb3b1aa..7f2851d22 100644 --- a/vendor/github.com/xanzy/go-gitlab/tags.go +++ b/vendor/github.com/xanzy/go-gitlab/tags.go @@ -73,7 +73,7 @@ func (s *TagsService) ListTags(pid interface{}, opt *ListTagsOptions, options .. if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/tags", pathEscape(project)) + u := fmt.Sprintf("projects/%s/repository/tags", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -99,7 +99,7 @@ func (s *TagsService) GetTag(pid interface{}, tag string, options ...RequestOpti if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/tags/%s", pathEscape(project), url.PathEscape(tag)) + u := fmt.Sprintf("projects/%s/repository/tags/%s", PathEscape(project), url.PathEscape(tag)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -136,7 +136,7 @@ func (s *TagsService) CreateTag(pid interface{}, opt *CreateTagOptions, options if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/tags", pathEscape(project)) + u := fmt.Sprintf("projects/%s/repository/tags", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -161,7 +161,7 @@ func (s *TagsService) DeleteTag(pid interface{}, tag string, options ...RequestO if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/repository/tags/%s", pathEscape(project), url.PathEscape(tag)) + u := fmt.Sprintf("projects/%s/repository/tags/%s", PathEscape(project), url.PathEscape(tag)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { @@ -193,7 +193,7 @@ func (s *TagsService) CreateReleaseNote(pid interface{}, tag string, opt *Create if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/tags/%s/release", pathEscape(project), url.PathEscape(tag)) + u := fmt.Sprintf("projects/%s/repository/tags/%s/release", PathEscape(project), url.PathEscape(tag)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -228,7 +228,7 @@ func (s *TagsService) UpdateReleaseNote(pid interface{}, tag string, opt *Update if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/repository/tags/%s/release", pathEscape(project), url.PathEscape(tag)) + u := fmt.Sprintf("projects/%s/repository/tags/%s/release", PathEscape(project), url.PathEscape(tag)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/time_stats.go b/vendor/github.com/xanzy/go-gitlab/time_stats.go index dfbc1c897..74a4c3831 100644 --- a/vendor/github.com/xanzy/go-gitlab/time_stats.go +++ b/vendor/github.com/xanzy/go-gitlab/time_stats.go @@ -59,7 +59,7 @@ func (s *timeStatsService) setTimeEstimate(pid interface{}, entity string, issue if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/%s/%d/time_estimate", pathEscape(project), entity, issue) + u := fmt.Sprintf("projects/%s/%s/%d/time_estimate", PathEscape(project), entity, issue) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -83,7 +83,7 @@ func (s *timeStatsService) resetTimeEstimate(pid interface{}, entity string, iss if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/%s/%d/reset_time_estimate", pathEscape(project), entity, issue) + u := fmt.Sprintf("projects/%s/%s/%d/reset_time_estimate", PathEscape(project), entity, issue) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -114,7 +114,7 @@ func (s *timeStatsService) addSpentTime(pid interface{}, entity string, issue in if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/%s/%d/add_spent_time", pathEscape(project), entity, issue) + u := fmt.Sprintf("projects/%s/%s/%d/add_spent_time", PathEscape(project), entity, issue) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -138,7 +138,7 @@ func (s *timeStatsService) resetSpentTime(pid interface{}, entity string, issue if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/%s/%d/reset_spent_time", pathEscape(project), entity, issue) + u := fmt.Sprintf("projects/%s/%s/%d/reset_spent_time", PathEscape(project), entity, issue) req, err := s.client.NewRequest(http.MethodPost, u, nil, options) if err != nil { @@ -162,7 +162,7 @@ func (s *timeStatsService) getTimeSpent(pid interface{}, entity string, issue in if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/%s/%d/time_stats", pathEscape(project), entity, issue) + u := fmt.Sprintf("projects/%s/%s/%d/time_stats", PathEscape(project), entity, issue) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/topics.go b/vendor/github.com/xanzy/go-gitlab/topics.go new file mode 100644 index 000000000..04f4473a0 --- /dev/null +++ b/vendor/github.com/xanzy/go-gitlab/topics.go @@ -0,0 +1,152 @@ +// +// Copyright 2021, Sander van Harmelen +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package gitlab + +import ( + "fmt" + "net/http" +) + +// TopicsService handles communication with the topics related methods +// of the GitLab API. +// +// GitLab API docs: https://docs.gitlab.com/ee/api/topics.html +type TopicsService struct { + client *Client +} + +// Topic represents a GitLab project topic. +// +// GitLab API docs: https://docs.gitlab.com/ee/api/topics.html +type Topic struct { + ID int `json:"id"` + Name string `json:"name"` + Description string `json:"description"` + TotalProjectsCount uint64 `json:"total_projects_count"` + AvatarURL string `json:"avatar_url"` +} + +func (t Topic) String() string { + return Stringify(t) +} + +// ListTopicsOptions represents the available ListTopics() options. +// +// GitLab API docs: https://docs.gitlab.com/ee/api/topics.html#list-topics +type ListTopicsOptions struct { + ListOptions + Search *string `url:"search,omitempty" json:"search,omitempty"` +} + +// ListTopics returns a list of project topics in the GitLab instance ordered +// by number of associated projects. +// +// GitLab API docs: https://docs.gitlab.com/ee/api/topics.html#list-topics +func (s *TopicsService) ListTopics(opt *ListTopicsOptions, options ...RequestOptionFunc) ([]*Topic, *Response, error) { + req, err := s.client.NewRequest(http.MethodGet, "topics", opt, options) + if err != nil { + return nil, nil, err + } + + var t []*Topic + resp, err := s.client.Do(req, &t) + if err != nil { + return nil, resp, err + } + + return t, resp, err +} + +// GetTopic gets a project topic by ID. +// +// GitLab API docs: https://docs.gitlab.com/ee/api/topics.html#get-a-topic +func (s *TopicsService) GetTopic(topic int, options ...RequestOptionFunc) (*Topic, *Response, error) { + u := fmt.Sprintf("topics/%d", topic) + + req, err := s.client.NewRequest(http.MethodGet, u, nil, options) + if err != nil { + return nil, nil, err + } + + t := new(Topic) + resp, err := s.client.Do(req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, err +} + +// CreateTopicOptions represents the available CreateTopic() options. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/topics.html#create-a-project-topic +type CreateTopicOptions struct { + Name *string `url:"name,omitempty" json:"name,omitempty"` + Description *string `url:"description,omitempty" json:"description,omitempty"` + // Avatar *string `url:"avatar,omitempty" json:"avatar,omitempty"` +} + +// CreateTopic creates a new project topic. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/topics.html#create-a-project-topic +func (s *TopicsService) CreateTopic(opt *CreateTopicOptions, options ...RequestOptionFunc) (*Topic, *Response, error) { + req, err := s.client.NewRequest(http.MethodPost, "topics", opt, options) + if err != nil { + return nil, nil, err + } + + t := new(Topic) + resp, err := s.client.Do(req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, err +} + +// UpdateTopicOptions represents the available UpdateTopic() options. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/topics.html#update-a-project-topic +type UpdateTopicOptions struct { + Name *string `url:"name,omitempty" json:"name,omitempty"` + Description *string `url:"description,omitempty" json:"description,omitempty"` + // Avatar *string `url:"avatar,omitempty" json:"avatar,omitempty"` +} + +// UpdateTopic updates a project topic. Only available to administrators. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/topics.html#update-a-project-topic +func (s *TopicsService) UpdateTopic(topic int, opt *UpdateTopicOptions, options ...RequestOptionFunc) (*Topic, *Response, error) { + u := fmt.Sprintf("topics/%d", topic) + + req, err := s.client.NewRequest(http.MethodPut, u, opt, options) + if err != nil { + return nil, nil, err + } + + t := new(Topic) + resp, err := s.client.Do(req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, err +} diff --git a/vendor/github.com/xanzy/go-gitlab/types.go b/vendor/github.com/xanzy/go-gitlab/types.go index 679cb9ef1..b851b984c 100644 --- a/vendor/github.com/xanzy/go-gitlab/types.go +++ b/vendor/github.com/xanzy/go-gitlab/types.go @@ -21,6 +21,7 @@ import ( "errors" "fmt" "net/url" + "strconv" "time" ) @@ -78,6 +79,127 @@ func AccessLevel(v AccessLevelValue) *AccessLevelValue { return p } +// UserIDValue represents a user ID value within GitLab. +type UserIDValue string + +// List of available user ID values. +const ( + UserIDAny UserIDValue = "Any" + UserIDNone UserIDValue = "None" +) + +// ApproverIDsValue represents an approver ID value within GitLab. +type ApproverIDsValue struct { + value interface{} +} + +// ApproverIDs is a helper routine that creates a new ApproverIDsValue. +func ApproverIDs(v interface{}) *ApproverIDsValue { + switch v.(type) { + case UserIDValue, []int: + return &ApproverIDsValue{value: v} + default: + panic("Unsupported value passed as approver ID") + } +} + +// EncodeValues implements the query.Encoder interface +func (a *ApproverIDsValue) EncodeValues(key string, v *url.Values) error { + switch value := a.value.(type) { + case UserIDValue: + v.Set(key, string(value)) + case []int: + v.Del(key) + v.Del(key + "[]") + for _, id := range value { + v.Add(key+"[]", strconv.Itoa(id)) + } + } + return nil +} + +// MarshalJSON implements the json.Marshaler interface +func (a *ApproverIDsValue) MarshalJSON() ([]byte, error) { + return json.Marshal(a.value) +} + +// UnmarshalJSON implements the json.Unmarshaler interface +func (a *ApproverIDsValue) UnmarshalJSON(bytes []byte) error { + return json.Unmarshal(bytes, a.value) +} + +// AssigneeIDValue represents an assignee ID value within GitLab. +type AssigneeIDValue struct { + value interface{} +} + +// AssigneeID is a helper routine that creates a new AssigneeIDValue. +func AssigneeID(v interface{}) *AssigneeIDValue { + switch v.(type) { + case UserIDValue, int: + return &AssigneeIDValue{value: v} + default: + panic("Unsupported value passed as assignee ID") + } +} + +// EncodeValues implements the query.Encoder interface +func (a *AssigneeIDValue) EncodeValues(key string, v *url.Values) error { + switch value := a.value.(type) { + case UserIDValue: + v.Set(key, string(value)) + case int: + v.Set(key, strconv.Itoa(value)) + } + return nil +} + +// MarshalJSON implements the json.Marshaler interface +func (a *AssigneeIDValue) MarshalJSON() ([]byte, error) { + return json.Marshal(a.value) +} + +// UnmarshalJSON implements the json.Unmarshaler interface +func (a *AssigneeIDValue) UnmarshalJSON(bytes []byte) error { + return json.Unmarshal(bytes, a.value) +} + +// ReviewerIDValue represents a reviewer ID value within GitLab. +type ReviewerIDValue struct { + value interface{} +} + +// ReviewerID is a helper routine that creates a new ReviewerIDValue. +func ReviewerID(v interface{}) *ReviewerIDValue { + switch v.(type) { + case UserIDValue, int: + return &ReviewerIDValue{value: v} + default: + panic("Unsupported value passed as reviewer ID") + } +} + +// EncodeValues implements the query.Encoder interface +func (a *ReviewerIDValue) EncodeValues(key string, v *url.Values) error { + switch value := a.value.(type) { + case UserIDValue: + v.Set(key, string(value)) + case int: + v.Set(key, strconv.Itoa(value)) + } + return nil +} + +// MarshalJSON implements the json.Marshaler interface +func (a *ReviewerIDValue) MarshalJSON() ([]byte, error) { + return json.Marshal(a.value) +} + +// UnmarshalJSON implements the json.Unmarshaler interface +func (a *ReviewerIDValue) UnmarshalJSON(bytes []byte) error { + return json.Unmarshal(bytes, a.value) +} + // AvailabilityValue represents an availability value within GitLab. type AvailabilityValue string @@ -556,6 +678,15 @@ const ( TodoTargetMergeRequest TodoTargetType = "MergeRequest" ) +// UploadType represents the available upload types. +type UploadType string + +// The available upload types. +const ( + UploadAvatar UploadType = "avatar" + UploadFile UploadType = "file" +) + // VariableTypeValue represents a variable type within GitLab. // // GitLab API docs: https://docs.gitlab.com/ce/api/ @@ -628,9 +759,8 @@ func Bool(v bool) *bool { return p } -// Int is a helper routine that allocates a new int32 value -// to store v and returns a pointer to it, but unlike Int32 -// its argument value is an int. +// Int is a helper routine that allocates a new int value +// to store v and returns a pointer to it. func Int(v int) *int { p := new(int) *p = v diff --git a/vendor/github.com/xanzy/go-gitlab/users.go b/vendor/github.com/xanzy/go-gitlab/users.go index d179a09b9..c16b07e76 100644 --- a/vendor/github.com/xanzy/go-gitlab/users.go +++ b/vendor/github.com/xanzy/go-gitlab/users.go @@ -19,6 +19,7 @@ package gitlab import ( "errors" "fmt" + "net" "net/http" "time" ) @@ -85,7 +86,9 @@ type User struct { CanCreateProject bool `json:"can_create_project"` ProjectsLimit int `json:"projects_limit"` CurrentSignInAt *time.Time `json:"current_sign_in_at"` + CurrentSignInIP *net.IP `json:"current_sign_in_ip"` LastSignInAt *time.Time `json:"last_sign_in_at"` + LastSignInIP *net.IP `json:"last_sign_in_ip"` ConfirmedAt *time.Time `json:"confirmed_at"` TwoFactorEnabled bool `json:"two_factor_enabled"` Note string `json:"note"` @@ -393,6 +396,7 @@ type SSHKey struct { Title string `json:"title"` Key string `json:"key"` CreatedAt *time.Time `json:"created_at"` + ExpiresAt *time.Time `json:"expires_at"` } // ListSSHKeys gets a list of currently authenticated user's SSH keys. @@ -419,13 +423,16 @@ func (s *UsersService) ListSSHKeys(options ...RequestOptionFunc) ([]*SSHKey, *Re // https://docs.gitlab.com/ce/api/users.html#list-ssh-keys-for-user type ListSSHKeysForUserOptions ListOptions -// ListSSHKeysForUser gets a list of a specified user's SSH keys. Available -// only for admin +// ListSSHKeysForUser gets a list of a specified user's SSH keys. // // GitLab API docs: // https://docs.gitlab.com/ce/api/users.html#list-ssh-keys-for-user -func (s *UsersService) ListSSHKeysForUser(user int, opt *ListSSHKeysForUserOptions, options ...RequestOptionFunc) ([]*SSHKey, *Response, error) { - u := fmt.Sprintf("users/%d/keys", user) +func (s *UsersService) ListSSHKeysForUser(uid interface{}, opt *ListSSHKeysForUserOptions, options ...RequestOptionFunc) ([]*SSHKey, *Response, error) { + user, err := parseID(uid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("users/%s/keys", user) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -542,9 +549,172 @@ func (s *UsersService) DeleteSSHKeyForUser(user, key int, options ...RequestOpti return s.client.Do(req, nil) } +// GPGKey represents a GPG key. +// +// GitLab API docs: https://docs.gitlab.com/ce/api/users.html#list-all-gpg-keys +type GPGKey struct { + ID int `json:"id"` + Key string `json:"key"` + CreatedAt *time.Time `json:"created_at"` +} + +// ListGPGKeys gets a list of currently authenticated user’s GPG keys. +// +// GitLab API docs: https://docs.gitlab.com/ce/api/users.html#list-all-gpg-keys +func (s *UsersService) ListGPGKeys(options ...RequestOptionFunc) ([]*GPGKey, *Response, error) { + req, err := s.client.NewRequest(http.MethodGet, "user/gpg_keys", nil, options) + if err != nil { + return nil, nil, err + } + + var ks []*GPGKey + resp, err := s.client.Do(req, &ks) + if err != nil { + return nil, resp, err + } + + return ks, resp, err +} + +// GetGPGKey gets a specific GPG key of currently authenticated user. +// +// GitLab API docs: https://docs.gitlab.com/ce/api/users.html#get-a-specific-gpg-key +func (s *UsersService) GetGPGKey(key int, options ...RequestOptionFunc) (*GPGKey, *Response, error) { + u := fmt.Sprintf("users/gpg_keys/%d", key) + + req, err := s.client.NewRequest(http.MethodGet, u, nil, options) + if err != nil { + return nil, nil, err + } + + k := new(GPGKey) + resp, err := s.client.Do(req, k) + if err != nil { + return nil, resp, err + } + + return k, resp, err +} + +// AddGPGKeyOptions represents the available AddGPGKey() options. +// +// GitLab API docs: https://docs.gitlab.com/ce/api/users.html#add-a-gpg-key +type AddGPGKeyOptions struct { + Key *string `url:"key,omitempty" json:"key,omitempty"` +} + +// AddGPGKey creates a new GPG key owned by the currently authenticated user. +// +// GitLab API docs: https://docs.gitlab.com/ce/api/users.html#add-a-gpg-key +func (s *UsersService) AddGPGKey(opt *AddGPGKeyOptions, options ...RequestOptionFunc) (*GPGKey, *Response, error) { + req, err := s.client.NewRequest(http.MethodPost, "user/gpg_keys", opt, options) + if err != nil { + return nil, nil, err + } + + k := new(GPGKey) + resp, err := s.client.Do(req, k) + if err != nil { + return nil, resp, err + } + + return k, resp, err +} + +// DeleteGPGKey deletes a GPG key owned by currently authenticated user. +// +// GitLab API docs: https://docs.gitlab.com/ce/api/users.html#delete-a-gpg-key +func (s *UsersService) DeleteGPGKey(key int, options ...RequestOptionFunc) (*Response, error) { + u := fmt.Sprintf("users/gpg_keys/%d", key) + + req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) + if err != nil { + return nil, err + } + + return s.client.Do(req, nil) +} + +// ListGPGKeysForUser gets a list of a specified user’s GPG keys. +// +// GitLab API docs: +// https://docs.gitlab.com/ce/api/users.html#list-all-gpg-keys-for-given-user +func (s *UsersService) ListGPGKeysForUser(user int, options ...RequestOptionFunc) ([]*GPGKey, *Response, error) { + u := fmt.Sprintf("users/%d/gpg_keys", user) + + req, err := s.client.NewRequest(http.MethodGet, u, nil, options) + if err != nil { + return nil, nil, err + } + + var ks []*GPGKey + resp, err := s.client.Do(req, &ks) + if err != nil { + return nil, resp, err + } + + return ks, resp, err +} + +// GetGPGKeyForUser gets a specific GPG key for a given user. +// +// GitLab API docs: https://docs.gitlab.com/ce/api/users.html#get-a-specific-gpg-key-for-a-given-user +func (s *UsersService) GetGPGKeyForUser(user, key int, options ...RequestOptionFunc) (*GPGKey, *Response, error) { + u := fmt.Sprintf("users/%d/gpg_keys/%d", user, key) + + req, err := s.client.NewRequest(http.MethodGet, u, nil, options) + if err != nil { + return nil, nil, err + } + + k := new(GPGKey) + resp, err := s.client.Do(req, k) + if err != nil { + return nil, resp, err + } + + return k, resp, err +} + +// AddGPGKeyForUser creates new GPG key owned by the specified user. +// +// GitLab API docs: +// https://docs.gitlab.com/ce/api/users.html#add-a-gpg-key-for-a-given-user +func (s *UsersService) AddGPGKeyForUser(user int, options ...RequestOptionFunc) (*GPGKey, *Response, error) { + u := fmt.Sprintf("users/%d/gpg_keys", user) + + req, err := s.client.NewRequest(http.MethodPost, u, nil, options) + if err != nil { + return nil, nil, err + } + + k := new(GPGKey) + resp, err := s.client.Do(req, k) + if err != nil { + return nil, resp, err + } + + return k, resp, err +} + +// DeleteGPGKeyForUser deletes a GPG key owned by a specified user. +// +// GitLab API docs: +// https://docs.gitlab.com/ce/api/users.html#delete-a-gpg-key-for-a-given-user +func (s *UsersService) DeleteGPGKeyForUser(user, key int, options ...RequestOptionFunc) (*Response, error) { + u := fmt.Sprintf("users/%d/gpg_keys/%d", user, key) + + req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) + if err != nil { + return nil, err + } + + return s.client.Do(req, nil) +} + // Email represents an Email. // -// GitLab API docs: https://doc.gitlab.com/ce/api/users.html#list-emails +// GitLab API docs: https://docs.gitlab.com/ce/api/users.html#list-emails type Email struct { ID int `json:"id"` Email string `json:"email"` @@ -1001,9 +1171,9 @@ type PersonalAccessToken struct { // GitLab API docs: // https://docs.gitlab.com/ee/api/users.html#create-a-personal-access-token type CreatePersonalAccessTokenOptions struct { - Name *string `url:"name,omitempty" json:"name,omitempty"` - ExpiresAt *ISOTime `url:"expires_at,omitempty" json:"expires_at,omitempty"` - Scopes []string `url:"scopes,omitempty" json:"scopes,omitempty"` + Name *string `url:"name,omitempty" json:"name,omitempty"` + ExpiresAt *ISOTime `url:"expires_at,omitempty" json:"expires_at,omitempty"` + Scopes *[]string `url:"scopes,omitempty" json:"scopes,omitempty"` } // CreatePersonalAccessToken creates a personal access token. diff --git a/vendor/github.com/xanzy/go-gitlab/validate.go b/vendor/github.com/xanzy/go-gitlab/validate.go index d922d7c5c..75b0c7503 100644 --- a/vendor/github.com/xanzy/go-gitlab/validate.go +++ b/vendor/github.com/xanzy/go-gitlab/validate.go @@ -91,7 +91,7 @@ func (s *ValidateService) ProjectNamespaceLint(pid interface{}, opt *ProjectName if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/ci/lint", pathEscape(project)) + u := fmt.Sprintf("projects/%s/ci/lint", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, &opt, options) if err != nil { @@ -124,7 +124,7 @@ func (s *ValidateService) ProjectLint(pid interface{}, opt *ProjectLintOptions, if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/ci/lint", pathEscape(project)) + u := fmt.Sprintf("projects/%s/ci/lint", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, &opt, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/wikis.go b/vendor/github.com/xanzy/go-gitlab/wikis.go index 7437cc3c7..113995640 100644 --- a/vendor/github.com/xanzy/go-gitlab/wikis.go +++ b/vendor/github.com/xanzy/go-gitlab/wikis.go @@ -61,7 +61,7 @@ func (s *WikisService) ListWikis(pid interface{}, opt *ListWikisOptions, options if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/wikis", pathEscape(project)) + u := fmt.Sprintf("projects/%s/wikis", PathEscape(project)) req, err := s.client.NewRequest(http.MethodGet, u, opt, options) if err != nil { @@ -86,7 +86,7 @@ func (s *WikisService) GetWikiPage(pid interface{}, slug string, options ...Requ if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/wikis/%s", pathEscape(project), url.PathEscape(slug)) + u := fmt.Sprintf("projects/%s/wikis/%s", PathEscape(project), url.PathEscape(slug)) req, err := s.client.NewRequest(http.MethodGet, u, nil, options) if err != nil { @@ -122,7 +122,7 @@ func (s *WikisService) CreateWikiPage(pid interface{}, opt *CreateWikiPageOption if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/wikis", pathEscape(project)) + u := fmt.Sprintf("projects/%s/wikis", PathEscape(project)) req, err := s.client.NewRequest(http.MethodPost, u, opt, options) if err != nil { @@ -158,7 +158,7 @@ func (s *WikisService) EditWikiPage(pid interface{}, slug string, opt *EditWikiP if err != nil { return nil, nil, err } - u := fmt.Sprintf("projects/%s/wikis/%s", pathEscape(project), url.PathEscape(slug)) + u := fmt.Sprintf("projects/%s/wikis/%s", PathEscape(project), url.PathEscape(slug)) req, err := s.client.NewRequest(http.MethodPut, u, opt, options) if err != nil { @@ -183,7 +183,7 @@ func (s *WikisService) DeleteWikiPage(pid interface{}, slug string, options ...R if err != nil { return nil, err } - u := fmt.Sprintf("projects/%s/wikis/%s", pathEscape(project), url.PathEscape(slug)) + u := fmt.Sprintf("projects/%s/wikis/%s", PathEscape(project), url.PathEscape(slug)) req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) if err != nil { diff --git a/vendor/github.com/yagipy/maintidx/.gitignore b/vendor/github.com/yagipy/maintidx/.gitignore new file mode 100644 index 000000000..a676215fa --- /dev/null +++ b/vendor/github.com/yagipy/maintidx/.gitignore @@ -0,0 +1,2 @@ +.idea +bin diff --git a/vendor/github.com/yagipy/maintidx/LICENSE b/vendor/github.com/yagipy/maintidx/LICENSE new file mode 100644 index 000000000..b94c2ede8 --- /dev/null +++ b/vendor/github.com/yagipy/maintidx/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 Hiroyuki Yagihashi + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/vendor/github.com/yagipy/maintidx/Makefile b/vendor/github.com/yagipy/maintidx/Makefile new file mode 100644 index 000000000..14b8fc979 --- /dev/null +++ b/vendor/github.com/yagipy/maintidx/Makefile @@ -0,0 +1,2 @@ +build: + go build -o bin/maintidx ./cmd/maintidx diff --git a/vendor/github.com/yagipy/maintidx/README.md b/vendor/github.com/yagipy/maintidx/README.md new file mode 100644 index 000000000..8d5e26df0 --- /dev/null +++ b/vendor/github.com/yagipy/maintidx/README.md @@ -0,0 +1,45 @@ +# maintidx +`maintidx` measures the maintainability index of each function. +https://docs.microsoft.com/en-us/visualstudio/code-quality/code-metrics-maintainability-index-range-and-meaning + +## Installation +### Go version < 1.16 +```shell +go get -u github.com/yagipy/maintidx/cmd/maintidx +``` + +### Go version 1.16+ +```shell +go install github.com/yagipy/maintidx/cmd/maintidx +``` + +## Usage +### standalone +```shell +maintidx ./... +``` + +### with go run +No installation required +```shell +go run github.com/yagipy/maintidx/cmd/maintidx ./... +``` + +### with go vet +```shell +go vet -vettool=`which maintidx` ./... +``` + +## Flag +```shell +Flags: + -under int + show functions with maintainability index < N only. (default 20) +``` + +## TODO +- [ ] Setup execute env on container +- [ ] Impl cyc.Cyc.Calc() +- [ ] Move maintidx.Visitor.PrintHalstVol to halstval package +- [ ] Consider the necessity of halstvol.incrIfAllTrue +- [ ] Test under pkg file diff --git a/vendor/github.com/yagipy/maintidx/go.mod b/vendor/github.com/yagipy/maintidx/go.mod new file mode 100644 index 000000000..6f216f80c --- /dev/null +++ b/vendor/github.com/yagipy/maintidx/go.mod @@ -0,0 +1,11 @@ +module github.com/yagipy/maintidx + +go 1.17 + +require golang.org/x/tools v0.1.8 + +require ( + golang.org/x/mod v0.5.1 // indirect + golang.org/x/sys v0.0.0-20211019181941-9d821ace8654 // indirect + golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect +) diff --git a/vendor/github.com/yagipy/maintidx/go.sum b/vendor/github.com/yagipy/maintidx/go.sum new file mode 100644 index 000000000..b0bad9f67 --- /dev/null +++ b/vendor/github.com/yagipy/maintidx/go.sum @@ -0,0 +1,28 @@ +github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/mod v0.5.1 h1:OJxoQ/rynoF0dcCdI7cLPktw/hR2cueqYfjm43oqK38= +golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20211019181941-9d821ace8654 h1:id054HUawV2/6IGm2IV8KZQjqtwAOo2CYlOToYqa0d0= +golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.8 h1:P1HhGGuLW4aAclzjtmJdf0mJOjVUZUzOTqkAkWL+l6w= +golang.org/x/tools v0.1.8/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/github.com/yagipy/maintidx/maintidx.go b/vendor/github.com/yagipy/maintidx/maintidx.go new file mode 100644 index 000000000..31ad9ca0c --- /dev/null +++ b/vendor/github.com/yagipy/maintidx/maintidx.go @@ -0,0 +1,63 @@ +package maintidx + +import ( + "go/ast" + "go/token" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +const doc = "maintidx measures the maintainability index of each function." + +var Analyzer = &analysis.Analyzer{ + Name: "maintidx", + Doc: doc, + Run: run, + Requires: []*analysis.Analyzer{ + inspect.Analyzer, + }, +} + +var under int + +func init() { + Analyzer.Flags.IntVar(&under, "under", 20, "show functions with maintainability index < N only.") +} + +func run(pass *analysis.Pass) (interface{}, error) { + i := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + nodeFilter := []ast.Node{ + (*ast.FuncDecl)(nil), + } + + i.Preorder(nodeFilter, func(n ast.Node) { + switch n := n.(type) { + case *ast.FuncDecl: + v := analyze(n) + + v.Coef.Cyc.Calc() + v.Coef.HalstVol.Calc() + v.calc(loc(pass.Fset, n)) + if v.MaintIdx < under { + pass.Reportf(n.Pos(), "Function name: %v, Cyclomatic Complexity: %v, Halstead Volume: %0.2f, Maintainability Index: %v", n.Name, v.Coef.Cyc.Val, v.Coef.HalstVol.Val, v.MaintIdx) + } + } + }) + + return nil, nil +} + +func analyze(n ast.Node) Visitor { + v := NewVisitor() + ast.Walk(v, n) + return *v +} + +func loc(fs *token.FileSet, n *ast.FuncDecl) int { + f := fs.File(n.Pos()) + startLine := f.Line(n.Pos()) + endLine := f.Line(n.End()) + return endLine - startLine + 1 +} diff --git a/vendor/github.com/yagipy/maintidx/pkg/cyc/cyc.go b/vendor/github.com/yagipy/maintidx/pkg/cyc/cyc.go new file mode 100644 index 000000000..9ea009106 --- /dev/null +++ b/vendor/github.com/yagipy/maintidx/pkg/cyc/cyc.go @@ -0,0 +1,36 @@ +package cyc + +import ( + "go/ast" + "go/token" +) + +type Cyc struct { + Val int + Coef Coef +} + +type Coef struct{} + +func (c *Cyc) Analyze(n ast.Node) { + switch n := n.(type) { + case *ast.IfStmt, *ast.ForStmt, *ast.RangeStmt: + c.Val++ + case *ast.CaseClause: + if n.List != nil { + c.Val++ + } + case *ast.CommClause: + if n.Comm != nil { + c.Val++ + } + case *ast.BinaryExpr: + if n.Op == token.LAND || n.Op == token.LOR { + c.Val++ + } + } +} + +// TODO: Implement +func (c *Cyc) Calc() { +} diff --git a/vendor/github.com/yagipy/maintidx/pkg/halstvol/halstvol.go b/vendor/github.com/yagipy/maintidx/pkg/halstvol/halstvol.go new file mode 100644 index 000000000..f0212759b --- /dev/null +++ b/vendor/github.com/yagipy/maintidx/pkg/halstvol/halstvol.go @@ -0,0 +1,71 @@ +package halstvol + +import ( + "go/ast" + "math" +) + +type HalstVol struct { + Val float64 + Coef Coef +} + +type Coef struct { + Opt map[string]int + Opd map[string]int +} + +func (v *HalstVol) Analyze(n ast.Node) { + switch n := n.(type) { + case *ast.FuncDecl, *ast.GenDecl: + v.handleDecl(n) + case *ast.ParenExpr, *ast.IndexExpr, *ast.SliceExpr, *ast.TypeAssertExpr, *ast.CallExpr, *ast.StarExpr, *ast.UnaryExpr, *ast.BinaryExpr, *ast.KeyValueExpr: + v.handleExpr(n) + case *ast.BasicLit, *ast.CompositeLit: + v.handleLit(n) + case *ast.Ident: + v.handleIdent(n) + case *ast.Ellipsis: + incrIfAllTrue(v.Coef.Opt, "...", []bool{n.Ellipsis.IsValid()}) + case *ast.FuncType: + incrIfAllTrue(v.Coef.Opt, "func", []bool{n.Func.IsValid()}) + v.Coef.Opt["()"]++ + case *ast.ChanType: + incrIfAllTrue(v.Coef.Opt, "chan", []bool{n.Begin.IsValid()}) + incrIfAllTrue(v.Coef.Opt, "<-", []bool{n.Arrow.IsValid()}) + case *ast.SendStmt, *ast.IncDecStmt, *ast.AssignStmt, *ast.GoStmt, *ast.DeferStmt, *ast.ReturnStmt, *ast.BranchStmt, *ast.BlockStmt, *ast.IfStmt, *ast.SwitchStmt, *ast.SelectStmt, *ast.ForStmt, *ast.RangeStmt: + v.handleStmt(n) + case *ast.CaseClause: + v.handleCaseClause(n) + } +} + +func (v *HalstVol) Calc() { + distOpt := len(v.Coef.Opt) + distOpd := len(v.Coef.Opd) + + var sumOpt, sumOpd int + + for _, val := range v.Coef.Opt { + sumOpt += val + } + + for _, val := range v.Coef.Opd { + sumOpd += val + } + + vocab := distOpt + distOpd + length := sumOpt + sumOpd + + v.Val = float64(length) * math.Log2(float64(vocab)) +} + +// TODO: Consider the necessity +func incrIfAllTrue(coef map[string]int, sym string, cond []bool) { + for _, ok := range cond { + if !ok { + return + } + } + coef[sym]++ +} diff --git a/vendor/github.com/yagipy/maintidx/pkg/halstvol/handle.go b/vendor/github.com/yagipy/maintidx/pkg/halstvol/handle.go new file mode 100644 index 000000000..9f5e33500 --- /dev/null +++ b/vendor/github.com/yagipy/maintidx/pkg/halstvol/handle.go @@ -0,0 +1,151 @@ +package halstvol + +import "go/ast" + +func (v *HalstVol) handleDecl(decl ast.Node) { + switch n := decl.(type) { + case *ast.FuncDecl: + if n.Recv == nil { + // In the case of receiver functions, the function name is incremented in *ast.Ident + v.Coef.Opt[n.Name.Name]++ + } else { + v.Coef.Opt["()"]++ + } + case *ast.GenDecl: + if n.Lparen.IsValid() && n.Rparen.IsValid() { + v.Coef.Opt["()"]++ + } + + if n.Tok.IsOperator() { + v.Coef.Opt[n.Tok.String()]++ + } else { + v.Coef.Opd[n.Tok.String()]++ + } + } +} + +func (v *HalstVol) handleIdent(ident *ast.Ident) { + if ident.Obj == nil { + v.Coef.Opt[ident.Name]++ + } else { + if ident.Obj.Kind.String() != "func" { + v.Coef.Opd[ident.Name]++ + } + } +} + +func (v *HalstVol) handleLit(lit ast.Node) { + switch n := lit.(type) { + case *ast.BasicLit: + if n.Kind.IsLiteral() { + v.Coef.Opd[n.Value]++ + } else { + v.Coef.Opt[n.Value]++ + } + case *ast.CompositeLit: + incrIfAllTrue(v.Coef.Opt, "{}", []bool{n.Lbrace.IsValid(), n.Rbrace.IsValid()}) + } +} + +func (v *HalstVol) handleExpr(expr ast.Node) { + switch n := expr.(type) { + case *ast.ParenExpr: + incrIfAllTrue(v.Coef.Opt, "()", []bool{n.Lparen.IsValid(), n.Rparen.IsValid()}) + case *ast.IndexExpr: + incrIfAllTrue(v.Coef.Opt, "{}", []bool{n.Lbrack.IsValid(), n.Rbrack.IsValid()}) + case *ast.SliceExpr: + incrIfAllTrue(v.Coef.Opt, "[]", []bool{n.Lbrack.IsValid(), n.Rbrack.IsValid()}) + case *ast.TypeAssertExpr: + incrIfAllTrue(v.Coef.Opt, "()", []bool{n.Lparen.IsValid(), n.Rparen.IsValid()}) + case *ast.CallExpr: + incrIfAllTrue(v.Coef.Opt, "()", []bool{n.Lparen.IsValid(), n.Rparen.IsValid()}) + incrIfAllTrue(v.Coef.Opt, "...", []bool{n.Ellipsis != 0}) + case *ast.StarExpr: + incrIfAllTrue(v.Coef.Opt, "*", []bool{n.Star.IsValid()}) + case *ast.UnaryExpr: + if n.Op.IsOperator() { + v.Coef.Opt[n.Op.String()]++ + } else { + v.Coef.Opd[n.Op.String()]++ + } + case *ast.BinaryExpr: + v.Coef.Opt[n.Op.String()]++ + case *ast.KeyValueExpr: + incrIfAllTrue(v.Coef.Opt, ":", []bool{n.Colon.IsValid()}) + } +} + +func (v *HalstVol) handleStmt(stmt ast.Node) { + switch n := stmt.(type) { + case *ast.SendStmt: + incrIfAllTrue(v.Coef.Opt, "<-", []bool{n.Arrow.IsValid()}) + case *ast.IncDecStmt: + incrIfAllTrue(v.Coef.Opt, n.Tok.String(), []bool{n.Tok.IsOperator()}) + case *ast.AssignStmt: + if n.Tok.IsOperator() { + v.Coef.Opt[n.Tok.String()]++ + } + case *ast.GoStmt: + if n.Go.IsValid() { + v.Coef.Opt["go"]++ + } + case *ast.DeferStmt: + if n.Defer.IsValid() { + v.Coef.Opt["defer"]++ + } + case *ast.ReturnStmt: + if n.Return.IsValid() { + v.Coef.Opt["return"]++ + } + case *ast.BranchStmt: + if n.Tok.IsOperator() { + v.Coef.Opt[n.Tok.String()]++ + } else { + v.Coef.Opd[n.Tok.String()]++ + } + case *ast.BlockStmt: + if n.Lbrace.IsValid() && n.Rbrace.IsValid() { + v.Coef.Opt["{}"]++ + } + case *ast.IfStmt: + if n.If.IsValid() { + v.Coef.Opt["if"]++ + } + if n.Else != nil { + v.Coef.Opt["else"]++ + } + case *ast.SwitchStmt: + if n.Switch.IsValid() { + v.Coef.Opt["switch"]++ + } + case *ast.SelectStmt: + if n.Select.IsValid() { + v.Coef.Opt["select"]++ + } + case *ast.ForStmt: + if n.For.IsValid() { + v.Coef.Opt["for"]++ + } + case *ast.RangeStmt: + if n.For.IsValid() { + v.Coef.Opt["for"]++ + } + if n.Key != nil { + if n.Tok.IsOperator() { + v.Coef.Opt[n.Tok.String()]++ + } else { + v.Coef.Opd[n.Tok.String()]++ + } + } + v.Coef.Opt["range"]++ + } +} + +func (v *HalstVol) handleCaseClause(cc *ast.CaseClause) { + if cc.List == nil { + v.Coef.Opt["default"]++ + } + if cc.Colon.IsValid() { + v.Coef.Opt[":"]++ + } +} diff --git a/vendor/github.com/yagipy/maintidx/visitor.go b/vendor/github.com/yagipy/maintidx/visitor.go new file mode 100644 index 000000000..e6f74c50d --- /dev/null +++ b/vendor/github.com/yagipy/maintidx/visitor.go @@ -0,0 +1,77 @@ +package maintidx + +import ( + "github.com/yagipy/maintidx/pkg/cyc" + "github.com/yagipy/maintidx/pkg/halstvol" + "go/ast" + "math" + "sort" +) + +type Visitor struct { + MaintIdx int + Coef Coef +} + +var _ ast.Visitor = &Visitor{} + +type Coef struct { + Cyc cyc.Cyc + HalstVol halstvol.HalstVol +} + +func NewVisitor() *Visitor { + return &Visitor{ + MaintIdx: 0, + Coef: Coef{ + Cyc: cyc.Cyc{ + Val: 1, + Coef: cyc.Coef{}, + }, + HalstVol: halstvol.HalstVol{ + Val: 0.0, + Coef: halstvol.Coef{ + Opt: map[string]int{}, + Opd: map[string]int{}, + }, + }, + }, + } +} + +func (v *Visitor) Visit(n ast.Node) ast.Visitor { + v.Coef.Cyc.Analyze(n) + v.Coef.HalstVol.Analyze(n) + return v +} + +// Calc https://docs.microsoft.com/ja-jp/archive/blogs/codeanalysis/maintainability-index-range-and-meaning +func (v *Visitor) calc(loc int) { + origVal := 171.0 - 5.2*math.Log(v.Coef.HalstVol.Val) - 0.23*float64(v.Coef.Cyc.Val) - 16.2*math.Log(float64(loc)) + normVal := int(math.Max(0.0, origVal*100.0/171.0)) + v.MaintIdx = normVal +} + +// TODO: Move halstvol package +func (v *Visitor) printHalstVol() { + sortedOpt := make([]string, len(v.Coef.HalstVol.Coef.Opt)) + sortedOpd := make([]string, len(v.Coef.HalstVol.Coef.Opd)) + optIndex := 0 + opdIndex := 0 + for key := range v.Coef.HalstVol.Coef.Opt { + sortedOpt[optIndex] = key + optIndex++ + } + for key := range v.Coef.HalstVol.Coef.Opd { + sortedOpd[opdIndex] = key + opdIndex++ + } + sort.Strings(sortedOpt) + sort.Strings(sortedOpd) + for _, val := range sortedOpt { + println("operators", val, v.Coef.HalstVol.Coef.Opt[val]) + } + for _, val := range sortedOpd { + println("operands", val, v.Coef.HalstVol.Coef.Opd[val]) + } +} diff --git a/vendor/github.com/yeya24/promlinter/README.md b/vendor/github.com/yeya24/promlinter/README.md index c7e664103..4fbffaa6b 100644 --- a/vendor/github.com/yeya24/promlinter/README.md +++ b/vendor/github.com/yeya24/promlinter/README.md @@ -2,10 +2,18 @@ A linter for checking Prometheus metrics name via promlint. -![example](assets/example.png) +![usage](assets/promlinter.gif) ## Installation +### Go Get + +go get github.com/yeya24/promlinter/cmd/promlinter + +### Download from release + +Please go to https://github.com/yeya24/promlinter/releases. + ### Build from source #### Requirements @@ -20,16 +28,10 @@ make build Then you can find the `promlinter` binary file in the `./bin` directory. -### Download from release - -TBD - ## Usage ``` bash -promlinter -h - -usage: promlinter [] [...] +usage: promlinter [] [ ...] Prometheus metrics linter for Go code. @@ -56,14 +58,18 @@ It is also supported to disable the lint functions using repeated flag --disable [UnitAbbreviations]: UnitAbbreviations detects abbreviated units in the metric name. Flags: - -h, --help Show context-sensitive help (also try --help-long and --help-man). - --version Show application version. - -s, --strict Strict mode. If true, linter will output more issues including parsing failures. - -d, --disable=DISABLE ... Disable lint functions (repeated).Supported options: Help, Counter, MetricUnits, HistogramSummaryReserved, MetricTypeInName, - ReservedChars, CamelCase, UnitAbbreviations + -h, --help Show context-sensitive help (also try --help-long and --help-man). + --version Show application version. -Args: - [] Files to lint. +Commands: + help [...] + Show help. + + list [] [...] + List metrics name. + + lint [] [...] + Lint metrics via promlint. ``` diff --git a/vendor/github.com/yeya24/promlinter/promlinter.go b/vendor/github.com/yeya24/promlinter/promlinter.go index 898336a6f..2ed4d60a8 100644 --- a/vendor/github.com/yeya24/promlinter/promlinter.go +++ b/vendor/github.com/yeya24/promlinter/promlinter.go @@ -250,6 +250,10 @@ func (v *visitor) parseCallerExpr(call *ast.CallExpr) ast.Visitor { return v } + if len(call.Args) == 0 { + return v + } + return v.parseOpts(call.Args[0], metricType) } diff --git a/vendor/gitlab.com/bosi/decorder/.gitignore b/vendor/gitlab.com/bosi/decorder/.gitignore new file mode 100644 index 000000000..7b533f819 --- /dev/null +++ b/vendor/gitlab.com/bosi/decorder/.gitignore @@ -0,0 +1,7 @@ +/.idea +/.env +/decorder +/deforder +/LICENSES-3RD-PARTY +/ytt +/yq \ No newline at end of file diff --git a/vendor/gitlab.com/bosi/decorder/.gitlab-ci.params.yml b/vendor/gitlab.com/bosi/decorder/.gitlab-ci.params.yml new file mode 100644 index 000000000..fe6b85288 --- /dev/null +++ b/vendor/gitlab.com/bosi/decorder/.gitlab-ci.params.yml @@ -0,0 +1,15 @@ +#@data/values +--- + +app: + name: decorder + +code_quality: + enable_tests: true + enable_static_code_analyses: true + enable_license_check: true + +deployment: + enable_rc_handling: false + use_gitlab_container_registry: false + enable_image_build_and_deploy: false diff --git a/vendor/gitlab.com/bosi/decorder/.gitlab-ci.yml b/vendor/gitlab.com/bosi/decorder/.gitlab-ci.yml new file mode 100644 index 000000000..810b6cb43 --- /dev/null +++ b/vendor/gitlab.com/bosi/decorder/.gitlab-ci.yml @@ -0,0 +1,61 @@ +############################### +# This file is auto-generated # +############################### + +variables: + APP_NAME: decorder + +stages: + - test + - build + - release + +test: + stage: test + image: golang:1.17.5@sha256:c72fa9afc50b3303e8044cf28fb358b48032a548e1825819420fd40155a131cb + before_script: + - set -eu + - if [[ -f .env.pipeline ]];then cp .env.pipeline .env;fi + - mkdir -p ~/.ssh + - touch ~/.ssh/known_hosts + - ssh-keyscan gitlab.com > ~/.ssh/known_hosts + retry: 2 + script: + - '### run tests ###' + - make test + - make test-cover + +lint:source-code: + stage: test + image: golangci/golangci-lint:v1.43.0-alpine@sha256:24d773a07983d19963b6e07c7d5bce1b7835f8fb37486d34081e6a40302c17c5 + script: + - '### run linter ###' + - golangci-lint run ./... + +license-check: + stage: test + image: golang:1.17.5@sha256:c72fa9afc50b3303e8044cf28fb358b48032a548e1825819420fd40155a131cb + before_script: + - set -eu + - if [[ -f .env.pipeline ]];then cp .env.pipeline .env;fi + - mkdir -p ~/.ssh + - touch ~/.ssh/known_hosts + - ssh-keyscan gitlab.com > ~/.ssh/known_hosts + script: + - '### run license-check ###' + - make check-licenses + artifacts: + paths: + - LICENSES-3RD-PARTY + expire_in: 7 days + +pages: + stage: release + image: golang:1.17.5@sha256:c72fa9afc50b3303e8044cf28fb358b48032a548e1825819420fd40155a131cb + only: + - tags + script: + - make gitlab-pages + artifacts: + paths: + - public/ diff --git a/vendor/gitlab.com/bosi/decorder/LICENSE.md b/vendor/gitlab.com/bosi/decorder/LICENSE.md new file mode 100644 index 000000000..d46c30e18 --- /dev/null +++ b/vendor/gitlab.com/bosi/decorder/LICENSE.md @@ -0,0 +1,16 @@ +MIT License + +Copyright (c) 2021 Florian Bosdorff + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit +persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the +Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/vendor/gitlab.com/bosi/decorder/Makefile b/vendor/gitlab.com/bosi/decorder/Makefile new file mode 100644 index 000000000..8d4c05690 --- /dev/null +++ b/vendor/gitlab.com/bosi/decorder/Makefile @@ -0,0 +1,7 @@ +include project-templates/base.mk + +project-templates/base.mk: + @cp -ar ~/.dotfiles/projects/golang ./project-templates + +.env: + touch .env \ No newline at end of file diff --git a/vendor/gitlab.com/bosi/decorder/README.md b/vendor/gitlab.com/bosi/decorder/README.md new file mode 100644 index 000000000..e72954937 --- /dev/null +++ b/vendor/gitlab.com/bosi/decorder/README.md @@ -0,0 +1,40 @@ +# Decorder + +A declaration order linter for golang. In case of this tool declarations are `type`, `const`, `var` and `func`. + +## Rules + +This linter applies multiple rules where each can be disabled via cli parameter. + +| rule | description | cli-options | +|--------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------| +| declaration order | Enforces the order of global declarations (e.g. all global constants are always defined before variables). You can also define a subset of declarations if you don't want to enforce the order of all of them. | * disable check: `-disable-dec-order-check`
* custom order: `-dec-order var,const,func,type` | +| declaration number | Enforces that the statements const, var and type are only used once per file. You have to use parenthesis to declare e.g multiple global types inside a file. | disable check: `-disable-dec-num-check` | +| init func first | Enforces the init func to be the first function in file. | disable check: `-disable-init-func-first-check` | + +You may find the implementation of the rules inside `analyzer.go`. + +## Installation + +```shell +go install gitlab.com/bosi/decorder/cmd/decorder +``` + +## Usage + +```shell +# with default options +decorder ./... + +# custom declaration order +decorder -dec-order var,const,func,type ./... + +# disable declaration order check +decorder -disable-dec-order-check ./... + +# disable check for multiple declarations statements +decorder -disable-dec-num-check ./... + +# disable check that init func is always first function +decorder -disable-init-func-first-check ./... +``` \ No newline at end of file diff --git a/vendor/gitlab.com/bosi/decorder/analyzer.go b/vendor/gitlab.com/bosi/decorder/analyzer.go new file mode 100644 index 000000000..91a784759 --- /dev/null +++ b/vendor/gitlab.com/bosi/decorder/analyzer.go @@ -0,0 +1,196 @@ +package decorder + +import ( + "go/ast" + "go/token" + "strings" + + "golang.org/x/tools/go/analysis" +) + +type ( + decNumChecker struct { + tokenMap map[string]token.Token + tokenCounts map[token.Token]int + decOrder []string + funcPoss []funcPos + } + + funcPos struct { + start token.Pos + end token.Pos + } +) + +const ( + Name = "decorder" + + FlagDo = "dec-order" + FlagDdnc = "disable-dec-num-check" + FlagDdoc = "disable-dec-order-check" + FlagDiffc = "disable-init-func-first-check" +) + +var ( + Analyzer = &analysis.Analyzer{ + Name: Name, + Doc: "check declaration order and count of types, constants, variables and functions", + Run: run, + } + + decOrder string + disableDecNumCheck bool + disableDecOrderCheck bool + disableInitFuncFirstCheck bool + + tokens = []token.Token{token.TYPE, token.CONST, token.VAR, token.FUNC} +) + +//nolint:lll +func init() { + Analyzer.Flags.StringVar(&decOrder, FlagDo, "type,const,var,func", "define the required order of types, constants, variables and functions declarations inside a file") + Analyzer.Flags.BoolVar(&disableDecNumCheck, FlagDdnc, false, "option to disable check for number of e.g. var declarations inside file") + Analyzer.Flags.BoolVar(&disableDecOrderCheck, FlagDdoc, false, "option to disable check for order of declarations inside file") + Analyzer.Flags.BoolVar(&disableInitFuncFirstCheck, FlagDiffc, false, "option to disable check that init function is always first function in file") +} + +func run(pass *analysis.Pass) (interface{}, error) { + for _, f := range pass.Files { + ast.Inspect(f, runDeclNumAndDecOrderCheck(pass)) + + if !disableInitFuncFirstCheck { + ast.Inspect(f, runInitFuncFirstCheck(pass)) + } + } + + return nil, nil +} + +func runInitFuncFirstCheck(pass *analysis.Pass) func(ast.Node) bool { + nonInitFound := false + + return func(n ast.Node) bool { + dec, ok := n.(*ast.FuncDecl) + if !ok { + return true + } + + if dec.Name.Name == "init" { + if nonInitFound { + pass.Reportf(dec.Pos(), "init func must be the first function in file") + } + } else { + nonInitFound = true + } + + return true + } +} + +func runDeclNumAndDecOrderCheck(pass *analysis.Pass) func(ast.Node) bool { + dnc := newDecNumChecker() + + return func(n ast.Node) bool { + fd, ok := n.(*ast.FuncDecl) + if ok { + return dnc.handleFuncDec(fd, pass) + } + + gd, ok := n.(*ast.GenDecl) + if !ok { + return true + } + + if dnc.isInsideFunction(gd) { + return true + } + + if !disableDecNumCheck { + dnc.handleDecNumCheck(gd, pass) + } + + if !disableDecOrderCheck { + dnc.handleDecOrderCheck(gd, pass) + } + + return true + } +} + +func newDecNumChecker() decNumChecker { + dnc := decNumChecker{ + tokenMap: map[string]token.Token{}, + tokenCounts: map[token.Token]int{}, + decOrder: []string{}, + funcPoss: []funcPos{}, + } + + for _, t := range tokens { + dnc.tokenCounts[t] = 0 + dnc.tokenMap[t.String()] = t + } + + for _, do := range strings.Split(decOrder, ",") { + dnc.decOrder = append(dnc.decOrder, strings.TrimSpace(do)) + } + + return dnc +} + +func (dnc decNumChecker) isToLate(t token.Token) (string, bool) { + for i, do := range dnc.decOrder { + if do == t.String() { + for j := i + 1; j < len(dnc.decOrder); j++ { + if dnc.tokenCounts[dnc.tokenMap[dnc.decOrder[j]]] > 0 { + return dnc.decOrder[j], false + } + } + return "", true + } + } + + return "", true +} + +func (dnc *decNumChecker) handleDecNumCheck(gd *ast.GenDecl, pass *analysis.Pass) { + for _, t := range tokens { + if gd.Tok == t { + dnc.tokenCounts[t]++ + + if dnc.tokenCounts[t] > 1 { + pass.Reportf(gd.Pos(), "multiple \"%s\" declarations are not allowed; use parentheses instead", t.String()) + } + } + } +} + +func (dnc decNumChecker) handleDecOrderCheck(gd *ast.GenDecl, pass *analysis.Pass) { + l, c := dnc.isToLate(gd.Tok) + if !c { + pass.Reportf(gd.Pos(), "%s must not be placed after %s", gd.Tok.String(), l) + } +} + +func (dnc decNumChecker) isInsideFunction(dn *ast.GenDecl) bool { + for _, poss := range dnc.funcPoss { + if poss.start < dn.Pos() && poss.end > dn.Pos() { + return true + } + } + return false +} + +func (dnc *decNumChecker) handleFuncDec(fd *ast.FuncDecl, pass *analysis.Pass) bool { + dnc.funcPoss = append(dnc.funcPoss, funcPos{start: fd.Pos(), end: fd.End()}) + + dnc.tokenCounts[token.FUNC]++ + + if !disableDecOrderCheck { + l, c := dnc.isToLate(token.FUNC) + if !c { + pass.Reportf(fd.Pos(), "%s must not be placed after %s", token.FUNC.String(), l) + } + } + + return true +} diff --git a/vendor/gitlab.com/bosi/decorder/go.mod b/vendor/gitlab.com/bosi/decorder/go.mod new file mode 100644 index 000000000..8f12fe285 --- /dev/null +++ b/vendor/gitlab.com/bosi/decorder/go.mod @@ -0,0 +1,11 @@ +module gitlab.com/bosi/decorder + +go 1.17 + +require golang.org/x/tools v0.1.8 + +require ( + golang.org/x/mod v0.5.1 // indirect + golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e // indirect + golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect +) diff --git a/vendor/gitlab.com/bosi/decorder/go.sum b/vendor/gitlab.com/bosi/decorder/go.sum new file mode 100644 index 000000000..a9ad6f676 --- /dev/null +++ b/vendor/gitlab.com/bosi/decorder/go.sum @@ -0,0 +1,30 @@ +github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/mod v0.5.1 h1:OJxoQ/rynoF0dcCdI7cLPktw/hR2cueqYfjm43oqK38= +golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20211019181941-9d821ace8654 h1:id054HUawV2/6IGm2IV8KZQjqtwAOo2CYlOToYqa0d0= +golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e h1:fLOSk5Q00efkSvAm+4xcoXD+RRmLmmulPn5I3Y9F2EM= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.8 h1:P1HhGGuLW4aAclzjtmJdf0mJOjVUZUzOTqkAkWL+l6w= +golang.org/x/tools v0.1.8/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/gitlab.com/bosi/decorder/renovate.json b/vendor/gitlab.com/bosi/decorder/renovate.json new file mode 100644 index 000000000..95cf1af02 --- /dev/null +++ b/vendor/gitlab.com/bosi/decorder/renovate.json @@ -0,0 +1,26 @@ +{ + "$schema": "https://docs.renovatebot.com/renovate-schema.json", + "extends": [ + "config:base", + "group:allNonMajor", + ":automergePatch", + ":automergeMinor", + ":automergeLinters", + ":automergeTesters", + ":automergeTypes" + ], + "enabled": true, + "dependencyDashboard": false, + "separateMajorMinor": true, + "separateMultipleMajor": false, + "prHourlyLimit": 2, + "prConcurrentLimit": 10, + "labels": [ + "depUpdate" + ], + "updateLockFiles": true, + "docker": { + "pinDigests": true + } +} + diff --git a/vendor/golang.org/x/crypto/acme/acme.go b/vendor/golang.org/x/crypto/acme/acme.go index 73b19ef35..f2d23f697 100644 --- a/vendor/golang.org/x/crypto/acme/acme.go +++ b/vendor/golang.org/x/crypto/acme/acme.go @@ -3,17 +3,20 @@ // license that can be found in the LICENSE file. // Package acme provides an implementation of the -// Automatic Certificate Management Environment (ACME) spec. -// The initial implementation was based on ACME draft-02 and -// is now being extended to comply with RFC 8555. -// See https://tools.ietf.org/html/draft-ietf-acme-acme-02 -// and https://tools.ietf.org/html/rfc8555 for details. +// Automatic Certificate Management Environment (ACME) spec, +// most famously used by Let's Encrypt. +// +// The initial implementation of this package was based on an early version +// of the spec. The current implementation supports only the modern +// RFC 8555 but some of the old API surface remains for compatibility. +// While code using the old API will still compile, it will return an error. +// Note the deprecation comments to update your code. +// +// See https://tools.ietf.org/html/rfc8555 for the spec. // // Most common scenarios will want to use autocert subdirectory instead, // which provides automatic access to certificates from Let's Encrypt // and any other ACME-based CA. -// -// This package is a work in progress and makes no API stability promises. package acme import ( @@ -33,8 +36,6 @@ import ( "encoding/pem" "errors" "fmt" - "io" - "io/ioutil" "math/big" "net/http" "strings" @@ -72,6 +73,7 @@ const ( ) // Client is an ACME client. +// // The only required field is Key. An example of creating a client with a new key // is as follows: // @@ -125,7 +127,9 @@ type Client struct { cacheMu sync.Mutex dir *Directory // cached result of Client's Discover method - kid keyID // cached Account.URI obtained from registerRFC or getAccountRFC + // KID is the key identifier provided by the CA. If not provided it will be + // retrieved from the CA by making a call to the registration endpoint. + KID KeyID noncesMu sync.Mutex nonces map[string]struct{} // nonces collected from previous responses @@ -140,23 +144,22 @@ type Client struct { // // When in pre-RFC mode or when c.getRegRFC responds with an error, accountKID // returns noKeyID. -func (c *Client) accountKID(ctx context.Context) keyID { +func (c *Client) accountKID(ctx context.Context) KeyID { c.cacheMu.Lock() defer c.cacheMu.Unlock() - if !c.dir.rfcCompliant() { - return noKeyID - } - if c.kid != noKeyID { - return c.kid + if c.KID != noKeyID { + return c.KID } a, err := c.getRegRFC(ctx) if err != nil { return noKeyID } - c.kid = keyID(a.URI) - return c.kid + c.KID = KeyID(a.URI) + return c.KID } +var errPreRFC = errors.New("acme: server does not support the RFC 8555 version of ACME") + // Discover performs ACME server discovery using c.DirectoryURL. // // It caches successful result. So, subsequent calls will not result in @@ -177,53 +180,36 @@ func (c *Client) Discover(ctx context.Context) (Directory, error) { c.addNonce(res.Header) var v struct { - Reg string `json:"new-reg"` - RegRFC string `json:"newAccount"` - Authz string `json:"new-authz"` - AuthzRFC string `json:"newAuthz"` - OrderRFC string `json:"newOrder"` - Cert string `json:"new-cert"` - Revoke string `json:"revoke-cert"` - RevokeRFC string `json:"revokeCert"` - NonceRFC string `json:"newNonce"` - KeyChangeRFC string `json:"keyChange"` - Meta struct { - Terms string `json:"terms-of-service"` - TermsRFC string `json:"termsOfService"` - WebsiteRFC string `json:"website"` - CAA []string `json:"caa-identities"` - CAARFC []string `json:"caaIdentities"` - ExternalAcctRFC bool `json:"externalAccountRequired"` + Reg string `json:"newAccount"` + Authz string `json:"newAuthz"` + Order string `json:"newOrder"` + Revoke string `json:"revokeCert"` + Nonce string `json:"newNonce"` + KeyChange string `json:"keyChange"` + Meta struct { + Terms string `json:"termsOfService"` + Website string `json:"website"` + CAA []string `json:"caaIdentities"` + ExternalAcct bool `json:"externalAccountRequired"` } } if err := json.NewDecoder(res.Body).Decode(&v); err != nil { return Directory{}, err } - if v.OrderRFC == "" { - // Non-RFC compliant ACME CA. - c.dir = &Directory{ - RegURL: v.Reg, - AuthzURL: v.Authz, - CertURL: v.Cert, - RevokeURL: v.Revoke, - Terms: v.Meta.Terms, - Website: v.Meta.WebsiteRFC, - CAA: v.Meta.CAA, - } - return *c.dir, nil + if v.Order == "" { + return Directory{}, errPreRFC } - // RFC compliant ACME CA. c.dir = &Directory{ - RegURL: v.RegRFC, - AuthzURL: v.AuthzRFC, - OrderURL: v.OrderRFC, - RevokeURL: v.RevokeRFC, - NonceURL: v.NonceRFC, - KeyChangeURL: v.KeyChangeRFC, - Terms: v.Meta.TermsRFC, - Website: v.Meta.WebsiteRFC, - CAA: v.Meta.CAARFC, - ExternalAccountRequired: v.Meta.ExternalAcctRFC, + RegURL: v.Reg, + AuthzURL: v.Authz, + OrderURL: v.Order, + RevokeURL: v.Revoke, + NonceURL: v.Nonce, + KeyChangeURL: v.KeyChange, + Terms: v.Meta.Terms, + Website: v.Meta.Website, + CAA: v.Meta.CAA, + ExternalAccountRequired: v.Meta.ExternalAcct, } return *c.dir, nil } @@ -235,55 +221,11 @@ func (c *Client) directoryURL() string { return LetsEncryptURL } -// CreateCert requests a new certificate using the Certificate Signing Request csr encoded in DER format. -// It is incompatible with RFC 8555. Callers should use CreateOrderCert when interfacing -// with an RFC-compliant CA. +// CreateCert was part of the old version of ACME. It is incompatible with RFC 8555. // -// The exp argument indicates the desired certificate validity duration. CA may issue a certificate -// with a different duration. -// If the bundle argument is true, the returned value will also contain the CA (issuer) certificate chain. -// -// In the case where CA server does not provide the issued certificate in the response, -// CreateCert will poll certURL using c.FetchCert, which will result in additional round-trips. -// In such a scenario, the caller can cancel the polling with ctx. -// -// CreateCert returns an error if the CA's response or chain was unreasonably large. -// Callers are encouraged to parse the returned value to ensure the certificate is valid and has the expected features. +// Deprecated: this was for the pre-RFC 8555 version of ACME. Callers should use CreateOrderCert. func (c *Client) CreateCert(ctx context.Context, csr []byte, exp time.Duration, bundle bool) (der [][]byte, certURL string, err error) { - if _, err := c.Discover(ctx); err != nil { - return nil, "", err - } - - req := struct { - Resource string `json:"resource"` - CSR string `json:"csr"` - NotBefore string `json:"notBefore,omitempty"` - NotAfter string `json:"notAfter,omitempty"` - }{ - Resource: "new-cert", - CSR: base64.RawURLEncoding.EncodeToString(csr), - } - now := timeNow() - req.NotBefore = now.Format(time.RFC3339) - if exp > 0 { - req.NotAfter = now.Add(exp).Format(time.RFC3339) - } - - res, err := c.post(ctx, nil, c.dir.CertURL, req, wantStatus(http.StatusCreated)) - if err != nil { - return nil, "", err - } - defer res.Body.Close() - - curl := res.Header.Get("Location") // cert permanent URL - if res.ContentLength == 0 { - // no cert in the body; poll until we get it - cert, err := c.FetchCert(ctx, curl, bundle) - return cert, curl, err - } - // slurp issued cert and CA chain, if requested - cert, err := c.responseCert(ctx, res, bundle) - return cert, curl, err + return nil, "", errPreRFC } // FetchCert retrieves already issued certificate from the given url, in DER format. @@ -297,20 +239,10 @@ func (c *Client) CreateCert(ctx context.Context, csr []byte, exp time.Duration, // Callers are encouraged to parse the returned value to ensure the certificate is valid // and has expected features. func (c *Client) FetchCert(ctx context.Context, url string, bundle bool) ([][]byte, error) { - dir, err := c.Discover(ctx) - if err != nil { + if _, err := c.Discover(ctx); err != nil { return nil, err } - if dir.rfcCompliant() { - return c.fetchCertRFC(ctx, url, bundle) - } - - // Legacy non-authenticated GET request. - res, err := c.get(ctx, url, wantStatus(http.StatusOK)) - if err != nil { - return nil, err - } - return c.responseCert(ctx, res, bundle) + return c.fetchCertRFC(ctx, url, bundle) } // RevokeCert revokes a previously issued certificate cert, provided in DER format. @@ -320,30 +252,10 @@ func (c *Client) FetchCert(ctx context.Context, url string, bundle bool) ([][]by // For instance, the key pair of the certificate may be authorized. // If the key is nil, c.Key is used instead. func (c *Client) RevokeCert(ctx context.Context, key crypto.Signer, cert []byte, reason CRLReasonCode) error { - dir, err := c.Discover(ctx) - if err != nil { + if _, err := c.Discover(ctx); err != nil { return err } - if dir.rfcCompliant() { - return c.revokeCertRFC(ctx, key, cert, reason) - } - - // Legacy CA. - body := &struct { - Resource string `json:"resource"` - Cert string `json:"certificate"` - Reason int `json:"reason"` - }{ - Resource: "revoke-cert", - Cert: base64.RawURLEncoding.EncodeToString(cert), - Reason: int(reason), - } - res, err := c.post(ctx, key, dir.RevokeURL, body, wantStatus(http.StatusOK)) - if err != nil { - return err - } - defer res.Body.Close() - return nil + return c.revokeCertRFC(ctx, key, cert, reason) } // AcceptTOS always returns true to indicate the acceptance of a CA's Terms of Service @@ -366,75 +278,33 @@ func (c *Client) Register(ctx context.Context, acct *Account, prompt func(tosURL if c.Key == nil { return nil, errors.New("acme: client.Key must be set to Register") } - - dir, err := c.Discover(ctx) - if err != nil { + if _, err := c.Discover(ctx); err != nil { return nil, err } - if dir.rfcCompliant() { - return c.registerRFC(ctx, acct, prompt) - } - - // Legacy ACME draft registration flow. - a, err := c.doReg(ctx, dir.RegURL, "new-reg", acct) - if err != nil { - return nil, err - } - var accept bool - if a.CurrentTerms != "" && a.CurrentTerms != a.AgreedTerms { - accept = prompt(a.CurrentTerms) - } - if accept { - a.AgreedTerms = a.CurrentTerms - a, err = c.UpdateReg(ctx, a) - } - return a, err + return c.registerRFC(ctx, acct, prompt) } // GetReg retrieves an existing account associated with c.Key. // -// The url argument is an Account URI used with pre-RFC 8555 CAs. -// It is ignored when interfacing with an RFC-compliant CA. +// The url argument is a legacy artifact of the pre-RFC 8555 API +// and is ignored. func (c *Client) GetReg(ctx context.Context, url string) (*Account, error) { - dir, err := c.Discover(ctx) - if err != nil { + if _, err := c.Discover(ctx); err != nil { return nil, err } - if dir.rfcCompliant() { - return c.getRegRFC(ctx) - } - - // Legacy CA. - a, err := c.doReg(ctx, url, "reg", nil) - if err != nil { - return nil, err - } - a.URI = url - return a, nil + return c.getRegRFC(ctx) } // UpdateReg updates an existing registration. // It returns an updated account copy. The provided account is not modified. // -// When interfacing with RFC-compliant CAs, a.URI is ignored and the account URL -// associated with c.Key is used instead. +// The account's URI is ignored and the account URL associated with +// c.Key is used instead. func (c *Client) UpdateReg(ctx context.Context, acct *Account) (*Account, error) { - dir, err := c.Discover(ctx) - if err != nil { + if _, err := c.Discover(ctx); err != nil { return nil, err } - if dir.rfcCompliant() { - return c.updateRegRFC(ctx, acct) - } - - // Legacy CA. - uri := acct.URI - a, err := c.doReg(ctx, uri, "reg", acct) - if err != nil { - return nil, err - } - a.URI = uri - return a, nil + return c.updateRegRFC(ctx, acct) } // Authorize performs the initial step in the pre-authorization flow, @@ -503,17 +373,11 @@ func (c *Client) authorize(ctx context.Context, typ, val string) (*Authorization // If a caller needs to poll an authorization until its status is final, // see the WaitAuthorization method. func (c *Client) GetAuthorization(ctx context.Context, url string) (*Authorization, error) { - dir, err := c.Discover(ctx) - if err != nil { + if _, err := c.Discover(ctx); err != nil { return nil, err } - var res *http.Response - if dir.rfcCompliant() { - res, err = c.postAsGet(ctx, url, wantStatus(http.StatusOK)) - } else { - res, err = c.get(ctx, url, wantStatus(http.StatusOK, http.StatusAccepted)) - } + res, err := c.postAsGet(ctx, url, wantStatus(http.StatusOK)) if err != nil { return nil, err } @@ -535,7 +399,6 @@ func (c *Client) GetAuthorization(ctx context.Context, url string) (*Authorizati // // It does not revoke existing certificates. func (c *Client) RevokeAuthorization(ctx context.Context, url string) error { - // Required for c.accountKID() when in RFC mode. if _, err := c.Discover(ctx); err != nil { return err } @@ -565,18 +428,11 @@ func (c *Client) RevokeAuthorization(ctx context.Context, url string) error { // In all other cases WaitAuthorization returns an error. // If the Status is StatusInvalid, the returned error is of type *AuthorizationError. func (c *Client) WaitAuthorization(ctx context.Context, url string) (*Authorization, error) { - // Required for c.accountKID() when in RFC mode. - dir, err := c.Discover(ctx) - if err != nil { + if _, err := c.Discover(ctx); err != nil { return nil, err } - getfn := c.postAsGet - if !dir.rfcCompliant() { - getfn = c.get - } - for { - res, err := getfn(ctx, url, wantStatus(http.StatusOK, http.StatusAccepted)) + res, err := c.postAsGet(ctx, url, wantStatus(http.StatusOK, http.StatusAccepted)) if err != nil { return nil, err } @@ -619,17 +475,11 @@ func (c *Client) WaitAuthorization(ctx context.Context, url string) (*Authorizat // // A client typically polls a challenge status using this method. func (c *Client) GetChallenge(ctx context.Context, url string) (*Challenge, error) { - // Required for c.accountKID() when in RFC mode. - dir, err := c.Discover(ctx) - if err != nil { + if _, err := c.Discover(ctx); err != nil { return nil, err } - getfn := c.postAsGet - if !dir.rfcCompliant() { - getfn = c.get - } - res, err := getfn(ctx, url, wantStatus(http.StatusOK, http.StatusAccepted)) + res, err := c.postAsGet(ctx, url, wantStatus(http.StatusOK, http.StatusAccepted)) if err != nil { return nil, err } @@ -647,29 +497,11 @@ func (c *Client) GetChallenge(ctx context.Context, url string) (*Challenge, erro // // The server will then perform the validation asynchronously. func (c *Client) Accept(ctx context.Context, chal *Challenge) (*Challenge, error) { - // Required for c.accountKID() when in RFC mode. - dir, err := c.Discover(ctx) - if err != nil { + if _, err := c.Discover(ctx); err != nil { return nil, err } - var req interface{} = json.RawMessage("{}") // RFC-compliant CA - if !dir.rfcCompliant() { - auth, err := keyAuth(c.Key.Public(), chal.Token) - if err != nil { - return nil, err - } - req = struct { - Resource string `json:"resource"` - Type string `json:"type"` - Auth string `json:"keyAuthorization"` - }{ - Resource: "challenge", - Type: chal.Type, - Auth: auth, - } - } - res, err := c.post(ctx, nil, chal.URI, req, wantStatus( + res, err := c.post(ctx, nil, chal.URI, json.RawMessage("{}"), wantStatus( http.StatusOK, // according to the spec http.StatusAccepted, // Let's Encrypt: see https://goo.gl/WsJ7VT (acme-divergences.md) )) @@ -720,7 +552,7 @@ func (c *Client) HTTP01ChallengePath(token string) string { // TLSSNI01ChallengeCert creates a certificate for TLS-SNI-01 challenge response. // -// Deprecated: This challenge type is unused in both draft-02 and RFC versions of ACME spec. +// Deprecated: This challenge type is unused in both draft-02 and RFC versions of the ACME spec. func (c *Client) TLSSNI01ChallengeCert(token string, opt ...CertOption) (cert tls.Certificate, name string, err error) { ka, err := keyAuth(c.Key.Public(), token) if err != nil { @@ -738,7 +570,7 @@ func (c *Client) TLSSNI01ChallengeCert(token string, opt ...CertOption) (cert tl // TLSSNI02ChallengeCert creates a certificate for TLS-SNI-02 challenge response. // -// Deprecated: This challenge type is unused in both draft-02 and RFC versions of ACME spec. +// Deprecated: This challenge type is unused in both draft-02 and RFC versions of the ACME spec. func (c *Client) TLSSNI02ChallengeCert(token string, opt ...CertOption) (cert tls.Certificate, name string, err error) { b := sha256.Sum256([]byte(token)) h := hex.EncodeToString(b[:]) @@ -805,63 +637,6 @@ func (c *Client) TLSALPN01ChallengeCert(token, domain string, opt ...CertOption) return tlsChallengeCert([]string{domain}, newOpt) } -// doReg sends all types of registration requests the old way (pre-RFC world). -// The type of request is identified by typ argument, which is a "resource" -// in the ACME spec terms. -// -// A non-nil acct argument indicates whether the intention is to mutate data -// of the Account. Only Contact and Agreement of its fields are used -// in such cases. -func (c *Client) doReg(ctx context.Context, url string, typ string, acct *Account) (*Account, error) { - req := struct { - Resource string `json:"resource"` - Contact []string `json:"contact,omitempty"` - Agreement string `json:"agreement,omitempty"` - }{ - Resource: typ, - } - if acct != nil { - req.Contact = acct.Contact - req.Agreement = acct.AgreedTerms - } - res, err := c.post(ctx, nil, url, req, wantStatus( - http.StatusOK, // updates and deletes - http.StatusCreated, // new account creation - http.StatusAccepted, // Let's Encrypt divergent implementation - )) - if err != nil { - return nil, err - } - defer res.Body.Close() - - var v struct { - Contact []string - Agreement string - Authorizations string - Certificates string - } - if err := json.NewDecoder(res.Body).Decode(&v); err != nil { - return nil, fmt.Errorf("acme: invalid response: %v", err) - } - var tos string - if v := linkHeader(res.Header, "terms-of-service"); len(v) > 0 { - tos = v[0] - } - var authz string - if v := linkHeader(res.Header, "next"); len(v) > 0 { - authz = v[0] - } - return &Account{ - URI: res.Header.Get("Location"), - Contact: v.Contact, - AgreedTerms: v.Agreement, - CurrentTerms: tos, - Authz: authz, - Authorizations: v.Authorizations, - Certificates: v.Certificates, - }, nil -} - // popNonce returns a nonce value previously stored with c.addNonce // or fetches a fresh one from c.dir.NonceURL. // If NonceURL is empty, it first tries c.directoryURL() and, failing that, @@ -936,78 +711,6 @@ func nonceFromHeader(h http.Header) string { return h.Get("Replay-Nonce") } -func (c *Client) responseCert(ctx context.Context, res *http.Response, bundle bool) ([][]byte, error) { - b, err := ioutil.ReadAll(io.LimitReader(res.Body, maxCertSize+1)) - if err != nil { - return nil, fmt.Errorf("acme: response stream: %v", err) - } - if len(b) > maxCertSize { - return nil, errors.New("acme: certificate is too big") - } - cert := [][]byte{b} - if !bundle { - return cert, nil - } - - // Append CA chain cert(s). - // At least one is required according to the spec: - // https://tools.ietf.org/html/draft-ietf-acme-acme-03#section-6.3.1 - up := linkHeader(res.Header, "up") - if len(up) == 0 { - return nil, errors.New("acme: rel=up link not found") - } - if len(up) > maxChainLen { - return nil, errors.New("acme: rel=up link is too large") - } - for _, url := range up { - cc, err := c.chainCert(ctx, url, 0) - if err != nil { - return nil, err - } - cert = append(cert, cc...) - } - return cert, nil -} - -// chainCert fetches CA certificate chain recursively by following "up" links. -// Each recursive call increments the depth by 1, resulting in an error -// if the recursion level reaches maxChainLen. -// -// First chainCert call starts with depth of 0. -func (c *Client) chainCert(ctx context.Context, url string, depth int) ([][]byte, error) { - if depth >= maxChainLen { - return nil, errors.New("acme: certificate chain is too deep") - } - - res, err := c.get(ctx, url, wantStatus(http.StatusOK)) - if err != nil { - return nil, err - } - defer res.Body.Close() - b, err := ioutil.ReadAll(io.LimitReader(res.Body, maxCertSize+1)) - if err != nil { - return nil, err - } - if len(b) > maxCertSize { - return nil, errors.New("acme: certificate is too big") - } - chain := [][]byte{b} - - uplink := linkHeader(res.Header, "up") - if len(uplink) > maxChainLen { - return nil, errors.New("acme: certificate chain is too large") - } - for _, up := range uplink { - cc, err := c.chainCert(ctx, up, depth+1) - if err != nil { - return nil, err - } - chain = append(chain, cc...) - } - - return chain, nil -} - // linkHeader returns URI-Reference values of all Link headers // with relation-type rel. // See https://tools.ietf.org/html/rfc5988#section-5 for details. @@ -1098,5 +801,5 @@ func encodePEM(typ string, b []byte) []byte { return pem.EncodeToMemory(pb) } -// timeNow is useful for testing for fixed current time. +// timeNow is time.Now, except in tests which can mess with it. var timeNow = time.Now diff --git a/vendor/golang.org/x/crypto/acme/autocert/autocert.go b/vendor/golang.org/x/crypto/acme/autocert/autocert.go index 37923f439..1858184e8 100644 --- a/vendor/golang.org/x/crypto/acme/autocert/autocert.go +++ b/vendor/golang.org/x/crypto/acme/autocert/autocert.go @@ -47,6 +47,8 @@ var createCertRetryAfter = time.Minute // pseudoRand is safe for concurrent use. var pseudoRand *lockedMathRand +var errPreRFC = errors.New("autocert: ACME server doesn't support RFC 8555") + func init() { src := mathrand.NewSource(time.Now().UnixNano()) pseudoRand = &lockedMathRand{rnd: mathrand.New(src)} @@ -456,7 +458,7 @@ func (m *Manager) cert(ctx context.Context, ck certKey) (*tls.Certificate, error leaf: cert.Leaf, } m.state[ck] = s - go m.renew(ck, s.key, s.leaf.NotAfter) + go m.startRenew(ck, s.key, s.leaf.NotAfter) return cert, nil } @@ -582,8 +584,9 @@ func (m *Manager) createCert(ctx context.Context, ck certKey) (*tls.Certificate, if err != nil { // Remove the failed state after some time, // making the manager call createCert again on the following TLS hello. + didRemove := testDidRemoveState // The lifetime of this timer is untracked, so copy mutable local state to avoid races. time.AfterFunc(createCertRetryAfter, func() { - defer testDidRemoveState(ck) + defer didRemove(ck) m.stateMu.Lock() defer m.stateMu.Unlock() // Verify the state hasn't changed and it's still invalid @@ -601,7 +604,7 @@ func (m *Manager) createCert(ctx context.Context, ck certKey) (*tls.Certificate, } state.cert = der state.leaf = leaf - go m.renew(ck, state.key, state.leaf.NotAfter) + go m.startRenew(ck, state.key, state.leaf.NotAfter) return state.tlscert() } @@ -658,31 +661,19 @@ func (m *Manager) authorizedCert(ctx context.Context, key crypto.Signer, ck cert if err != nil { return nil, nil, err } - - var chain [][]byte - switch { - // Pre-RFC legacy CA. - case dir.OrderURL == "": - if err := m.verify(ctx, client, ck.domain); err != nil { - return nil, nil, err - } - der, _, err := client.CreateCert(ctx, csr, 0, true) - if err != nil { - return nil, nil, err - } - chain = der - // RFC 8555 compliant CA. - default: - o, err := m.verifyRFC(ctx, client, ck.domain) - if err != nil { - return nil, nil, err - } - der, _, err := client.CreateOrderCert(ctx, o.FinalizeURL, csr, true) - if err != nil { - return nil, nil, err - } - chain = der + if dir.OrderURL == "" { + return nil, nil, errPreRFC } + + o, err := m.verifyRFC(ctx, client, ck.domain) + if err != nil { + return nil, nil, err + } + chain, _, err := client.CreateOrderCert(ctx, o.FinalizeURL, csr, true) + if err != nil { + return nil, nil, err + } + leaf, err = validCert(ck, chain, key, m.now()) if err != nil { return nil, nil, err @@ -690,69 +681,6 @@ func (m *Manager) authorizedCert(ctx context.Context, key crypto.Signer, ck cert return chain, leaf, nil } -// verify runs the identifier (domain) pre-authorization flow for legacy CAs -// using each applicable ACME challenge type. -func (m *Manager) verify(ctx context.Context, client *acme.Client, domain string) error { - // Remove all hanging authorizations to reduce rate limit quotas - // after we're done. - var authzURLs []string - defer func() { - go m.deactivatePendingAuthz(authzURLs) - }() - - // errs accumulates challenge failure errors, printed if all fail - errs := make(map[*acme.Challenge]error) - challengeTypes := m.supportedChallengeTypes() - var nextTyp int // challengeType index of the next challenge type to try - for { - // Start domain authorization and get the challenge. - authz, err := client.Authorize(ctx, domain) - if err != nil { - return err - } - authzURLs = append(authzURLs, authz.URI) - // No point in accepting challenges if the authorization status - // is in a final state. - switch authz.Status { - case acme.StatusValid: - return nil // already authorized - case acme.StatusInvalid: - return fmt.Errorf("acme/autocert: invalid authorization %q", authz.URI) - } - - // Pick the next preferred challenge. - var chal *acme.Challenge - for chal == nil && nextTyp < len(challengeTypes) { - chal = pickChallenge(challengeTypes[nextTyp], authz.Challenges) - nextTyp++ - } - if chal == nil { - errorMsg := fmt.Sprintf("acme/autocert: unable to authorize %q", domain) - for chal, err := range errs { - errorMsg += fmt.Sprintf("; challenge %q failed with error: %v", chal.Type, err) - } - return errors.New(errorMsg) - } - cleanup, err := m.fulfill(ctx, client, chal, domain) - if err != nil { - errs[chal] = err - continue - } - defer cleanup() - if _, err := client.Accept(ctx, chal); err != nil { - errs[chal] = err - continue - } - - // A challenge is fulfilled and accepted: wait for the CA to validate. - if _, err := client.WaitAuthorization(ctx, authz.URI); err != nil { - errs[chal] = err - continue - } - return nil - } -} - // verifyRFC runs the identifier (domain) order-based authorization flow for RFC compliant CAs // using each applicable ACME challenge type. func (m *Manager) verifyRFC(ctx context.Context, client *acme.Client, domain string) (*acme.Order, error) { @@ -966,7 +894,7 @@ func httpTokenCacheKey(tokenPath string) string { return path.Base(tokenPath) + "+http-01" } -// renew starts a cert renewal timer loop, one per domain. +// startRenew starts a cert renewal timer loop, one per domain. // // The loop is scheduled in two cases: // - a cert was fetched from cache for the first time (wasn't in m.state) @@ -974,7 +902,7 @@ func httpTokenCacheKey(tokenPath string) string { // // The key argument is a certificate private key. // The exp argument is the cert expiration time (NotAfter). -func (m *Manager) renew(ck certKey, key crypto.Signer, exp time.Time) { +func (m *Manager) startRenew(ck certKey, key crypto.Signer, exp time.Time) { m.renewalMu.Lock() defer m.renewalMu.Unlock() if m.renewal[ck] != nil { diff --git a/vendor/golang.org/x/crypto/acme/autocert/renewal.go b/vendor/golang.org/x/crypto/acme/autocert/renewal.go index 665f870dc..0df7da78a 100644 --- a/vendor/golang.org/x/crypto/acme/autocert/renewal.go +++ b/vendor/golang.org/x/crypto/acme/autocert/renewal.go @@ -21,8 +21,9 @@ type domainRenewal struct { ck certKey key crypto.Signer - timerMu sync.Mutex - timer *time.Timer + timerMu sync.Mutex + timer *time.Timer + timerClose chan struct{} // if non-nil, renew closes this channel (and nils out the timer fields) instead of running } // start starts a cert renewal timer at the time @@ -38,16 +39,28 @@ func (dr *domainRenewal) start(exp time.Time) { dr.timer = time.AfterFunc(dr.next(exp), dr.renew) } -// stop stops the cert renewal timer. -// If the timer is already stopped, calling stop is a noop. +// stop stops the cert renewal timer and waits for any in-flight calls to renew +// to complete. If the timer is already stopped, calling stop is a noop. func (dr *domainRenewal) stop() { dr.timerMu.Lock() defer dr.timerMu.Unlock() - if dr.timer == nil { - return + for { + if dr.timer == nil { + return + } + if dr.timer.Stop() { + dr.timer = nil + return + } else { + // dr.timer fired, and we acquired dr.timerMu before the renew callback did. + // (We know this because otherwise the renew callback would have reset dr.timer!) + timerClose := make(chan struct{}) + dr.timerClose = timerClose + dr.timerMu.Unlock() + <-timerClose + dr.timerMu.Lock() + } } - dr.timer.Stop() - dr.timer = nil } // renew is called periodically by a timer. @@ -55,7 +68,9 @@ func (dr *domainRenewal) stop() { func (dr *domainRenewal) renew() { dr.timerMu.Lock() defer dr.timerMu.Unlock() - if dr.timer == nil { + if dr.timerClose != nil { + close(dr.timerClose) + dr.timer, dr.timerClose = nil, nil return } @@ -67,8 +82,8 @@ func (dr *domainRenewal) renew() { next = renewJitter / 2 next += time.Duration(pseudoRand.int63n(int64(next))) } - dr.timer = time.AfterFunc(next, dr.renew) testDidRenewLoop(next, err) + dr.timer = time.AfterFunc(next, dr.renew) } // updateState locks and replaces the relevant Manager.state item with the given diff --git a/vendor/golang.org/x/crypto/acme/jws.go b/vendor/golang.org/x/crypto/acme/jws.go index 8c3ecceca..403e5b0c2 100644 --- a/vendor/golang.org/x/crypto/acme/jws.go +++ b/vendor/golang.org/x/crypto/acme/jws.go @@ -20,12 +20,12 @@ import ( "math/big" ) -// keyID is the account identity provided by a CA during registration. -type keyID string +// KeyID is the account key identity provided by a CA during registration. +type KeyID string // noKeyID indicates that jwsEncodeJSON should compute and use JWK instead of a KID. // See jwsEncodeJSON for details. -const noKeyID = keyID("") +const noKeyID = KeyID("") // noPayload indicates jwsEncodeJSON will encode zero-length octet string // in a JWS request. This is called POST-as-GET in RFC 8555 and is used to make @@ -43,14 +43,17 @@ type jsonWebSignature struct { // jwsEncodeJSON signs claimset using provided key and a nonce. // The result is serialized in JSON format containing either kid or jwk -// fields based on the provided keyID value. +// fields based on the provided KeyID value. // // If kid is non-empty, its quoted value is inserted in the protected head // as "kid" field value. Otherwise, JWK is computed using jwkEncode and inserted // as "jwk" field value. The "jwk" and "kid" fields are mutually exclusive. // // See https://tools.ietf.org/html/rfc7515#section-7. -func jwsEncodeJSON(claimset interface{}, key crypto.Signer, kid keyID, nonce, url string) ([]byte, error) { +func jwsEncodeJSON(claimset interface{}, key crypto.Signer, kid KeyID, nonce, url string) ([]byte, error) { + if key == nil { + return nil, errors.New("nil key") + } alg, sha := jwsHasher(key.Public()) if alg == "" || !sha.Available() { return nil, ErrUnsupportedKey diff --git a/vendor/golang.org/x/crypto/acme/rfc8555.go b/vendor/golang.org/x/crypto/acme/rfc8555.go index f9d3011ff..928a5aa03 100644 --- a/vendor/golang.org/x/crypto/acme/rfc8555.go +++ b/vendor/golang.org/x/crypto/acme/rfc8555.go @@ -78,7 +78,7 @@ func (c *Client) registerRFC(ctx context.Context, acct *Account, prompt func(tos } // Cache Account URL even if we return an error to the caller. // It is by all means a valid and usable "kid" value for future requests. - c.kid = keyID(a.URI) + c.KID = KeyID(a.URI) if res.StatusCode == http.StatusOK { return nil, ErrAccountAlreadyExists } diff --git a/vendor/golang.org/x/crypto/acme/types.go b/vendor/golang.org/x/crypto/acme/types.go index eaae45290..67b825201 100644 --- a/vendor/golang.org/x/crypto/acme/types.go +++ b/vendor/golang.org/x/crypto/acme/types.go @@ -305,14 +305,6 @@ type Directory struct { ExternalAccountRequired bool } -// rfcCompliant reports whether the ACME server implements RFC 8555. -// Note that some servers may have incomplete RFC implementation -// even if the returned value is true. -// If rfcCompliant reports false, the server most likely implements draft-02. -func (d *Directory) rfcCompliant() bool { - return d.OrderURL != "" -} - // Order represents a client's request for a certificate. // It tracks the request flow progress through to issuance. type Order struct { diff --git a/vendor/golang.org/x/crypto/sha3/sha3.go b/vendor/golang.org/x/crypto/sha3/sha3.go index ba269a073..fa182beb4 100644 --- a/vendor/golang.org/x/crypto/sha3/sha3.go +++ b/vendor/golang.org/x/crypto/sha3/sha3.go @@ -86,7 +86,7 @@ func (d *state) permute() { d.buf = d.storage.asBytes()[:0] keccakF1600(&d.a) case spongeSqueezing: - // If we're squeezing, we need to apply the permutatin before + // If we're squeezing, we need to apply the permutation before // copying more output. keccakF1600(&d.a) d.buf = d.storage.asBytes()[:d.rate] diff --git a/vendor/golang.org/x/mod/modfile/rule.go b/vendor/golang.org/x/mod/modfile/rule.go index 78f83fa71..ca03e70ea 100644 --- a/vendor/golang.org/x/mod/modfile/rule.go +++ b/vendor/golang.org/x/mod/modfile/rule.go @@ -956,170 +956,217 @@ func (f *File) SetRequire(req []*Require) { // SetRequireSeparateIndirect updates the requirements of f to contain the given // requirements. Comment contents (except for 'indirect' markings) are retained -// from the first existing requirement for each module path, and block structure -// is maintained as long as the indirect markings match. +// from the first existing requirement for each module path. Like SetRequire, +// SetRequireSeparateIndirect adds requirements for new paths in req, +// updates the version and "// indirect" comment on existing requirements, +// and deletes requirements on paths not in req. Existing duplicate requirements +// are deleted. // -// Any requirements on paths not already present in the file are added. Direct -// requirements are added to the last block containing *any* other direct -// requirement. Indirect requirements are added to the last block containing -// *only* other indirect requirements. If no suitable block exists, a new one is -// added, with the last block containing a direct dependency (if any) -// immediately before the first block containing only indirect dependencies. +// As its name suggests, SetRequireSeparateIndirect puts direct and indirect +// requirements into two separate blocks, one containing only direct +// requirements, and the other containing only indirect requirements. +// SetRequireSeparateIndirect may move requirements between these two blocks +// when their indirect markings change. However, SetRequireSeparateIndirect +// won't move requirements from other blocks, especially blocks with comments. // -// The Syntax field is ignored for requirements in the given blocks. +// If the file initially has one uncommented block of requirements, +// SetRequireSeparateIndirect will split it into a direct-only and indirect-only +// block. This aids in the transition to separate blocks. func (f *File) SetRequireSeparateIndirect(req []*Require) { - type modKey struct { - path string - indirect bool - } - need := make(map[modKey]string) - for _, r := range req { - need[modKey{r.Mod.Path, r.Indirect}] = r.Mod.Version + // hasComments returns whether a line or block has comments + // other than "indirect". + hasComments := func(c Comments) bool { + return len(c.Before) > 0 || len(c.After) > 0 || len(c.Suffix) > 1 || + (len(c.Suffix) == 1 && + strings.TrimSpace(strings.TrimPrefix(c.Suffix[0].Token, string(slashSlash))) != "indirect") } - comments := make(map[string]Comments) - for _, r := range f.Require { - v, ok := need[modKey{r.Mod.Path, r.Indirect}] - if !ok { - if _, ok := need[modKey{r.Mod.Path, !r.Indirect}]; ok { - if _, dup := comments[r.Mod.Path]; !dup { - comments[r.Mod.Path] = r.Syntax.Comments - } + // moveReq adds r to block. If r was in another block, moveReq deletes + // it from that block and transfers its comments. + moveReq := func(r *Require, block *LineBlock) { + var line *Line + if r.Syntax == nil { + line = &Line{Token: []string{AutoQuote(r.Mod.Path), r.Mod.Version}} + r.Syntax = line + if r.Indirect { + r.setIndirect(true) } - r.markRemoved() - continue + } else { + line = new(Line) + *line = *r.Syntax + if !line.InBlock && len(line.Token) > 0 && line.Token[0] == "require" { + line.Token = line.Token[1:] + } + r.Syntax.Token = nil // Cleanup will delete the old line. + r.Syntax = line } - r.setVersion(v) - delete(need, modKey{r.Mod.Path, r.Indirect}) + line.InBlock = true + block.Line = append(block.Line, line) } + // Examine existing require lines and blocks. var ( - lastDirectOrMixedBlock Expr - firstIndirectOnlyBlock Expr - lastIndirectOnlyBlock Expr + // We may insert new requirements into the last uncommented + // direct-only and indirect-only blocks. We may also move requirements + // to the opposite block if their indirect markings change. + lastDirectIndex = -1 + lastIndirectIndex = -1 + + // If there are no direct-only or indirect-only blocks, a new block may + // be inserted after the last require line or block. + lastRequireIndex = -1 + + // If there's only one require line or block, and it's uncommented, + // we'll move its requirements to the direct-only or indirect-only blocks. + requireLineOrBlockCount = 0 + + // Track the block each requirement belongs to (if any) so we can + // move them later. + lineToBlock = make(map[*Line]*LineBlock) ) - for _, stmt := range f.Syntax.Stmt { + for i, stmt := range f.Syntax.Stmt { switch stmt := stmt.(type) { case *Line: if len(stmt.Token) == 0 || stmt.Token[0] != "require" { continue } - if isIndirect(stmt) { - lastIndirectOnlyBlock = stmt - } else { - lastDirectOrMixedBlock = stmt + lastRequireIndex = i + requireLineOrBlockCount++ + if !hasComments(stmt.Comments) { + if isIndirect(stmt) { + lastIndirectIndex = i + } else { + lastDirectIndex = i + } } + case *LineBlock: if len(stmt.Token) == 0 || stmt.Token[0] != "require" { continue } - indirectOnly := true + lastRequireIndex = i + requireLineOrBlockCount++ + allDirect := len(stmt.Line) > 0 && !hasComments(stmt.Comments) + allIndirect := len(stmt.Line) > 0 && !hasComments(stmt.Comments) for _, line := range stmt.Line { - if len(line.Token) == 0 { - continue - } - if !isIndirect(line) { - indirectOnly = false - break - } - } - if indirectOnly { - lastIndirectOnlyBlock = stmt - if firstIndirectOnlyBlock == nil { - firstIndirectOnlyBlock = stmt - } - } else { - lastDirectOrMixedBlock = stmt - } - } - } - - isOrContainsStmt := func(stmt Expr, target Expr) bool { - if stmt == target { - return true - } - if stmt, ok := stmt.(*LineBlock); ok { - if target, ok := target.(*Line); ok { - for _, line := range stmt.Line { - if line == target { - return true - } - } - } - } - return false - } - - addRequire := func(path, vers string, indirect bool, comments Comments) { - var line *Line - if indirect { - if lastIndirectOnlyBlock != nil { - line = f.Syntax.addLine(lastIndirectOnlyBlock, "require", path, vers) - } else { - // Add a new require block after the last direct-only or mixed "require" - // block (if any). - // - // (f.Syntax.addLine would add the line to an existing "require" block if - // present, but here the existing "require" blocks are all direct-only, so - // we know we need to add a new block instead.) - line = &Line{Token: []string{"require", path, vers}} - lastIndirectOnlyBlock = line - firstIndirectOnlyBlock = line // only block implies first block - if lastDirectOrMixedBlock == nil { - f.Syntax.Stmt = append(f.Syntax.Stmt, line) + lineToBlock[line] = stmt + if hasComments(line.Comments) { + allDirect = false + allIndirect = false + } else if isIndirect(line) { + allDirect = false } else { - for i, stmt := range f.Syntax.Stmt { - if isOrContainsStmt(stmt, lastDirectOrMixedBlock) { - f.Syntax.Stmt = append(f.Syntax.Stmt, nil) // increase size - copy(f.Syntax.Stmt[i+2:], f.Syntax.Stmt[i+1:]) // shuffle elements up - f.Syntax.Stmt[i+1] = line - break - } - } + allIndirect = false } } + if allDirect { + lastDirectIndex = i + } + if allIndirect { + lastIndirectIndex = i + } + } + } + + oneFlatUncommentedBlock := requireLineOrBlockCount == 1 && + !hasComments(*f.Syntax.Stmt[lastRequireIndex].Comment()) + + // Create direct and indirect blocks if needed. Convert lines into blocks + // if needed. If we end up with an empty block or a one-line block, + // Cleanup will delete it or convert it to a line later. + insertBlock := func(i int) *LineBlock { + block := &LineBlock{Token: []string{"require"}} + f.Syntax.Stmt = append(f.Syntax.Stmt, nil) + copy(f.Syntax.Stmt[i+1:], f.Syntax.Stmt[i:]) + f.Syntax.Stmt[i] = block + return block + } + + ensureBlock := func(i int) *LineBlock { + switch stmt := f.Syntax.Stmt[i].(type) { + case *LineBlock: + return stmt + case *Line: + block := &LineBlock{ + Token: []string{"require"}, + Line: []*Line{stmt}, + } + stmt.Token = stmt.Token[1:] // remove "require" + stmt.InBlock = true + f.Syntax.Stmt[i] = block + return block + default: + panic(fmt.Sprintf("unexpected statement: %v", stmt)) + } + } + + var lastDirectBlock *LineBlock + if lastDirectIndex < 0 { + if lastIndirectIndex >= 0 { + lastDirectIndex = lastIndirectIndex + lastIndirectIndex++ + } else if lastRequireIndex >= 0 { + lastDirectIndex = lastRequireIndex + 1 } else { - if lastDirectOrMixedBlock != nil { - line = f.Syntax.addLine(lastDirectOrMixedBlock, "require", path, vers) + lastDirectIndex = len(f.Syntax.Stmt) + } + lastDirectBlock = insertBlock(lastDirectIndex) + } else { + lastDirectBlock = ensureBlock(lastDirectIndex) + } + + var lastIndirectBlock *LineBlock + if lastIndirectIndex < 0 { + lastIndirectIndex = lastDirectIndex + 1 + lastIndirectBlock = insertBlock(lastIndirectIndex) + } else { + lastIndirectBlock = ensureBlock(lastIndirectIndex) + } + + // Delete requirements we don't want anymore. + // Update versions and indirect comments on requirements we want to keep. + // If a requirement is in last{Direct,Indirect}Block with the wrong + // indirect marking after this, or if the requirement is in an single + // uncommented mixed block (oneFlatUncommentedBlock), move it to the + // correct block. + // + // Some blocks may be empty after this. Cleanup will remove them. + need := make(map[string]*Require) + for _, r := range req { + need[r.Mod.Path] = r + } + have := make(map[string]*Require) + for _, r := range f.Require { + path := r.Mod.Path + if need[path] == nil || have[path] != nil { + // Requirement not needed, or duplicate requirement. Delete. + r.markRemoved() + continue + } + have[r.Mod.Path] = r + r.setVersion(need[path].Mod.Version) + r.setIndirect(need[path].Indirect) + if need[path].Indirect && + (oneFlatUncommentedBlock || lineToBlock[r.Syntax] == lastDirectBlock) { + moveReq(r, lastIndirectBlock) + } else if !need[path].Indirect && + (oneFlatUncommentedBlock || lineToBlock[r.Syntax] == lastIndirectBlock) { + moveReq(r, lastDirectBlock) + } + } + + // Add new requirements. + for path, r := range need { + if have[path] == nil { + if r.Indirect { + moveReq(r, lastIndirectBlock) } else { - // Add a new require block before the first indirect block (if any). - // - // That way if the file initially contains only indirect lines, - // the direct lines still appear before it: we preserve existing - // structure, but only to the extent that that structure already - // reflects the direct/indirect split. - line = &Line{Token: []string{"require", path, vers}} - lastDirectOrMixedBlock = line - if firstIndirectOnlyBlock == nil { - f.Syntax.Stmt = append(f.Syntax.Stmt, line) - } else { - for i, stmt := range f.Syntax.Stmt { - if isOrContainsStmt(stmt, firstIndirectOnlyBlock) { - f.Syntax.Stmt = append(f.Syntax.Stmt, nil) // increase size - copy(f.Syntax.Stmt[i+1:], f.Syntax.Stmt[i:]) // shuffle elements up - f.Syntax.Stmt[i] = line - break - } - } - } + moveReq(r, lastDirectBlock) } + f.Require = append(f.Require, r) } - - line.Comments.Before = commentsAdd(line.Comments.Before, comments.Before) - line.Comments.Suffix = commentsAdd(line.Comments.Suffix, comments.Suffix) - - r := &Require{ - Mod: module.Version{Path: path, Version: vers}, - Indirect: indirect, - Syntax: line, - } - r.setIndirect(indirect) - f.Require = append(f.Require, r) } - for k, vers := range need { - addRequire(k.path, vers, k.indirect, comments[k.path]) - } f.SortBlocks() } diff --git a/vendor/golang.org/x/oauth2/go.mod b/vendor/golang.org/x/oauth2/go.mod index 2b13f0b34..468b62679 100644 --- a/vendor/golang.org/x/oauth2/go.mod +++ b/vendor/golang.org/x/oauth2/go.mod @@ -4,6 +4,6 @@ go 1.11 require ( cloud.google.com/go v0.65.0 - golang.org/x/net v0.0.0-20200822124328-c89045814202 + golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd google.golang.org/appengine v1.6.6 ) diff --git a/vendor/golang.org/x/oauth2/go.sum b/vendor/golang.org/x/oauth2/go.sum index eab5833c4..bdceef99e 100644 --- a/vendor/golang.org/x/oauth2/go.sum +++ b/vendor/golang.org/x/oauth2/go.sum @@ -177,8 +177,9 @@ golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/ golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200822124328-c89045814202 h1:VvcQYSHwXgi7W+TpUR6A9g6Up98WAHf3f/ulnJ62IyA= golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd h1:O7DYs+zxREGLKzKoMQrtrEacpb0ZVXA5rIwylE2Xchk= +golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -217,11 +218,15 @@ golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= diff --git a/vendor/golang.org/x/sys/unix/ioctl_linux.go b/vendor/golang.org/x/sys/unix/ioctl_linux.go index 1dadead21..884430b81 100644 --- a/vendor/golang.org/x/sys/unix/ioctl_linux.go +++ b/vendor/golang.org/x/sys/unix/ioctl_linux.go @@ -194,3 +194,26 @@ func ioctlIfreqData(fd int, req uint, value *ifreqData) error { // identical so pass *IfreqData directly. return ioctlPtr(fd, req, unsafe.Pointer(value)) } + +// IoctlKCMClone attaches a new file descriptor to a multiplexor by cloning an +// existing KCM socket, returning a structure containing the file descriptor of +// the new socket. +func IoctlKCMClone(fd int) (*KCMClone, error) { + var info KCMClone + if err := ioctlPtr(fd, SIOCKCMCLONE, unsafe.Pointer(&info)); err != nil { + return nil, err + } + + return &info, nil +} + +// IoctlKCMAttach attaches a TCP socket and associated BPF program file +// descriptor to a multiplexor. +func IoctlKCMAttach(fd int, info KCMAttach) error { + return ioctlPtr(fd, SIOCKCMATTACH, unsafe.Pointer(&info)) +} + +// IoctlKCMUnattach unattaches a TCP socket file descriptor from a multiplexor. +func IoctlKCMUnattach(fd int, info KCMUnattach) error { + return ioctlPtr(fd, SIOCKCMUNATTACH, unsafe.Pointer(&info)) +} diff --git a/vendor/golang.org/x/sys/unix/mkerrors.sh b/vendor/golang.org/x/sys/unix/mkerrors.sh index a47b035f9..a03708748 100644 --- a/vendor/golang.org/x/sys/unix/mkerrors.sh +++ b/vendor/golang.org/x/sys/unix/mkerrors.sh @@ -205,6 +205,7 @@ struct ltchars { #include #include #include +#include #include #include #include @@ -231,6 +232,7 @@ struct ltchars { #include #include #include +#include #include #include #include @@ -503,6 +505,7 @@ ccflags="$@" $2 ~ /^O?XTABS$/ || $2 ~ /^TC[IO](ON|OFF)$/ || $2 ~ /^IN_/ || + $2 ~ /^KCM/ || $2 ~ /^LANDLOCK_/ || $2 ~ /^LOCK_(SH|EX|NB|UN)$/ || $2 ~ /^LO_(KEY|NAME)_SIZE$/ || @@ -597,6 +600,7 @@ ccflags="$@" $2 ~ /^DEVLINK_/ || $2 ~ /^ETHTOOL_/ || $2 ~ /^LWTUNNEL_IP/ || + $2 ~ /^ITIMER_/ || $2 !~ "WMESGLEN" && $2 ~ /^W[A-Z0-9]+$/ || $2 ~/^PPPIOC/ || diff --git a/vendor/golang.org/x/sys/unix/syscall_linux.go b/vendor/golang.org/x/sys/unix/syscall_linux.go index f432b0684..5f28f8fde 100644 --- a/vendor/golang.org/x/sys/unix/syscall_linux.go +++ b/vendor/golang.org/x/sys/unix/syscall_linux.go @@ -14,6 +14,7 @@ package unix import ( "encoding/binary" "syscall" + "time" "unsafe" ) @@ -249,6 +250,13 @@ func Getwd() (wd string, err error) { if n < 1 || n > len(buf) || buf[n-1] != 0 { return "", EINVAL } + // In some cases, Linux can return a path that starts with the + // "(unreachable)" prefix, which can potentially be a valid relative + // path. To work around that, return ENOENT if path is not absolute. + if buf[0] != '/' { + return "", ENOENT + } + return string(buf[0 : n-1]), nil } @@ -2314,11 +2322,56 @@ type RemoteIovec struct { //sys shmdt(addr uintptr) (err error) //sys shmget(key int, size int, flag int) (id int, err error) +//sys getitimer(which int, currValue *Itimerval) (err error) +//sys setitimer(which int, newValue *Itimerval, oldValue *Itimerval) (err error) + +// MakeItimerval creates an Itimerval from interval and value durations. +func MakeItimerval(interval, value time.Duration) Itimerval { + return Itimerval{ + Interval: NsecToTimeval(interval.Nanoseconds()), + Value: NsecToTimeval(value.Nanoseconds()), + } +} + +// A value which may be passed to the which parameter for Getitimer and +// Setitimer. +type ItimerWhich int + +// Possible which values for Getitimer and Setitimer. +const ( + ItimerReal ItimerWhich = ITIMER_REAL + ItimerVirtual ItimerWhich = ITIMER_VIRTUAL + ItimerProf ItimerWhich = ITIMER_PROF +) + +// Getitimer wraps getitimer(2) to return the current value of the timer +// specified by which. +func Getitimer(which ItimerWhich) (Itimerval, error) { + var it Itimerval + if err := getitimer(int(which), &it); err != nil { + return Itimerval{}, err + } + + return it, nil +} + +// Setitimer wraps setitimer(2) to arm or disarm the timer specified by which. +// It returns the previous value of the timer. +// +// If the Itimerval argument is the zero value, the timer will be disarmed. +func Setitimer(which ItimerWhich, it Itimerval) (Itimerval, error) { + var prev Itimerval + if err := setitimer(int(which), &it, &prev); err != nil { + return Itimerval{}, err + } + + return prev, nil +} + /* * Unimplemented */ // AfsSyscall -// Alarm // ArchPrctl // Brk // ClockNanosleep @@ -2334,7 +2387,6 @@ type RemoteIovec struct { // GetMempolicy // GetRobustList // GetThreadArea -// Getitimer // Getpmsg // IoCancel // IoDestroy diff --git a/vendor/golang.org/x/sys/unix/syscall_linux_386.go b/vendor/golang.org/x/sys/unix/syscall_linux_386.go index 5f757e8aa..d44b8ad53 100644 --- a/vendor/golang.org/x/sys/unix/syscall_linux_386.go +++ b/vendor/golang.org/x/sys/unix/syscall_linux_386.go @@ -173,14 +173,6 @@ const ( _SENDMMSG = 20 ) -func accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) { - fd, e := socketcall(_ACCEPT, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)), 0, 0, 0) - if e != 0 { - err = e - } - return -} - func accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) { fd, e := socketcall(_ACCEPT4, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)), uintptr(flags), 0, 0) if e != 0 { diff --git a/vendor/golang.org/x/sys/unix/syscall_linux_alarm.go b/vendor/golang.org/x/sys/unix/syscall_linux_alarm.go new file mode 100644 index 000000000..08086ac6a --- /dev/null +++ b/vendor/golang.org/x/sys/unix/syscall_linux_alarm.go @@ -0,0 +1,14 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build linux && (386 || amd64 || mips || mipsle || mips64 || mipsle || ppc64 || ppc64le || ppc || s390x || sparc64) +// +build linux +// +build 386 amd64 mips mipsle mips64 mipsle ppc64 ppc64le ppc s390x sparc64 + +package unix + +// SYS_ALARM is not defined on arm or riscv, but is available for other GOARCH +// values. + +//sys Alarm(seconds uint) (remaining uint, err error) diff --git a/vendor/golang.org/x/sys/unix/syscall_linux_amd64.go b/vendor/golang.org/x/sys/unix/syscall_linux_amd64.go index 4299125aa..bd21d93bf 100644 --- a/vendor/golang.org/x/sys/unix/syscall_linux_amd64.go +++ b/vendor/golang.org/x/sys/unix/syscall_linux_amd64.go @@ -62,7 +62,6 @@ func Stat(path string, stat *Stat_t) (err error) { //sys SyncFileRange(fd int, off int64, n int64, flags int) (err error) //sys Truncate(path string, length int64) (err error) //sys Ustat(dev int, ubuf *Ustat_t) (err error) -//sys accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) //sys accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) //sys bind(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) //sys connect(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) diff --git a/vendor/golang.org/x/sys/unix/syscall_linux_arm.go b/vendor/golang.org/x/sys/unix/syscall_linux_arm.go index 79edeb9cb..343c91f6b 100644 --- a/vendor/golang.org/x/sys/unix/syscall_linux_arm.go +++ b/vendor/golang.org/x/sys/unix/syscall_linux_arm.go @@ -27,7 +27,6 @@ func Seek(fd int, offset int64, whence int) (newoffset int64, err error) { return newoffset, nil } -//sys accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) //sys accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) //sys bind(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) //sys connect(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) diff --git a/vendor/golang.org/x/sys/unix/syscall_linux_arm64.go b/vendor/golang.org/x/sys/unix/syscall_linux_arm64.go index 862890de2..8c5628684 100644 --- a/vendor/golang.org/x/sys/unix/syscall_linux_arm64.go +++ b/vendor/golang.org/x/sys/unix/syscall_linux_arm64.go @@ -66,7 +66,6 @@ func Ustat(dev int, ubuf *Ustat_t) (err error) { return ENOSYS } -//sys accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) //sys accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) //sys bind(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) //sys connect(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) diff --git a/vendor/golang.org/x/sys/unix/syscall_linux_mips64x.go b/vendor/golang.org/x/sys/unix/syscall_linux_mips64x.go index 8932e34ad..f0b138002 100644 --- a/vendor/golang.org/x/sys/unix/syscall_linux_mips64x.go +++ b/vendor/golang.org/x/sys/unix/syscall_linux_mips64x.go @@ -48,7 +48,6 @@ func Select(nfd int, r *FdSet, w *FdSet, e *FdSet, timeout *Timeval) (n int, err //sys SyncFileRange(fd int, off int64, n int64, flags int) (err error) //sys Truncate(path string, length int64) (err error) //sys Ustat(dev int, ubuf *Ustat_t) (err error) -//sys accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) //sys accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) //sys bind(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) //sys connect(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) diff --git a/vendor/golang.org/x/sys/unix/syscall_linux_mipsx.go b/vendor/golang.org/x/sys/unix/syscall_linux_mipsx.go index 7821c25d9..e6163c30f 100644 --- a/vendor/golang.org/x/sys/unix/syscall_linux_mipsx.go +++ b/vendor/golang.org/x/sys/unix/syscall_linux_mipsx.go @@ -41,7 +41,6 @@ func Syscall9(trap, a1, a2, a3, a4, a5, a6, a7, a8, a9 uintptr) (r1, r2 uintptr, //sys SyncFileRange(fd int, off int64, n int64, flags int) (err error) //sys Truncate(path string, length int64) (err error) = SYS_TRUNCATE64 //sys Ustat(dev int, ubuf *Ustat_t) (err error) -//sys accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) //sys accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) //sys bind(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) //sys connect(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) diff --git a/vendor/golang.org/x/sys/unix/syscall_linux_ppc.go b/vendor/golang.org/x/sys/unix/syscall_linux_ppc.go index c5053a0f0..4740e80a8 100644 --- a/vendor/golang.org/x/sys/unix/syscall_linux_ppc.go +++ b/vendor/golang.org/x/sys/unix/syscall_linux_ppc.go @@ -43,7 +43,6 @@ import ( //sys Stat(path string, stat *Stat_t) (err error) = SYS_STAT64 //sys Truncate(path string, length int64) (err error) = SYS_TRUNCATE64 //sys Ustat(dev int, ubuf *Ustat_t) (err error) -//sys accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) //sys accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) //sys bind(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) //sys connect(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) diff --git a/vendor/golang.org/x/sys/unix/syscall_linux_ppc64x.go b/vendor/golang.org/x/sys/unix/syscall_linux_ppc64x.go index 25786c421..78bc9166e 100644 --- a/vendor/golang.org/x/sys/unix/syscall_linux_ppc64x.go +++ b/vendor/golang.org/x/sys/unix/syscall_linux_ppc64x.go @@ -45,7 +45,6 @@ package unix //sys Statfs(path string, buf *Statfs_t) (err error) //sys Truncate(path string, length int64) (err error) //sys Ustat(dev int, ubuf *Ustat_t) (err error) -//sys accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) //sys accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) //sys bind(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) //sys connect(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) diff --git a/vendor/golang.org/x/sys/unix/syscall_linux_riscv64.go b/vendor/golang.org/x/sys/unix/syscall_linux_riscv64.go index 6f9f71041..3d6c4eb06 100644 --- a/vendor/golang.org/x/sys/unix/syscall_linux_riscv64.go +++ b/vendor/golang.org/x/sys/unix/syscall_linux_riscv64.go @@ -65,7 +65,6 @@ func Ustat(dev int, ubuf *Ustat_t) (err error) { return ENOSYS } -//sys accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) //sys accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) //sys bind(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) //sys connect(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) diff --git a/vendor/golang.org/x/sys/unix/syscall_linux_s390x.go b/vendor/golang.org/x/sys/unix/syscall_linux_s390x.go index 6aa59cb27..89ce84a41 100644 --- a/vendor/golang.org/x/sys/unix/syscall_linux_s390x.go +++ b/vendor/golang.org/x/sys/unix/syscall_linux_s390x.go @@ -145,15 +145,6 @@ const ( netSendMMsg = 20 ) -func accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (int, error) { - args := [3]uintptr{uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))} - fd, _, err := Syscall(SYS_SOCKETCALL, netAccept, uintptr(unsafe.Pointer(&args)), 0) - if err != 0 { - return 0, err - } - return int(fd), nil -} - func accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (int, error) { args := [4]uintptr{uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)), uintptr(flags)} fd, _, err := Syscall(SYS_SOCKETCALL, netAccept4, uintptr(unsafe.Pointer(&args)), 0) diff --git a/vendor/golang.org/x/sys/unix/syscall_linux_sparc64.go b/vendor/golang.org/x/sys/unix/syscall_linux_sparc64.go index bbe8d174f..35bdb098c 100644 --- a/vendor/golang.org/x/sys/unix/syscall_linux_sparc64.go +++ b/vendor/golang.org/x/sys/unix/syscall_linux_sparc64.go @@ -42,7 +42,6 @@ package unix //sys Statfs(path string, buf *Statfs_t) (err error) //sys SyncFileRange(fd int, off int64, n int64, flags int) (err error) //sys Truncate(path string, length int64) (err error) -//sys accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) //sys accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) //sys bind(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) //sys connect(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux.go b/vendor/golang.org/x/sys/unix/zerrors_linux.go index 4e5420586..bc7c9d075 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux.go @@ -38,7 +38,8 @@ const ( AF_KEY = 0xf AF_LLC = 0x1a AF_LOCAL = 0x1 - AF_MAX = 0x2d + AF_MAX = 0x2e + AF_MCTP = 0x2d AF_MPLS = 0x1c AF_NETBEUI = 0xd AF_NETLINK = 0x10 @@ -259,6 +260,17 @@ const ( BUS_USB = 0x3 BUS_VIRTUAL = 0x6 CAN_BCM = 0x2 + CAN_CTRLMODE_3_SAMPLES = 0x4 + CAN_CTRLMODE_BERR_REPORTING = 0x10 + CAN_CTRLMODE_CC_LEN8_DLC = 0x100 + CAN_CTRLMODE_FD = 0x20 + CAN_CTRLMODE_FD_NON_ISO = 0x80 + CAN_CTRLMODE_LISTENONLY = 0x2 + CAN_CTRLMODE_LOOPBACK = 0x1 + CAN_CTRLMODE_ONE_SHOT = 0x8 + CAN_CTRLMODE_PRESUME_ACK = 0x40 + CAN_CTRLMODE_TDC_AUTO = 0x200 + CAN_CTRLMODE_TDC_MANUAL = 0x400 CAN_EFF_FLAG = 0x80000000 CAN_EFF_ID_BITS = 0x1d CAN_EFF_MASK = 0x1fffffff @@ -336,6 +348,7 @@ const ( CAN_RTR_FLAG = 0x40000000 CAN_SFF_ID_BITS = 0xb CAN_SFF_MASK = 0x7ff + CAN_TERMINATION_DISABLED = 0x0 CAN_TP16 = 0x3 CAN_TP20 = 0x4 CAP_AUDIT_CONTROL = 0x1e @@ -1267,9 +1280,14 @@ const ( IP_XFRM_POLICY = 0x11 ISOFS_SUPER_MAGIC = 0x9660 ISTRIP = 0x20 + ITIMER_PROF = 0x2 + ITIMER_REAL = 0x0 + ITIMER_VIRTUAL = 0x1 IUTF8 = 0x4000 IXANY = 0x800 JFFS2_SUPER_MAGIC = 0x72b6 + KCMPROTO_CONNECTED = 0x0 + KCM_RECV_DISABLE = 0x1 KEXEC_ARCH_386 = 0x30000 KEXEC_ARCH_68K = 0x40000 KEXEC_ARCH_AARCH64 = 0xb70000 @@ -2442,6 +2460,9 @@ const ( SIOCGSTAMPNS = 0x8907 SIOCGSTAMPNS_OLD = 0x8907 SIOCGSTAMP_OLD = 0x8906 + SIOCKCMATTACH = 0x89e0 + SIOCKCMCLONE = 0x89e2 + SIOCKCMUNATTACH = 0x89e1 SIOCOUTQNSD = 0x894b SIOCPROTOPRIVATE = 0x89e0 SIOCRTMSG = 0x890d diff --git a/vendor/golang.org/x/sys/unix/zsyscall_linux.go b/vendor/golang.org/x/sys/unix/zsyscall_linux.go index 93edda4c4..30fa4055e 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_linux.go +++ b/vendor/golang.org/x/sys/unix/zsyscall_linux.go @@ -2032,3 +2032,23 @@ func shmget(key int, size int, flag int) (id int, err error) { } return } + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func getitimer(which int, currValue *Itimerval) (err error) { + _, _, e1 := Syscall(SYS_GETITIMER, uintptr(which), uintptr(unsafe.Pointer(currValue)), 0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func setitimer(which int, newValue *Itimerval, oldValue *Itimerval) (err error) { + _, _, e1 := Syscall(SYS_SETITIMER, uintptr(which), uintptr(unsafe.Pointer(newValue)), uintptr(unsafe.Pointer(oldValue))) + if e1 != 0 { + err = errnoErr(e1) + } + return +} diff --git a/vendor/golang.org/x/sys/unix/zsyscall_linux_386.go b/vendor/golang.org/x/sys/unix/zsyscall_linux_386.go index ff90c81e7..2fc6271f4 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_linux_386.go +++ b/vendor/golang.org/x/sys/unix/zsyscall_linux_386.go @@ -1,4 +1,4 @@ -// go run mksyscall.go -l32 -tags linux,386 syscall_linux.go syscall_linux_386.go +// go run mksyscall.go -l32 -tags linux,386 syscall_linux.go syscall_linux_386.go syscall_linux_alarm.go // Code generated by the command above; see README.md. DO NOT EDIT. //go:build linux && 386 @@ -524,3 +524,14 @@ func utimes(path string, times *[2]Timeval) (err error) { } return } + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Alarm(seconds uint) (remaining uint, err error) { + r0, _, e1 := Syscall(SYS_ALARM, uintptr(seconds), 0, 0) + remaining = uint(r0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} diff --git a/vendor/golang.org/x/sys/unix/zsyscall_linux_amd64.go b/vendor/golang.org/x/sys/unix/zsyscall_linux_amd64.go index fa7d3dbe4..43d9f0128 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_linux_amd64.go +++ b/vendor/golang.org/x/sys/unix/zsyscall_linux_amd64.go @@ -1,4 +1,4 @@ -// go run mksyscall.go -tags linux,amd64 syscall_linux.go syscall_linux_amd64.go +// go run mksyscall.go -tags linux,amd64 syscall_linux.go syscall_linux_amd64.go syscall_linux_alarm.go // Code generated by the command above; see README.md. DO NOT EDIT. //go:build linux && amd64 @@ -444,17 +444,6 @@ func Ustat(dev int, ubuf *Ustat_t) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) { - r0, _, e1 := Syscall(SYS_ACCEPT, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))) - fd = int(r0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) { r0, _, e1 := Syscall6(SYS_ACCEPT4, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)), uintptr(flags), 0, 0) fd = int(r0) @@ -691,3 +680,14 @@ func kexecFileLoad(kernelFd int, initrdFd int, cmdlineLen int, cmdline string, f } return } + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Alarm(seconds uint) (remaining uint, err error) { + r0, _, e1 := Syscall(SYS_ALARM, uintptr(seconds), 0, 0) + remaining = uint(r0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} diff --git a/vendor/golang.org/x/sys/unix/zsyscall_linux_arm.go b/vendor/golang.org/x/sys/unix/zsyscall_linux_arm.go index 654f91530..7df0cb179 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_linux_arm.go +++ b/vendor/golang.org/x/sys/unix/zsyscall_linux_arm.go @@ -46,17 +46,6 @@ func Tee(rfd int, wfd int, len int, flags int) (n int64, err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) { - r0, _, e1 := Syscall(SYS_ACCEPT, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))) - fd = int(r0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) { r0, _, e1 := Syscall6(SYS_ACCEPT4, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)), uintptr(flags), 0, 0) fd = int(r0) diff --git a/vendor/golang.org/x/sys/unix/zsyscall_linux_arm64.go b/vendor/golang.org/x/sys/unix/zsyscall_linux_arm64.go index e893f987f..076e8f1c5 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_linux_arm64.go +++ b/vendor/golang.org/x/sys/unix/zsyscall_linux_arm64.go @@ -389,17 +389,6 @@ func Truncate(path string, length int64) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) { - r0, _, e1 := Syscall(SYS_ACCEPT, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))) - fd = int(r0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) { r0, _, e1 := Syscall6(SYS_ACCEPT4, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)), uintptr(flags), 0, 0) fd = int(r0) diff --git a/vendor/golang.org/x/sys/unix/zsyscall_linux_mips.go b/vendor/golang.org/x/sys/unix/zsyscall_linux_mips.go index 6d1552885..7b3c84746 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_linux_mips.go +++ b/vendor/golang.org/x/sys/unix/zsyscall_linux_mips.go @@ -1,4 +1,4 @@ -// go run mksyscall.go -b32 -arm -tags linux,mips syscall_linux.go syscall_linux_mipsx.go +// go run mksyscall.go -b32 -arm -tags linux,mips syscall_linux.go syscall_linux_mipsx.go syscall_linux_alarm.go // Code generated by the command above; see README.md. DO NOT EDIT. //go:build linux && mips @@ -344,17 +344,6 @@ func Ustat(dev int, ubuf *Ustat_t) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) { - r0, _, e1 := Syscall(SYS_ACCEPT, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))) - fd = int(r0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) { r0, _, e1 := Syscall6(SYS_ACCEPT4, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)), uintptr(flags), 0, 0) fd = int(r0) @@ -702,3 +691,14 @@ func setrlimit(resource int, rlim *rlimit32) (err error) { } return } + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Alarm(seconds uint) (remaining uint, err error) { + r0, _, e1 := Syscall(SYS_ALARM, uintptr(seconds), 0, 0) + remaining = uint(r0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} diff --git a/vendor/golang.org/x/sys/unix/zsyscall_linux_mips64.go b/vendor/golang.org/x/sys/unix/zsyscall_linux_mips64.go index 1e20d72df..0d3c45fbd 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_linux_mips64.go +++ b/vendor/golang.org/x/sys/unix/zsyscall_linux_mips64.go @@ -1,4 +1,4 @@ -// go run mksyscall.go -tags linux,mips64 syscall_linux.go syscall_linux_mips64x.go +// go run mksyscall.go -tags linux,mips64 syscall_linux.go syscall_linux_mips64x.go syscall_linux_alarm.go // Code generated by the command above; see README.md. DO NOT EDIT. //go:build linux && mips64 @@ -399,17 +399,6 @@ func Ustat(dev int, ubuf *Ustat_t) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) { - r0, _, e1 := Syscall(SYS_ACCEPT, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))) - fd = int(r0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) { r0, _, e1 := Syscall6(SYS_ACCEPT4, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)), uintptr(flags), 0, 0) fd = int(r0) @@ -696,3 +685,14 @@ func stat(path string, st *stat_t) (err error) { } return } + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Alarm(seconds uint) (remaining uint, err error) { + r0, _, e1 := Syscall(SYS_ALARM, uintptr(seconds), 0, 0) + remaining = uint(r0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} diff --git a/vendor/golang.org/x/sys/unix/zsyscall_linux_mips64le.go b/vendor/golang.org/x/sys/unix/zsyscall_linux_mips64le.go index 82b5e2d9e..cb46b2aaa 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_linux_mips64le.go +++ b/vendor/golang.org/x/sys/unix/zsyscall_linux_mips64le.go @@ -399,17 +399,6 @@ func Ustat(dev int, ubuf *Ustat_t) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) { - r0, _, e1 := Syscall(SYS_ACCEPT, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))) - fd = int(r0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) { r0, _, e1 := Syscall6(SYS_ACCEPT4, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)), uintptr(flags), 0, 0) fd = int(r0) diff --git a/vendor/golang.org/x/sys/unix/zsyscall_linux_mipsle.go b/vendor/golang.org/x/sys/unix/zsyscall_linux_mipsle.go index a0440c1d4..21c9baa6a 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_linux_mipsle.go +++ b/vendor/golang.org/x/sys/unix/zsyscall_linux_mipsle.go @@ -1,4 +1,4 @@ -// go run mksyscall.go -l32 -arm -tags linux,mipsle syscall_linux.go syscall_linux_mipsx.go +// go run mksyscall.go -l32 -arm -tags linux,mipsle syscall_linux.go syscall_linux_mipsx.go syscall_linux_alarm.go // Code generated by the command above; see README.md. DO NOT EDIT. //go:build linux && mipsle @@ -344,17 +344,6 @@ func Ustat(dev int, ubuf *Ustat_t) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) { - r0, _, e1 := Syscall(SYS_ACCEPT, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))) - fd = int(r0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) { r0, _, e1 := Syscall6(SYS_ACCEPT4, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)), uintptr(flags), 0, 0) fd = int(r0) @@ -702,3 +691,14 @@ func setrlimit(resource int, rlim *rlimit32) (err error) { } return } + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Alarm(seconds uint) (remaining uint, err error) { + r0, _, e1 := Syscall(SYS_ALARM, uintptr(seconds), 0, 0) + remaining = uint(r0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} diff --git a/vendor/golang.org/x/sys/unix/zsyscall_linux_ppc.go b/vendor/golang.org/x/sys/unix/zsyscall_linux_ppc.go index 5864b9ca6..02b8f0887 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_linux_ppc.go +++ b/vendor/golang.org/x/sys/unix/zsyscall_linux_ppc.go @@ -1,4 +1,4 @@ -// go run mksyscall.go -b32 -tags linux,ppc syscall_linux.go syscall_linux_ppc.go +// go run mksyscall.go -b32 -tags linux,ppc syscall_linux.go syscall_linux_ppc.go syscall_linux_alarm.go // Code generated by the command above; see README.md. DO NOT EDIT. //go:build linux && ppc @@ -409,17 +409,6 @@ func Ustat(dev int, ubuf *Ustat_t) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) { - r0, _, e1 := Syscall(SYS_ACCEPT, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))) - fd = int(r0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) { r0, _, e1 := Syscall6(SYS_ACCEPT4, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)), uintptr(flags), 0, 0) fd = int(r0) @@ -707,3 +696,14 @@ func kexecFileLoad(kernelFd int, initrdFd int, cmdlineLen int, cmdline string, f } return } + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Alarm(seconds uint) (remaining uint, err error) { + r0, _, e1 := Syscall(SYS_ALARM, uintptr(seconds), 0, 0) + remaining = uint(r0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} diff --git a/vendor/golang.org/x/sys/unix/zsyscall_linux_ppc64.go b/vendor/golang.org/x/sys/unix/zsyscall_linux_ppc64.go index beeb49e34..ac8cb09ba 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_linux_ppc64.go +++ b/vendor/golang.org/x/sys/unix/zsyscall_linux_ppc64.go @@ -1,4 +1,4 @@ -// go run mksyscall.go -tags linux,ppc64 syscall_linux.go syscall_linux_ppc64x.go +// go run mksyscall.go -tags linux,ppc64 syscall_linux.go syscall_linux_ppc64x.go syscall_linux_alarm.go // Code generated by the command above; see README.md. DO NOT EDIT. //go:build linux && ppc64 @@ -475,17 +475,6 @@ func Ustat(dev int, ubuf *Ustat_t) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) { - r0, _, e1 := Syscall(SYS_ACCEPT, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))) - fd = int(r0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) { r0, _, e1 := Syscall6(SYS_ACCEPT4, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)), uintptr(flags), 0, 0) fd = int(r0) @@ -753,3 +742,14 @@ func kexecFileLoad(kernelFd int, initrdFd int, cmdlineLen int, cmdline string, f } return } + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Alarm(seconds uint) (remaining uint, err error) { + r0, _, e1 := Syscall(SYS_ALARM, uintptr(seconds), 0, 0) + remaining = uint(r0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} diff --git a/vendor/golang.org/x/sys/unix/zsyscall_linux_ppc64le.go b/vendor/golang.org/x/sys/unix/zsyscall_linux_ppc64le.go index 53139b82c..bd08d887a 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_linux_ppc64le.go +++ b/vendor/golang.org/x/sys/unix/zsyscall_linux_ppc64le.go @@ -1,4 +1,4 @@ -// go run mksyscall.go -tags linux,ppc64le syscall_linux.go syscall_linux_ppc64x.go +// go run mksyscall.go -tags linux,ppc64le syscall_linux.go syscall_linux_ppc64x.go syscall_linux_alarm.go // Code generated by the command above; see README.md. DO NOT EDIT. //go:build linux && ppc64le @@ -475,17 +475,6 @@ func Ustat(dev int, ubuf *Ustat_t) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) { - r0, _, e1 := Syscall(SYS_ACCEPT, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))) - fd = int(r0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) { r0, _, e1 := Syscall6(SYS_ACCEPT4, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)), uintptr(flags), 0, 0) fd = int(r0) @@ -753,3 +742,14 @@ func kexecFileLoad(kernelFd int, initrdFd int, cmdlineLen int, cmdline string, f } return } + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Alarm(seconds uint) (remaining uint, err error) { + r0, _, e1 := Syscall(SYS_ALARM, uintptr(seconds), 0, 0) + remaining = uint(r0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} diff --git a/vendor/golang.org/x/sys/unix/zsyscall_linux_riscv64.go b/vendor/golang.org/x/sys/unix/zsyscall_linux_riscv64.go index 63b393b80..a834d2173 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_linux_riscv64.go +++ b/vendor/golang.org/x/sys/unix/zsyscall_linux_riscv64.go @@ -369,17 +369,6 @@ func Truncate(path string, length int64) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) { - r0, _, e1 := Syscall(SYS_ACCEPT, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))) - fd = int(r0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) { r0, _, e1 := Syscall6(SYS_ACCEPT4, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)), uintptr(flags), 0, 0) fd = int(r0) diff --git a/vendor/golang.org/x/sys/unix/zsyscall_linux_s390x.go b/vendor/golang.org/x/sys/unix/zsyscall_linux_s390x.go index 202add37d..9e462a96f 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_linux_s390x.go +++ b/vendor/golang.org/x/sys/unix/zsyscall_linux_s390x.go @@ -1,4 +1,4 @@ -// go run mksyscall.go -tags linux,s390x syscall_linux.go syscall_linux_s390x.go +// go run mksyscall.go -tags linux,s390x syscall_linux.go syscall_linux_s390x.go syscall_linux_alarm.go // Code generated by the command above; see README.md. DO NOT EDIT. //go:build linux && s390x @@ -533,3 +533,14 @@ func kexecFileLoad(kernelFd int, initrdFd int, cmdlineLen int, cmdline string, f } return } + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Alarm(seconds uint) (remaining uint, err error) { + r0, _, e1 := Syscall(SYS_ALARM, uintptr(seconds), 0, 0) + remaining = uint(r0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} diff --git a/vendor/golang.org/x/sys/unix/zsyscall_linux_sparc64.go b/vendor/golang.org/x/sys/unix/zsyscall_linux_sparc64.go index 2ab268c34..96d340242 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_linux_sparc64.go +++ b/vendor/golang.org/x/sys/unix/zsyscall_linux_sparc64.go @@ -1,4 +1,4 @@ -// go run mksyscall.go -tags linux,sparc64 syscall_linux.go syscall_linux_sparc64.go +// go run mksyscall.go -tags linux,sparc64 syscall_linux.go syscall_linux_sparc64.go syscall_linux_alarm.go // Code generated by the command above; see README.md. DO NOT EDIT. //go:build linux && sparc64 @@ -455,17 +455,6 @@ func Truncate(path string, length int64) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) { - r0, _, e1 := Syscall(SYS_ACCEPT, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))) - fd = int(r0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) { r0, _, e1 := Syscall6(SYS_ACCEPT4, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)), uintptr(flags), 0, 0) fd = int(r0) @@ -697,3 +686,14 @@ func utimes(path string, times *[2]Timeval) (err error) { } return } + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Alarm(seconds uint) (remaining uint, err error) { + r0, _, e1 := Syscall(SYS_ALARM, uintptr(seconds), 0, 0) + remaining = uint(r0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux.go b/vendor/golang.org/x/sys/unix/ztypes_linux.go index 66788f156..e6a8d88c5 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux.go @@ -24,6 +24,11 @@ type ItimerSpec struct { Value Timespec } +type Itimerval struct { + Interval Timeval + Value Timeval +} + const ( TIME_OK = 0x0 TIME_INS = 0x1 @@ -4065,3 +4070,91 @@ const ( NL_POLICY_TYPE_ATTR_MASK = 0xc NL_POLICY_TYPE_ATTR_MAX = 0xc ) + +type CANBitTiming struct { + Bitrate uint32 + Sample_point uint32 + Tq uint32 + Prop_seg uint32 + Phase_seg1 uint32 + Phase_seg2 uint32 + Sjw uint32 + Brp uint32 +} + +type CANBitTimingConst struct { + Name [16]uint8 + Tseg1_min uint32 + Tseg1_max uint32 + Tseg2_min uint32 + Tseg2_max uint32 + Sjw_max uint32 + Brp_min uint32 + Brp_max uint32 + Brp_inc uint32 +} + +type CANClock struct { + Freq uint32 +} + +type CANBusErrorCounters struct { + Txerr uint16 + Rxerr uint16 +} + +type CANCtrlMode struct { + Mask uint32 + Flags uint32 +} + +type CANDeviceStats struct { + Bus_error uint32 + Error_warning uint32 + Error_passive uint32 + Bus_off uint32 + Arbitration_lost uint32 + Restarts uint32 +} + +const ( + CAN_STATE_ERROR_ACTIVE = 0x0 + CAN_STATE_ERROR_WARNING = 0x1 + CAN_STATE_ERROR_PASSIVE = 0x2 + CAN_STATE_BUS_OFF = 0x3 + CAN_STATE_STOPPED = 0x4 + CAN_STATE_SLEEPING = 0x5 + CAN_STATE_MAX = 0x6 +) + +const ( + IFLA_CAN_UNSPEC = 0x0 + IFLA_CAN_BITTIMING = 0x1 + IFLA_CAN_BITTIMING_CONST = 0x2 + IFLA_CAN_CLOCK = 0x3 + IFLA_CAN_STATE = 0x4 + IFLA_CAN_CTRLMODE = 0x5 + IFLA_CAN_RESTART_MS = 0x6 + IFLA_CAN_RESTART = 0x7 + IFLA_CAN_BERR_COUNTER = 0x8 + IFLA_CAN_DATA_BITTIMING = 0x9 + IFLA_CAN_DATA_BITTIMING_CONST = 0xa + IFLA_CAN_TERMINATION = 0xb + IFLA_CAN_TERMINATION_CONST = 0xc + IFLA_CAN_BITRATE_CONST = 0xd + IFLA_CAN_DATA_BITRATE_CONST = 0xe + IFLA_CAN_BITRATE_MAX = 0xf +) + +type KCMAttach struct { + Fd int32 + Bpf_fd int32 +} + +type KCMUnattach struct { + Fd int32 +} + +type KCMClone struct { + Fd int32 +} diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go b/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go index 635880610..c426c3576 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go @@ -210,8 +210,8 @@ type PtraceFpregs struct { } type PtracePer struct { - _ [0]uint64 - _ [32]byte + Control_regs [3]uint64 + _ [8]byte Starting_addr uint64 Ending_addr uint64 Perc_atmid uint16 diff --git a/vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go b/vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go index 4c3ac6647..d3670aca9 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go @@ -14,6 +14,7 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/typeparams" ) const Doc = `check for unkeyed composite literals @@ -67,41 +68,61 @@ func run(pass *analysis.Pass) (interface{}, error) { // skip whitelisted types return } - under := typ.Underlying() - for { - ptr, ok := under.(*types.Pointer) - if !ok { - break + var structuralTypes []types.Type + switch typ := typ.(type) { + case *typeparams.TypeParam: + terms, err := typeparams.StructuralTerms(typ) + if err != nil { + return // invalid type } - under = ptr.Elem().Underlying() - } - if _, ok := under.(*types.Struct); !ok { - // skip non-struct composite literals - return - } - if isLocalType(pass, typ) { - // allow unkeyed locally defined composite literal - return - } - - // check if the CompositeLit contains an unkeyed field - allKeyValue := true - for _, e := range cl.Elts { - if _, ok := e.(*ast.KeyValueExpr); !ok { - allKeyValue = false - break + for _, term := range terms { + structuralTypes = append(structuralTypes, term.Type()) } + default: + structuralTypes = append(structuralTypes, typ) } - if allKeyValue { - // all the composite literal fields are keyed + for _, typ := range structuralTypes { + under := deref(typ.Underlying()) + if _, ok := under.(*types.Struct); !ok { + // skip non-struct composite literals + continue + } + if isLocalType(pass, typ) { + // allow unkeyed locally defined composite literal + continue + } + + // check if the CompositeLit contains an unkeyed field + allKeyValue := true + for _, e := range cl.Elts { + if _, ok := e.(*ast.KeyValueExpr); !ok { + allKeyValue = false + break + } + } + if allKeyValue { + // all the composite literal fields are keyed + continue + } + + pass.ReportRangef(cl, "%s composite literal uses unkeyed fields", typeName) return } - - pass.ReportRangef(cl, "%s composite literal uses unkeyed fields", typeName) }) return nil, nil } +func deref(typ types.Type) types.Type { + for { + ptr, ok := typ.(*types.Pointer) + if !ok { + break + } + typ = ptr.Elem().Underlying() + } + return typ +} + func isLocalType(pass *analysis.Pass, typ types.Type) bool { switch x := typ.(type) { case *types.Struct: @@ -112,6 +133,8 @@ func isLocalType(pass *analysis.Pass, typ types.Type) bool { case *types.Named: // names in package foo are local to foo_test too return strings.TrimSuffix(x.Obj().Pkg().Path(), "_test") == strings.TrimSuffix(pass.Pkg.Path(), "_test") + case *typeparams.TypeParam: + return strings.TrimSuffix(x.Obj().Pkg().Path(), "_test") == strings.TrimSuffix(pass.Pkg.Path(), "_test") } return false } diff --git a/vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go b/vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go index c4ebf7857..350dc4e0f 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go @@ -17,6 +17,7 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/typeparams" ) const Doc = `check for locks erroneously passed by value @@ -145,7 +146,7 @@ func checkCopyLocksCallExpr(pass *analysis.Pass, ce *ast.CallExpr) { func checkCopyLocksFunc(pass *analysis.Pass, name string, recv *ast.FieldList, typ *ast.FuncType) { if recv != nil && len(recv.List) > 0 { expr := recv.List[0].Type - if path := lockPath(pass.Pkg, pass.TypesInfo.Types[expr].Type); path != nil { + if path := lockPath(pass.Pkg, pass.TypesInfo.Types[expr].Type, nil); path != nil { pass.ReportRangef(expr, "%s passes lock by value: %v", name, path) } } @@ -153,7 +154,7 @@ func checkCopyLocksFunc(pass *analysis.Pass, name string, recv *ast.FieldList, t if typ.Params != nil { for _, field := range typ.Params.List { expr := field.Type - if path := lockPath(pass.Pkg, pass.TypesInfo.Types[expr].Type); path != nil { + if path := lockPath(pass.Pkg, pass.TypesInfo.Types[expr].Type, nil); path != nil { pass.ReportRangef(expr, "%s passes lock by value: %v", name, path) } } @@ -199,12 +200,12 @@ func checkCopyLocksRangeVar(pass *analysis.Pass, rtok token.Token, e ast.Expr) { if typ == nil { return } - if path := lockPath(pass.Pkg, typ); path != nil { + if path := lockPath(pass.Pkg, typ, nil); path != nil { pass.Reportf(e.Pos(), "range var %s copies lock: %v", analysisutil.Format(pass.Fset, e), path) } } -type typePath []types.Type +type typePath []string // String pretty-prints a typePath. func (path typePath) String() string { @@ -215,7 +216,7 @@ func (path typePath) String() string { fmt.Fprint(&buf, " contains ") } // The human-readable path is in reverse order, outermost to innermost. - fmt.Fprint(&buf, path[n-i-1].String()) + fmt.Fprint(&buf, path[n-i-1]) } return buf.String() } @@ -234,16 +235,57 @@ func lockPathRhs(pass *analysis.Pass, x ast.Expr) typePath { return nil } } - return lockPath(pass.Pkg, pass.TypesInfo.Types[x].Type) + return lockPath(pass.Pkg, pass.TypesInfo.Types[x].Type, nil) } // lockPath returns a typePath describing the location of a lock value // contained in typ. If there is no contained lock, it returns nil. -func lockPath(tpkg *types.Package, typ types.Type) typePath { +// +// The seenTParams map is used to short-circuit infinite recursion via type +// parameters. +func lockPath(tpkg *types.Package, typ types.Type, seenTParams map[*typeparams.TypeParam]bool) typePath { if typ == nil { return nil } + if tpar, ok := typ.(*typeparams.TypeParam); ok { + if seenTParams == nil { + // Lazily allocate seenTParams, since the common case will not involve + // any type parameters. + seenTParams = make(map[*typeparams.TypeParam]bool) + } + if seenTParams[tpar] { + return nil + } + seenTParams[tpar] = true + terms, err := typeparams.StructuralTerms(tpar) + if err != nil { + return nil // invalid type + } + for _, term := range terms { + subpath := lockPath(tpkg, term.Type(), seenTParams) + if len(subpath) > 0 { + if term.Tilde() { + // Prepend a tilde to our lock path entry to clarify the resulting + // diagnostic message. Consider the following example: + // + // func _[Mutex interface{ ~sync.Mutex; M() }](m Mutex) {} + // + // Here the naive error message will be something like "passes lock + // by value: Mutex contains sync.Mutex". This is misleading because + // the local type parameter doesn't actually contain sync.Mutex, + // which lacks the M method. + // + // With tilde, it is clearer that the containment is via an + // approximation element. + subpath[len(subpath)-1] = "~" + subpath[len(subpath)-1] + } + return append(subpath, typ.String()) + } + } + return nil + } + for { atyp, ok := typ.Underlying().(*types.Array) if !ok { @@ -252,6 +294,17 @@ func lockPath(tpkg *types.Package, typ types.Type) typePath { typ = atyp.Elem() } + ttyp, ok := typ.Underlying().(*types.Tuple) + if ok { + for i := 0; i < ttyp.Len(); i++ { + subpath := lockPath(tpkg, ttyp.At(i).Type(), seenTParams) + if subpath != nil { + return append(subpath, typ.String()) + } + } + return nil + } + // We're only interested in the case in which the underlying // type is a struct. (Interfaces and pointers are safe to copy.) styp, ok := typ.Underlying().(*types.Struct) @@ -263,7 +316,7 @@ func lockPath(tpkg *types.Package, typ types.Type) typePath { // is a sync.Locker, but a value is not. This differentiates // embedded interfaces from embedded values. if types.Implements(types.NewPointer(typ), lockerType) && !types.Implements(typ, lockerType) { - return []types.Type{typ} + return []string{typ.String()} } // In go1.10, sync.noCopy did not implement Locker. @@ -272,15 +325,15 @@ func lockPath(tpkg *types.Package, typ types.Type) typePath { if named, ok := typ.(*types.Named); ok && named.Obj().Name() == "noCopy" && named.Obj().Pkg().Path() == "sync" { - return []types.Type{typ} + return []string{typ.String()} } nfields := styp.NumFields() for i := 0; i < nfields; i++ { ftyp := styp.Field(i).Type() - subpath := lockPath(tpkg, ftyp) + subpath := lockPath(tpkg, ftyp, seenTParams) if subpath != nil { - return append(subpath, typ) + return append(subpath, typ.String()) } } diff --git a/vendor/golang.org/x/tools/go/analysis/passes/ctrlflow/ctrlflow.go b/vendor/golang.org/x/tools/go/analysis/passes/ctrlflow/ctrlflow.go index 51600ffc7..73746d6f0 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/ctrlflow/ctrlflow.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/ctrlflow/ctrlflow.go @@ -187,7 +187,11 @@ func (c *CFGs) callMayReturn(call *ast.CallExpr) (r bool) { return false // panic never returns } - // Is this a static call? + // Is this a static call? Also includes static functions + // parameterized by a type. Such functions may or may not + // return depending on the parameter type, but in some + // cases the answer is definite. We let ctrlflow figure + // that out. fn := typeutil.StaticCallee(c.pass.TypesInfo, call) if fn == nil { return true // callee not statically known; be conservative diff --git a/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/ifaceassert.go b/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/ifaceassert.go index fd2285332..30130f63e 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/ifaceassert.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/ifaceassert.go @@ -51,6 +51,12 @@ func assertableTo(v, t types.Type) *types.Func { if V == nil || T == nil { return nil } + + // Mitigations for interface comparisons and generics. + // TODO(https://github.com/golang/go/issues/50658): Support more precise conclusion. + if isParameterized(V) || isParameterized(T) { + return nil + } if f, wrongType := types.MissingMethod(V, T, false); wrongType { return f } diff --git a/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/parameterized.go b/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/parameterized.go new file mode 100644 index 000000000..1285ecf13 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/parameterized.go @@ -0,0 +1,112 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +package ifaceassert + +import ( + "go/types" + + "golang.org/x/tools/internal/typeparams" +) + +// isParameterized reports whether typ contains any of the type parameters of tparams. +// +// NOTE: Adapted from go/types/infer.go. If that is exported in a future release remove this copy. +func isParameterized(typ types.Type) bool { + w := tpWalker{ + seen: make(map[types.Type]bool), + } + return w.isParameterized(typ) +} + +type tpWalker struct { + seen map[types.Type]bool +} + +func (w *tpWalker) isParameterized(typ types.Type) (res bool) { + // detect cycles + if x, ok := w.seen[typ]; ok { + return x + } + w.seen[typ] = false + defer func() { + w.seen[typ] = res + }() + + switch t := typ.(type) { + case nil, *types.Basic: // TODO(gri) should nil be handled here? + break + + case *types.Array: + return w.isParameterized(t.Elem()) + + case *types.Slice: + return w.isParameterized(t.Elem()) + + case *types.Struct: + for i, n := 0, t.NumFields(); i < n; i++ { + if w.isParameterized(t.Field(i).Type()) { + return true + } + } + + case *types.Pointer: + return w.isParameterized(t.Elem()) + + case *types.Tuple: + n := t.Len() + for i := 0; i < n; i++ { + if w.isParameterized(t.At(i).Type()) { + return true + } + } + + case *types.Signature: + // t.tparams may not be nil if we are looking at a signature + // of a generic function type (or an interface method) that is + // part of the type we're testing. We don't care about these type + // parameters. + // Similarly, the receiver of a method may declare (rather then + // use) type parameters, we don't care about those either. + // Thus, we only need to look at the input and result parameters. + return w.isParameterized(t.Params()) || w.isParameterized(t.Results()) + + case *types.Interface: + for i, n := 0, t.NumMethods(); i < n; i++ { + if w.isParameterized(t.Method(i).Type()) { + return true + } + } + terms, err := typeparams.InterfaceTermSet(t) + if err != nil { + panic(err) + } + for _, term := range terms { + if w.isParameterized(term.Type()) { + return true + } + } + + case *types.Map: + return w.isParameterized(t.Key()) || w.isParameterized(t.Elem()) + + case *types.Chan: + return w.isParameterized(t.Elem()) + + case *types.Named: + list := typeparams.NamedTypeArgs(t) + for i, n := 0, list.Len(); i < n; i++ { + if w.isParameterized(list.At(i)) { + return true + } + } + + case *typeparams.TypeParam: + return true + + default: + panic(t) // unreachable + } + + return false +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/nilfunc/nilfunc.go b/vendor/golang.org/x/tools/go/analysis/passes/nilfunc/nilfunc.go index cd42c9897..e4c66df6d 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/nilfunc/nilfunc.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/nilfunc/nilfunc.go @@ -14,6 +14,7 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/typeparams" ) const Doc = `check for useless comparisons between functions and nil @@ -59,6 +60,12 @@ func run(pass *analysis.Pass) (interface{}, error) { obj = pass.TypesInfo.Uses[v] case *ast.SelectorExpr: obj = pass.TypesInfo.Uses[v.Sel] + case *ast.IndexExpr, *typeparams.IndexListExpr: + // Check generic functions such as "f[T1,T2]". + x, _, _, _ := typeparams.UnpackIndexExpr(v) + if id, ok := x.(*ast.Ident); ok { + obj = pass.TypesInfo.Uses[id] + } default: return } diff --git a/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go b/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go index de0369a42..dee37d78a 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go @@ -25,6 +25,7 @@ import ( "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/typeparams" ) func init() { @@ -452,8 +453,15 @@ func stringConstantArg(pass *analysis.Pass, call *ast.CallExpr, idx int) (string if idx >= len(call.Args) { return "", false } - arg := call.Args[idx] - lit := pass.TypesInfo.Types[arg].Value + return stringConstantExpr(pass, call.Args[idx]) +} + +// stringConstantExpr returns expression's string constant value. +// +// ("", false) is returned if expression isn't a string +// constant. +func stringConstantExpr(pass *analysis.Pass, expr ast.Expr) (string, bool) { + lit := pass.TypesInfo.Types[expr].Value if lit != nil && lit.Kind() == constant.String { return constant.StringVal(lit), true } @@ -513,7 +521,12 @@ func printfNameAndKind(pass *analysis.Pass, call *ast.CallExpr) (fn *types.Func, func isFormatter(typ types.Type) bool { // If the type is an interface, the value it holds might satisfy fmt.Formatter. if _, ok := typ.Underlying().(*types.Interface); ok { - return true + // Don't assume type parameters could be formatters. With the greater + // expressiveness of constraint interface syntax we expect more type safety + // when using type parameters. + if !typeparams.IsTypeParam(typ) { + return true + } } obj, _, _ := types.LookupFieldOrMethod(typ, false, nil, "Format") fn, ok := obj.(*types.Func) @@ -834,8 +847,9 @@ func okPrintfArg(pass *analysis.Pass, call *ast.CallExpr, state *formatState) (o } // Could current arg implement fmt.Formatter? + // Skip check for the %w verb, which requires an error. formatter := false - if state.argNum < len(call.Args) { + if v.typ != argError && state.argNum < len(call.Args) { if tv, ok := pass.TypesInfo.Types[call.Args[state.argNum]]; ok { formatter = isFormatter(tv.Type) } @@ -871,8 +885,12 @@ func okPrintfArg(pass *analysis.Pass, call *ast.CallExpr, state *formatState) (o return } arg := call.Args[argNum] - if !matchArgType(pass, argInt, nil, arg) { - pass.ReportRangef(call, "%s format %s uses non-int %s as argument of *", state.name, state.format, analysisutil.Format(pass.Fset, arg)) + if reason, ok := matchArgType(pass, argInt, arg); !ok { + details := "" + if reason != "" { + details = " (" + reason + ")" + } + pass.ReportRangef(call, "%s format %s uses non-int %s%s as argument of *", state.name, state.format, analysisutil.Format(pass.Fset, arg), details) return false } } @@ -889,12 +907,16 @@ func okPrintfArg(pass *analysis.Pass, call *ast.CallExpr, state *formatState) (o pass.ReportRangef(call, "%s format %s arg %s is a func value, not called", state.name, state.format, analysisutil.Format(pass.Fset, arg)) return false } - if !matchArgType(pass, v.typ, nil, arg) { + if reason, ok := matchArgType(pass, v.typ, arg); !ok { typeString := "" if typ := pass.TypesInfo.Types[arg].Type; typ != nil { typeString = typ.String() } - pass.ReportRangef(call, "%s format %s has arg %s of wrong type %s", state.name, state.format, analysisutil.Format(pass.Fset, arg), typeString) + details := "" + if reason != "" { + details = " (" + reason + ")" + } + pass.ReportRangef(call, "%s format %s has arg %s of wrong type %s%s", state.name, state.format, analysisutil.Format(pass.Fset, arg), typeString, details) return false } if v.typ&argString != 0 && v.verb != 'T' && !bytes.Contains(state.flags, []byte{'#'}) { @@ -1052,10 +1074,10 @@ func checkPrint(pass *analysis.Pass, call *ast.CallExpr, fn *types.Func) { } arg := args[0] - if lit, ok := arg.(*ast.BasicLit); ok && lit.Kind == token.STRING { - // Ignore trailing % character in lit.Value. + if s, ok := stringConstantExpr(pass, arg); ok { + // Ignore trailing % character // The % in "abc 0.0%" couldn't be a formatting directive. - s := strings.TrimSuffix(lit.Value, `%"`) + s = strings.TrimSuffix(s, "%") if strings.Contains(s, "%") { m := printFormatRE.FindStringSubmatch(s) if m != nil { @@ -1066,9 +1088,8 @@ func checkPrint(pass *analysis.Pass, call *ast.CallExpr, fn *types.Func) { if strings.HasSuffix(fn.Name(), "ln") { // The last item, if a string, should not have a newline. arg = args[len(args)-1] - if lit, ok := arg.(*ast.BasicLit); ok && lit.Kind == token.STRING { - str, _ := strconv.Unquote(lit.Value) - if strings.HasSuffix(str, "\n") { + if s, ok := stringConstantExpr(pass, arg); ok { + if strings.HasSuffix(s, "\n") { pass.ReportRangef(call, "%s arg list ends with redundant newline", fn.FullName()) } } diff --git a/vendor/golang.org/x/tools/go/analysis/passes/printf/types.go b/vendor/golang.org/x/tools/go/analysis/passes/printf/types.go index 6a5fae44f..270e917c8 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/printf/types.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/printf/types.go @@ -5,45 +5,60 @@ package printf import ( + "fmt" "go/ast" "go/types" "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" + "golang.org/x/tools/internal/typeparams" ) var errorType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) -// matchArgType reports an error if printf verb t is not appropriate -// for operand arg. +// matchArgType reports an error if printf verb t is not appropriate for +// operand arg. // -// typ is used only for recursive calls; external callers must supply nil. -// -// (Recursion arises from the compound types {map,chan,slice} which -// may be printed with %d etc. if that is appropriate for their element -// types.) -func matchArgType(pass *analysis.Pass, t printfArgType, typ types.Type, arg ast.Expr) bool { - return matchArgTypeInternal(pass, t, typ, arg, make(map[types.Type]bool)) -} - -// matchArgTypeInternal is the internal version of matchArgType. It carries a map -// remembering what types are in progress so we don't recur when faced with recursive -// types or mutually recursive types. -func matchArgTypeInternal(pass *analysis.Pass, t printfArgType, typ types.Type, arg ast.Expr, inProgress map[types.Type]bool) bool { +// If arg is a type parameter, the verb t must be appropriate for every type in +// the type parameter type set. +func matchArgType(pass *analysis.Pass, t printfArgType, arg ast.Expr) (reason string, ok bool) { // %v, %T accept any argument type. if t == anyType { - return true - } - if typ == nil { - // external call - typ = pass.TypesInfo.Types[arg].Type - if typ == nil { - return true // probably a type check problem - } + return "", true } + typ := pass.TypesInfo.Types[arg].Type + if typ == nil { + return "", true // probably a type check problem + } + + m := &argMatcher{t: t, seen: make(map[types.Type]bool)} + ok = m.match(typ, true) + return m.reason, ok +} + +// argMatcher recursively matches types against the printfArgType t. +// +// To short-circuit recursion, it keeps track of types that have already been +// matched (or are in the process of being matched) via the seen map. Recursion +// arises from the compound types {map,chan,slice} which may be printed with %d +// etc. if that is appropriate for their element types, as well as from type +// parameters, which are expanded to the constituents of their type set. +// +// The reason field may be set to report the cause of the mismatch. +type argMatcher struct { + t printfArgType + seen map[types.Type]bool + reason string +} + +// match checks if typ matches m's printf arg type. If topLevel is true, typ is +// the actual type of the printf arg, for which special rules apply. As a +// special case, top level type parameters pass topLevel=true when checking for +// matches among the constituents of their type set, as type arguments will +// replace the type parameter at compile time. +func (m *argMatcher) match(typ types.Type, topLevel bool) bool { // %w accepts only errors. - if t == argError { + if m.t == argError { return types.ConvertibleTo(typ, errorType) } @@ -51,65 +66,122 @@ func matchArgTypeInternal(pass *analysis.Pass, t printfArgType, typ types.Type, if isFormatter(typ) { return true } + // If we can use a string, might arg (dynamically) implement the Stringer or Error interface? - if t&argString != 0 && isConvertibleToString(pass, typ) { + if m.t&argString != 0 && isConvertibleToString(typ) { + return true + } + + if typ, _ := typ.(*typeparams.TypeParam); typ != nil { + // Avoid infinite recursion through type parameters. + if m.seen[typ] { + return true + } + m.seen[typ] = true + terms, err := typeparams.StructuralTerms(typ) + if err != nil { + return true // invalid type (possibly an empty type set) + } + + if len(terms) == 0 { + // No restrictions on the underlying of typ. Type parameters implementing + // error, fmt.Formatter, or fmt.Stringer were handled above, and %v and + // %T was handled in matchType. We're about to check restrictions the + // underlying; if the underlying type is unrestricted there must be an + // element of the type set that violates one of the arg type checks + // below, so we can safely return false here. + + if m.t == anyType { // anyType must have already been handled. + panic("unexpected printfArgType") + } + return false + } + + // Only report a reason if typ is the argument type, otherwise it won't + // make sense. Note that it is not sufficient to check if topLevel == here, + // as type parameters can have a type set consisting of other type + // parameters. + reportReason := len(m.seen) == 1 + + for _, term := range terms { + if !m.match(term.Type(), topLevel) { + if reportReason { + if term.Tilde() { + m.reason = fmt.Sprintf("contains ~%s", term.Type()) + } else { + m.reason = fmt.Sprintf("contains %s", term.Type()) + } + } + return false + } + } return true } typ = typ.Underlying() - if inProgress[typ] { - // We're already looking at this type. The call that started it will take care of it. + if m.seen[typ] { + // We've already considered typ, or are in the process of considering it. + // In case we've already considered typ, it must have been valid (else we + // would have stopped matching). In case we're in the process of + // considering it, we must avoid infinite recursion. + // + // There are some pathological cases where returning true here is + // incorrect, for example `type R struct { F []R }`, but these are + // acceptable false negatives. return true } - inProgress[typ] = true + m.seen[typ] = true switch typ := typ.(type) { case *types.Signature: - return t == argPointer + return m.t == argPointer case *types.Map: - return t == argPointer || - // Recur: map[int]int matches %d. - (matchArgTypeInternal(pass, t, typ.Key(), arg, inProgress) && matchArgTypeInternal(pass, t, typ.Elem(), arg, inProgress)) + if m.t == argPointer { + return true + } + // Recur: map[int]int matches %d. + return m.match(typ.Key(), false) && m.match(typ.Elem(), false) case *types.Chan: - return t&argPointer != 0 + return m.t&argPointer != 0 case *types.Array: // Same as slice. - if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && t&argString != 0 { + if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && m.t&argString != 0 { return true // %s matches []byte } // Recur: []int matches %d. - return matchArgTypeInternal(pass, t, typ.Elem(), arg, inProgress) + return m.match(typ.Elem(), false) case *types.Slice: // Same as array. - if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && t&argString != 0 { + if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && m.t&argString != 0 { return true // %s matches []byte } - if t == argPointer { + if m.t == argPointer { return true // %p prints a slice's 0th element } // Recur: []int matches %d. But watch out for // type T []T // If the element is a pointer type (type T[]*T), it's handled fine by the Pointer case below. - return matchArgTypeInternal(pass, t, typ.Elem(), arg, inProgress) + return m.match(typ.Elem(), false) case *types.Pointer: // Ugly, but dealing with an edge case: a known pointer to an invalid type, // probably something from a failed import. - if typ.Elem().String() == "invalid type" { - if false { - pass.Reportf(arg.Pos(), "printf argument %v is pointer to invalid or unknown type", analysisutil.Format(pass.Fset, arg)) - } + if typ.Elem() == types.Typ[types.Invalid] { return true // special case } // If it's actually a pointer with %p, it prints as one. - if t == argPointer { + if m.t == argPointer { return true } + if typeparams.IsTypeParam(typ.Elem()) { + return true // We don't know whether the logic below applies. Give up. + } + under := typ.Elem().Underlying() switch under.(type) { case *types.Struct: // see below @@ -118,19 +190,31 @@ func matchArgTypeInternal(pass *analysis.Pass, t printfArgType, typ types.Type, case *types.Map: // see below default: // Check whether the rest can print pointers. - return t&argPointer != 0 + return m.t&argPointer != 0 } - // If it's a top-level pointer to a struct, array, slice, or + // If it's a top-level pointer to a struct, array, slice, type param, or // map, that's equivalent in our analysis to whether we can // print the type being pointed to. Pointers in nested levels // are not supported to minimize fmt running into loops. - if len(inProgress) > 1 { + if !topLevel { return false } - return matchArgTypeInternal(pass, t, under, arg, inProgress) + return m.match(under, false) case *types.Struct: - return matchStructArgType(pass, t, typ, arg, inProgress) + // report whether all the elements of the struct match the expected type. For + // instance, with "%d" all the elements must be printable with the "%d" format. + for i := 0; i < typ.NumFields(); i++ { + typf := typ.Field(i) + if !m.match(typf.Type(), false) { + return false + } + if m.t&argString != 0 && !typf.Exported() && isConvertibleToString(typf.Type()) { + // Issue #17798: unexported Stringer or error cannot be properly formatted. + return false + } + } + return true case *types.Interface: // There's little we can do. @@ -142,7 +226,7 @@ func matchArgTypeInternal(pass *analysis.Pass, t printfArgType, typ types.Type, switch typ.Kind() { case types.UntypedBool, types.Bool: - return t&argBool != 0 + return m.t&argBool != 0 case types.UntypedInt, types.Int, @@ -156,35 +240,32 @@ func matchArgTypeInternal(pass *analysis.Pass, t printfArgType, typ types.Type, types.Uint32, types.Uint64, types.Uintptr: - return t&argInt != 0 + return m.t&argInt != 0 case types.UntypedFloat, types.Float32, types.Float64: - return t&argFloat != 0 + return m.t&argFloat != 0 case types.UntypedComplex, types.Complex64, types.Complex128: - return t&argComplex != 0 + return m.t&argComplex != 0 case types.UntypedString, types.String: - return t&argString != 0 + return m.t&argString != 0 case types.UnsafePointer: - return t&(argPointer|argInt) != 0 + return m.t&(argPointer|argInt) != 0 case types.UntypedRune: - return t&(argInt|argRune) != 0 + return m.t&(argInt|argRune) != 0 case types.UntypedNil: return false case types.Invalid: - if false { - pass.Reportf(arg.Pos(), "printf argument %v has invalid or unknown type", analysisutil.Format(pass.Fset, arg)) - } return true // Probably a type check problem. } panic("unreachable") @@ -193,7 +274,7 @@ func matchArgTypeInternal(pass *analysis.Pass, t printfArgType, typ types.Type, return false } -func isConvertibleToString(pass *analysis.Pass, typ types.Type) bool { +func isConvertibleToString(typ types.Type) bool { if bt, ok := typ.(*types.Basic); ok && bt.Kind() == types.UntypedNil { // We explicitly don't want untyped nil, which is // convertible to both of the interfaces below, as it @@ -228,19 +309,3 @@ func hasBasicType(pass *analysis.Pass, x ast.Expr, kind types.BasicKind) bool { b, ok := t.(*types.Basic) return ok && b.Kind() == kind } - -// matchStructArgType reports whether all the elements of the struct match the expected -// type. For instance, with "%d" all the elements must be printable with the "%d" format. -func matchStructArgType(pass *analysis.Pass, t printfArgType, typ *types.Struct, arg ast.Expr, inProgress map[types.Type]bool) bool { - for i := 0; i < typ.NumFields(); i++ { - typf := typ.Field(i) - if !matchArgTypeInternal(pass, t, typf.Type(), arg, inProgress) { - return false - } - if t&argString != 0 && !typf.Exported() && isConvertibleToString(pass, typf.Type()) { - // Issue #17798: unexported Stringer or error cannot be properly formatted. - return false - } - } - return true -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go b/vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go index 1f3df07cc..e968f27b4 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go @@ -14,11 +14,14 @@ import ( "go/ast" "go/constant" "go/token" + "go/types" + "math" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/typeparams" ) const Doc = "check for shifts that equal or exceed the width of the integer" @@ -93,9 +96,36 @@ func checkLongShift(pass *analysis.Pass, node ast.Node, x, y ast.Expr) { if t == nil { return } - size := 8 * pass.TypesSizes.Sizeof(t) - if amt >= size { + var structuralTypes []types.Type + switch t := t.(type) { + case *typeparams.TypeParam: + terms, err := typeparams.StructuralTerms(t) + if err != nil { + return // invalid type + } + for _, term := range terms { + structuralTypes = append(structuralTypes, term.Type()) + } + default: + structuralTypes = append(structuralTypes, t) + } + sizes := make(map[int64]struct{}) + for _, t := range structuralTypes { + size := 8 * pass.TypesSizes.Sizeof(t) + sizes[size] = struct{}{} + } + minSize := int64(math.MaxInt64) + for size := range sizes { + if size < minSize { + minSize = size + } + } + if amt >= minSize { ident := analysisutil.Format(pass.Fset, x) - pass.ReportRangef(node, "%s (%d bits) too small for shift of %d", ident, size, amt) + qualifier := "" + if len(sizes) > 1 { + qualifier = "may be " + } + pass.ReportRangef(node, "%s (%s%d bits) too small for shift of %d", ident, qualifier, minSize, amt) } } diff --git a/vendor/golang.org/x/tools/go/analysis/passes/sortslice/analyzer.go b/vendor/golang.org/x/tools/go/analysis/passes/sortslice/analyzer.go index 69a67939d..5eb957a18 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/sortslice/analyzer.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/sortslice/analyzer.go @@ -45,7 +45,8 @@ func run(pass *analysis.Pass) (interface{}, error) { return } - if fn.FullName() != "sort.Slice" { + fnName := fn.FullName() + if fnName != "sort.Slice" && fnName != "sort.SliceStable" && fnName != "sort.SliceIsSorted" { return } @@ -115,7 +116,7 @@ func run(pass *analysis.Pass) (interface{}, error) { pass.Report(analysis.Diagnostic{ Pos: call.Pos(), End: call.End(), - Message: fmt.Sprintf("sort.Slice's argument must be a slice; is called with %s", typ.String()), + Message: fmt.Sprintf("%s's argument must be a slice; is called with %s", fnName, typ.String()), SuggestedFixes: fixes, }) }) diff --git a/vendor/golang.org/x/tools/go/analysis/passes/stdmethods/stdmethods.go b/vendor/golang.org/x/tools/go/analysis/passes/stdmethods/stdmethods.go index 64a28ac0b..cc9497179 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/stdmethods/stdmethods.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/stdmethods/stdmethods.go @@ -61,7 +61,7 @@ var Analyzer = &analysis.Analyzer{ // we let it go. But if it does have a fmt.ScanState, then the // rest has to match. var canonicalMethods = map[string]struct{ args, results []string }{ - "As": {[]string{"interface{}"}, []string{"bool"}}, // errors.As + "As": {[]string{"any"}, []string{"bool"}}, // errors.As // "Flush": {{}, {"error"}}, // http.Flusher and jpeg.writer conflict "Format": {[]string{"=fmt.State", "rune"}, []string{}}, // fmt.Formatter "GobDecode": {[]string{"[]byte"}, []string{"error"}}, // gob.GobDecoder @@ -194,7 +194,9 @@ func matchParams(pass *analysis.Pass, expect []string, actual *types.Tuple, pref func matchParamType(expect string, actual types.Type) bool { expect = strings.TrimPrefix(expect, "=") // Overkill but easy. - return typeString(actual) == expect + t := typeString(actual) + return t == expect || + (t == "any" || t == "interface{}") && (expect == "any" || expect == "interface{}") } var errorType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) diff --git a/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/string.go b/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/string.go index 7a005901e..e41de809d 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/string.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/string.go @@ -10,10 +10,12 @@ import ( "fmt" "go/ast" "go/types" + "strings" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/typeparams" ) const Doc = `check for string(int) conversions @@ -36,6 +38,35 @@ var Analyzer = &analysis.Analyzer{ Run: run, } +// describe returns a string describing the type typ contained within the type +// set of inType. If non-empty, inName is used as the name of inType (this is +// necessary so that we can use alias type names that may not be reachable from +// inType itself). +func describe(typ, inType types.Type, inName string) string { + name := inName + if typ != inType { + name = typeName(typ) + } + if name == "" { + return "" + } + + var parentheticals []string + if underName := typeName(typ.Underlying()); underName != "" && underName != name { + parentheticals = append(parentheticals, underName) + } + + if typ != inType && inName != "" && inName != name { + parentheticals = append(parentheticals, "in "+inName) + } + + if len(parentheticals) > 0 { + name += " (" + strings.Join(parentheticals, ", ") + ")" + } + + return name +} + func typeName(typ types.Type) string { if v, _ := typ.(interface{ Name() string }); v != nil { return v.Name() @@ -54,6 +85,11 @@ func run(pass *analysis.Pass) (interface{}, error) { inspect.Preorder(nodeFilter, func(n ast.Node) { call := n.(*ast.CallExpr) + if len(call.Args) != 1 { + return + } + arg := call.Args[0] + // Retrieve target type name. var tname *types.TypeName switch fun := call.Fun.(type) { @@ -65,62 +101,119 @@ func run(pass *analysis.Pass) (interface{}, error) { if tname == nil { return } - target := tname.Name() - // Check that target type T in T(v) has an underlying type of string. - T, _ := tname.Type().Underlying().(*types.Basic) - if T == nil || T.Kind() != types.String { - return - } - if s := T.Name(); target != s { - target += " (" + s + ")" + // In the conversion T(v) of a value v of type V to a target type T, we + // look for types T0 in the type set of T and V0 in the type set of V, such + // that V0->T0 is a problematic conversion. If T and V are not type + // parameters, this amounts to just checking if V->T is a problematic + // conversion. + + // First, find a type T0 in T that has an underlying type of string. + T := tname.Type() + ttypes, err := structuralTypes(T) + if err != nil { + return // invalid type } - // Check that type V of v has an underlying integral type that is not byte or rune. - if len(call.Args) != 1 { - return + var T0 types.Type // string type in the type set of T + + for _, tt := range ttypes { + u, _ := tt.Underlying().(*types.Basic) + if u != nil && u.Kind() == types.String { + T0 = tt + break + } } - v := call.Args[0] - vtyp := pass.TypesInfo.TypeOf(v) - V, _ := vtyp.Underlying().(*types.Basic) - if V == nil || V.Info()&types.IsInteger == 0 { - return - } - switch V.Kind() { - case types.Byte, types.Rune, types.UntypedRune: + + if T0 == nil { + // No target types have an underlying type of string. return } - // Retrieve source type name. - source := typeName(vtyp) - if source == "" { + // Next, find a type V0 in V that has an underlying integral type that is + // not byte or rune. + V := pass.TypesInfo.TypeOf(arg) + vtypes, err := structuralTypes(V) + if err != nil { + return // invalid type + } + + var V0 types.Type // integral type in the type set of V + + for _, vt := range vtypes { + u, _ := vt.Underlying().(*types.Basic) + if u != nil && u.Info()&types.IsInteger != 0 { + switch u.Kind() { + case types.Byte, types.Rune, types.UntypedRune: + continue + } + V0 = vt + break + } + } + + if V0 == nil { + // No source types are non-byte or rune integer types. return } - if s := V.Name(); source != s { - source += " (" + s + ")" + + convertibleToRune := true // if true, we can suggest a fix + for _, t := range vtypes { + if !types.ConvertibleTo(t, types.Typ[types.Rune]) { + convertibleToRune = false + break + } } + + target := describe(T0, T, tname.Name()) + source := describe(V0, V, typeName(V)) + + if target == "" || source == "" { + return // something went wrong + } + diag := analysis.Diagnostic{ Pos: n.Pos(), Message: fmt.Sprintf("conversion from %s to %s yields a string of one rune, not a string of digits (did you mean fmt.Sprint(x)?)", source, target), - SuggestedFixes: []analysis.SuggestedFix{ + } + + if convertibleToRune { + diag.SuggestedFixes = []analysis.SuggestedFix{ { Message: "Did you mean to convert a rune to a string?", TextEdits: []analysis.TextEdit{ { - Pos: v.Pos(), - End: v.Pos(), + Pos: arg.Pos(), + End: arg.Pos(), NewText: []byte("rune("), }, { - Pos: v.End(), - End: v.End(), + Pos: arg.End(), + End: arg.End(), NewText: []byte(")"), }, }, }, - }, + } } pass.Report(diag) }) return nil, nil } + +func structuralTypes(t types.Type) ([]types.Type, error) { + var structuralTypes []types.Type + switch t := t.(type) { + case *typeparams.TypeParam: + terms, err := typeparams.StructuralTerms(t) + if err != nil { + return nil, err + } + for _, term := range terms { + structuralTypes = append(structuralTypes, term.Type()) + } + default: + structuralTypes = append(structuralTypes, t) + } + return structuralTypes, nil +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/testinggoroutine.go b/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/testinggoroutine.go index ce05a56cc..7ea8f77e3 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/testinggoroutine.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/testinggoroutine.go @@ -11,6 +11,7 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/typeparams" ) const Doc = `report calls to (*testing.T).Fatal from goroutines started by a test. @@ -124,16 +125,27 @@ func typeIsTestingDotTOrB(expr ast.Expr) (string, bool) { // function literals declared in the same function, and // static calls within the same package are supported. func goStmtFun(goStmt *ast.GoStmt) ast.Node { - switch goStmt.Call.Fun.(type) { - case *ast.Ident: - id := goStmt.Call.Fun.(*ast.Ident) - // TODO(cuonglm): improve this once golang/go#48141 resolved. + switch fun := goStmt.Call.Fun.(type) { + case *ast.IndexExpr, *typeparams.IndexListExpr: + x, _, _, _ := typeparams.UnpackIndexExpr(fun) + id, _ := x.(*ast.Ident) + if id == nil { + break + } if id.Obj == nil { break } if funDecl, ok := id.Obj.Decl.(ast.Node); ok { return funDecl } + case *ast.Ident: + // TODO(cuonglm): improve this once golang/go#48141 resolved. + if fun.Obj == nil { + break + } + if funDecl, ok := fun.Obj.Decl.(ast.Node); ok { + return funDecl + } case *ast.FuncLit: return goStmt.Call.Fun } diff --git a/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go b/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go index 570ad5c20..2c8788249 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go @@ -16,6 +16,7 @@ import ( "unicode/utf8" "golang.org/x/tools/go/analysis" + "golang.org/x/tools/internal/typeparams" ) const Doc = `check for common mistaken usages of tests and examples @@ -170,6 +171,9 @@ func checkExampleName(pass *analysis.Pass, fn *ast.FuncDecl) { if results := fn.Type.Results; results != nil && len(results.List) != 0 { pass.Reportf(fn.Pos(), "%s should return nothing", fnName) } + if tparams := typeparams.ForFuncType(fn.Type); tparams != nil && len(tparams.List) > 0 { + pass.Reportf(fn.Pos(), "%s should not have type params", fnName) + } if fnName == "Example" { // Nothing more to do. @@ -236,6 +240,12 @@ func checkTest(pass *analysis.Pass, fn *ast.FuncDecl, prefix string) { return } + if tparams := typeparams.ForFuncType(fn.Type); tparams != nil && len(tparams.List) > 0 { + // Note: cmd/go/internal/load also errors about TestXXX and BenchmarkXXX functions with type parameters. + // We have currently decided to also warn before compilation/package loading. This can help users in IDEs. + pass.Reportf(fn.Pos(), "%s has type parameters: it will not be run by go test as a %sXXX function", fn.Name.Name, prefix) + } + if !isTestSuffix(fn.Name.Name[len(prefix):]) { pass.Reportf(fn.Pos(), "%s has malformed name: first letter after '%s' must not be lowercase", fn.Name.Name, prefix) } diff --git a/vendor/golang.org/x/tools/go/analysis/passes/unmarshal/unmarshal.go b/vendor/golang.org/x/tools/go/analysis/passes/unmarshal/unmarshal.go index 92b37caff..5129048a0 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/unmarshal/unmarshal.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/unmarshal/unmarshal.go @@ -14,6 +14,7 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/typeparams" ) const Doc = `report passing non-pointer or non-interface values to unmarshal @@ -85,7 +86,7 @@ func run(pass *analysis.Pass) (interface{}, error) { t := pass.TypesInfo.Types[call.Args[argidx]].Type switch t.Underlying().(type) { - case *types.Pointer, *types.Interface: + case *types.Pointer, *types.Interface, *typeparams.TypeParam: return } diff --git a/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go b/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go index bececee7e..06747ba72 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go @@ -17,6 +17,7 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/typeparams" ) // TODO(adonovan): make this analysis modular: export a mustUseResult @@ -70,6 +71,11 @@ func run(pass *analysis.Pass) (interface{}, error) { return // a conversion, not a call } + x, _, _, _ := typeparams.UnpackIndexExpr(fun) + if x != nil { + fun = x // If this is generic function or method call, skip the instantiation arguments + } + selector, ok := fun.(*ast.SelectorExpr) if !ok { return // neither a method call nor a qualified ident diff --git a/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go b/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go index 6b7052b89..a5c6d6d4f 100644 --- a/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go +++ b/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go @@ -11,6 +11,8 @@ import ( "go/ast" "go/token" "sort" + + "golang.org/x/tools/internal/typeparams" ) // PathEnclosingInterval returns the node that encloses the source @@ -294,8 +296,8 @@ func childrenOf(n ast.Node) []ast.Node { case *ast.FieldList: children = append(children, - tok(n.Opening, len("(")), - tok(n.Closing, len(")"))) + tok(n.Opening, len("(")), // or len("[") + tok(n.Closing, len(")"))) // or len("]") case *ast.File: // TODO test: Doc @@ -322,6 +324,9 @@ func childrenOf(n ast.Node) []ast.Node { children = append(children, n.Recv) } children = append(children, n.Name) + if tparams := typeparams.ForFuncType(n.Type); tparams != nil { + children = append(children, tparams) + } if n.Type.Params != nil { children = append(children, n.Type.Params) } @@ -371,8 +376,13 @@ func childrenOf(n ast.Node) []ast.Node { case *ast.IndexExpr: children = append(children, - tok(n.Lbrack, len("{")), - tok(n.Rbrack, len("}"))) + tok(n.Lbrack, len("[")), + tok(n.Rbrack, len("]"))) + + case *typeparams.IndexListExpr: + children = append(children, + tok(n.Lbrack, len("[")), + tok(n.Rbrack, len("]"))) case *ast.InterfaceType: children = append(children, @@ -581,6 +591,8 @@ func NodeDescription(n ast.Node) string { return "decrement statement" case *ast.IndexExpr: return "index expression" + case *typeparams.IndexListExpr: + return "index list expression" case *ast.InterfaceType: return "interface type" case *ast.KeyValueExpr: diff --git a/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go b/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go index 5fe75b14c..6d9ca23e2 100644 --- a/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go +++ b/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go @@ -253,6 +253,10 @@ func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast. a.apply(n, "X", nil, n.X) a.apply(n, "Index", nil, n.Index) + case *typeparams.IndexListExpr: + a.apply(n, "X", nil, n.X) + a.applyList(n, "Indices") + case *ast.SliceExpr: a.apply(n, "X", nil, n.X) a.apply(n, "Low", nil, n.Low) @@ -439,13 +443,7 @@ func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast. } default: - if ix := typeparams.GetIndexExprData(n); ix != nil { - a.apply(n, "X", nil, ix.X) - // *ast.IndexExpr was handled above, so n must be an *ast.MultiIndexExpr. - a.applyList(n, "Indices") - } else { - panic(fmt.Sprintf("Apply: unexpected node type %T", n)) - } + panic(fmt.Sprintf("Apply: unexpected node type %T", n)) } if a.post != nil && !a.post(&a.cursor) { diff --git a/vendor/golang.org/x/tools/go/ast/inspector/typeof.go b/vendor/golang.org/x/tools/go/ast/inspector/typeof.go index b6b00cf2e..11f4fc369 100644 --- a/vendor/golang.org/x/tools/go/ast/inspector/typeof.go +++ b/vendor/golang.org/x/tools/go/ast/inspector/typeof.go @@ -9,7 +9,11 @@ package inspector // The initial map-based implementation was too slow; // see https://go-review.googlesource.com/c/tools/+/135655/1/go/ast/inspector/inspector.go#196 -import "go/ast" +import ( + "go/ast" + + "golang.org/x/tools/internal/typeparams" +) const ( nArrayType = iota @@ -47,6 +51,7 @@ const ( nImportSpec nIncDecStmt nIndexExpr + nIndexListExpr nInterfaceType nKeyValueExpr nLabeledStmt @@ -164,6 +169,8 @@ func typeOf(n ast.Node) uint64 { return 1 << nIncDecStmt case *ast.IndexExpr: return 1 << nIndexExpr + case *typeparams.IndexListExpr: + return 1 << nIndexListExpr case *ast.InterfaceType: return 1 << nInterfaceType case *ast.KeyValueExpr: diff --git a/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go b/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go index fc8beea5d..cec819d64 100644 --- a/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go +++ b/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go @@ -50,11 +50,24 @@ func Find(importPath, srcDir string) (filename, path string) { // additional trailing data beyond the end of the export data. func NewReader(r io.Reader) (io.Reader, error) { buf := bufio.NewReader(r) - _, err := gcimporter.FindExportData(buf) - // If we ever switch to a zip-like archive format with the ToC - // at the end, we can return the correct portion of export data, - // but for now we must return the entire rest of the file. - return buf, err + _, size, err := gcimporter.FindExportData(buf) + if err != nil { + return nil, err + } + + if size >= 0 { + // We were given an archive and found the __.PKGDEF in it. + // This tells us the size of the export data, and we don't + // need to return the entire file. + return &io.LimitedReader{ + R: buf, + N: size, + }, nil + } else { + // We were given an object file. As such, we don't know how large + // the export data is and must return the entire file. + return buf, nil + } } // Read reads export data from in, decodes it, and returns type diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go index 072005af8..0a3cdb9a3 100644 --- a/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go +++ b/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go @@ -34,9 +34,6 @@ import ( // (suspected) format errors, and whenever a change is made to the format. const debugFormat = false // default: false -// If trace is set, debugging output is printed to std out. -const trace = false // default: false - // Current export format version. Increase with each format change. // Note: The latest binary (non-indexed) export format is at version 6. // This exporter is still at level 4, but it doesn't matter since diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/bimport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/bimport.go index b02312000..b85de0147 100644 --- a/vendor/golang.org/x/tools/go/internal/gcimporter/bimport.go +++ b/vendor/golang.org/x/tools/go/internal/gcimporter/bimport.go @@ -74,9 +74,10 @@ func BImportData(fset *token.FileSet, imports map[string]*types.Package, data [] pathList: []string{""}, // empty string is mapped to 0 fake: fakeFileSet{ fset: fset, - files: make(map[string]*token.File), + files: make(map[string]*fileInfo), }, } + defer p.fake.setLines() // set lines for files in fset // read version info var versionstr string @@ -338,37 +339,49 @@ func (p *importer) pos() token.Pos { // Synthesize a token.Pos type fakeFileSet struct { fset *token.FileSet - files map[string]*token.File + files map[string]*fileInfo } +type fileInfo struct { + file *token.File + lastline int +} + +const maxlines = 64 * 1024 + func (s *fakeFileSet) pos(file string, line, column int) token.Pos { // TODO(mdempsky): Make use of column. - // Since we don't know the set of needed file positions, we - // reserve maxlines positions per file. - const maxlines = 64 * 1024 + // Since we don't know the set of needed file positions, we reserve maxlines + // positions per file. We delay calling token.File.SetLines until all + // positions have been calculated (by way of fakeFileSet.setLines), so that + // we can avoid setting unnecessary lines. See also golang/go#46586. f := s.files[file] if f == nil { - f = s.fset.AddFile(file, -1, maxlines) + f = &fileInfo{file: s.fset.AddFile(file, -1, maxlines)} s.files[file] = f - // Allocate the fake linebreak indices on first use. - // TODO(adonovan): opt: save ~512KB using a more complex scheme? - fakeLinesOnce.Do(func() { - fakeLines = make([]int, maxlines) - for i := range fakeLines { - fakeLines[i] = i - } - }) - f.SetLines(fakeLines) } - if line > maxlines { line = 1 } + if line > f.lastline { + f.lastline = line + } - // Treat the file as if it contained only newlines - // and column=1: use the line number as the offset. - return f.Pos(line - 1) + // Return a fake position assuming that f.file consists only of newlines. + return token.Pos(f.file.Base() + line - 1) +} + +func (s *fakeFileSet) setLines() { + fakeLinesOnce.Do(func() { + fakeLines = make([]int, maxlines) + for i := range fakeLines { + fakeLines[i] = i + } + }) + for _, f := range s.files { + f.file.SetLines(fakeLines[:f.lastline]) + } } var ( diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/exportdata.go b/vendor/golang.org/x/tools/go/internal/gcimporter/exportdata.go index f33dc5613..f6437feb1 100644 --- a/vendor/golang.org/x/tools/go/internal/gcimporter/exportdata.go +++ b/vendor/golang.org/x/tools/go/internal/gcimporter/exportdata.go @@ -16,7 +16,7 @@ import ( "strings" ) -func readGopackHeader(r *bufio.Reader) (name string, size int, err error) { +func readGopackHeader(r *bufio.Reader) (name string, size int64, err error) { // See $GOROOT/include/ar.h. hdr := make([]byte, 16+12+6+6+8+10+2) _, err = io.ReadFull(r, hdr) @@ -28,7 +28,8 @@ func readGopackHeader(r *bufio.Reader) (name string, size int, err error) { fmt.Printf("header: %s", hdr) } s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10])) - size, err = strconv.Atoi(s) + length, err := strconv.Atoi(s) + size = int64(length) if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' { err = fmt.Errorf("invalid archive header") return @@ -42,8 +43,8 @@ func readGopackHeader(r *bufio.Reader) (name string, size int, err error) { // file by reading from it. The reader must be positioned at the // start of the file before calling this function. The hdr result // is the string before the export data, either "$$" or "$$B". -// -func FindExportData(r *bufio.Reader) (hdr string, err error) { +// The size result is the length of the export data in bytes, or -1 if not known. +func FindExportData(r *bufio.Reader) (hdr string, size int64, err error) { // Read first line to make sure this is an object file. line, err := r.ReadSlice('\n') if err != nil { @@ -54,7 +55,7 @@ func FindExportData(r *bufio.Reader) (hdr string, err error) { if string(line) == "!\n" { // Archive file. Scan to __.PKGDEF. var name string - if name, _, err = readGopackHeader(r); err != nil { + if name, size, err = readGopackHeader(r); err != nil { return } @@ -70,6 +71,7 @@ func FindExportData(r *bufio.Reader) (hdr string, err error) { err = fmt.Errorf("can't find export data (%v)", err) return } + size -= int64(len(line)) } // Now at __.PKGDEF in archive or still at beginning of file. @@ -86,8 +88,12 @@ func FindExportData(r *bufio.Reader) (hdr string, err error) { err = fmt.Errorf("can't find export data (%v)", err) return } + size -= int64(len(line)) } hdr = string(line) + if size < 0 { + size = -1 + } return } diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go b/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go index e8cba6b23..3ab66830d 100644 --- a/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go +++ b/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go @@ -29,8 +29,14 @@ import ( "text/scanner" ) -// debugging/development support -const debug = false +const ( + // Enable debug during development: it adds some additional checks, and + // prevents errors from being recovered. + debug = false + + // If trace is set, debugging output is printed to std out. + trace = false +) var pkgExts = [...]string{".a", ".o"} @@ -179,7 +185,7 @@ func Import(packages map[string]*types.Package, path, srcDir string, lookup func var hdr string buf := bufio.NewReader(rc) - if hdr, err = FindExportData(buf); err != nil { + if hdr, _, err = FindExportData(buf); err != nil { return } diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go index be8b7459a..209553409 100644 --- a/vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go +++ b/vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go @@ -11,6 +11,7 @@ package gcimporter import ( "bytes" "encoding/binary" + "fmt" "go/ast" "go/constant" "go/token" @@ -19,6 +20,8 @@ import ( "math/big" "reflect" "sort" + "strconv" + "strings" "golang.org/x/tools/internal/typeparams" ) @@ -33,15 +36,15 @@ const bundleVersion = 0 // The package path of the top-level package will not be recorded, // so that calls to IImportData can override with a provided package path. func IExportData(out io.Writer, fset *token.FileSet, pkg *types.Package) error { - return iexportCommon(out, fset, false, []*types.Package{pkg}) + return iexportCommon(out, fset, false, iexportVersion, []*types.Package{pkg}) } // IExportBundle writes an indexed export bundle for pkgs to out. func IExportBundle(out io.Writer, fset *token.FileSet, pkgs []*types.Package) error { - return iexportCommon(out, fset, true, pkgs) + return iexportCommon(out, fset, true, iexportVersion, pkgs) } -func iexportCommon(out io.Writer, fset *token.FileSet, bundle bool, pkgs []*types.Package) (err error) { +func iexportCommon(out io.Writer, fset *token.FileSet, bundle bool, version int, pkgs []*types.Package) (err error) { if !debug { defer func() { if e := recover(); e != nil { @@ -57,9 +60,11 @@ func iexportCommon(out io.Writer, fset *token.FileSet, bundle bool, pkgs []*type p := iexporter{ fset: fset, + version: version, allPkgs: map[*types.Package]bool{}, stringIndex: map[string]uint64{}, declIndex: map[types.Object]uint64{}, + tparamNames: map[types.Object]string{}, typIndex: map[types.Type]uint64{}, } if !bundle { @@ -119,7 +124,7 @@ func iexportCommon(out io.Writer, fset *token.FileSet, bundle bool, pkgs []*type if bundle { hdr.uint64(bundleVersion) } - hdr.uint64(iexportVersion) + hdr.uint64(uint64(p.version)) hdr.uint64(uint64(p.strings.Len())) hdr.uint64(dataLen) @@ -136,8 +141,12 @@ func iexportCommon(out io.Writer, fset *token.FileSet, bundle bool, pkgs []*type // non-compiler tools and includes a complete package description // (i.e., name and height). func (w *exportWriter) writeIndex(index map[types.Object]uint64) { + type pkgObj struct { + obj types.Object + name string // qualified name; differs from obj.Name for type params + } // Build a map from packages to objects from that package. - pkgObjs := map[*types.Package][]types.Object{} + pkgObjs := map[*types.Package][]pkgObj{} // For the main index, make sure to include every package that // we reference, even if we're not exporting (or reexporting) @@ -150,7 +159,8 @@ func (w *exportWriter) writeIndex(index map[types.Object]uint64) { } for obj := range index { - pkgObjs[obj.Pkg()] = append(pkgObjs[obj.Pkg()], obj) + name := w.p.exportName(obj) + pkgObjs[obj.Pkg()] = append(pkgObjs[obj.Pkg()], pkgObj{obj, name}) } var pkgs []*types.Package @@ -158,7 +168,7 @@ func (w *exportWriter) writeIndex(index map[types.Object]uint64) { pkgs = append(pkgs, pkg) sort.Slice(objs, func(i, j int) bool { - return indexName(objs[i]) < indexName(objs[j]) + return objs[i].name < objs[j].name }) } @@ -175,29 +185,25 @@ func (w *exportWriter) writeIndex(index map[types.Object]uint64) { objs := pkgObjs[pkg] w.uint64(uint64(len(objs))) for _, obj := range objs { - w.string(indexName(obj)) - w.uint64(index[obj]) + w.string(obj.name) + w.uint64(index[obj.obj]) } } } -// indexName returns the 'indexed' name of an object. It differs from -// obj.Name() only for type parameter names, where we include the subscripted -// type parameter ID. -// -// TODO(rfindley): remove this once we no longer need subscripts. -func indexName(obj types.Object) (res string) { - if _, ok := obj.(*types.TypeName); ok { - if tparam, ok := obj.Type().(*typeparams.TypeParam); ok { - return types.TypeString(tparam, func(*types.Package) string { return "" }) - } +// exportName returns the 'exported' name of an object. It differs from +// obj.Name() only for type parameters (see tparamExportName for details). +func (p *iexporter) exportName(obj types.Object) (res string) { + if name := p.tparamNames[obj]; name != "" { + return name } return obj.Name() } type iexporter struct { - fset *token.FileSet - out *bytes.Buffer + fset *token.FileSet + out *bytes.Buffer + version int localpkg *types.Package @@ -211,9 +217,21 @@ type iexporter struct { strings intWriter stringIndex map[string]uint64 - data0 intWriter - declIndex map[types.Object]uint64 - typIndex map[types.Type]uint64 + data0 intWriter + declIndex map[types.Object]uint64 + tparamNames map[types.Object]string // typeparam->exported name + typIndex map[types.Type]uint64 + + indent int // for tracing support +} + +func (p *iexporter) trace(format string, args ...interface{}) { + if !trace { + // Call sites should also be guarded, but having this check here allows + // easily enabling/disabling debug trace statements. + return + } + fmt.Printf(strings.Repeat("..", p.indent)+format+"\n", args...) } // stringOff returns the offset of s within the string section. @@ -239,7 +257,7 @@ func (p *iexporter) pushDecl(obj types.Object) { return } - p.declIndex[obj] = ^uint64(0) // mark n present in work queue + p.declIndex[obj] = ^uint64(0) // mark obj present in work queue p.declTodo.pushTail(obj) } @@ -262,6 +280,14 @@ func (w *exportWriter) exportPath(pkg *types.Package) string { } func (p *iexporter) doDecl(obj types.Object) { + if trace { + p.trace("exporting decl %v (%T)", obj, obj) + p.indent++ + defer func() { + p.indent-- + p.trace("=> %s", obj) + }() + } w := p.newWriter() w.setPkg(obj.Pkg(), false) @@ -284,14 +310,15 @@ func (p *iexporter) doDecl(obj types.Object) { w.tag('G') } w.pos(obj.Pos()) - // The tparam list of the function type is the - // declaration of the type params. So, write out the type - // params right now. Then those type params will be - // referenced via their type offset (via typOff) in all - // other places in the signature and function that they - // are used. + // The tparam list of the function type is the declaration of the type + // params. So, write out the type params right now. Then those type params + // will be referenced via their type offset (via typOff) in all other + // places in the signature and function where they are used. + // + // While importing the type parameters, tparamList computes and records + // their export name, so that it can be later used when writing the index. if tparams := typeparams.ForSignature(sig); tparams.Len() > 0 { - w.tparamList(tparams, obj.Pkg()) + w.tparamList(obj.Name(), tparams, obj.Pkg()) } w.signature(sig) @@ -306,7 +333,15 @@ func (p *iexporter) doDecl(obj types.Object) { if tparam, ok := t.(*typeparams.TypeParam); ok { w.tag('P') w.pos(obj.Pos()) - w.typ(tparam.Constraint(), obj.Pkg()) + constraint := tparam.Constraint() + if p.version >= iexportVersionGo1_18 { + implicit := false + if iface, _ := constraint.(*types.Interface); iface != nil { + implicit = typeparams.IsImplicit(iface) + } + w.bool(implicit) + } + w.typ(constraint, obj.Pkg()) break } @@ -331,7 +366,9 @@ func (p *iexporter) doDecl(obj types.Object) { w.pos(obj.Pos()) if typeparams.ForNamed(named).Len() > 0 { - w.tparamList(typeparams.ForNamed(named), obj.Pkg()) + // While importing the type parameters, tparamList computes and records + // their export name, so that it can be later used when writing the index. + w.tparamList(obj.Name(), typeparams.ForNamed(named), obj.Pkg()) } underlying := obj.Type().Underlying() @@ -348,6 +385,17 @@ func (p *iexporter) doDecl(obj types.Object) { w.pos(m.Pos()) w.string(m.Name()) sig, _ := m.Type().(*types.Signature) + + // Receiver type parameters are type arguments of the receiver type, so + // their name must be qualified before exporting recv. + if rparams := typeparams.RecvTypeParams(sig); rparams.Len() > 0 { + prefix := obj.Name() + "." + m.Name() + for i := 0; i < rparams.Len(); i++ { + rparam := rparams.At(i) + name := tparamExportName(prefix, rparam) + w.p.tparamNames[rparam.Obj()] = name + } + } w.param(sig.Recv()) w.signature(sig) } @@ -364,7 +412,7 @@ func (w *exportWriter) tag(tag byte) { } func (w *exportWriter) pos(pos token.Pos) { - if iexportVersion >= iexportVersionPosCol { + if w.p.version >= iexportVersionPosCol { w.posV1(pos) } else { w.posV0(pos) @@ -447,9 +495,11 @@ func (w *exportWriter) pkg(pkg *types.Package) { } func (w *exportWriter) qualifiedIdent(obj types.Object) { + name := w.p.exportName(obj) + // Ensure any referenced declarations are written out too. w.p.pushDecl(obj) - w.string(indexName(obj)) + w.string(name) w.pkg(obj.Pkg()) } @@ -483,6 +533,14 @@ func (w *exportWriter) startType(k itag) { } func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) { + if trace { + w.p.trace("exporting type %s (%T)", t, t) + w.p.indent++ + defer func() { + w.p.indent-- + w.p.trace("=> %s", t) + }() + } switch t := t.(type) { case *types.Named: if targs := typeparams.NamedTypeArgs(t); targs.Len() > 0 { @@ -619,14 +677,49 @@ func (w *exportWriter) typeList(ts *typeparams.TypeList, pkg *types.Package) { } } -func (w *exportWriter) tparamList(list *typeparams.TypeParamList, pkg *types.Package) { +func (w *exportWriter) tparamList(prefix string, list *typeparams.TypeParamList, pkg *types.Package) { ll := uint64(list.Len()) w.uint64(ll) for i := 0; i < list.Len(); i++ { + tparam := list.At(i) + // Set the type parameter exportName before exporting its type. + exportName := tparamExportName(prefix, tparam) + w.p.tparamNames[tparam.Obj()] = exportName w.typ(list.At(i), pkg) } } +const blankMarker = "$" + +// tparamExportName returns the 'exported' name of a type parameter, which +// differs from its actual object name: it is prefixed with a qualifier, and +// blank type parameter names are disambiguated by their index in the type +// parameter list. +func tparamExportName(prefix string, tparam *typeparams.TypeParam) string { + assert(prefix != "") + name := tparam.Obj().Name() + if name == "_" { + name = blankMarker + strconv.Itoa(tparam.Index()) + } + return prefix + "." + name +} + +// tparamName returns the real name of a type parameter, after stripping its +// qualifying prefix and reverting blank-name encoding. See tparamExportName +// for details. +func tparamName(exportName string) string { + // Remove the "path" from the type param name that makes it unique. + ix := strings.LastIndex(exportName, ".") + if ix < 0 { + errorf("malformed type parameter export name %s: missing prefix", exportName) + } + name := exportName[ix+1:] + if strings.HasPrefix(name, blankMarker) { + return "_" + } + return name +} + func (w *exportWriter) paramList(tup *types.Tuple) { n := tup.Len() w.uint64(uint64(n)) @@ -643,6 +736,9 @@ func (w *exportWriter) param(obj types.Object) { func (w *exportWriter) value(typ types.Type, v constant.Value) { w.typ(typ, nil) + if w.p.version >= iexportVersionGo1_18 { + w.int64(int64(v.Kind())) + } switch b := typ.Underlying().(*types.Basic); b.Info() & types.IsConstType { case types.IsBoolean: @@ -832,7 +928,7 @@ func (w *exportWriter) localIdent(obj types.Object) { return } - name := indexName(obj) + name := obj.Name() if name == "_" { w.string("_") return diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go index 1fcc87e58..1a33cd5c6 100644 --- a/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go +++ b/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go @@ -18,6 +18,7 @@ import ( "go/types" "io" "sort" + "strings" "golang.org/x/tools/internal/typeparams" ) @@ -45,12 +46,10 @@ func (r *intReader) uint64() uint64 { // Keep this in sync with constants in iexport.go. const ( - iexportVersionGo1_11 = 0 - iexportVersionPosCol = 1 - // TODO: before release, change this back to 2. - iexportVersionGenerics = iexportVersionPosCol - - iexportVersionCurrent = iexportVersionGenerics + iexportVersionGo1_11 = 0 + iexportVersionPosCol = 1 + iexportVersionGo1_18 = 2 + iexportVersionGenerics = 2 ) type ident struct { @@ -123,9 +122,9 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data version = int64(r.uint64()) switch version { - case /* iexportVersionGenerics, */ iexportVersionPosCol, iexportVersionGo1_11: + case iexportVersionGo1_18, iexportVersionPosCol, iexportVersionGo1_11: default: - if version > iexportVersionGenerics { + if version > iexportVersionGo1_18 { errorf("unstable iexport format version %d, just rebuild compiler and std library", version) } else { errorf("unknown iexport format version %d", version) @@ -141,9 +140,8 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data r.Seek(sLen+dLen, io.SeekCurrent) p := iimporter{ - exportVersion: version, - ipath: path, - version: int(version), + version: int(version), + ipath: path, stringData: stringData, stringCache: make(map[uint64]string), @@ -153,14 +151,15 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data pkgIndex: make(map[*types.Package]map[string]uint64), typCache: make(map[uint64]types.Type), // Separate map for typeparams, keyed by their package and unique - // name (name with subscript). + // name. tparamIndex: make(map[ident]types.Type), fake: fakeFileSet{ fset: fset, - files: make(map[string]*token.File), + files: make(map[string]*fileInfo), }, } + defer p.fake.setLines() // set lines for files in fset for i, pt := range predeclared() { p.typCache[uint64(i)] = pt @@ -246,9 +245,8 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data } type iimporter struct { - exportVersion int64 - ipath string - version int + version int + ipath string stringData []byte stringCache map[uint64]string @@ -261,9 +259,28 @@ type iimporter struct { fake fakeFileSet interfaceList []*types.Interface + + indent int // for tracing support +} + +func (p *iimporter) trace(format string, args ...interface{}) { + if !trace { + // Call sites should also be guarded, but having this check here allows + // easily enabling/disabling debug trace statements. + return + } + fmt.Printf(strings.Repeat("..", p.indent)+format+"\n", args...) } func (p *iimporter) doDecl(pkg *types.Package, name string) { + if debug { + p.trace("import decl %s", name) + p.indent++ + defer func() { + p.indent-- + p.trace("=> %s", name) + }() + } // See if we've already imported this declaration. if obj := pkg.Scope().Lookup(name); obj != nil { return @@ -305,7 +322,7 @@ func (p *iimporter) pkgAt(off uint64) *types.Package { } func (p *iimporter) typAt(off uint64, base *types.Named) types.Type { - if t, ok := p.typCache[off]; ok && (base == nil || !isInterface(t)) { + if t, ok := p.typCache[off]; ok && canReuse(base, t) { return t } @@ -317,12 +334,30 @@ func (p *iimporter) typAt(off uint64, base *types.Named) types.Type { r.declReader.Reset(p.declData[off-predeclReserved:]) t := r.doType(base) - if base == nil || !isInterface(t) { + if canReuse(base, t) { p.typCache[off] = t } return t } +// canReuse reports whether the type rhs on the RHS of the declaration for def +// may be re-used. +// +// Specifically, if def is non-nil and rhs is an interface type with methods, it +// may not be re-used because we have a convention of setting the receiver type +// for interface methods to def. +func canReuse(def *types.Named, rhs types.Type) bool { + if def == nil { + return true + } + iface, _ := rhs.(*types.Interface) + if iface == nil { + return true + } + // Don't use iface.Empty() here as iface may not be complete. + return iface.NumEmbeddeds() == 0 && iface.NumExplicitMethods() == 0 +} + type importReader struct { p *iimporter declReader bytes.Reader @@ -352,8 +387,7 @@ func (r *importReader) obj(name string) { if tag == 'G' { tparams = r.tparamList() } - sig := r.signature(nil) - typeparams.SetForSignature(sig, tparams) + sig := r.signature(nil, nil, tparams) r.declare(types.NewFunc(pos, r.currPkg, name, sig)) case 'T', 'U': @@ -377,23 +411,21 @@ func (r *importReader) obj(name string) { mpos := r.pos() mname := r.ident() recv := r.param() - msig := r.signature(recv) // If the receiver has any targs, set those as the // rparams of the method (since those are the // typeparams being used in the method sig/body). - targs := typeparams.NamedTypeArgs(baseType(msig.Recv().Type())) + base := baseType(recv.Type()) + assert(base != nil) + targs := typeparams.NamedTypeArgs(base) + var rparams []*typeparams.TypeParam if targs.Len() > 0 { - rparams := make([]*typeparams.TypeParam, targs.Len()) + rparams = make([]*typeparams.TypeParam, targs.Len()) for i := range rparams { - // TODO(rfindley): this is less tolerant than the standard library - // go/internal/gcimporter, which calls under(...) and is tolerant - // of nil rparams. Bring them in sync by making the standard - // library importer stricter. rparams[i] = targs.At(i).(*typeparams.TypeParam) } - typeparams.SetRecvTypeParams(msig, rparams) } + msig := r.signature(recv, rparams, nil) named.AddMethod(types.NewFunc(mpos, r.currPkg, mname, msig)) } @@ -403,25 +435,30 @@ func (r *importReader) obj(name string) { // We need to "declare" a typeparam in order to have a name that // can be referenced recursively (if needed) in the type param's // bound. - if r.p.exportVersion < iexportVersionGenerics { + if r.p.version < iexportVersionGenerics { errorf("unexpected type param type") } - name0, sub := parseSubscript(name) + name0 := tparamName(name) tn := types.NewTypeName(pos, r.currPkg, name0, nil) t := typeparams.NewTypeParam(tn, nil) - if sub == 0 { - errorf("name %q missing subscript", name) - } - - // TODO(rfindley): can we use a different, stable ID? - // t.SetId(sub) // To handle recursive references to the typeparam within its // bound, save the partial type in tparamIndex before reading the bounds. id := ident{r.currPkg.Name(), name} r.p.tparamIndex[id] = t - - typeparams.SetTypeParamConstraint(t, r.typ()) + var implicit bool + if r.p.version >= iexportVersionGo1_18 { + implicit = r.bool() + } + constraint := r.typ() + if implicit { + iface, _ := constraint.(*types.Interface) + if iface == nil { + errorf("non-interface constraint marked implicit") + } + typeparams.MarkImplicit(iface) + } + typeparams.SetTypeParamConstraint(t, constraint) case 'V': typ := r.typ() @@ -439,6 +476,10 @@ func (r *importReader) declare(obj types.Object) { func (r *importReader) value() (typ types.Type, val constant.Value) { typ = r.typ() + if r.p.version >= iexportVersionGo1_18 { + // TODO: add support for using the kind. + _ = constant.Kind(r.int64()) + } switch b := typ.Underlying().(*types.Basic); b.Info() & types.IsConstType { case types.IsBoolean: @@ -581,7 +622,7 @@ func (r *importReader) qualifiedIdent() (*types.Package, string) { } func (r *importReader) pos() token.Pos { - if r.p.exportVersion >= iexportVersionPosCol { + if r.p.version >= iexportVersionPosCol { r.posv1() } else { r.posv0() @@ -629,8 +670,17 @@ func isInterface(t types.Type) bool { func (r *importReader) pkg() *types.Package { return r.p.pkgAt(r.uint64()) } func (r *importReader) string() string { return r.p.stringAt(r.uint64()) } -func (r *importReader) doType(base *types.Named) types.Type { - switch k := r.kind(); k { +func (r *importReader) doType(base *types.Named) (res types.Type) { + k := r.kind() + if debug { + r.p.trace("importing type %d (base: %s)", k, base) + r.p.indent++ + defer func() { + r.p.indent-- + r.p.trace("=> %s", res) + }() + } + switch k { default: errorf("unexpected kind tag in %q: %v", r.p.ipath, k) return nil @@ -653,7 +703,7 @@ func (r *importReader) doType(base *types.Named) types.Type { return types.NewMap(r.typ(), r.typ()) case signatureType: r.currPkg = r.pkg() - return r.signature(nil) + return r.signature(nil, nil, nil) case structType: r.currPkg = r.pkg() @@ -693,7 +743,7 @@ func (r *importReader) doType(base *types.Named) types.Type { recv = types.NewVar(token.NoPos, r.currPkg, "", base) } - msig := r.signature(recv) + msig := r.signature(recv, nil, nil) methods[i] = types.NewFunc(mpos, r.currPkg, mname, msig) } @@ -702,7 +752,7 @@ func (r *importReader) doType(base *types.Named) types.Type { return typ case typeParamType: - if r.p.exportVersion < iexportVersionGenerics { + if r.p.version < iexportVersionGenerics { errorf("unexpected type param type") } pkg, name := r.qualifiedIdent() @@ -716,7 +766,7 @@ func (r *importReader) doType(base *types.Named) types.Type { return r.p.tparamIndex[id] case instanceType: - if r.p.exportVersion < iexportVersionGenerics { + if r.p.version < iexportVersionGenerics { errorf("unexpected instantiation type") } // pos does not matter for instances: they are positioned on the original @@ -735,7 +785,7 @@ func (r *importReader) doType(base *types.Named) types.Type { return t case unionType: - if r.p.exportVersion < iexportVersionGenerics { + if r.p.version < iexportVersionGenerics { errorf("unexpected instantiation type") } terms := make([]*typeparams.Term, r.uint64()) @@ -750,11 +800,11 @@ func (r *importReader) kind() itag { return itag(r.uint64()) } -func (r *importReader) signature(recv *types.Var) *types.Signature { +func (r *importReader) signature(recv *types.Var, rparams []*typeparams.TypeParam, tparams []*typeparams.TypeParam) *types.Signature { params := r.paramList() results := r.paramList() variadic := params.Len() > 0 && r.bool() - return types.NewSignature(recv, params, results, variadic) + return typeparams.NewSignatureType(recv, rparams, tparams, params, results, variadic) } func (r *importReader) tparamList() []*typeparams.TypeParam { @@ -823,23 +873,3 @@ func baseType(typ types.Type) *types.Named { n, _ := typ.(*types.Named) return n } - -func parseSubscript(name string) (string, uint64) { - // Extract the subscript value from the type param name. We export - // and import the subscript value, so that all type params have - // unique names. - sub := uint64(0) - startsub := -1 - for i, r := range name { - if '₀' <= r && r < '₀'+10 { - if startsub == -1 { - startsub = i - } - sub = sub*10 + uint64(r-'₀') - } - } - if startsub >= 0 { - name = name[:startsub] - } - return name, sub -} diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/support_go117.go b/vendor/golang.org/x/tools/go/internal/gcimporter/support_go117.go index 817a147ef..d892273ef 100644 --- a/vendor/golang.org/x/tools/go/internal/gcimporter/support_go117.go +++ b/vendor/golang.org/x/tools/go/internal/gcimporter/support_go117.go @@ -2,8 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build !typeparams || !go1.18 -// +build !typeparams !go1.18 +//go:build !go1.18 +// +build !go1.18 package gcimporter diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/support_go118.go b/vendor/golang.org/x/tools/go/internal/gcimporter/support_go118.go index e6b81fc50..a99384323 100644 --- a/vendor/golang.org/x/tools/go/internal/gcimporter/support_go118.go +++ b/vendor/golang.org/x/tools/go/internal/gcimporter/support_go118.go @@ -2,8 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build typeparams && go1.18 -// +build typeparams,go1.18 +//go:build go1.18 +// +build go1.18 package gcimporter @@ -16,5 +16,8 @@ func additionalPredeclared() []types.Type { return []types.Type{ // comparable types.Universe.Lookup("comparable").Type(), + + // any + types.Universe.Lookup("any").Type(), } } diff --git a/vendor/golang.org/x/tools/go/packages/packages.go b/vendor/golang.org/x/tools/go/packages/packages.go index 8a1a2d681..1b5424e78 100644 --- a/vendor/golang.org/x/tools/go/packages/packages.go +++ b/vendor/golang.org/x/tools/go/packages/packages.go @@ -26,6 +26,7 @@ import ( "golang.org/x/tools/go/gcexportdata" "golang.org/x/tools/internal/gocommand" "golang.org/x/tools/internal/packagesinternal" + "golang.org/x/tools/internal/typeparams" "golang.org/x/tools/internal/typesinternal" ) @@ -327,6 +328,9 @@ type Package struct { // The NeedSyntax LoadMode bit populates this field for packages matching the patterns. // If NeedDeps and NeedImports are also set, this field will also be populated // for dependencies. + // + // Syntax is kept in the same order as CompiledGoFiles, with the caveat that nils are + // removed. If parsing returned nil, Syntax may be shorter than CompiledGoFiles. Syntax []*ast.File // TypesInfo provides type information about the package's syntax trees. @@ -910,6 +914,7 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) { Scopes: make(map[ast.Node]*types.Scope), Selections: make(map[*ast.SelectorExpr]*types.Selection), } + typeparams.InitInstanceInfo(lpkg.TypesInfo) lpkg.TypesSizes = ld.sizes importer := importerFunc(func(path string) (*types.Package, error) { diff --git a/vendor/golang.org/x/tools/go/ssa/doc.go b/vendor/golang.org/x/tools/go/ssa/doc.go index 71511bff3..6885bedb3 100644 --- a/vendor/golang.org/x/tools/go/ssa/doc.go +++ b/vendor/golang.org/x/tools/go/ssa/doc.go @@ -25,8 +25,8 @@ // // The simplest way to create the SSA representation of a package is // to load typed syntax trees using golang.org/x/tools/go/packages, then -// invoke the ssautil.Packages helper function. See ExampleLoadPackages -// and ExampleWholeProgram for examples. +// invoke the ssautil.Packages helper function. See Example_loadPackages +// and Example_loadWholeProgram for examples. // The resulting ssa.Program contains all the packages and their // members, but SSA code is not created for function bodies until a // subsequent call to (*Package).Build or (*Program).Build. diff --git a/vendor/golang.org/x/tools/go/ssa/print.go b/vendor/golang.org/x/tools/go/ssa/print.go index c1b6d22b3..5995f83af 100644 --- a/vendor/golang.org/x/tools/go/ssa/print.go +++ b/vendor/golang.org/x/tools/go/ssa/print.go @@ -14,6 +14,7 @@ import ( "io" "reflect" "sort" + "strings" "golang.org/x/tools/go/types/typeutil" ) @@ -38,8 +39,16 @@ func relName(v Value, i Instruction) string { return v.Name() } +// normalizeAnyFortesting controls whether we replace occurrences of +// interface{} with any. It is only used for normalizing test output. +var normalizeAnyForTesting bool + func relType(t types.Type, from *types.Package) string { - return types.TypeString(t, types.RelativeTo(from)) + s := types.TypeString(t, types.RelativeTo(from)) + if normalizeAnyForTesting { + s = strings.ReplaceAll(s, "interface{}", "any") + } + return s } func relString(m Member, from *types.Package) string { diff --git a/vendor/golang.org/x/tools/go/ssa/ssautil/load.go b/vendor/golang.org/x/tools/go/ssa/ssautil/load.go index eab12dc55..18e88e6af 100644 --- a/vendor/golang.org/x/tools/go/ssa/ssautil/load.go +++ b/vendor/golang.org/x/tools/go/ssa/ssautil/load.go @@ -102,7 +102,7 @@ func doPackages(initial []*packages.Package, mode ssa.BuilderMode, deps bool) (* // The mode parameter controls diagnostics and checking during SSA construction. // // Deprecated: Use golang.org/x/tools/go/packages and the Packages -// function instead; see ssa.ExampleLoadPackages. +// function instead; see ssa.Example_loadPackages. // func CreateProgram(lprog *loader.Program, mode ssa.BuilderMode) *ssa.Program { prog := ssa.NewProgram(lprog.Fset, mode) diff --git a/vendor/golang.org/x/tools/go/ssa/testmain.go b/vendor/golang.org/x/tools/go/ssa/testmain.go deleted file mode 100644 index c4256d1ef..000000000 --- a/vendor/golang.org/x/tools/go/ssa/testmain.go +++ /dev/null @@ -1,274 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -// CreateTestMainPackage synthesizes a main package that runs all the -// tests of the supplied packages. -// It is closely coupled to $GOROOT/src/cmd/go/test.go and $GOROOT/src/testing. -// -// TODO(adonovan): throws this all away now that x/tools/go/packages -// provides access to the actual synthetic test main files. - -import ( - "bytes" - "fmt" - "go/ast" - "go/parser" - "go/types" - "log" - "os" - "strings" - "text/template" -) - -// FindTests returns the Test, Benchmark, and Example functions -// (as defined by "go test") defined in the specified package, -// and its TestMain function, if any. -// -// Deprecated: Use golang.org/x/tools/go/packages to access synthetic -// testmain packages. -func FindTests(pkg *Package) (tests, benchmarks, examples []*Function, main *Function) { - prog := pkg.Prog - - // The first two of these may be nil: if the program doesn't import "testing", - // it can't contain any tests, but it may yet contain Examples. - var testSig *types.Signature // func(*testing.T) - var benchmarkSig *types.Signature // func(*testing.B) - var exampleSig = types.NewSignature(nil, nil, nil, false) // func() - - // Obtain the types from the parameters of testing.MainStart. - if testingPkg := prog.ImportedPackage("testing"); testingPkg != nil { - mainStart := testingPkg.Func("MainStart") - params := mainStart.Signature.Params() - testSig = funcField(params.At(1).Type()) - benchmarkSig = funcField(params.At(2).Type()) - - // Does the package define this function? - // func TestMain(*testing.M) - if f := pkg.Func("TestMain"); f != nil { - sig := f.Type().(*types.Signature) - starM := mainStart.Signature.Results().At(0).Type() // *testing.M - if sig.Results().Len() == 0 && - sig.Params().Len() == 1 && - types.Identical(sig.Params().At(0).Type(), starM) { - main = f - } - } - } - - // TODO(adonovan): use a stable order, e.g. lexical. - for _, mem := range pkg.Members { - if f, ok := mem.(*Function); ok && - ast.IsExported(f.Name()) && - strings.HasSuffix(prog.Fset.Position(f.Pos()).Filename, "_test.go") { - - switch { - case testSig != nil && isTestSig(f, "Test", testSig): - tests = append(tests, f) - case benchmarkSig != nil && isTestSig(f, "Benchmark", benchmarkSig): - benchmarks = append(benchmarks, f) - case isTestSig(f, "Example", exampleSig): - examples = append(examples, f) - default: - continue - } - } - } - return -} - -// Like isTest, but checks the signature too. -func isTestSig(f *Function, prefix string, sig *types.Signature) bool { - return isTest(f.Name(), prefix) && types.Identical(f.Signature, sig) -} - -// Given the type of one of the three slice parameters of testing.Main, -// returns the function type. -func funcField(slice types.Type) *types.Signature { - return slice.(*types.Slice).Elem().Underlying().(*types.Struct).Field(1).Type().(*types.Signature) -} - -// isTest tells whether name looks like a test (or benchmark, according to prefix). -// It is a Test (say) if there is a character after Test that is not a lower-case letter. -// We don't want TesticularCancer. -// Plundered from $GOROOT/src/cmd/go/test.go -func isTest(name, prefix string) bool { - if !strings.HasPrefix(name, prefix) { - return false - } - if len(name) == len(prefix) { // "Test" is ok - return true - } - return ast.IsExported(name[len(prefix):]) -} - -// CreateTestMainPackage creates and returns a synthetic "testmain" -// package for the specified package if it defines tests, benchmarks or -// executable examples, or nil otherwise. The new package is named -// "main" and provides a function named "main" that runs the tests, -// similar to the one that would be created by the 'go test' tool. -// -// Subsequent calls to prog.AllPackages include the new package. -// The package pkg must belong to the program prog. -// -// Deprecated: Use golang.org/x/tools/go/packages to access synthetic -// testmain packages. -func (prog *Program) CreateTestMainPackage(pkg *Package) *Package { - if pkg.Prog != prog { - log.Fatal("Package does not belong to Program") - } - - // Template data - var data struct { - Pkg *Package - Tests, Benchmarks, Examples []*Function - Main *Function - Go18 bool - } - data.Pkg = pkg - - // Enumerate tests. - data.Tests, data.Benchmarks, data.Examples, data.Main = FindTests(pkg) - if data.Main == nil && - data.Tests == nil && data.Benchmarks == nil && data.Examples == nil { - return nil - } - - // Synthesize source for testmain package. - path := pkg.Pkg.Path() + "$testmain" - tmpl := testmainTmpl - if testingPkg := prog.ImportedPackage("testing"); testingPkg != nil { - // In Go 1.8, testing.MainStart's first argument is an interface, not a func. - data.Go18 = types.IsInterface(testingPkg.Func("MainStart").Signature.Params().At(0).Type()) - } else { - // The program does not import "testing", but FindTests - // returned non-nil, which must mean there were Examples - // but no Test, Benchmark, or TestMain functions. - - // We'll simply call them from testmain.main; this will - // ensure they don't panic, but will not check any - // "Output:" comments. - // (We should not execute an Example that has no - // "Output:" comment, but it's impossible to tell here.) - tmpl = examplesOnlyTmpl - } - var buf bytes.Buffer - if err := tmpl.Execute(&buf, data); err != nil { - log.Fatalf("internal error expanding template for %s: %v", path, err) - } - if false { // debugging - fmt.Fprintln(os.Stderr, buf.String()) - } - - // Parse and type-check the testmain package. - f, err := parser.ParseFile(prog.Fset, path+".go", &buf, parser.Mode(0)) - if err != nil { - log.Fatalf("internal error parsing %s: %v", path, err) - } - conf := types.Config{ - DisableUnusedImportCheck: true, - Importer: importer{pkg}, - } - files := []*ast.File{f} - info := &types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Implicits: make(map[ast.Node]types.Object), - Scopes: make(map[ast.Node]*types.Scope), - Selections: make(map[*ast.SelectorExpr]*types.Selection), - } - testmainPkg, err := conf.Check(path, prog.Fset, files, info) - if err != nil { - log.Fatalf("internal error type-checking %s: %v", path, err) - } - - // Create and build SSA code. - testmain := prog.CreatePackage(testmainPkg, files, info, false) - testmain.SetDebugMode(false) - testmain.Build() - testmain.Func("main").Synthetic = "test main function" - testmain.Func("init").Synthetic = "package initializer" - return testmain -} - -// An implementation of types.Importer for an already loaded SSA program. -type importer struct { - pkg *Package // package under test; may be non-importable -} - -func (imp importer) Import(path string) (*types.Package, error) { - if p := imp.pkg.Prog.ImportedPackage(path); p != nil { - return p.Pkg, nil - } - if path == imp.pkg.Pkg.Path() { - return imp.pkg.Pkg, nil - } - return nil, fmt.Errorf("not found") // can't happen -} - -var testmainTmpl = template.Must(template.New("testmain").Parse(` -package main - -import "io" -import "os" -import "testing" -import p {{printf "%q" .Pkg.Pkg.Path}} - -{{if .Go18}} -type deps struct{} - -func (deps) ImportPath() string { return "" } -func (deps) MatchString(pat, str string) (bool, error) { return true, nil } -func (deps) SetPanicOnExit0(bool) {} -func (deps) StartCPUProfile(io.Writer) error { return nil } -func (deps) StartTestLog(io.Writer) {} -func (deps) StopCPUProfile() {} -func (deps) StopTestLog() error { return nil } -func (deps) WriteHeapProfile(io.Writer) error { return nil } -func (deps) WriteProfileTo(string, io.Writer, int) error { return nil } - -var match deps -{{else}} -func match(_, _ string) (bool, error) { return true, nil } -{{end}} - -func main() { - tests := []testing.InternalTest{ -{{range .Tests}} - { {{printf "%q" .Name}}, p.{{.Name}} }, -{{end}} - } - benchmarks := []testing.InternalBenchmark{ -{{range .Benchmarks}} - { {{printf "%q" .Name}}, p.{{.Name}} }, -{{end}} - } - examples := []testing.InternalExample{ -{{range .Examples}} - {Name: {{printf "%q" .Name}}, F: p.{{.Name}}}, -{{end}} - } - m := testing.MainStart(match, tests, benchmarks, examples) -{{with .Main}} - p.{{.Name}}(m) -{{else}} - os.Exit(m.Run()) -{{end}} -} - -`)) - -var examplesOnlyTmpl = template.Must(template.New("examples").Parse(` -package main - -import p {{printf "%q" .Pkg.Pkg.Path}} - -func main() { -{{range .Examples}} - p.{{.Name}}() -{{end}} -} -`)) diff --git a/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go b/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go index 81e8fdcf0..7e96fc234 100644 --- a/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go +++ b/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go @@ -23,10 +23,12 @@ package objectpath import ( "fmt" + "go/types" + "sort" "strconv" "strings" - "go/types" + "golang.org/x/tools/internal/typeparams" ) // A Path is an opaque name that identifies a types.Object @@ -57,12 +59,16 @@ type Path string // - The only PO operator is Package.Scope.Lookup, which requires an identifier. // - The only OT operator is Object.Type, // which we encode as '.' because dot cannot appear in an identifier. -// - The TT operators are encoded as [EKPRU]. +// - The TT operators are encoded as [EKPRUTC]; +// one of these (TypeParam) requires an integer operand, +// which is encoded as a string of decimal digits. // - The TO operators are encoded as [AFMO]; // three of these (At,Field,Method) require an integer operand, // which is encoded as a string of decimal digits. // These indices are stable across different representations // of the same package, even source and export data. +// The indices used are implementation specific and may not correspond to +// the argument to the go/types function. // // In the example below, // @@ -89,17 +95,19 @@ const ( opType = '.' // .Type() (Object) // type->type operators - opElem = 'E' // .Elem() (Pointer, Slice, Array, Chan, Map) - opKey = 'K' // .Key() (Map) - opParams = 'P' // .Params() (Signature) - opResults = 'R' // .Results() (Signature) - opUnderlying = 'U' // .Underlying() (Named) + opElem = 'E' // .Elem() (Pointer, Slice, Array, Chan, Map) + opKey = 'K' // .Key() (Map) + opParams = 'P' // .Params() (Signature) + opResults = 'R' // .Results() (Signature) + opUnderlying = 'U' // .Underlying() (Named) + opTypeParam = 'T' // .TypeParams.At(i) (Named, Signature) + opConstraint = 'C' // .Constraint() (TypeParam) // type->object operators - opAt = 'A' // .At(i) (Tuple) - opField = 'F' // .Field(i) (Struct) - opMethod = 'M' // .Method(i) (Named or Interface; not Struct: "promoted" names are ignored) - opObj = 'O' // .Obj() (Named) + opAt = 'A' // .At(i) (Tuple) + opField = 'F' // .Field(i) (Struct) + opMethod = 'M' // .Method(i) (Named or Interface; not Struct: "promoted" names are ignored) + opObj = 'O' // .Obj() (Named, TypeParam) ) // The For function returns the path to an object relative to its package, @@ -190,10 +198,15 @@ func For(obj types.Object) (Path, error) { // 3. Not a package-level object. // Reject obviously non-viable cases. switch obj := obj.(type) { + case *types.TypeName: + if _, ok := obj.Type().(*typeparams.TypeParam); !ok { + // With the exception of type parameters, only package-level type names + // have a path. + return "", fmt.Errorf("no path for %v", obj) + } case *types.Const, // Only package-level constants have a path. - *types.TypeName, // Only package-level types have a path. - *types.Label, // Labels are function-local. - *types.PkgName: // PkgNames are file-local. + *types.Label, // Labels are function-local. + *types.PkgName: // PkgNames are file-local. return "", fmt.Errorf("no path for %v", obj) case *types.Var: @@ -245,6 +258,12 @@ func For(obj types.Object) (Path, error) { return Path(r), nil } } else { + if named, _ := T.(*types.Named); named != nil { + if r := findTypeParam(obj, typeparams.ForNamed(named), path); r != nil { + // generic named type + return Path(r), nil + } + } // defined (named) type if r := find(obj, T.Underlying(), append(path, opUnderlying)); r != nil { return Path(r), nil @@ -270,8 +289,12 @@ func For(obj types.Object) (Path, error) { // Inspect declared methods of defined types. if T, ok := o.Type().(*types.Named); ok { path = append(path, opType) - for i := 0; i < T.NumMethods(); i++ { - m := T.Method(i) + // Note that method index here is always with respect + // to canonical ordering of methods, regardless of how + // they appear in the underlying type. + canonical := canonicalize(T) + for i := 0; i < len(canonical); i++ { + m := canonical[i] path2 := appendOpArg(path, opMethod, i) if m == obj { return Path(path2), nil // found declared method @@ -313,6 +336,9 @@ func find(obj types.Object, T types.Type, path []byte) []byte { } return find(obj, T.Elem(), append(path, opElem)) case *types.Signature: + if r := findTypeParam(obj, typeparams.ForSignature(T), path); r != nil { + return r + } if r := find(obj, T.Params(), append(path, opParams)); r != nil { return r } @@ -353,10 +379,30 @@ func find(obj types.Object, T types.Type, path []byte) []byte { } } return nil + case *typeparams.TypeParam: + name := T.Obj() + if name == obj { + return append(path, opObj) + } + if r := find(obj, T.Constraint(), append(path, opConstraint)); r != nil { + return r + } + return nil } panic(T) } +func findTypeParam(obj types.Object, list *typeparams.TypeParamList, path []byte) []byte { + for i := 0; i < list.Len(); i++ { + tparam := list.At(i) + path2 := appendOpArg(path, opTypeParam, i) + if r := find(obj, tparam, path2); r != nil { + return r + } + } + return nil +} + // Object returns the object denoted by path p within the package pkg. func Object(pkg *types.Package, p Path) (types.Object, error) { if p == "" { @@ -381,10 +427,13 @@ func Object(pkg *types.Package, p Path) (types.Object, error) { type hasElem interface { Elem() types.Type } - // abstraction of *types.{Interface,Named} - type hasMethods interface { - Method(int) *types.Func - NumMethods() int + // abstraction of *types.{Named,Signature} + type hasTypeParams interface { + TypeParams() *typeparams.TypeParamList + } + // abstraction of *types.{Named,TypeParam} + type hasObj interface { + Obj() *types.TypeName } // The loop state is the pair (t, obj), @@ -401,7 +450,7 @@ func Object(pkg *types.Package, p Path) (types.Object, error) { // Codes [AFM] have an integer operand. var index int switch code { - case opAt, opField, opMethod: + case opAt, opField, opMethod, opTypeParam: rest := strings.TrimLeft(suffix, "0123456789") numerals := suffix[:len(suffix)-len(rest)] suffix = rest @@ -466,14 +515,32 @@ func Object(pkg *types.Package, p Path) (types.Object, error) { case opUnderlying: named, ok := t.(*types.Named) if !ok { - return nil, fmt.Errorf("cannot apply %q to %s (got %s, want named)", code, t, t) + return nil, fmt.Errorf("cannot apply %q to %s (got %T, want named)", code, t, t) } t = named.Underlying() + case opTypeParam: + hasTypeParams, ok := t.(hasTypeParams) // Named, Signature + if !ok { + return nil, fmt.Errorf("cannot apply %q to %s (got %T, want named or signature)", code, t, t) + } + tparams := hasTypeParams.TypeParams() + if n := tparams.Len(); index >= n { + return nil, fmt.Errorf("tuple index %d out of range [0-%d)", index, n) + } + t = tparams.At(index) + + case opConstraint: + tparam, ok := t.(*typeparams.TypeParam) + if !ok { + return nil, fmt.Errorf("cannot apply %q to %s (got %T, want type parameter)", code, t, t) + } + t = tparam.Constraint() + case opAt: tuple, ok := t.(*types.Tuple) if !ok { - return nil, fmt.Errorf("cannot apply %q to %s (got %s, want tuple)", code, t, t) + return nil, fmt.Errorf("cannot apply %q to %s (got %T, want tuple)", code, t, t) } if n := tuple.Len(); index >= n { return nil, fmt.Errorf("tuple index %d out of range [0-%d)", index, n) @@ -495,20 +562,21 @@ func Object(pkg *types.Package, p Path) (types.Object, error) { case opMethod: hasMethods, ok := t.(hasMethods) // Interface or Named if !ok { - return nil, fmt.Errorf("cannot apply %q to %s (got %s, want interface or named)", code, t, t) + return nil, fmt.Errorf("cannot apply %q to %s (got %T, want interface or named)", code, t, t) } - if n := hasMethods.NumMethods(); index >= n { + canonical := canonicalize(hasMethods) + if n := len(canonical); index >= n { return nil, fmt.Errorf("method index %d out of range [0-%d)", index, n) } - obj = hasMethods.Method(index) + obj = canonical[index] t = nil case opObj: - named, ok := t.(*types.Named) + hasObj, ok := t.(hasObj) if !ok { - return nil, fmt.Errorf("cannot apply %q to %s (got %s, want named)", code, t, t) + return nil, fmt.Errorf("cannot apply %q to %s (got %T, want named or type param)", code, t, t) } - obj = named.Obj() + obj = hasObj.Obj() t = nil default: @@ -522,3 +590,28 @@ func Object(pkg *types.Package, p Path) (types.Object, error) { return obj, nil // success } + +// hasMethods is an abstraction of *types.{Interface,Named}. This is pulled up +// because it is used by methodOrdering, which is in turn used by both encoding +// and decoding. +type hasMethods interface { + Method(int) *types.Func + NumMethods() int +} + +// canonicalize returns a canonical order for the methods in a hasMethod. +func canonicalize(hm hasMethods) []*types.Func { + count := hm.NumMethods() + if count <= 0 { + return nil + } + canon := make([]*types.Func, count) + for i := 0; i < count; i++ { + canon[i] = hm.Method(i) + } + less := func(i, j int) bool { + return canon[i].Id() < canon[j].Id() + } + sort.Slice(canon, less) + return canon +} diff --git a/vendor/golang.org/x/tools/go/types/typeutil/callee.go b/vendor/golang.org/x/tools/go/types/typeutil/callee.go index 38f596daf..90b3ab0e2 100644 --- a/vendor/golang.org/x/tools/go/types/typeutil/callee.go +++ b/vendor/golang.org/x/tools/go/types/typeutil/callee.go @@ -9,13 +9,29 @@ import ( "go/types" "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/internal/typeparams" ) // Callee returns the named target of a function call, if any: // a function, method, builtin, or variable. +// +// Functions and methods may potentially have type parameters. func Callee(info *types.Info, call *ast.CallExpr) types.Object { + fun := astutil.Unparen(call.Fun) + + // Look through type instantiation if necessary. + isInstance := false + switch fun.(type) { + case *ast.IndexExpr, *typeparams.IndexListExpr: + // When extracting the callee from an *IndexExpr, we need to check that + // it is a *types.Func and not a *types.Var. + // Example: Don't match a slice m within the expression `m[0]()`. + isInstance = true + fun, _, _, _ = typeparams.UnpackIndexExpr(fun) + } + var obj types.Object - switch fun := astutil.Unparen(call.Fun).(type) { + switch fun := fun.(type) { case *ast.Ident: obj = info.Uses[fun] // type, var, builtin, or declared func case *ast.SelectorExpr: @@ -28,11 +44,18 @@ func Callee(info *types.Info, call *ast.CallExpr) types.Object { if _, ok := obj.(*types.TypeName); ok { return nil // T(x) is a conversion, not a call } + // A Func is required to match instantiations. + if _, ok := obj.(*types.Func); isInstance && !ok { + return nil // Was not a Func. + } return obj } -// StaticCallee returns the target (function or method) of a static -// function call, if any. It returns nil for calls to builtins. +// StaticCallee returns the target (function or method) of a static function +// call, if any. It returns nil for calls to builtins. +// +// Note: for calls of instantiated functions and methods, StaticCallee returns +// the corresponding generic function or method on the generic type. func StaticCallee(info *types.Info, call *ast.CallExpr) *types.Func { if f, ok := Callee(info, call).(*types.Func); ok && !interfaceMethod(f) { return f diff --git a/vendor/golang.org/x/tools/go/types/typeutil/map.go b/vendor/golang.org/x/tools/go/types/typeutil/map.go index c7f754500..490ee904a 100644 --- a/vendor/golang.org/x/tools/go/types/typeutil/map.go +++ b/vendor/golang.org/x/tools/go/types/typeutil/map.go @@ -11,6 +11,8 @@ import ( "fmt" "go/types" "reflect" + + "golang.org/x/tools/internal/typeparams" ) // Map is a hash-table-based mapping from types (types.Type) to @@ -211,11 +213,29 @@ func (m *Map) KeysString() string { // Call MakeHasher to create a Hasher. type Hasher struct { memo map[types.Type]uint32 + + // ptrMap records pointer identity. + ptrMap map[interface{}]uint32 + + // sigTParams holds type parameters from the signature being hashed. + // Signatures are considered identical modulo renaming of type parameters, so + // within the scope of a signature type the identity of the signature's type + // parameters is just their index. + // + // Since the language does not currently support referring to uninstantiated + // generic types or functions, and instantiated signatures do not have type + // parameter lists, we should never encounter a second non-empty type + // parameter list when hashing a generic signature. + sigTParams *typeparams.TypeParamList } // MakeHasher returns a new Hasher instance. func MakeHasher() Hasher { - return Hasher{make(map[types.Type]uint32)} + return Hasher{ + memo: make(map[types.Type]uint32), + ptrMap: make(map[interface{}]uint32), + sigTParams: nil, + } } // Hash computes a hash value for the given type t such that @@ -273,17 +293,62 @@ func (h Hasher) hashFor(t types.Type) uint32 { if t.Variadic() { hash *= 8863 } + + // Use a separate hasher for types inside of the signature, where type + // parameter identity is modified to be (index, constraint). We must use a + // new memo for this hasher as type identity may be affected by this + // masking. For example, in func[T any](*T), the identity of *T depends on + // whether we are mapping the argument in isolation, or recursively as part + // of hashing the signature. + // + // We should never encounter a generic signature while hashing another + // generic signature, but defensively set sigTParams only if h.mask is + // unset. + tparams := typeparams.ForSignature(t) + if h.sigTParams == nil && tparams.Len() != 0 { + h = Hasher{ + // There may be something more efficient than discarding the existing + // memo, but it would require detecting whether types are 'tainted' by + // references to type parameters. + memo: make(map[types.Type]uint32), + // Re-using ptrMap ensures that pointer identity is preserved in this + // hasher. + ptrMap: h.ptrMap, + sigTParams: tparams, + } + } + + for i := 0; i < tparams.Len(); i++ { + tparam := tparams.At(i) + hash += 7 * h.Hash(tparam.Constraint()) + } + return hash + 3*h.hashTuple(t.Params()) + 5*h.hashTuple(t.Results()) + case *typeparams.Union: + return h.hashUnion(t) + case *types.Interface: + // Interfaces are identical if they have the same set of methods, with + // identical names and types, and they have the same set of type + // restrictions. See go/types.identical for more details. var hash uint32 = 9103 + + // Hash methods. for i, n := 0, t.NumMethods(); i < n; i++ { - // See go/types.identicalMethods for rationale. // Method order is not significant. // Ignore m.Pkg(). m := t.Method(i) hash += 3*hashString(m.Name()) + 5*h.Hash(m.Type()) } + + // Hash type restrictions. + terms, err := typeparams.InterfaceTermSet(t) + // if err != nil t has invalid type restrictions. + if err == nil { + hash += h.hashTermSet(terms) + } + return hash case *types.Map: @@ -293,13 +358,22 @@ func (h Hasher) hashFor(t types.Type) uint32 { return 9127 + 2*uint32(t.Dir()) + 3*h.Hash(t.Elem()) case *types.Named: - // Not safe with a copying GC; objects may move. - return uint32(reflect.ValueOf(t.Obj()).Pointer()) + hash := h.hashPtr(t.Obj()) + targs := typeparams.NamedTypeArgs(t) + for i := 0; i < targs.Len(); i++ { + targ := targs.At(i) + hash += 2 * h.Hash(targ) + } + return hash + + case *typeparams.TypeParam: + return h.hashTypeParam(t) case *types.Tuple: return h.hashTuple(t) } - panic(t) + + panic(fmt.Sprintf("%T: %v", t, t)) } func (h Hasher) hashTuple(tuple *types.Tuple) uint32 { @@ -311,3 +385,57 @@ func (h Hasher) hashTuple(tuple *types.Tuple) uint32 { } return hash } + +func (h Hasher) hashUnion(t *typeparams.Union) uint32 { + // Hash type restrictions. + terms, err := typeparams.UnionTermSet(t) + // if err != nil t has invalid type restrictions. Fall back on a non-zero + // hash. + if err != nil { + return 9151 + } + return h.hashTermSet(terms) +} + +func (h Hasher) hashTermSet(terms []*typeparams.Term) uint32 { + var hash uint32 = 9157 + 2*uint32(len(terms)) + for _, term := range terms { + // term order is not significant. + termHash := h.Hash(term.Type()) + if term.Tilde() { + termHash *= 9161 + } + hash += 3 * termHash + } + return hash +} + +// hashTypeParam returns a hash of the type parameter t, with a hash value +// depending on whether t is contained in h.sigTParams. +// +// If h.sigTParams is set and contains t, then we are in the process of hashing +// a signature, and the hash value of t must depend only on t's index and +// constraint: signatures are considered identical modulo type parameter +// renaming. +// +// Otherwise the hash of t depends only on t's pointer identity. +func (h Hasher) hashTypeParam(t *typeparams.TypeParam) uint32 { + if h.sigTParams != nil { + i := t.Index() + if i >= 0 && i < h.sigTParams.Len() && t == h.sigTParams.At(i) { + return 9173 + 2*h.Hash(t.Constraint()) + 3*uint32(i) + } + } + return h.hashPtr(t.Obj()) +} + +// hashPtr hashes the pointer identity of ptr. It uses h.ptrMap to ensure that +// pointers values are not dependent on the GC. +func (h Hasher) hashPtr(ptr interface{}) uint32 { + if hash, ok := h.ptrMap[ptr]; ok { + return hash + } + hash := uint32(reflect.ValueOf(ptr).Pointer()) + h.ptrMap[ptr] = hash + return hash +} diff --git a/vendor/golang.org/x/tools/internal/imports/sortimports.go b/vendor/golang.org/x/tools/internal/imports/sortimports.go index be8ffa25f..dc52372e4 100644 --- a/vendor/golang.org/x/tools/internal/imports/sortimports.go +++ b/vendor/golang.org/x/tools/internal/imports/sortimports.go @@ -9,6 +9,7 @@ package imports import ( "go/ast" "go/token" + "log" "sort" "strconv" ) @@ -60,6 +61,7 @@ func sortImports(localPrefix string, fset *token.FileSet, f *ast.File) { // mergeImports merges all the import declarations into the first one. // Taken from golang.org/x/tools/ast/astutil. +// This does not adjust line numbers properly func mergeImports(fset *token.FileSet, f *ast.File) { if len(f.Decls) <= 1 { return @@ -237,8 +239,17 @@ func sortSpecs(localPrefix string, fset *token.FileSet, f *ast.File, specs []ast p := s.Pos() line := fset.File(p).Line(p) for previousLine := line - 1; previousLine >= firstSpecLine; { - fset.File(p).MergeLine(previousLine) - previousLine-- + // MergeLine can panic. Avoid the panic at the cost of not removing the blank line + // golang/go#50329 + if previousLine > 0 && previousLine < fset.File(p).LineCount() { + fset.File(p).MergeLine(previousLine) + previousLine-- + } else { + // try to gather some data to diagnose how this could happen + req := "Please report what the imports section of your go file looked like." + log.Printf("panic avoided: first:%d line:%d previous:%d max:%d. %s", + firstSpecLine, line, previousLine, fset.File(p).LineCount(), req) + } } } return specs diff --git a/vendor/golang.org/x/tools/internal/lsp/fuzzy/symbol.go b/vendor/golang.org/x/tools/internal/lsp/fuzzy/symbol.go index 062f491fb..df9fbd514 100644 --- a/vendor/golang.org/x/tools/internal/lsp/fuzzy/symbol.go +++ b/vendor/golang.org/x/tools/internal/lsp/fuzzy/symbol.go @@ -49,11 +49,6 @@ const ( // // Currently this matcher only accepts case-insensitive fuzzy patterns. // -// TODO(rfindley): -// - implement smart-casing -// - implement space-separated groups -// - implement ', ^, and $ modifiers -// // An empty pattern matches no input. func NewSymbolMatcher(pattern string) *SymbolMatcher { m := &SymbolMatcher{} @@ -176,7 +171,12 @@ input: // 1. 1.0 if the character starts a segment, .8 if the character start a // mid-segment word, otherwise 0.6. This carries over to immediately // following characters. - // 2. 1.0 if the character is part of the last segment, otherwise + // 2. For the final character match, the multiplier from (1) is reduced to + // .8 if the next character in the input is a mid-segment word, or 0.6 if + // the next character in the input is not a word or segment start. This + // ensures that we favor whole-word or whole-segment matches over prefix + // matches. + // 3. 1.0 if the character is part of the last segment, otherwise // 1.0-.2*, with a max segment count of 3. // // This is a very naive algorithm, but it is fast. There's lots of prior art @@ -211,8 +211,20 @@ input: case m.roles[ii]&wordStart != 0 && wordStreak > streakBonus: streakBonus = wordStreak } + finalChar := pi >= m.patternLen + // finalCost := 1.0 + if finalChar && streakBonus > noStreak { + switch { + case ii == inputLen-1 || m.roles[ii+1]&segmentStart != 0: + // Full segment: no reduction + case m.roles[ii+1]&wordStart != 0: + streakBonus = wordStreak + default: + streakBonus = noStreak + } + } totScore += streakBonus * (1.0 - float64(m.segments[ii])*perSegment) - if pi >= m.patternLen { + if finalChar { break } } else { diff --git a/vendor/golang.org/x/tools/internal/typeparams/common.go b/vendor/golang.org/x/tools/internal/typeparams/common.go index 9fc6b4beb..1222764b6 100644 --- a/vendor/golang.org/x/tools/internal/typeparams/common.go +++ b/vendor/golang.org/x/tools/internal/typeparams/common.go @@ -2,24 +2,78 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package typeparams provides functions to work indirectly with type parameter -// data stored in go/ast and go/types objects, while these API are guarded by a -// build constraint. +// Package typeparams contains common utilities for writing tools that interact +// with generic Go code, as introduced with Go 1.18. // -// This package exists to make it easier for tools to work with generic code, -// while also compiling against older Go versions. +// Many of the types and functions in this package are proxies for the new APIs +// introduced in the standard library with Go 1.18. For example, the +// typeparams.Union type is an alias for go/types.Union, and the ForTypeSpec +// function returns the value of the go/ast.TypeSpec.TypeParams field. At Go +// versions older than 1.18 these helpers are implemented as stubs, allowing +// users of this package to write code that handles generic constructs inline, +// even if the Go version being used to compile does not support generics. +// +// Additionally, this package contains common utilities for working with the +// new generic constructs, to supplement the standard library APIs. Notably, +// the StructuralTerms API computes a minimal representation of the structural +// restrictions on a type parameter. In the future, this API may be available +// from go/types. +// +// See the example/README.md for a more detailed guide on how to update tools +// to support generics. package typeparams import ( "go/ast" "go/token" + "go/types" ) -// A IndexExprData holds data from both ast.IndexExpr and the new -// ast.MultiIndexExpr, which was introduced in Go 1.18. -type IndexExprData struct { - X ast.Expr // expression - Lbrack token.Pos // position of "[" - Indices []ast.Expr // index expressions - Rbrack token.Pos // position of "]" +// UnpackIndexExpr extracts data from AST nodes that represent index +// expressions. +// +// For an ast.IndexExpr, the resulting indices slice will contain exactly one +// index expression. For an ast.IndexListExpr (go1.18+), it may have a variable +// number of index expressions. +// +// For nodes that don't represent index expressions, the first return value of +// UnpackIndexExpr will be nil. +func UnpackIndexExpr(n ast.Node) (x ast.Expr, lbrack token.Pos, indices []ast.Expr, rbrack token.Pos) { + switch e := n.(type) { + case *ast.IndexExpr: + return e.X, e.Lbrack, []ast.Expr{e.Index}, e.Rbrack + case *IndexListExpr: + return e.X, e.Lbrack, e.Indices, e.Rbrack + } + return nil, token.NoPos, nil, token.NoPos +} + +// PackIndexExpr returns an *ast.IndexExpr or *ast.IndexListExpr, depending on +// the cardinality of indices. Calling PackIndexExpr with len(indices) == 0 +// will panic. +func PackIndexExpr(x ast.Expr, lbrack token.Pos, indices []ast.Expr, rbrack token.Pos) ast.Expr { + switch len(indices) { + case 0: + panic("empty indices") + case 1: + return &ast.IndexExpr{ + X: x, + Lbrack: lbrack, + Index: indices[0], + Rbrack: rbrack, + } + default: + return &IndexListExpr{ + X: x, + Lbrack: lbrack, + Indices: indices, + Rbrack: rbrack, + } + } +} + +// IsTypeParam reports whether t is a type parameter. +func IsTypeParam(t types.Type) bool { + _, ok := t.(*TypeParam) + return ok } diff --git a/vendor/golang.org/x/tools/internal/typeparams/enabled_go117.go b/vendor/golang.org/x/tools/internal/typeparams/enabled_go117.go index 72d010e51..18212390e 100644 --- a/vendor/golang.org/x/tools/internal/typeparams/enabled_go117.go +++ b/vendor/golang.org/x/tools/internal/typeparams/enabled_go117.go @@ -2,8 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build !typeparams || !go1.18 -// +build !typeparams !go1.18 +//go:build !go1.18 +// +build !go1.18 package typeparams diff --git a/vendor/golang.org/x/tools/internal/typeparams/enabled_go118.go b/vendor/golang.org/x/tools/internal/typeparams/enabled_go118.go index 642fc8ee2..d67148823 100644 --- a/vendor/golang.org/x/tools/internal/typeparams/enabled_go118.go +++ b/vendor/golang.org/x/tools/internal/typeparams/enabled_go118.go @@ -2,8 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build typeparams && go1.18 -// +build typeparams,go1.18 +//go:build go1.18 +// +build go1.18 package typeparams diff --git a/vendor/golang.org/x/tools/internal/typeparams/normalize.go b/vendor/golang.org/x/tools/internal/typeparams/normalize.go new file mode 100644 index 000000000..090f142a5 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/typeparams/normalize.go @@ -0,0 +1,216 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typeparams + +import ( + "errors" + "fmt" + "go/types" + "os" + "strings" +) + +//go:generate go run copytermlist.go + +const debug = false + +var ErrEmptyTypeSet = errors.New("empty type set") + +// StructuralTerms returns a slice of terms representing the normalized +// structural type restrictions of a type parameter, if any. +// +// Structural type restrictions of a type parameter are created via +// non-interface types embedded in its constraint interface (directly, or via a +// chain of interface embeddings). For example, in the declaration +// type T[P interface{~int; m()}] int +// the structural restriction of the type parameter P is ~int. +// +// With interface embedding and unions, the specification of structural type +// restrictions may be arbitrarily complex. For example, consider the +// following: +// +// type A interface{ ~string|~[]byte } +// +// type B interface{ int|string } +// +// type C interface { ~string|~int } +// +// type T[P interface{ A|B; C }] int +// +// In this example, the structural type restriction of P is ~string|int: A|B +// expands to ~string|~[]byte|int|string, which reduces to ~string|~[]byte|int, +// which when intersected with C (~string|~int) yields ~string|int. +// +// StructuralTerms computes these expansions and reductions, producing a +// "normalized" form of the embeddings. A structural restriction is normalized +// if it is a single union containing no interface terms, and is minimal in the +// sense that removing any term changes the set of types satisfying the +// constraint. It is left as a proof for the reader that, modulo sorting, there +// is exactly one such normalized form. +// +// Because the minimal representation always takes this form, StructuralTerms +// returns a slice of tilde terms corresponding to the terms of the union in +// the normalized structural restriction. An error is returned if the +// constraint interface is invalid, exceeds complexity bounds, or has an empty +// type set. In the latter case, StructuralTerms returns ErrEmptyTypeSet. +// +// StructuralTerms makes no guarantees about the order of terms, except that it +// is deterministic. +func StructuralTerms(tparam *TypeParam) ([]*Term, error) { + constraint := tparam.Constraint() + if constraint == nil { + return nil, fmt.Errorf("%s has nil constraint", tparam) + } + iface, _ := constraint.Underlying().(*types.Interface) + if iface == nil { + return nil, fmt.Errorf("constraint is %T, not *types.Interface", constraint.Underlying()) + } + return InterfaceTermSet(iface) +} + +// InterfaceTermSet computes the normalized terms for a constraint interface, +// returning an error if the term set cannot be computed or is empty. In the +// latter case, the error will be ErrEmptyTypeSet. +// +// See the documentation of StructuralTerms for more information on +// normalization. +func InterfaceTermSet(iface *types.Interface) ([]*Term, error) { + return computeTermSet(iface) +} + +// UnionTermSet computes the normalized terms for a union, returning an error +// if the term set cannot be computed or is empty. In the latter case, the +// error will be ErrEmptyTypeSet. +// +// See the documentation of StructuralTerms for more information on +// normalization. +func UnionTermSet(union *Union) ([]*Term, error) { + return computeTermSet(union) +} + +func computeTermSet(typ types.Type) ([]*Term, error) { + tset, err := computeTermSetInternal(typ, make(map[types.Type]*termSet), 0) + if err != nil { + return nil, err + } + if tset.terms.isEmpty() { + return nil, ErrEmptyTypeSet + } + if tset.terms.isAll() { + return nil, nil + } + var terms []*Term + for _, term := range tset.terms { + terms = append(terms, NewTerm(term.tilde, term.typ)) + } + return terms, nil +} + +// A termSet holds the normalized set of terms for a given type. +// +// The name termSet is intentionally distinct from 'type set': a type set is +// all types that implement a type (and includes method restrictions), whereas +// a term set just represents the structural restrictions on a type. +type termSet struct { + complete bool + terms termlist +} + +func indentf(depth int, format string, args ...interface{}) { + fmt.Fprintf(os.Stderr, strings.Repeat(".", depth)+format+"\n", args...) +} + +func computeTermSetInternal(t types.Type, seen map[types.Type]*termSet, depth int) (res *termSet, err error) { + if t == nil { + panic("nil type") + } + + if debug { + indentf(depth, "%s", t.String()) + defer func() { + if err != nil { + indentf(depth, "=> %s", err) + } else { + indentf(depth, "=> %s", res.terms.String()) + } + }() + } + + const maxTermCount = 100 + if tset, ok := seen[t]; ok { + if !tset.complete { + return nil, fmt.Errorf("cycle detected in the declaration of %s", t) + } + return tset, nil + } + + // Mark the current type as seen to avoid infinite recursion. + tset := new(termSet) + defer func() { + tset.complete = true + }() + seen[t] = tset + + switch u := t.Underlying().(type) { + case *types.Interface: + // The term set of an interface is the intersection of the term sets of its + // embedded types. + tset.terms = allTermlist + for i := 0; i < u.NumEmbeddeds(); i++ { + embedded := u.EmbeddedType(i) + if _, ok := embedded.Underlying().(*TypeParam); ok { + return nil, fmt.Errorf("invalid embedded type %T", embedded) + } + tset2, err := computeTermSetInternal(embedded, seen, depth+1) + if err != nil { + return nil, err + } + tset.terms = tset.terms.intersect(tset2.terms) + } + case *Union: + // The term set of a union is the union of term sets of its terms. + tset.terms = nil + for i := 0; i < u.Len(); i++ { + t := u.Term(i) + var terms termlist + switch t.Type().Underlying().(type) { + case *types.Interface: + tset2, err := computeTermSetInternal(t.Type(), seen, depth+1) + if err != nil { + return nil, err + } + terms = tset2.terms + case *TypeParam, *Union: + // A stand-alone type parameter or union is not permitted as union + // term. + return nil, fmt.Errorf("invalid union term %T", t) + default: + if t.Type() == types.Typ[types.Invalid] { + continue + } + terms = termlist{{t.Tilde(), t.Type()}} + } + tset.terms = tset.terms.union(terms) + if len(tset.terms) > maxTermCount { + return nil, fmt.Errorf("exceeded max term count %d", maxTermCount) + } + } + case *TypeParam: + panic("unreachable") + default: + // For all other types, the term set is just a single non-tilde term + // holding the type itself. + if u != types.Typ[types.Invalid] { + tset.terms = termlist{{false, t}} + } + } + return tset, nil +} + +// under is a facade for the go/types internal function of the same name. It is +// used by typeterm.go. +func under(t types.Type) types.Type { + return t.Underlying() +} diff --git a/vendor/golang.org/x/tools/internal/typeparams/termlist.go b/vendor/golang.org/x/tools/internal/typeparams/termlist.go new file mode 100644 index 000000000..10857d504 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/typeparams/termlist.go @@ -0,0 +1,172 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by copytermlist.go DO NOT EDIT. + +package typeparams + +import ( + "bytes" + "go/types" +) + +// A termlist represents the type set represented by the union +// t1 ∪ y2 ∪ ... tn of the type sets of the terms t1 to tn. +// A termlist is in normal form if all terms are disjoint. +// termlist operations don't require the operands to be in +// normal form. +type termlist []*term + +// allTermlist represents the set of all types. +// It is in normal form. +var allTermlist = termlist{new(term)} + +// String prints the termlist exactly (without normalization). +func (xl termlist) String() string { + if len(xl) == 0 { + return "∅" + } + var buf bytes.Buffer + for i, x := range xl { + if i > 0 { + buf.WriteString(" ∪ ") + } + buf.WriteString(x.String()) + } + return buf.String() +} + +// isEmpty reports whether the termlist xl represents the empty set of types. +func (xl termlist) isEmpty() bool { + // If there's a non-nil term, the entire list is not empty. + // If the termlist is in normal form, this requires at most + // one iteration. + for _, x := range xl { + if x != nil { + return false + } + } + return true +} + +// isAll reports whether the termlist xl represents the set of all types. +func (xl termlist) isAll() bool { + // If there's a 𝓤 term, the entire list is 𝓤. + // If the termlist is in normal form, this requires at most + // one iteration. + for _, x := range xl { + if x != nil && x.typ == nil { + return true + } + } + return false +} + +// norm returns the normal form of xl. +func (xl termlist) norm() termlist { + // Quadratic algorithm, but good enough for now. + // TODO(gri) fix asymptotic performance + used := make([]bool, len(xl)) + var rl termlist + for i, xi := range xl { + if xi == nil || used[i] { + continue + } + for j := i + 1; j < len(xl); j++ { + xj := xl[j] + if xj == nil || used[j] { + continue + } + if u1, u2 := xi.union(xj); u2 == nil { + // If we encounter a 𝓤 term, the entire list is 𝓤. + // Exit early. + // (Note that this is not just an optimization; + // if we continue, we may end up with a 𝓤 term + // and other terms and the result would not be + // in normal form.) + if u1.typ == nil { + return allTermlist + } + xi = u1 + used[j] = true // xj is now unioned into xi - ignore it in future iterations + } + } + rl = append(rl, xi) + } + return rl +} + +// If the type set represented by xl is specified by a single (non-𝓤) term, +// structuralType returns that type. Otherwise it returns nil. +func (xl termlist) structuralType() types.Type { + if nl := xl.norm(); len(nl) == 1 { + return nl[0].typ // if nl.isAll() then typ is nil, which is ok + } + return nil +} + +// union returns the union xl ∪ yl. +func (xl termlist) union(yl termlist) termlist { + return append(xl, yl...).norm() +} + +// intersect returns the intersection xl ∩ yl. +func (xl termlist) intersect(yl termlist) termlist { + if xl.isEmpty() || yl.isEmpty() { + return nil + } + + // Quadratic algorithm, but good enough for now. + // TODO(gri) fix asymptotic performance + var rl termlist + for _, x := range xl { + for _, y := range yl { + if r := x.intersect(y); r != nil { + rl = append(rl, r) + } + } + } + return rl.norm() +} + +// equal reports whether xl and yl represent the same type set. +func (xl termlist) equal(yl termlist) bool { + // TODO(gri) this should be more efficient + return xl.subsetOf(yl) && yl.subsetOf(xl) +} + +// includes reports whether t ∈ xl. +func (xl termlist) includes(t types.Type) bool { + for _, x := range xl { + if x.includes(t) { + return true + } + } + return false +} + +// supersetOf reports whether y ⊆ xl. +func (xl termlist) supersetOf(y *term) bool { + for _, x := range xl { + if y.subsetOf(x) { + return true + } + } + return false +} + +// subsetOf reports whether xl ⊆ yl. +func (xl termlist) subsetOf(yl termlist) bool { + if yl.isEmpty() { + return xl.isEmpty() + } + + // each term x of xl must be a subset of yl + for _, x := range xl { + if !yl.supersetOf(x) { + return false // x is not a subset yl + } + } + return true +} diff --git a/vendor/golang.org/x/tools/internal/typeparams/typeparams_go117.go b/vendor/golang.org/x/tools/internal/typeparams/typeparams_go117.go index 12817af85..5fd3fc351 100644 --- a/vendor/golang.org/x/tools/internal/typeparams/typeparams_go117.go +++ b/vendor/golang.org/x/tools/internal/typeparams/typeparams_go117.go @@ -2,13 +2,14 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build !typeparams || !go1.18 -// +build !typeparams !go1.18 +//go:build !go1.18 +// +build !go1.18 package typeparams import ( "go/ast" + "go/token" "go/types" ) @@ -16,18 +17,14 @@ func unsupported() { panic("type parameters are unsupported at this go version") } -// GetIndexExprData extracts data from *ast.IndexExpr nodes. -// For other nodes, GetIndexExprData returns nil. -func GetIndexExprData(n ast.Node) *IndexExprData { - if e, _ := n.(*ast.IndexExpr); e != nil { - return &IndexExprData{ - X: e.X, - Lbrack: e.Lbrack, - Indices: []ast.Expr{e.Index}, - Rbrack: e.Rbrack, - } - } - return nil +// IndexListExpr is a placeholder type, as type parameters are not supported at +// this Go version. Its methods panic on use. +type IndexListExpr struct { + ast.Expr + X ast.Expr // expression + Lbrack token.Pos // position of "[" + Indices []ast.Expr // index expressions + Rbrack token.Pos // position of "]" } // ForTypeSpec returns an empty field list, as type parameters on not supported @@ -46,6 +43,7 @@ func ForFuncType(*ast.FuncType) *ast.FieldList { // this Go version. Its methods panic on use. type TypeParam struct{ types.Type } +func (*TypeParam) Index() int { unsupported(); return 0 } func (*TypeParam) Constraint() types.Type { unsupported(); return nil } func (*TypeParam) Obj() *types.TypeName { unsupported(); return nil } @@ -72,42 +70,46 @@ func SetTypeParamConstraint(tparam *TypeParam, constraint types.Type) { unsupported() } +// NewSignatureType calls types.NewSignature, panicking if recvTypeParams or +// typeParams is non-empty. +func NewSignatureType(recv *types.Var, recvTypeParams, typeParams []*TypeParam, params, results *types.Tuple, variadic bool) *types.Signature { + if len(recvTypeParams) != 0 || len(typeParams) != 0 { + panic("signatures cannot have type parameters at this Go version") + } + return types.NewSignature(recv, params, results, variadic) +} + // ForSignature returns an empty slice. func ForSignature(*types.Signature) *TypeParamList { return nil } -// SetForSignature panics if tparams is non-empty. -func SetForSignature(_ *types.Signature, tparams []*TypeParam) { - if len(tparams) > 0 { - unsupported() - } -} - // RecvTypeParams returns a nil slice. func RecvTypeParams(sig *types.Signature) *TypeParamList { return nil } -// SetRecvTypeParams panics if rparams is non-empty. -func SetRecvTypeParams(sig *types.Signature, rparams []*TypeParam) { - if len(rparams) > 0 { - unsupported() - } -} - // IsComparable returns false, as no interfaces are type-restricted at this Go // version. func IsComparable(*types.Interface) bool { return false } -// IsConstraint returns false, as no interfaces are type-restricted at this Go +// IsMethodSet returns true, as no interfaces are type-restricted at this Go // version. -func IsConstraint(*types.Interface) bool { +func IsMethodSet(*types.Interface) bool { + return true +} + +// IsImplicit returns false, as no interfaces are implicit at this Go version. +func IsImplicit(*types.Interface) bool { return false } +// MarkImplicit does nothing, because this Go version does not have implicit +// interfaces. +func MarkImplicit(*types.Interface) {} + // ForNamed returns an empty type parameter list, as type parameters are not // supported at this Go version. func ForNamed(*types.Named) *TypeParamList { @@ -131,19 +133,25 @@ func NamedTypeOrigin(named *types.Named) types.Type { return named } -// Term is a placeholder type, as type parameters are not supported at this Go -// version. Its methods panic on use. -type Term struct{} +// Term holds information about a structural type restriction. +type Term struct { + tilde bool + typ types.Type +} -func (*Term) Tilde() bool { unsupported(); return false } -func (*Term) Type() types.Type { unsupported(); return nil } -func (*Term) String() string { unsupported(); return "" } -func (*Term) Underlying() types.Type { unsupported(); return nil } +func (m *Term) Tilde() bool { return m.tilde } +func (m *Term) Type() types.Type { return m.typ } +func (m *Term) String() string { + pre := "" + if m.tilde { + pre = "~" + } + return pre + m.typ.String() +} // NewTerm is unsupported at this Go version, and panics. func NewTerm(tilde bool, typ types.Type) *Term { - unsupported() - return nil + return &Term{tilde, typ} } // Union is a placeholder type, as type parameters are not supported at this Go @@ -162,16 +170,23 @@ func NewUnion(terms []*Term) *Union { // InitInstanceInfo is a noop at this Go version. func InitInstanceInfo(*types.Info) {} -// GetInstance returns nothing, as type parameters are not supported at this Go -// version. -func GetInstance(*types.Info, *ast.Ident) (*TypeList, types.Type) { return nil, nil } +// Instance is a placeholder type, as type parameters are not supported at this +// Go version. +type Instance struct { + TypeArgs *TypeList + Type types.Type +} -// Environment is a placeholder type, as type parameters are not supported at +// GetInstances returns a nil map, as type parameters are not supported at this +// Go version. +func GetInstances(info *types.Info) map[*ast.Ident]Instance { return nil } + +// Context is a placeholder type, as type parameters are not supported at // this Go version. -type Environment struct{} +type Context struct{} // Instantiate is unsupported on this Go version, and panics. -func Instantiate(env *Environment, typ types.Type, targs []types.Type, validate bool) (types.Type, error) { +func Instantiate(ctxt *Context, typ types.Type, targs []types.Type, validate bool) (types.Type, error) { unsupported() return nil, nil } diff --git a/vendor/golang.org/x/tools/internal/typeparams/typeparams_go118.go b/vendor/golang.org/x/tools/internal/typeparams/typeparams_go118.go index 8ab17b777..7470aed8c 100644 --- a/vendor/golang.org/x/tools/internal/typeparams/typeparams_go118.go +++ b/vendor/golang.org/x/tools/internal/typeparams/typeparams_go118.go @@ -2,8 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build typeparams && go1.18 -// +build typeparams,go1.18 +//go:build go1.18 +// +build go1.18 package typeparams @@ -12,29 +12,8 @@ import ( "go/types" ) -// GetIndexExprData extracts data from AST nodes that represent index -// expressions. -// -// For an ast.IndexExpr, the resulting IndexExprData will have exactly one -// index expression. For an ast.IndexListExpr (go1.18+), it may have a -// variable number of index expressions. -// -// For nodes that don't represent index expressions, GetIndexExprData returns -// nil. -func GetIndexExprData(n ast.Node) *IndexExprData { - switch e := n.(type) { - case *ast.IndexExpr: - return &IndexExprData{ - X: e.X, - Lbrack: e.Lbrack, - Indices: []ast.Expr{e.Index}, - Rbrack: e.Rbrack, - } - case *ast.IndexListExpr: - return (*IndexExprData)(e) - } - return nil -} +// IndexListExpr is an alias for ast.IndexListExpr. +type IndexListExpr = ast.IndexListExpr // ForTypeSpec returns n.TypeParams. func ForTypeSpec(n *ast.TypeSpec) *ast.FieldList { @@ -71,34 +50,39 @@ func SetTypeParamConstraint(tparam *TypeParam, constraint types.Type) { tparam.SetConstraint(constraint) } +// NewSignatureType calls types.NewSignatureType. +func NewSignatureType(recv *types.Var, recvTypeParams, typeParams []*TypeParam, params, results *types.Tuple, variadic bool) *types.Signature { + return types.NewSignatureType(recv, recvTypeParams, typeParams, params, results, variadic) +} + // ForSignature returns sig.TypeParams() func ForSignature(sig *types.Signature) *TypeParamList { return sig.TypeParams() } -// SetForSignature calls sig.SetTypeParams(tparams) -func SetForSignature(sig *types.Signature, tparams []*TypeParam) { - sig.SetTypeParams(tparams) -} - // RecvTypeParams returns sig.RecvTypeParams(). func RecvTypeParams(sig *types.Signature) *TypeParamList { return sig.RecvTypeParams() } -// SetRecvTypeParams calls sig.SetRecvTypeParams(rparams). -func SetRecvTypeParams(sig *types.Signature, rparams []*TypeParam) { - sig.SetRecvTypeParams(rparams) -} - // IsComparable calls iface.IsComparable(). func IsComparable(iface *types.Interface) bool { return iface.IsComparable() } -// IsConstraint calls iface.IsConstraint(). -func IsConstraint(iface *types.Interface) bool { - return iface.IsConstraint() +// IsMethodSet calls iface.IsMethodSet(). +func IsMethodSet(iface *types.Interface) bool { + return iface.IsMethodSet() +} + +// IsImplicit calls iface.IsImplicit(). +func IsImplicit(iface *types.Interface) bool { + return iface.IsImplicit() +} + +// MarkImplicit calls iface.MarkImplicit(). +func MarkImplicit(iface *types.Interface) { + iface.MarkImplicit() } // ForNamed extracts the (possibly empty) type parameter object list from @@ -145,21 +129,18 @@ func InitInstanceInfo(info *types.Info) { info.Instances = make(map[*ast.Ident]types.Instance) } -// GetInstance extracts information about the instantiation occurring at the -// identifier id. id should be the identifier denoting a parameterized type or -// function in an instantiation expression or function call. -func GetInstance(info *types.Info, id *ast.Ident) (*TypeList, types.Type) { - if info.Instances != nil { - inf := info.Instances[id] - return inf.TypeArgs, inf.Type - } - return nil, nil +// Instance is an alias for types.Instance. +type Instance = types.Instance + +// GetInstances returns info.Instances. +func GetInstances(info *types.Info) map[*ast.Ident]Instance { + return info.Instances } -// Environment is an alias for types.Environment. -type Environment = types.Environment +// Context is an alias for types.Context. +type Context = types.Context // Instantiate calls types.Instantiate. -func Instantiate(env *Environment, typ types.Type, targs []types.Type, validate bool) (types.Type, error) { - return types.Instantiate(env, typ, targs, validate) +func Instantiate(ctxt *Context, typ types.Type, targs []types.Type, validate bool) (types.Type, error) { + return types.Instantiate(ctxt, typ, targs, validate) } diff --git a/vendor/golang.org/x/tools/internal/typeparams/typeterm.go b/vendor/golang.org/x/tools/internal/typeparams/typeterm.go new file mode 100644 index 000000000..7ddee28d9 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/typeparams/typeterm.go @@ -0,0 +1,170 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by copytermlist.go DO NOT EDIT. + +package typeparams + +import "go/types" + +// A term describes elementary type sets: +// +// ∅: (*term)(nil) == ∅ // set of no types (empty set) +// 𝓤: &term{} == 𝓤 // set of all types (𝓤niverse) +// T: &term{false, T} == {T} // set of type T +// ~t: &term{true, t} == {t' | under(t') == t} // set of types with underlying type t +// +type term struct { + tilde bool // valid if typ != nil + typ types.Type +} + +func (x *term) String() string { + switch { + case x == nil: + return "∅" + case x.typ == nil: + return "𝓤" + case x.tilde: + return "~" + x.typ.String() + default: + return x.typ.String() + } +} + +// equal reports whether x and y represent the same type set. +func (x *term) equal(y *term) bool { + // easy cases + switch { + case x == nil || y == nil: + return x == y + case x.typ == nil || y.typ == nil: + return x.typ == y.typ + } + // ∅ ⊂ x, y ⊂ 𝓤 + + return x.tilde == y.tilde && types.Identical(x.typ, y.typ) +} + +// union returns the union x ∪ y: zero, one, or two non-nil terms. +func (x *term) union(y *term) (_, _ *term) { + // easy cases + switch { + case x == nil && y == nil: + return nil, nil // ∅ ∪ ∅ == ∅ + case x == nil: + return y, nil // ∅ ∪ y == y + case y == nil: + return x, nil // x ∪ ∅ == x + case x.typ == nil: + return x, nil // 𝓤 ∪ y == 𝓤 + case y.typ == nil: + return y, nil // x ∪ 𝓤 == 𝓤 + } + // ∅ ⊂ x, y ⊂ 𝓤 + + if x.disjoint(y) { + return x, y // x ∪ y == (x, y) if x ∩ y == ∅ + } + // x.typ == y.typ + + // ~t ∪ ~t == ~t + // ~t ∪ T == ~t + // T ∪ ~t == ~t + // T ∪ T == T + if x.tilde || !y.tilde { + return x, nil + } + return y, nil +} + +// intersect returns the intersection x ∩ y. +func (x *term) intersect(y *term) *term { + // easy cases + switch { + case x == nil || y == nil: + return nil // ∅ ∩ y == ∅ and ∩ ∅ == ∅ + case x.typ == nil: + return y // 𝓤 ∩ y == y + case y.typ == nil: + return x // x ∩ 𝓤 == x + } + // ∅ ⊂ x, y ⊂ 𝓤 + + if x.disjoint(y) { + return nil // x ∩ y == ∅ if x ∩ y == ∅ + } + // x.typ == y.typ + + // ~t ∩ ~t == ~t + // ~t ∩ T == T + // T ∩ ~t == T + // T ∩ T == T + if !x.tilde || y.tilde { + return x + } + return y +} + +// includes reports whether t ∈ x. +func (x *term) includes(t types.Type) bool { + // easy cases + switch { + case x == nil: + return false // t ∈ ∅ == false + case x.typ == nil: + return true // t ∈ 𝓤 == true + } + // ∅ ⊂ x ⊂ 𝓤 + + u := t + if x.tilde { + u = under(u) + } + return types.Identical(x.typ, u) +} + +// subsetOf reports whether x ⊆ y. +func (x *term) subsetOf(y *term) bool { + // easy cases + switch { + case x == nil: + return true // ∅ ⊆ y == true + case y == nil: + return false // x ⊆ ∅ == false since x != ∅ + case y.typ == nil: + return true // x ⊆ 𝓤 == true + case x.typ == nil: + return false // 𝓤 ⊆ y == false since y != 𝓤 + } + // ∅ ⊂ x, y ⊂ 𝓤 + + if x.disjoint(y) { + return false // x ⊆ y == false if x ∩ y == ∅ + } + // x.typ == y.typ + + // ~t ⊆ ~t == true + // ~t ⊆ T == false + // T ⊆ ~t == true + // T ⊆ T == true + return !x.tilde || y.tilde +} + +// disjoint reports whether x ∩ y == ∅. +// x.typ and y.typ must not be nil. +func (x *term) disjoint(y *term) bool { + if debug && (x.typ == nil || y.typ == nil) { + panic("invalid argument(s)") + } + ux := x.typ + if y.tilde { + ux = under(ux) + } + uy := y.typ + if x.tilde { + uy = under(uy) + } + return !types.Identical(ux, uy) +} diff --git a/vendor/gopkg.in/ini.v1/README.md b/vendor/gopkg.in/ini.v1/README.md index 5d65658b2..1e4294452 100644 --- a/vendor/gopkg.in/ini.v1/README.md +++ b/vendor/gopkg.in/ini.v1/README.md @@ -24,7 +24,7 @@ Package ini provides INI file read and write functionality in Go. ## Installation -The minimum requirement of Go is **1.6**. +The minimum requirement of Go is **1.12**. ```sh $ go get gopkg.in/ini.v1 diff --git a/vendor/gopkg.in/ini.v1/file.go b/vendor/gopkg.in/ini.v1/file.go index b96d172cf..7b4e560d1 100644 --- a/vendor/gopkg.in/ini.v1/file.go +++ b/vendor/gopkg.in/ini.v1/file.go @@ -142,6 +142,12 @@ func (f *File) GetSection(name string) (*Section, error) { return secs[0], err } +// HasSection returns true if the file contains a section with given name. +func (f *File) HasSection(name string) bool { + section, _ := f.GetSection(name) + return section != nil +} + // SectionsByName returns all sections with given name. func (f *File) SectionsByName(name string) ([]*Section, error) { if len(name) == 0 { @@ -168,8 +174,9 @@ func (f *File) SectionsByName(name string) ([]*Section, error) { func (f *File) Section(name string) *Section { sec, err := f.GetSection(name) if err != nil { - // Note: It's OK here because the only possible error is empty section name, - // but if it's empty, this piece of code won't be executed. + if name == "" { + name = DefaultSection + } sec, _ = f.NewSection(name) return sec } diff --git a/vendor/gopkg.in/ini.v1/parser.go b/vendor/gopkg.in/ini.v1/parser.go index b8b5aa86a..ac1c980ab 100644 --- a/vendor/gopkg.in/ini.v1/parser.go +++ b/vendor/gopkg.in/ini.v1/parser.go @@ -302,15 +302,9 @@ func (p *parser) readPythonMultilines(line string, bufferSize int) (string, erro parserBufferPeekResult, _ := p.buf.Peek(bufferSize) peekBuffer := bytes.NewBuffer(parserBufferPeekResult) - indentSize := 0 for { peekData, peekErr := peekBuffer.ReadBytes('\n') - if peekErr != nil { - if peekErr == io.EOF { - p.debug("readPythonMultilines: io.EOF, peekData: %q, line: %q", string(peekData), line) - return line, nil - } - + if peekErr != nil && peekErr != io.EOF { p.debug("readPythonMultilines: failed to peek with error: %v", peekErr) return "", peekErr } @@ -329,19 +323,6 @@ func (p *parser) readPythonMultilines(line string, bufferSize int) (string, erro return line, nil } - // Determine indent size and line prefix. - currentIndentSize := len(peekMatches[1]) - if indentSize < 1 { - indentSize = currentIndentSize - p.debug("readPythonMultilines: indent size is %d", indentSize) - } - - // Make sure each line is indented at least as far as first line. - if currentIndentSize < indentSize { - p.debug("readPythonMultilines: end of value, current indent: %d, expected indent: %d, line: %q", currentIndentSize, indentSize, line) - return line, nil - } - // Advance the parser reader (buffer) in-sync with the peek buffer. _, err := p.buf.Discard(len(peekData)) if err != nil { @@ -349,8 +330,7 @@ func (p *parser) readPythonMultilines(line string, bufferSize int) (string, erro return "", err } - // Handle indented empty line. - line += "\n" + peekMatches[1][indentSize:] + peekMatches[2] + line += "\n" + peekMatches[0] } } @@ -461,6 +441,8 @@ func (f *File) parse(reader io.Reader) (err error) { // Reset auto-counter and comments p.comment.Reset() p.count = 1 + // Nested values can't span sections + isLastValueEmpty = false inUnparseableSection = false for i := range f.options.UnparseableSections { diff --git a/vendor/honnef.co/go/tools/analysis/facts/nilness/nilness.go b/vendor/honnef.co/go/tools/analysis/facts/nilness/nilness.go index 4d49d1327..b7386352e 100644 --- a/vendor/honnef.co/go/tools/analysis/facts/nilness/nilness.go +++ b/vendor/honnef.co/go/tools/analysis/facts/nilness/nilness.go @@ -132,6 +132,13 @@ func impl(pass *analysis.Pass, fn *ir.Function, seenFns map[*ir.Function]struct{ return mightReturnNil(v.X) case *ir.Convert: return mightReturnNil(v.X) + case *ir.SliceToArrayPointer: + if v.Type().Underlying().(*types.Pointer).Elem().Underlying().(*types.Array).Len() == 0 { + return mightReturnNil(v.X) + } else { + // converting a slice to an array pointer of length > 0 panics if the slice is nil + return neverNil + } case *ir.Slice: return mightReturnNil(v.X) case *ir.Phi: diff --git a/vendor/honnef.co/go/tools/go/ir/UPSTREAM b/vendor/honnef.co/go/tools/go/ir/UPSTREAM index b12782503..757ebfd59 100644 --- a/vendor/honnef.co/go/tools/go/ir/UPSTREAM +++ b/vendor/honnef.co/go/tools/go/ir/UPSTREAM @@ -5,5 +5,5 @@ The changes are too many to list here, and it is best to consider this package i Upstream changes still get applied when they address bugs in portions of code we have inherited. The last upstream commit we've looked at was: -640c1dea83015e5271a001c99370762fc63dc280 +915f6209478fe61eb90dbe155a8a1c58655b931f diff --git a/vendor/honnef.co/go/tools/go/ir/doc.go b/vendor/honnef.co/go/tools/go/ir/doc.go index 0765d439e..7158a0aec 100644 --- a/vendor/honnef.co/go/tools/go/ir/doc.go +++ b/vendor/honnef.co/go/tools/go/ir/doc.go @@ -49,55 +49,56 @@ // concrete type which of these interfaces it implements. // // Value? Instruction? Member? -// *Alloc ✔ ✔ -// *BinOp ✔ ✔ -// *BlankStore ✔ -// *Builtin ✔ -// *Call ✔ ✔ -// *ChangeInterface ✔ ✔ -// *ChangeType ✔ ✔ -// *Const ✔ ✔ -// *Convert ✔ ✔ -// *DebugRef ✔ -// *Defer ✔ ✔ -// *Extract ✔ ✔ -// *Field ✔ ✔ -// *FieldAddr ✔ ✔ -// *FreeVar ✔ -// *Function ✔ ✔ (func) -// *Global ✔ ✔ (var) -// *Go ✔ ✔ -// *If ✔ -// *Index ✔ ✔ -// *IndexAddr ✔ ✔ -// *Jump ✔ -// *Load ✔ ✔ -// *MakeChan ✔ ✔ -// *MakeClosure ✔ ✔ -// *MakeInterface ✔ ✔ -// *MakeMap ✔ ✔ -// *MakeSlice ✔ ✔ -// *MapLookup ✔ ✔ -// *MapUpdate ✔ ✔ -// *NamedConst ✔ (const) -// *Next ✔ ✔ -// *Panic ✔ -// *Parameter ✔ ✔ -// *Phi ✔ ✔ -// *Range ✔ ✔ -// *Recv ✔ ✔ -// *Return ✔ -// *RunDefers ✔ -// *Select ✔ ✔ -// *Send ✔ ✔ -// *Sigma ✔ ✔ -// *Slice ✔ ✔ -// *Store ✔ ✔ -// *StringLookup ✔ ✔ -// *Type ✔ (type) -// *TypeAssert ✔ ✔ -// *UnOp ✔ ✔ -// *Unreachable ✔ +// *Alloc ✔ ✔ +// *BinOp ✔ ✔ +// *BlankStore ✔ +// *Builtin ✔ +// *Call ✔ ✔ +// *ChangeInterface ✔ ✔ +// *ChangeType ✔ ✔ +// *Const ✔ ✔ +// *Convert ✔ ✔ +// *DebugRef ✔ +// *Defer ✔ ✔ +// *Extract ✔ ✔ +// *Field ✔ ✔ +// *FieldAddr ✔ ✔ +// *FreeVar ✔ +// *Function ✔ ✔ (func) +// *Global ✔ ✔ (var) +// *Go ✔ ✔ +// *If ✔ +// *Index ✔ ✔ +// *IndexAddr ✔ ✔ +// *Jump ✔ +// *Load ✔ ✔ +// *MakeChan ✔ ✔ +// *MakeClosure ✔ ✔ +// *MakeInterface ✔ ✔ +// *MakeMap ✔ ✔ +// *MakeSlice ✔ ✔ +// *MapLookup ✔ ✔ +// *MapUpdate ✔ ✔ +// *NamedConst ✔ (const) +// *Next ✔ ✔ +// *Panic ✔ +// *Parameter ✔ ✔ +// *Phi ✔ ✔ +// *Range ✔ ✔ +// *Recv ✔ ✔ +// *Return ✔ +// *RunDefers ✔ +// *Select ✔ ✔ +// *Send ✔ ✔ +// *Sigma ✔ ✔ +// *Slice ✔ ✔ +// *SliceToArrayPointer ✔ ✔ +// *Store ✔ ✔ +// *StringLookup ✔ ✔ +// *Type ✔ (type) +// *TypeAssert ✔ ✔ +// *UnOp ✔ ✔ +// *Unreachable ✔ // // Other key types in this package include: Program, Package, Function // and BasicBlock. diff --git a/vendor/honnef.co/go/tools/go/ir/emit.go b/vendor/honnef.co/go/tools/go/ir/emit.go index f7629646a..49a9778d3 100644 --- a/vendor/honnef.co/go/tools/go/ir/emit.go +++ b/vendor/honnef.co/go/tools/go/ir/emit.go @@ -239,8 +239,8 @@ func emitConv(f *Function, val Value, typ types.Type, source ast.Node) Value { // Conversion from slice to array pointer? if slice, ok := ut_src.(*types.Slice); ok { if ptr, ok := ut_dst.(*types.Pointer); ok { - if arr, ok := ptr.Elem().(*types.Array); ok && types.Identical(slice.Elem(), arr.Elem()) { - c := &Convert{X: val} + if arr, ok := ptr.Elem().Underlying().(*types.Array); ok && types.Identical(slice.Elem(), arr.Elem()) { + c := &SliceToArrayPointer{X: val} c.setType(ut_dst) return f.emit(c, source) } diff --git a/vendor/honnef.co/go/tools/go/ir/exits.go b/vendor/honnef.co/go/tools/go/ir/exits.go index 0abf58089..851b62c4f 100644 --- a/vendor/honnef.co/go/tools/go/ir/exits.go +++ b/vendor/honnef.co/go/tools/go/ir/exits.go @@ -19,6 +19,32 @@ func (b *builder) buildExits(fn *Function) { fn.NoReturn = AlwaysUnwinds return } + case "go.uber.org/zap": + switch obj.(*types.Func).FullName() { + case "(*go.uber.org/zap.Logger).Fatal", + "(*go.uber.org/zap.SugaredLogger).Fatal", + "(*go.uber.org/zap.SugaredLogger).Fatalw", + "(*go.uber.org/zap.SugaredLogger).Fatalf": + // Technically, this method does not unconditionally exit + // the process. It dynamically calls a function stored in + // the logger. If the function is nil, it defaults to + // os.Exit. + // + // The main intent of this method is to terminate the + // process, and that's what the vast majority of people + // will use it for. We'll happily accept some false + // negatives to avoid a lot of false positives. + fn.NoReturn = AlwaysExits + case "(*go.uber.org/zap.Logger).Panic", + "(*go.uber.org/zap.SugaredLogger).Panicw", + "(*go.uber.org/zap.SugaredLogger).Panicf": + fn.NoReturn = AlwaysUnwinds + return + case "(*go.uber.org/zap.Logger).DPanic", + "(*go.uber.org/zap.SugaredLogger).DPanicf", + "(*go.uber.org/zap.SugaredLogger).DPanicw": + // These methods will only panic in development. + } case "github.com/sirupsen/logrus": switch obj.(*types.Func).FullName() { case "(*github.com/sirupsen/logrus.Logger).Exit": @@ -71,6 +97,19 @@ func (b *builder) buildExits(fn *Function) { // all of these call os.Exit after logging fn.NoReturn = AlwaysExits } + case "k8s.io/klog": + switch obj.(*types.Func).FullName() { + case "k8s.io/klog.Exit", + "k8s.io/klog.ExitDepth", + "k8s.io/klog.Exitf", + "k8s.io/klog.Exitln", + "k8s.io/klog.Fatal", + "k8s.io/klog.FatalDepth", + "k8s.io/klog.Fatalf", + "k8s.io/klog.Fatalln": + // all of these call os.Exit after logging + fn.NoReturn = AlwaysExits + } } } diff --git a/vendor/honnef.co/go/tools/go/ir/print.go b/vendor/honnef.co/go/tools/go/ir/print.go index f6ed431b6..befba6617 100644 --- a/vendor/honnef.co/go/tools/go/ir/print.go +++ b/vendor/honnef.co/go/tools/go/ir/print.go @@ -162,10 +162,11 @@ func printConv(prefix string, v, x Value) string { relName(x, v.(Instruction))) } -func (v *ChangeType) String() string { return printConv("ChangeType", v, v.X) } -func (v *Convert) String() string { return printConv("Convert", v, v.X) } -func (v *ChangeInterface) String() string { return printConv("ChangeInterface", v, v.X) } -func (v *MakeInterface) String() string { return printConv("MakeInterface", v, v.X) } +func (v *ChangeType) String() string { return printConv("ChangeType", v, v.X) } +func (v *Convert) String() string { return printConv("Convert", v, v.X) } +func (v *ChangeInterface) String() string { return printConv("ChangeInterface", v, v.X) } +func (v *SliceToArrayPointer) String() string { return printConv("SliceToArrayPointer", v, v.X) } +func (v *MakeInterface) String() string { return printConv("MakeInterface", v, v.X) } func (v *MakeClosure) String() string { from := v.Parent().pkg() diff --git a/vendor/honnef.co/go/tools/go/ir/sanity.go b/vendor/honnef.co/go/tools/go/ir/sanity.go index 1d4c5f74a..a8b82d638 100644 --- a/vendor/honnef.co/go/tools/go/ir/sanity.go +++ b/vendor/honnef.co/go/tools/go/ir/sanity.go @@ -141,14 +141,8 @@ func (s *sanity) checkInstr(idx int, instr Instruction) { case *Call: case *ChangeInterface: case *ChangeType: + case *SliceToArrayPointer: case *Convert: - if _, ok := instr.X.Type().Underlying().(*types.Slice); ok { - if ptr, ok := instr.Type().Underlying().(*types.Pointer); ok { - if _, ok := ptr.Elem().(*types.Array); ok { - break - } - } - } if _, ok := instr.X.Type().Underlying().(*types.Basic); !ok { if _, ok := instr.Type().Underlying().(*types.Basic); !ok { s.errorf("convert %s -> %s: at least one type must be basic", instr.X.Type(), instr.Type()) diff --git a/vendor/honnef.co/go/tools/go/ir/ssa.go b/vendor/honnef.co/go/tools/go/ir/ssa.go index 6dfdfcd80..3d089e0c3 100644 --- a/vendor/honnef.co/go/tools/go/ir/ssa.go +++ b/vendor/honnef.co/go/tools/go/ir/ssa.go @@ -729,9 +729,10 @@ type ChangeType struct { // - between pointers and unsafe.Pointer. // - between unsafe.Pointer and uintptr. // - from (Unicode) integer to (UTF-8) string. -// - from slice to array pointer. // A conversion may imply a type name change also. // +// This operation cannot fail dynamically. +// // Conversions of untyped string/number/bool constants to a specific // representation are eliminated during IR construction. // @@ -763,6 +764,20 @@ type ChangeInterface struct { X Value } +// The SliceToArrayPointer instruction yields the conversion of slice X to +// array pointer. +// +// Pos() returns the ast.CallExpr.Lparen, if the instruction arose +// from an explicit conversion in the source. +// +// Example printed form: +// t1 = SliceToArrayPointer <*[4]byte> t1 +// +type SliceToArrayPointer struct { + register + X Value +} + // MakeInterface constructs an instance of an interface type from a // value of a concrete type. // @@ -1731,6 +1746,10 @@ func (v *Convert) Operands(rands []*Value) []*Value { return append(rands, &v.X) } +func (v *SliceToArrayPointer) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + func (s *DebugRef) Operands(rands []*Value) []*Value { return append(rands, &s.X) } diff --git a/vendor/honnef.co/go/tools/simple/doc.go b/vendor/honnef.co/go/tools/simple/doc.go index ddc1b8d35..c7546cf59 100644 --- a/vendor/honnef.co/go/tools/simple/doc.go +++ b/vendor/honnef.co/go/tools/simple/doc.go @@ -248,8 +248,7 @@ After: "S1019": { Title: "Simplify `make` call by omitting redundant arguments", Text: `The \'make\' function has default values for the length and capacity -arguments. For channels and maps, the length defaults to zero. -Additionally, for slices the capacity defaults to the length.`, +arguments. For channels, the length defaults to zero, and for slices, the capacity defaults to the length.`, Since: "2017.1", }, diff --git a/vendor/honnef.co/go/tools/simple/lint.go b/vendor/honnef.co/go/tools/simple/lint.go index 4555d709f..c7acd85c8 100644 --- a/vendor/honnef.co/go/tools/simple/lint.go +++ b/vendor/honnef.co/go/tools/simple/lint.go @@ -1174,7 +1174,8 @@ func CheckMakeLenCap(pass *analysis.Pass) (interface{}, error) { if m, ok := code.Match(pass, checkMakeLenCapQ1, node); ok { T := m.State["typ"].(ast.Expr) size := m.State["size"].(ast.Node) - if _, ok := pass.TypesInfo.TypeOf(T).Underlying().(*types.Slice); ok { + switch pass.TypesInfo.TypeOf(T).Underlying().(type) { + case *types.Slice, *types.Map: return } report.Report(pass, size, fmt.Sprintf("should use make(%s) instead", report.Render(pass, T)), report.FilterGenerated()) diff --git a/vendor/honnef.co/go/tools/staticcheck/fakejson/encode.go b/vendor/honnef.co/go/tools/staticcheck/fakejson/encode.go new file mode 100644 index 000000000..f5e6c4010 --- /dev/null +++ b/vendor/honnef.co/go/tools/staticcheck/fakejson/encode.go @@ -0,0 +1,373 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file contains a modified copy of the encoding/json encoder. +// All dynamic behavior has been removed, and reflecttion has been replaced with go/types. +// This allows us to statically find unmarshable types +// with the same rules for tags, shadowing and addressability as encoding/json. +// This is used for SA1026. + +package fakejson + +import ( + "go/token" + "go/types" + "sort" + "strings" + "unicode" + + "honnef.co/go/tools/staticcheck/fakereflect" +) + +// parseTag splits a struct field's json tag into its name and +// comma-separated options. +func parseTag(tag string) string { + if idx := strings.Index(tag, ","); idx != -1 { + return tag[:idx] + } + return tag +} + +func Marshal(v types.Type) *UnsupportedTypeError { + enc := encoder{ + seen: map[fakereflect.TypeAndCanAddr]struct{}{}, + } + return enc.newTypeEncoder(fakereflect.TypeAndCanAddr{Type: v}, "x") +} + +// An UnsupportedTypeError is returned by Marshal when attempting +// to encode an unsupported value type. +type UnsupportedTypeError struct { + Type types.Type + Path string +} + +var marshalerType = types.NewInterfaceType([]*types.Func{ + types.NewFunc(token.NoPos, nil, "MarshalJSON", types.NewSignature(nil, + types.NewTuple(), + types.NewTuple( + types.NewVar(token.NoPos, nil, "", types.NewSlice(types.Typ[types.Byte])), + types.NewVar(0, nil, "", types.Universe.Lookup("error").Type())), + false, + )), +}, nil).Complete() + +var textMarshalerType = types.NewInterfaceType([]*types.Func{ + types.NewFunc(token.NoPos, nil, "MarshalText", types.NewSignature(nil, + types.NewTuple(), + types.NewTuple( + types.NewVar(token.NoPos, nil, "", types.NewSlice(types.Typ[types.Byte])), + types.NewVar(0, nil, "", types.Universe.Lookup("error").Type())), + false, + )), +}, nil).Complete() + +type encoder struct { + seen map[fakereflect.TypeAndCanAddr]struct{} +} + +func (enc *encoder) newTypeEncoder(t fakereflect.TypeAndCanAddr, stack string) *UnsupportedTypeError { + if _, ok := enc.seen[t]; ok { + return nil + } + enc.seen[t] = struct{}{} + + if t.Implements(marshalerType) { + return nil + } + if !t.IsPtr() && t.CanAddr() && fakereflect.PtrTo(t).Implements(marshalerType) { + return nil + } + if t.Implements(textMarshalerType) { + return nil + } + if !t.IsPtr() && t.CanAddr() && fakereflect.PtrTo(t).Implements(textMarshalerType) { + return nil + } + + switch t.Type.Underlying().(type) { + case *types.Basic, *types.Interface: + return nil + case *types.Struct: + return enc.typeFields(t, stack) + case *types.Map: + return enc.newMapEncoder(t, stack) + case *types.Slice: + return enc.newSliceEncoder(t, stack) + case *types.Array: + return enc.newArrayEncoder(t, stack) + case *types.Pointer: + // we don't have to express the pointer dereference in the path; x.f is syntactic sugar for (*x).f + return enc.newTypeEncoder(t.Elem(), stack) + default: + return &UnsupportedTypeError{t.Type, stack} + } +} + +func (enc *encoder) newMapEncoder(t fakereflect.TypeAndCanAddr, stack string) *UnsupportedTypeError { + switch t.Key().Type.Underlying().(type) { + case *types.Basic: + default: + if !t.Key().Implements(textMarshalerType) { + return &UnsupportedTypeError{ + Type: t.Type, + Path: stack, + } + } + } + return enc.newTypeEncoder(t.Elem(), stack+"[k]") +} + +func (enc *encoder) newSliceEncoder(t fakereflect.TypeAndCanAddr, stack string) *UnsupportedTypeError { + // Byte slices get special treatment; arrays don't. + basic, ok := t.Elem().Type.Underlying().(*types.Basic) + if ok && basic.Kind() == types.Uint8 { + p := fakereflect.PtrTo(t.Elem()) + if !p.Implements(marshalerType) && !p.Implements(textMarshalerType) { + return nil + } + } + return enc.newArrayEncoder(t, stack) +} + +func (enc *encoder) newArrayEncoder(t fakereflect.TypeAndCanAddr, stack string) *UnsupportedTypeError { + return enc.newTypeEncoder(t.Elem(), stack+"[0]") +} + +func isValidTag(s string) bool { + if s == "" { + return false + } + for _, c := range s { + switch { + case strings.ContainsRune("!#$%&()*+-./:;<=>?@[]^_{|}~ ", c): + // Backslash and quote chars are reserved, but + // otherwise any punctuation chars are allowed + // in a tag name. + case !unicode.IsLetter(c) && !unicode.IsDigit(c): + return false + } + } + return true +} + +func typeByIndex(t fakereflect.TypeAndCanAddr, index []int) fakereflect.TypeAndCanAddr { + for _, i := range index { + if t.IsPtr() { + t = t.Elem() + } + t = t.Field(i).Type + } + return t +} + +func pathByIndex(t fakereflect.TypeAndCanAddr, index []int) string { + path := "" + for _, i := range index { + if t.IsPtr() { + t = t.Elem() + } + path += "." + t.Field(i).Name + t = t.Field(i).Type + } + return path +} + +// A field represents a single field found in a struct. +type field struct { + name string + + tag bool + index []int + typ fakereflect.TypeAndCanAddr +} + +// byIndex sorts field by index sequence. +type byIndex []field + +func (x byIndex) Len() int { return len(x) } + +func (x byIndex) Swap(i, j int) { x[i], x[j] = x[j], x[i] } + +func (x byIndex) Less(i, j int) bool { + for k, xik := range x[i].index { + if k >= len(x[j].index) { + return false + } + if xik != x[j].index[k] { + return xik < x[j].index[k] + } + } + return len(x[i].index) < len(x[j].index) +} + +// typeFields returns a list of fields that JSON should recognize for the given type. +// The algorithm is breadth-first search over the set of structs to include - the top struct +// and then any reachable anonymous structs. +func (enc *encoder) typeFields(t fakereflect.TypeAndCanAddr, stack string) *UnsupportedTypeError { + // Anonymous fields to explore at the current level and the next. + current := []field{} + next := []field{{typ: t}} + + // Count of queued names for current level and the next. + var count, nextCount map[fakereflect.TypeAndCanAddr]int + + // Types already visited at an earlier level. + visited := map[fakereflect.TypeAndCanAddr]bool{} + + // Fields found. + var fields []field + + for len(next) > 0 { + current, next = next, current[:0] + count, nextCount = nextCount, map[fakereflect.TypeAndCanAddr]int{} + + for _, f := range current { + if visited[f.typ] { + continue + } + visited[f.typ] = true + + // Scan f.typ for fields to include. + for i := 0; i < f.typ.NumField(); i++ { + sf := f.typ.Field(i) + if sf.Anonymous { + t := sf.Type + if t.IsPtr() { + t = t.Elem() + } + if !sf.IsExported() && !t.IsStruct() { + // Ignore embedded fields of unexported non-struct types. + continue + } + // Do not ignore embedded fields of unexported struct types + // since they may have exported fields. + } else if !sf.IsExported() { + // Ignore unexported non-embedded fields. + continue + } + tag := sf.Tag.Get("json") + if tag == "-" { + continue + } + name := parseTag(tag) + if !isValidTag(name) { + name = "" + } + index := make([]int, len(f.index)+1) + copy(index, f.index) + index[len(f.index)] = i + + ft := sf.Type + if ft.Name() == "" && ft.IsPtr() { + // Follow pointer. + ft = ft.Elem() + } + + // Record found field and index sequence. + if name != "" || !sf.Anonymous || !ft.IsStruct() { + tagged := name != "" + if name == "" { + name = sf.Name + } + field := field{ + name: name, + tag: tagged, + index: index, + typ: ft, + } + + fields = append(fields, field) + if count[f.typ] > 1 { + // If there were multiple instances, add a second, + // so that the annihilation code will see a duplicate. + // It only cares about the distinction between 1 or 2, + // so don't bother generating any more copies. + fields = append(fields, fields[len(fields)-1]) + } + continue + } + + // Record new anonymous struct to explore in next round. + nextCount[ft]++ + if nextCount[ft] == 1 { + next = append(next, field{name: ft.Name(), index: index, typ: ft}) + } + } + } + } + + sort.Slice(fields, func(i, j int) bool { + x := fields + // sort field by name, breaking ties with depth, then + // breaking ties with "name came from json tag", then + // breaking ties with index sequence. + if x[i].name != x[j].name { + return x[i].name < x[j].name + } + if len(x[i].index) != len(x[j].index) { + return len(x[i].index) < len(x[j].index) + } + if x[i].tag != x[j].tag { + return x[i].tag + } + return byIndex(x).Less(i, j) + }) + + // Delete all fields that are hidden by the Go rules for embedded fields, + // except that fields with JSON tags are promoted. + + // The fields are sorted in primary order of name, secondary order + // of field index length. Loop over names; for each name, delete + // hidden fields by choosing the one dominant field that survives. + out := fields[:0] + for advance, i := 0, 0; i < len(fields); i += advance { + // One iteration per name. + // Find the sequence of fields with the name of this first field. + fi := fields[i] + name := fi.name + for advance = 1; i+advance < len(fields); advance++ { + fj := fields[i+advance] + if fj.name != name { + break + } + } + if advance == 1 { // Only one field with this name + out = append(out, fi) + continue + } + dominant, ok := dominantField(fields[i : i+advance]) + if ok { + out = append(out, dominant) + } + } + + fields = out + sort.Sort(byIndex(fields)) + + for i := range fields { + f := &fields[i] + err := enc.newTypeEncoder(typeByIndex(t, f.index), stack+pathByIndex(t, f.index)) + if err != nil { + return err + } + } + return nil +} + +// dominantField looks through the fields, all of which are known to +// have the same name, to find the single field that dominates the +// others using Go's embedding rules, modified by the presence of +// JSON tags. If there are multiple top-level fields, the boolean +// will be false: This condition is an error in Go and we skip all +// the fields. +func dominantField(fields []field) (field, bool) { + // The fields are sorted in increasing index-length order, then by presence of tag. + // That means that the first field is the dominant one. We need only check + // for error cases: two fields at top level, either both tagged or neither tagged. + if len(fields) > 1 && len(fields[0].index) == len(fields[1].index) && fields[0].tag == fields[1].tag { + return field{}, false + } + return fields[0], true +} diff --git a/vendor/honnef.co/go/tools/staticcheck/fakereflect/fakereflect.go b/vendor/honnef.co/go/tools/staticcheck/fakereflect/fakereflect.go new file mode 100644 index 000000000..7f8fd4799 --- /dev/null +++ b/vendor/honnef.co/go/tools/staticcheck/fakereflect/fakereflect.go @@ -0,0 +1,131 @@ +package fakereflect + +import ( + "fmt" + "go/types" + "reflect" +) + +type TypeAndCanAddr struct { + Type types.Type + canAddr bool +} + +type StructField struct { + Index []int + Name string + Anonymous bool + Tag reflect.StructTag + f *types.Var + Type TypeAndCanAddr +} + +func (sf StructField) IsExported() bool { return sf.f.Exported() } + +func (t TypeAndCanAddr) Field(i int) StructField { + st := t.Type.Underlying().(*types.Struct) + f := st.Field(i) + return StructField{ + f: f, + Index: []int{i}, + Name: f.Name(), + Anonymous: f.Anonymous(), + Tag: reflect.StructTag(st.Tag(i)), + Type: TypeAndCanAddr{ + Type: f.Type(), + canAddr: t.canAddr, + }, + } +} + +func (t TypeAndCanAddr) FieldByIndex(index []int) StructField { + f := t.Field(index[0]) + for _, idx := range index[1:] { + f = f.Type.Field(idx) + } + f.Index = index + return f +} + +func PtrTo(t TypeAndCanAddr) TypeAndCanAddr { + // Note that we don't care about canAddr here because it's irrelevant to all uses of PtrTo + return TypeAndCanAddr{Type: types.NewPointer(t.Type)} +} + +func (t TypeAndCanAddr) CanAddr() bool { return t.canAddr } + +func (t TypeAndCanAddr) Implements(ityp *types.Interface) bool { + return types.Implements(t.Type, ityp) +} + +func (t TypeAndCanAddr) IsSlice() bool { + _, ok := t.Type.Underlying().(*types.Slice) + return ok +} + +func (t TypeAndCanAddr) IsArray() bool { + _, ok := t.Type.Underlying().(*types.Array) + return ok +} + +func (t TypeAndCanAddr) IsPtr() bool { + _, ok := t.Type.Underlying().(*types.Pointer) + return ok +} + +func (t TypeAndCanAddr) IsInterface() bool { + _, ok := t.Type.Underlying().(*types.Interface) + return ok +} + +func (t TypeAndCanAddr) IsStruct() bool { + _, ok := t.Type.Underlying().(*types.Struct) + return ok +} + +func (t TypeAndCanAddr) Name() string { + named, ok := t.Type.(*types.Named) + if !ok { + return "" + } + return named.Obj().Name() +} + +func (t TypeAndCanAddr) NumField() int { + return t.Type.Underlying().(*types.Struct).NumFields() +} + +func (t TypeAndCanAddr) String() string { + return t.Type.String() +} + +func (t TypeAndCanAddr) Key() TypeAndCanAddr { + return TypeAndCanAddr{Type: t.Type.Underlying().(*types.Map).Key()} +} + +func (t TypeAndCanAddr) Elem() TypeAndCanAddr { + switch typ := t.Type.Underlying().(type) { + case *types.Pointer: + return TypeAndCanAddr{ + Type: typ.Elem(), + canAddr: true, + } + case *types.Slice: + return TypeAndCanAddr{ + Type: typ.Elem(), + canAddr: true, + } + case *types.Array: + return TypeAndCanAddr{ + Type: typ.Elem(), + canAddr: t.canAddr, + } + case *types.Map: + return TypeAndCanAddr{ + Type: typ.Elem(), + canAddr: false, + } + default: + panic(fmt.Sprintf("unhandled type %T", typ)) + } +} diff --git a/vendor/honnef.co/go/tools/staticcheck/fakexml/marshal.go b/vendor/honnef.co/go/tools/staticcheck/fakexml/marshal.go new file mode 100644 index 000000000..a5ff21d62 --- /dev/null +++ b/vendor/honnef.co/go/tools/staticcheck/fakexml/marshal.go @@ -0,0 +1,375 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file contains a modified copy of the encoding/xml encoder. +// All dynamic behavior has been removed, and reflecttion has been replaced with go/types. +// This allows us to statically find unmarshable types +// with the same rules for tags, shadowing and addressability as encoding/xml. +// This is used for SA1026 and SA5008. + +// NOTE(dh): we do not check CanInterface in various places, which means we'll accept more marshaler implementations than encoding/xml does. This will lead to a small amount of false negatives. + +package fakexml + +import ( + "fmt" + "go/token" + "go/types" + + "honnef.co/go/tools/go/types/typeutil" + "honnef.co/go/tools/staticcheck/fakereflect" +) + +func Marshal(v types.Type) error { + return NewEncoder().Encode(v) +} + +type Encoder struct { + seen map[fakereflect.TypeAndCanAddr]struct{} +} + +func NewEncoder() *Encoder { + e := &Encoder{ + seen: map[fakereflect.TypeAndCanAddr]struct{}{}, + } + return e +} + +func (enc *Encoder) Encode(v types.Type) error { + rv := fakereflect.TypeAndCanAddr{Type: v} + return enc.marshalValue(rv, nil, nil, "x") +} + +func implementsMarshaler(v fakereflect.TypeAndCanAddr) bool { + t := v.Type + named, ok := t.(*types.Named) + if !ok { + return false + } + obj, _, _ := types.LookupFieldOrMethod(named, false, nil, "MarshalXML") + if obj == nil { + return false + } + fn, ok := obj.(*types.Func) + if !ok { + return false + } + params := fn.Type().(*types.Signature).Params() + if params.Len() != 2 { + return false + } + if !typeutil.IsType(params.At(0).Type(), "*encoding/xml.Encoder") { + return false + } + if !typeutil.IsType(params.At(1).Type(), "encoding/xml.StartElement") { + return false + } + rets := fn.Type().(*types.Signature).Results() + if rets.Len() != 1 { + return false + } + if !typeutil.IsType(rets.At(0).Type(), "error") { + return false + } + return true +} + +func implementsMarshalerAttr(v fakereflect.TypeAndCanAddr) bool { + t := v.Type + named, ok := t.(*types.Named) + if !ok { + return false + } + obj, _, _ := types.LookupFieldOrMethod(named, false, nil, "MarshalXMLAttr") + if obj == nil { + return false + } + fn, ok := obj.(*types.Func) + if !ok { + return false + } + params := fn.Type().(*types.Signature).Params() + if params.Len() != 1 { + return false + } + if !typeutil.IsType(params.At(0).Type(), "encoding/xml.Name") { + return false + } + rets := fn.Type().(*types.Signature).Results() + if rets.Len() != 2 { + return false + } + if !typeutil.IsType(rets.At(0).Type(), "encoding/xml.Attr") { + return false + } + if !typeutil.IsType(rets.At(1).Type(), "error") { + return false + } + return true +} + +var textMarshalerType = types.NewInterfaceType([]*types.Func{ + types.NewFunc(token.NoPos, nil, "MarshalText", types.NewSignature(nil, + types.NewTuple(), + types.NewTuple( + types.NewVar(token.NoPos, nil, "", types.NewSlice(types.Typ[types.Byte])), + types.NewVar(0, nil, "", types.Universe.Lookup("error").Type())), + false, + )), +}, nil).Complete() + +var N = 0 + +// marshalValue writes one or more XML elements representing val. +// If val was obtained from a struct field, finfo must have its details. +func (e *Encoder) marshalValue(val fakereflect.TypeAndCanAddr, finfo *fieldInfo, startTemplate *StartElement, stack string) error { + if _, ok := e.seen[val]; ok { + return nil + } + e.seen[val] = struct{}{} + + // Drill into interfaces and pointers. + for val.IsInterface() || val.IsPtr() { + if val.IsInterface() { + return nil + } + val = val.Elem() + } + + // Check for marshaler. + if implementsMarshaler(val) { + return nil + } + if val.CanAddr() { + pv := fakereflect.PtrTo(val) + if implementsMarshaler(pv) { + return nil + } + } + + // Check for text marshaler. + if val.Implements(textMarshalerType) { + return nil + } + if val.CanAddr() { + pv := fakereflect.PtrTo(val) + if pv.Implements(textMarshalerType) { + return nil + } + } + + // Slices and arrays iterate over the elements. They do not have an enclosing tag. + if (val.IsSlice() || val.IsArray()) && !isByteArray(val) && !isByteSlice(val) { + if err := e.marshalValue(val.Elem(), finfo, startTemplate, stack+"[0]"); err != nil { + return err + } + return nil + } + + tinfo, err := getTypeInfo(val) + if err != nil { + return err + } + + // Create start element. + // Precedence for the XML element name is: + // 0. startTemplate + // 1. XMLName field in underlying struct; + // 2. field name/tag in the struct field; and + // 3. type name + var start StartElement + + if startTemplate != nil { + start.Name = startTemplate.Name + start.Attr = append(start.Attr, startTemplate.Attr...) + } else if tinfo.xmlname != nil { + xmlname := tinfo.xmlname + if xmlname.name != "" { + start.Name.Space, start.Name.Local = xmlname.xmlns, xmlname.name + } + } + + // Attributes + for i := range tinfo.fields { + finfo := &tinfo.fields[i] + if finfo.flags&fAttr == 0 { + continue + } + fv := finfo.value(val) + + name := Name{Space: finfo.xmlns, Local: finfo.name} + if err := e.marshalAttr(&start, name, fv, stack+pathByIndex(val, finfo.idx)); err != nil { + return err + } + } + + if val.IsStruct() { + return e.marshalStruct(tinfo, val, stack) + } else { + return e.marshalSimple(val, stack) + } +} + +func isSlice(v fakereflect.TypeAndCanAddr) bool { + _, ok := v.Type.Underlying().(*types.Slice) + return ok +} + +func isByteSlice(v fakereflect.TypeAndCanAddr) bool { + slice, ok := v.Type.Underlying().(*types.Slice) + if !ok { + return false + } + basic, ok := slice.Elem().Underlying().(*types.Basic) + if !ok { + return false + } + return basic.Kind() == types.Uint8 +} + +func isByteArray(v fakereflect.TypeAndCanAddr) bool { + slice, ok := v.Type.Underlying().(*types.Array) + if !ok { + return false + } + basic, ok := slice.Elem().Underlying().(*types.Basic) + if !ok { + return false + } + return basic.Kind() == types.Uint8 +} + +// marshalAttr marshals an attribute with the given name and value, adding to start.Attr. +func (e *Encoder) marshalAttr(start *StartElement, name Name, val fakereflect.TypeAndCanAddr, stack string) error { + if implementsMarshalerAttr(val) { + return nil + } + + if val.CanAddr() { + pv := fakereflect.PtrTo(val) + if implementsMarshalerAttr(pv) { + return nil + } + } + + if val.Implements(textMarshalerType) { + return nil + } + + if val.CanAddr() { + pv := fakereflect.PtrTo(val) + if pv.Implements(textMarshalerType) { + return nil + } + } + + // Dereference or skip nil pointer + if val.IsPtr() { + val = val.Elem() + } + + // Walk slices. + if isSlice(val) && !isByteSlice(val) { + if err := e.marshalAttr(start, name, val.Elem(), stack+"[0]"); err != nil { + return err + } + return nil + } + + if typeutil.IsType(val.Type, "encoding/xml.Attr") { + return nil + } + + return e.marshalSimple(val, stack) +} + +func (e *Encoder) marshalSimple(val fakereflect.TypeAndCanAddr, stack string) error { + switch val.Type.Underlying().(type) { + case *types.Basic, *types.Interface: + return nil + case *types.Slice, *types.Array: + basic, ok := val.Elem().Type.Underlying().(*types.Basic) + if !ok || basic.Kind() != types.Uint8 { + return &UnsupportedTypeError{val.Type, stack} + } + return nil + default: + return &UnsupportedTypeError{val.Type, stack} + } +} + +func indirect(vf fakereflect.TypeAndCanAddr) fakereflect.TypeAndCanAddr { + for vf.IsPtr() { + vf = vf.Elem() + } + return vf +} + +func pathByIndex(t fakereflect.TypeAndCanAddr, index []int) string { + path := "" + for _, i := range index { + if t.IsPtr() { + t = t.Elem() + } + path += "." + t.Field(i).Name + t = t.Field(i).Type + } + return path +} + +func (e *Encoder) marshalStruct(tinfo *typeInfo, val fakereflect.TypeAndCanAddr, stack string) error { + for i := range tinfo.fields { + finfo := &tinfo.fields[i] + if finfo.flags&fAttr != 0 { + continue + } + vf := finfo.value(val) + + switch finfo.flags & fMode { + case fCDATA, fCharData: + if vf.Implements(textMarshalerType) { + continue + } + if vf.CanAddr() { + pv := fakereflect.PtrTo(vf) + if pv.Implements(textMarshalerType) { + continue + } + } + + vf = indirect(vf) + continue + + case fComment: + vf = indirect(vf) + if !(isByteSlice(vf) || isByteArray(vf)) { + return fmt.Errorf("xml: bad type for comment field of %s", val) + } + continue + + case fInnerXML: + vf = indirect(vf) + if typeutil.IsType(vf.Type, "[]byte") || typeutil.IsType(vf.Type, "string") { + continue + } + + case fElement, fElement | fAny: + } + if err := e.marshalValue(vf, finfo, nil, stack+pathByIndex(val, finfo.idx)); err != nil { + return err + } + } + return nil +} + +// UnsupportedTypeError is returned when Marshal encounters a type +// that cannot be converted into XML. +type UnsupportedTypeError struct { + Type types.Type + Path string +} + +func (e *UnsupportedTypeError) Error() string { + return fmt.Sprintf("xml: unsupported type %s, via %s ", e.Type, e.Path) +} diff --git a/vendor/honnef.co/go/tools/staticcheck/fakexml/typeinfo.go b/vendor/honnef.co/go/tools/staticcheck/fakexml/typeinfo.go new file mode 100644 index 000000000..63f48c418 --- /dev/null +++ b/vendor/honnef.co/go/tools/staticcheck/fakexml/typeinfo.go @@ -0,0 +1,383 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package fakexml + +import ( + "fmt" + "go/types" + "strconv" + "strings" + "sync" + + "honnef.co/go/tools/staticcheck/fakereflect" +) + +// typeInfo holds details for the xml representation of a type. +type typeInfo struct { + xmlname *fieldInfo + fields []fieldInfo +} + +// fieldInfo holds details for the xml representation of a single field. +type fieldInfo struct { + idx []int + name string + xmlns string + flags fieldFlags + parents []string +} + +type fieldFlags int + +const ( + fElement fieldFlags = 1 << iota + fAttr + fCDATA + fCharData + fInnerXML + fComment + fAny + + fOmitEmpty + + fMode = fElement | fAttr | fCDATA | fCharData | fInnerXML | fComment | fAny + + xmlName = "XMLName" +) + +func (f fieldFlags) String() string { + switch f { + case fAttr: + return "attr" + case fCDATA: + return "cdata" + case fCharData: + return "chardata" + case fInnerXML: + return "innerxml" + case fComment: + return "comment" + case fAny: + return "any" + case fOmitEmpty: + return "omitempty" + case fAny | fAttr: + return "any,attr" + default: + return strconv.Itoa(int(f)) + } +} + +var tinfoMap sync.Map // map[reflect.Type]*typeInfo + +// getTypeInfo returns the typeInfo structure with details necessary +// for marshaling and unmarshaling typ. +func getTypeInfo(typ fakereflect.TypeAndCanAddr) (*typeInfo, error) { + if ti, ok := tinfoMap.Load(typ); ok { + return ti.(*typeInfo), nil + } + + tinfo := &typeInfo{} + named, ok := typ.Type.(*types.Named) + if typ.IsStruct() && !(ok && named.Obj().Pkg().Path() == "encoding/xml" && named.Obj().Name() == "Name") { + n := typ.NumField() + for i := 0; i < n; i++ { + f := typ.Field(i) + if (!f.IsExported() && !f.Anonymous) || f.Tag.Get("xml") == "-" { + continue // Private field + } + + // For embedded structs, embed its fields. + if f.Anonymous { + t := f.Type + if t.IsPtr() { + t = t.Elem() + } + if t.IsStruct() { + inner, err := getTypeInfo(t) + if err != nil { + return nil, err + } + if tinfo.xmlname == nil { + tinfo.xmlname = inner.xmlname + } + for _, finfo := range inner.fields { + finfo.idx = append([]int{i}, finfo.idx...) + if err := addFieldInfo(typ, tinfo, &finfo); err != nil { + return nil, err + } + } + continue + } + } + + finfo, err := StructFieldInfo(f) + if err != nil { + return nil, err + } + + if f.Name == xmlName { + tinfo.xmlname = finfo + continue + } + + // Add the field if it doesn't conflict with other fields. + if err := addFieldInfo(typ, tinfo, finfo); err != nil { + return nil, err + } + } + } + + ti, _ := tinfoMap.LoadOrStore(typ, tinfo) + return ti.(*typeInfo), nil +} + +// StructFieldInfo builds and returns a fieldInfo for f. +func StructFieldInfo(f fakereflect.StructField) (*fieldInfo, error) { + finfo := &fieldInfo{idx: f.Index} + + // Split the tag from the xml namespace if necessary. + tag := f.Tag.Get("xml") + if i := strings.Index(tag, " "); i >= 0 { + finfo.xmlns, tag = tag[:i], tag[i+1:] + } + + // Parse flags. + tokens := strings.Split(tag, ",") + if len(tokens) == 1 { + finfo.flags = fElement + } else { + tag = tokens[0] + for _, flag := range tokens[1:] { + switch flag { + case "attr": + finfo.flags |= fAttr + case "cdata": + finfo.flags |= fCDATA + case "chardata": + finfo.flags |= fCharData + case "innerxml": + finfo.flags |= fInnerXML + case "comment": + finfo.flags |= fComment + case "any": + finfo.flags |= fAny + case "omitempty": + finfo.flags |= fOmitEmpty + } + } + + // Validate the flags used. + switch mode := finfo.flags & fMode; mode { + case 0: + finfo.flags |= fElement + case fAttr, fCDATA, fCharData, fInnerXML, fComment, fAny, fAny | fAttr: + if f.Name == xmlName { + return nil, fmt.Errorf("cannot use option %s on XMLName field", mode) + } else if tag != "" && mode != fAttr { + return nil, fmt.Errorf("cannot specify name together with option ,%s", mode) + } + default: + // This will also catch multiple modes in a single field. + return nil, fmt.Errorf("invalid combination of options: %q", f.Tag.Get("xml")) + } + if finfo.flags&fMode == fAny { + finfo.flags |= fElement + } + if finfo.flags&fOmitEmpty != 0 && finfo.flags&(fElement|fAttr) == 0 { + return nil, fmt.Errorf("can only use omitempty on elements and attributes") + } + } + + // Use of xmlns without a name is not allowed. + if finfo.xmlns != "" && tag == "" { + return nil, fmt.Errorf("namespace without name: %q", f.Tag.Get("xml")) + } + + if f.Name == xmlName { + // The XMLName field records the XML element name. Don't + // process it as usual because its name should default to + // empty rather than to the field name. + finfo.name = tag + return finfo, nil + } + + if tag == "" { + // If the name part of the tag is completely empty, get + // default from XMLName of underlying struct if feasible, + // or field name otherwise. + if xmlname := lookupXMLName(f.Type); xmlname != nil { + finfo.xmlns, finfo.name = xmlname.xmlns, xmlname.name + } else { + finfo.name = f.Name + } + return finfo, nil + } + + // Prepare field name and parents. + parents := strings.Split(tag, ">") + if parents[0] == "" { + parents[0] = f.Name + } + if parents[len(parents)-1] == "" { + return nil, fmt.Errorf("trailing '>'") + } + finfo.name = parents[len(parents)-1] + if len(parents) > 1 { + if (finfo.flags & fElement) == 0 { + return nil, fmt.Errorf("%s chain not valid with %s flag", tag, strings.Join(tokens[1:], ",")) + } + finfo.parents = parents[:len(parents)-1] + } + + // If the field type has an XMLName field, the names must match + // so that the behavior of both marshaling and unmarshaling + // is straightforward and unambiguous. + if finfo.flags&fElement != 0 { + ftyp := f.Type + xmlname := lookupXMLName(ftyp) + if xmlname != nil && xmlname.name != finfo.name { + return nil, fmt.Errorf("name %q conflicts with name %q in %s.XMLName", finfo.name, xmlname.name, ftyp) + } + } + return finfo, nil +} + +// lookupXMLName returns the fieldInfo for typ's XMLName field +// in case it exists and has a valid xml field tag, otherwise +// it returns nil. +func lookupXMLName(typ fakereflect.TypeAndCanAddr) (xmlname *fieldInfo) { + for typ.IsPtr() { + typ = typ.Elem() + } + if !typ.IsStruct() { + return nil + } + for i, n := 0, typ.NumField(); i < n; i++ { + f := typ.Field(i) + if f.Name != xmlName { + continue + } + finfo, err := StructFieldInfo(f) + if err == nil && finfo.name != "" { + return finfo + } + // Also consider errors as a non-existent field tag + // and let getTypeInfo itself report the error. + break + } + return nil +} + +func min(a, b int) int { + if a <= b { + return a + } + return b +} + +// addFieldInfo adds finfo to tinfo.fields if there are no +// conflicts, or if conflicts arise from previous fields that were +// obtained from deeper embedded structures than finfo. In the latter +// case, the conflicting entries are dropped. +// A conflict occurs when the path (parent + name) to a field is +// itself a prefix of another path, or when two paths match exactly. +// It is okay for field paths to share a common, shorter prefix. +func addFieldInfo(typ fakereflect.TypeAndCanAddr, tinfo *typeInfo, newf *fieldInfo) error { + var conflicts []int +Loop: + // First, figure all conflicts. Most working code will have none. + for i := range tinfo.fields { + oldf := &tinfo.fields[i] + if oldf.flags&fMode != newf.flags&fMode { + continue + } + if oldf.xmlns != "" && newf.xmlns != "" && oldf.xmlns != newf.xmlns { + continue + } + minl := min(len(newf.parents), len(oldf.parents)) + for p := 0; p < minl; p++ { + if oldf.parents[p] != newf.parents[p] { + continue Loop + } + } + if len(oldf.parents) > len(newf.parents) { + if oldf.parents[len(newf.parents)] == newf.name { + conflicts = append(conflicts, i) + } + } else if len(oldf.parents) < len(newf.parents) { + if newf.parents[len(oldf.parents)] == oldf.name { + conflicts = append(conflicts, i) + } + } else { + if newf.name == oldf.name { + conflicts = append(conflicts, i) + } + } + } + // Without conflicts, add the new field and return. + if conflicts == nil { + tinfo.fields = append(tinfo.fields, *newf) + return nil + } + + // If any conflict is shallower, ignore the new field. + // This matches the Go field resolution on embedding. + for _, i := range conflicts { + if len(tinfo.fields[i].idx) < len(newf.idx) { + return nil + } + } + + // Otherwise, if any of them is at the same depth level, it's an error. + for _, i := range conflicts { + oldf := &tinfo.fields[i] + if len(oldf.idx) == len(newf.idx) { + f1 := typ.FieldByIndex(oldf.idx) + f2 := typ.FieldByIndex(newf.idx) + return &TagPathError{typ, f1.Name, f1.Tag.Get("xml"), f2.Name, f2.Tag.Get("xml")} + } + } + + // Otherwise, the new field is shallower, and thus takes precedence, + // so drop the conflicting fields from tinfo and append the new one. + for c := len(conflicts) - 1; c >= 0; c-- { + i := conflicts[c] + copy(tinfo.fields[i:], tinfo.fields[i+1:]) + tinfo.fields = tinfo.fields[:len(tinfo.fields)-1] + } + tinfo.fields = append(tinfo.fields, *newf) + return nil +} + +// A TagPathError represents an error in the unmarshaling process +// caused by the use of field tags with conflicting paths. +type TagPathError struct { + Struct fakereflect.TypeAndCanAddr + Field1, Tag1 string + Field2, Tag2 string +} + +func (e *TagPathError) Error() string { + return fmt.Sprintf("%s field %q with tag %q conflicts with field %q with tag %q", e.Struct, e.Field1, e.Tag1, e.Field2, e.Tag2) +} + +// value returns v's field value corresponding to finfo. +// It's equivalent to v.FieldByIndex(finfo.idx), but when passed +// initNilPointers, it initializes and dereferences pointers as necessary. +// When passed dontInitNilPointers and a nil pointer is reached, the function +// returns a zero reflect.Value. +func (finfo *fieldInfo) value(v fakereflect.TypeAndCanAddr) fakereflect.TypeAndCanAddr { + for i, x := range finfo.idx { + if i > 0 { + t := v + if t.IsPtr() && t.Elem().IsStruct() { + v = v.Elem() + } + } + v = v.Field(x).Type + } + return v +} diff --git a/vendor/honnef.co/go/tools/staticcheck/fakexml/xml.go b/vendor/honnef.co/go/tools/staticcheck/fakexml/xml.go new file mode 100644 index 000000000..0f8fb27b8 --- /dev/null +++ b/vendor/honnef.co/go/tools/staticcheck/fakexml/xml.go @@ -0,0 +1,33 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package fakexml + +// References: +// Annotated XML spec: https://www.xml.com/axml/testaxml.htm +// XML name spaces: https://www.w3.org/TR/REC-xml-names/ + +// TODO(rsc): +// Test error handling. + +// A Name represents an XML name (Local) annotated +// with a name space identifier (Space). +// In tokens returned by Decoder.Token, the Space identifier +// is given as a canonical URL, not the short prefix used +// in the document being parsed. +type Name struct { + Space, Local string +} + +// An Attr represents an attribute in an XML element (Name=Value). +type Attr struct { + Name Name + Value string +} + +// A StartElement represents an XML start element. +type StartElement struct { + Name Name + Attr []Attr +} diff --git a/vendor/honnef.co/go/tools/staticcheck/lint.go b/vendor/honnef.co/go/tools/staticcheck/lint.go index 40ca2c131..d3fa5441f 100644 --- a/vendor/honnef.co/go/tools/staticcheck/lint.go +++ b/vendor/honnef.co/go/tools/staticcheck/lint.go @@ -34,6 +34,9 @@ import ( "honnef.co/go/tools/knowledge" "honnef.co/go/tools/pattern" "honnef.co/go/tools/printf" + "honnef.co/go/tools/staticcheck/fakejson" + "honnef.co/go/tools/staticcheck/fakereflect" + "honnef.co/go/tools/staticcheck/fakexml" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" @@ -254,10 +257,10 @@ var ( } checkUnsupportedMarshal = map[string]CallCheck{ - "encoding/json.Marshal": checkUnsupportedMarshalImpl(knowledge.Arg("json.Marshal.v"), "json", "MarshalJSON", "MarshalText"), - "encoding/xml.Marshal": checkUnsupportedMarshalImpl(knowledge.Arg("xml.Marshal.v"), "xml", "MarshalXML", "MarshalText"), - "(*encoding/json.Encoder).Encode": checkUnsupportedMarshalImpl(knowledge.Arg("(*encoding/json.Encoder).Encode.v"), "json", "MarshalJSON", "MarshalText"), - "(*encoding/xml.Encoder).Encode": checkUnsupportedMarshalImpl(knowledge.Arg("(*encoding/xml.Encoder).Encode.v"), "xml", "MarshalXML", "MarshalText"), + "encoding/json.Marshal": checkUnsupportedMarshalJSON, + "encoding/xml.Marshal": checkUnsupportedMarshalXML, + "(*encoding/json.Encoder).Encode": checkUnsupportedMarshalJSON, + "(*encoding/xml.Encoder).Encode": checkUnsupportedMarshalXML, } checkAtomicAlignment = map[string]CallCheck{ @@ -868,57 +871,37 @@ func checkNoopMarshalImpl(argN int, meths ...string) CallCheck { } } -func checkUnsupportedMarshalImpl(argN int, tag string, meths ...string) CallCheck { - // TODO(dh): flag slices and maps of unsupported types - return func(call *Call) { - msCache := &call.Instr.Parent().Prog.MethodSets - - arg := call.Args[argN] - T := arg.Value.Value.Type() - Ts, ok := typeutil.Dereference(T).Underlying().(*types.Struct) - if !ok { - return - } - ms := msCache.MethodSet(T) - // TODO(dh): we're not checking the signature, which can cause false negatives. - // This isn't a huge problem, however, since vet complains about incorrect signatures. - for _, meth := range meths { - if ms.Lookup(nil, meth) != nil { - return - } - } - fields := typeutil.FlattenFields(Ts) - for _, field := range fields { - if !(field.Var.Exported()) { - continue - } - if reflect.StructTag(field.Tag).Get(tag) == "-" { - continue - } - ms := msCache.MethodSet(field.Var.Type()) - // TODO(dh): we're not checking the signature, which can cause false negatives. - // This isn't a huge problem, however, since vet complains about incorrect signatures. - for _, meth := range meths { - if ms.Lookup(nil, meth) != nil { - return - } - } - switch field.Var.Type().Underlying().(type) { - case *types.Chan, *types.Signature: - arg.Invalid(fmt.Sprintf("trying to marshal chan or func value, field %s", fieldPath(T, field.Path))) - } +func checkUnsupportedMarshalJSON(call *Call) { + arg := call.Args[0] + T := arg.Value.Value.Type() + if err := fakejson.Marshal(T); err != nil { + typ := types.TypeString(err.Type, types.RelativeTo(arg.Value.Value.Parent().Pkg.Pkg)) + if err.Path == "x" { + arg.Invalid(fmt.Sprintf("trying to marshal unsupported type %s", typ)) + } else { + arg.Invalid(fmt.Sprintf("trying to marshal unsupported type %s, via %s", typ, err.Path)) } } } -func fieldPath(start types.Type, indices []int) string { - p := start.String() - for _, idx := range indices { - field := typeutil.Dereference(start).Underlying().(*types.Struct).Field(idx) - start = field.Type() - p += "." + field.Name() +func checkUnsupportedMarshalXML(call *Call) { + arg := call.Args[0] + T := arg.Value.Value.Type() + if err := fakexml.Marshal(T); err != nil { + switch err := err.(type) { + case *fakexml.UnsupportedTypeError: + typ := types.TypeString(err.Type, types.RelativeTo(arg.Value.Value.Parent().Pkg.Pkg)) + if err.Path == "x" { + arg.Invalid(fmt.Sprintf("trying to marshal unsupported type %s", typ)) + } else { + arg.Invalid(fmt.Sprintf("trying to marshal unsupported type %s, via %s", typ, err.Path)) + } + case *fakexml.TagPathError: + // Vet does a better job at reporting this error, because it can flag the actual struct tags, not just the call to Marshal + default: + // These errors get reported by SA5008 instead, which can flag the actual fields, independently of calls to xml.Marshal + } } - return p } func isInLoop(b *ir.BasicBlock) bool { @@ -1360,6 +1343,9 @@ func CheckLhsRhsIdentical(pass *analysis.Pass) (interface{}, error) { // happily flags fn() == fn() – so far, we've had nobody complain // about a false positive, and it's caught several bugs in real // code. + // + // We special case functions from the math/rand package. Someone ran + // into the following false positive: "rand.Intn(2) - rand.Intn(2), which I wrote to generate values {-1, 0, 1} with {0.25, 0.5, 0.25} probability." fn := func(node ast.Node) { op := node.(*ast.BinaryExpr) switch op.Op { @@ -1399,6 +1385,38 @@ func CheckLhsRhsIdentical(pass *analysis.Pass) (interface{}, error) { // 0 == 0 are slim. return } + + if expr, ok := op.X.(*ast.CallExpr); ok { + call := code.CallName(pass, expr) + switch call { + case "math/rand.Int", + "math/rand.Int31", + "math/rand.Int31n", + "math/rand.Int63", + "math/rand.Int63n", + "math/rand.Intn", + "math/rand.Uint32", + "math/rand.Uint64", + "math/rand.ExpFloat64", + "math/rand.Float32", + "math/rand.Float64", + "math/rand.NormFloat64", + "(*math/rand.Rand).Int", + "(*math/rand.Rand).Int31", + "(*math/rand.Rand).Int31n", + "(*math/rand.Rand).Int63", + "(*math/rand.Rand).Int63n", + "(*math/rand.Rand).Intn", + "(*math/rand.Rand).Uint32", + "(*math/rand.Rand).Uint64", + "(*math/rand.Rand).ExpFloat64", + "(*math/rand.Rand).Float32", + "(*math/rand.Rand).Float64", + "(*math/rand.Rand).NormFloat64": + return + } + } + report.Report(pass, op, fmt.Sprintf("identical expressions on the left and right side of the '%s' operator", op.Op)) } code.Preorder(pass, fn, (*ast.BinaryExpr)(nil)) @@ -3828,7 +3846,12 @@ func CheckStructTags(pass *analysis.Pass) (interface{}, error) { } fn := func(node ast.Node) { - for _, field := range node.(*ast.StructType).Fields.List { + structNode := node.(*ast.StructType) + T := pass.TypesInfo.Types[structNode].Type.(*types.Struct) + rt := fakereflect.TypeAndCanAddr{ + Type: T, + } + for i, field := range structNode.Fields.List { if field.Tag == nil { continue } @@ -3850,6 +3873,9 @@ func CheckStructTags(pass *analysis.Pass) (interface{}, error) { case "json": checkJSONTag(pass, field, v[0]) case "xml": + if _, err := fakexml.StructFieldInfo(rt.Field(i)); err != nil { + report.Report(pass, field.Tag, fmt.Sprintf("invalid XML tag: %s", err)) + } checkXMLTag(pass, field, v[0]) } } @@ -3913,29 +3939,22 @@ func checkXMLTag(pass *analysis.Pass, field *ast.Field, tag string) { } fields := strings.Split(tag, ",") counts := map[string]int{} - var exclusives []string for _, s := range fields[1:] { switch s { case "attr", "chardata", "cdata", "innerxml", "comment": counts[s]++ - if counts[s] == 1 { - exclusives = append(exclusives, s) - } case "omitempty", "any": counts[s]++ case "": default: - report.Report(pass, field.Tag, fmt.Sprintf("unknown XML option %q", s)) + report.Report(pass, field.Tag, fmt.Sprintf("invalid XML tag: unknown option %q", s)) } } for k, v := range counts { if v > 1 { - report.Report(pass, field.Tag, fmt.Sprintf("duplicate XML option %q", k)) + report.Report(pass, field.Tag, fmt.Sprintf("invalid XML tag: duplicate option %q", k)) } } - if len(exclusives) > 1 { - report.Report(pass, field.Tag, fmt.Sprintf("XML options %s are mutually exclusive", strings.Join(exclusives, " and "))) - } } func CheckImpossibleTypeAssertion(pass *analysis.Pass) (interface{}, error) { diff --git a/vendor/honnef.co/go/tools/unused/unused.go b/vendor/honnef.co/go/tools/unused/unused.go index 3b1ed1702..ab7c9f7ea 100644 --- a/vendor/honnef.co/go/tools/unused/unused.go +++ b/vendor/honnef.co/go/tools/unused/unused.go @@ -1206,6 +1206,17 @@ func (g *graph) entry(pkg *pkg) { // use methods and fields of ignored types if obj, ok := obj.(*types.TypeName); ok { + if obj.IsAlias() { + if typ, ok := obj.Type().(*types.Named); ok && typ.Obj().Pkg() != obj.Pkg() { + // This is an alias of a named type in another package. + // Don't walk its fields or methods; we don't have to, + // and it breaks an assertion in graph.use because we're using an object that we haven't seen before. + // + // For aliases to types in the same package, we do want to ignore the fields and methods, + // because ignoring the alias should ignore the aliased type. + continue + } + } if typ, ok := obj.Type().(*types.Named); ok { for i := 0; i < typ.NumMethods(); i++ { g.use(typ.Method(i), nil, edgeIgnored) @@ -1650,8 +1661,10 @@ func (g *graph) instructions(fn *ir.Function) { // nothing to do case *ir.ConstantSwitch: // nothing to do + case *ir.SliceToArrayPointer: + // nothing to do default: - panic(fmt.Sprintf("unreachable: %T", instr)) + lint.ExhaustiveTypeSwitch(instr) } } } diff --git a/vendor/modules.txt b/vendor/modules.txt index 0651c2348..aee079b30 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -10,7 +10,7 @@ github.com/Antonboom/nilnil/pkg/analyzer # github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 github.com/Azure/go-ansiterm github.com/Azure/go-ansiterm/winterm -# github.com/BurntSushi/toml v0.4.1 +# github.com/BurntSushi/toml v1.0.0 github.com/BurntSushi/toml github.com/BurntSushi/toml/internal # github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 @@ -21,27 +21,29 @@ github.com/Masterminds/semver ## explicit github.com/Microsoft/go-winio github.com/Microsoft/go-winio/pkg/guid -# github.com/OpenPeeDeeP/depguard v1.0.1 +# github.com/OpenPeeDeeP/depguard v1.1.0 github.com/OpenPeeDeeP/depguard # github.com/alexkohler/prealloc v1.0.0 github.com/alexkohler/prealloc/pkg -# github.com/ashanbrown/forbidigo v1.2.0 +# github.com/ashanbrown/forbidigo v1.3.0 github.com/ashanbrown/forbidigo/forbidigo -# github.com/ashanbrown/makezero v0.0.0-20210520155254-b6261585ddde +# github.com/ashanbrown/makezero v1.1.0 github.com/ashanbrown/makezero/makezero # github.com/beorn7/perks v1.0.1 github.com/beorn7/perks/quantile # github.com/bkielbasa/cyclop v1.2.0 github.com/bkielbasa/cyclop/pkg/analyzer -# github.com/blizzy78/varnamelen v0.3.0 +# github.com/blizzy78/varnamelen v0.6.0 github.com/blizzy78/varnamelen # github.com/bmatcuk/doublestar/v4 v4.0.2 ## explicit github.com/bmatcuk/doublestar/v4 # github.com/bombsimon/wsl/v3 v3.3.0 github.com/bombsimon/wsl/v3 -# github.com/breml/bidichk v0.1.1 +# github.com/breml/bidichk v0.2.2 github.com/breml/bidichk/pkg/bidichk +# github.com/breml/errchkjson v0.2.3 +github.com/breml/errchkjson # github.com/butuzov/ireturn v0.1.1 github.com/butuzov/ireturn/analyzer github.com/butuzov/ireturn/config @@ -58,25 +60,31 @@ github.com/containerd/containerd/errdefs github.com/containerd/containerd/log github.com/containerd/containerd/platforms # github.com/cpuguy83/go-md2man/v2 v2.0.1 -## explicit github.com/cpuguy83/go-md2man/v2/md2man -# github.com/daixiang0/gci v0.2.9 +# github.com/daixiang0/gci v0.3.1-0.20220208004058-76d765e3ab48 +github.com/daixiang0/gci/pkg/analyzer +github.com/daixiang0/gci/pkg/configuration +github.com/daixiang0/gci/pkg/constants github.com/daixiang0/gci/pkg/gci +github.com/daixiang0/gci/pkg/gci/imports +github.com/daixiang0/gci/pkg/gci/sections +github.com/daixiang0/gci/pkg/gci/specificity +github.com/daixiang0/gci/pkg/io # github.com/davecgh/go-spew v1.1.1 github.com/davecgh/go-spew/spew # github.com/denis-tingajkin/go-header v0.4.2 github.com/denis-tingajkin/go-header -# github.com/docker/cli v20.10.10+incompatible +# github.com/docker/cli v20.10.12+incompatible ## explicit github.com/docker/cli/cli/config/configfile github.com/docker/cli/cli/config/credentials github.com/docker/cli/cli/config/types -# github.com/docker/distribution v2.7.1+incompatible +# github.com/docker/distribution v2.8.0+incompatible ## explicit github.com/docker/distribution/digestset github.com/docker/distribution/reference github.com/docker/distribution/registry/api/errcode -# github.com/docker/docker v20.10.10+incompatible +# github.com/docker/docker v20.10.12+incompatible ## explicit github.com/docker/docker/api github.com/docker/docker/api/types @@ -112,7 +120,7 @@ github.com/docker/go-units github.com/drone/envsubst github.com/drone/envsubst/parse github.com/drone/envsubst/path -# github.com/esimonov/ifshort v1.0.3 +# github.com/esimonov/ifshort v1.0.4 github.com/esimonov/ifshort/pkg/analyzer # github.com/ettle/strcase v0.1.1 github.com/ettle/strcase @@ -128,18 +136,18 @@ github.com/flynn/go-shlex github.com/franela/goblin # github.com/fsnotify/fsnotify v1.5.1 github.com/fsnotify/fsnotify -# github.com/fzipp/gocyclo v0.3.1 +# github.com/fzipp/gocyclo v0.4.0 github.com/fzipp/gocyclo # github.com/gin-contrib/sse v0.1.0 github.com/gin-contrib/sse -# github.com/gin-gonic/gin v1.7.4 +# github.com/gin-gonic/gin v1.7.7 ## explicit github.com/gin-gonic/gin github.com/gin-gonic/gin/binding github.com/gin-gonic/gin/internal/bytesconv github.com/gin-gonic/gin/internal/json github.com/gin-gonic/gin/render -# github.com/go-critic/go-critic v0.6.1 +# github.com/go-critic/go-critic v0.6.2 github.com/go-critic/go-critic/checkers github.com/go-critic/go-critic/checkers/internal/astwalk github.com/go-critic/go-critic/checkers/internal/lintutil @@ -150,7 +158,7 @@ github.com/go-playground/locales github.com/go-playground/locales/currency # github.com/go-playground/universal-translator v0.18.0 github.com/go-playground/universal-translator -# github.com/go-playground/validator/v10 v10.9.0 +# github.com/go-playground/validator/v10 v10.10.0 ## explicit github.com/go-playground/validator/v10 # github.com/go-sql-driver/mysql v1.6.0 @@ -198,7 +206,7 @@ github.com/gofrs/flock github.com/gogits/go-gogs-client # github.com/gogo/protobuf v1.3.2 github.com/gogo/protobuf/proto -# github.com/golang-jwt/jwt/v4 v4.1.0 +# github.com/golang-jwt/jwt/v4 v4.3.0 ## explicit github.com/golang-jwt/jwt/v4 # github.com/golang/protobuf v1.5.2 @@ -224,7 +232,7 @@ github.com/golangci/go-misc/deadcode # github.com/golangci/gofmt v0.0.0-20190930125516-244bba706f1a github.com/golangci/gofmt/gofmt github.com/golangci/gofmt/goimports -# github.com/golangci/golangci-lint v1.43.0 +# github.com/golangci/golangci-lint v1.44.2 ## explicit github.com/golangci/golangci-lint/cmd/golangci-lint github.com/golangci/golangci-lint/internal/cache @@ -262,7 +270,7 @@ github.com/golangci/misspell github.com/golangci/revgrep # github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4 github.com/golangci/unconvert -# github.com/google/go-cmp v0.5.6 +# github.com/google/go-cmp v0.5.7 github.com/google/go-cmp/cmp github.com/google/go-cmp/cmp/internal/diff github.com/google/go-cmp/cmp/internal/flags @@ -273,7 +281,7 @@ github.com/google/go-cmp/cmp/internal/value github.com/google/go-github/v39/github # github.com/google/go-querystring v1.1.0 github.com/google/go-querystring/query -# github.com/gordonklaus/ineffassign v0.0.0-20210225214923-2e10b2664254 +# github.com/gordonklaus/ineffassign v0.0.0-20210914165742-4cc7213b9bc8 github.com/gordonklaus/ineffassign/pkg/ineffassign # github.com/gorilla/securecookie v1.1.1 ## explicit @@ -283,14 +291,13 @@ github.com/gostaticanalysis/analysisutil # github.com/gostaticanalysis/comment v1.4.2 github.com/gostaticanalysis/comment github.com/gostaticanalysis/comment/passes/commentmap -# github.com/gostaticanalysis/forcetypeassert v0.0.0-20200621232751-01d4955beaa5 +# github.com/gostaticanalysis/forcetypeassert v0.1.0 github.com/gostaticanalysis/forcetypeassert # github.com/gostaticanalysis/nilerr v0.1.1 github.com/gostaticanalysis/nilerr # github.com/hashicorp/errwrap v1.0.0 github.com/hashicorp/errwrap # github.com/hashicorp/go-cleanhttp v0.5.2 -## explicit github.com/hashicorp/go-cleanhttp # github.com/hashicorp/go-multierror v1.1.1 github.com/hashicorp/go-multierror @@ -310,6 +317,10 @@ github.com/hashicorp/hcl/hcl/token github.com/hashicorp/hcl/json/parser github.com/hashicorp/hcl/json/scanner github.com/hashicorp/hcl/json/token +# github.com/hexops/gotextdiff v1.0.3 +github.com/hexops/gotextdiff +github.com/hexops/gotextdiff/myers +github.com/hexops/gotextdiff/span # github.com/inconshreveable/mousetrap v1.0.0 github.com/inconshreveable/mousetrap # github.com/jgautheron/goconst v1.5.1 @@ -324,14 +335,14 @@ github.com/joho/godotenv github.com/joho/godotenv/autoload # github.com/json-iterator/go v1.1.12 github.com/json-iterator/go -# github.com/julz/importas v0.0.0-20210419104244-841f0c0fe66d +# github.com/julz/importas v0.1.0 github.com/julz/importas # github.com/kisielk/errcheck v1.6.0 github.com/kisielk/errcheck/errcheck # github.com/kisielk/gotool v1.0.0 github.com/kisielk/gotool github.com/kisielk/gotool/internal/load -# github.com/kulti/thelper v0.4.0 +# github.com/kulti/thelper v0.5.1 github.com/kulti/thelper/pkg/analyzer # github.com/kunwardeep/paralleltest v1.0.3 github.com/kunwardeep/paralleltest/pkg/paralleltest @@ -339,10 +350,17 @@ github.com/kunwardeep/paralleltest/pkg/paralleltest github.com/kyoh86/exportloopref # github.com/ldez/gomoddirectives v0.2.2 github.com/ldez/gomoddirectives -# github.com/ldez/tagliatelle v0.2.0 +# github.com/ldez/tagliatelle v0.3.1 github.com/ldez/tagliatelle # github.com/leodido/go-urn v1.2.1 github.com/leodido/go-urn +# github.com/leonklingele/grouper v1.1.0 +github.com/leonklingele/grouper/pkg/analyzer +github.com/leonklingele/grouper/pkg/analyzer/consts +github.com/leonklingele/grouper/pkg/analyzer/globals +github.com/leonklingele/grouper/pkg/analyzer/imports +github.com/leonklingele/grouper/pkg/analyzer/types +github.com/leonklingele/grouper/pkg/analyzer/vars # github.com/lib/pq v1.10.4 ## explicit github.com/lib/pq @@ -354,13 +372,13 @@ github.com/magiconair/properties github.com/maratori/testpackage/pkg/testpackage # github.com/matoous/godox v0.0.0-20210227103229-6504466cf951 github.com/matoous/godox -# github.com/mattn/go-colorable v0.1.11 +# github.com/mattn/go-colorable v0.1.12 github.com/mattn/go-colorable # github.com/mattn/go-isatty v0.0.14 github.com/mattn/go-isatty # github.com/mattn/go-runewidth v0.0.9 github.com/mattn/go-runewidth -# github.com/mattn/go-sqlite3 v1.14.9 +# github.com/mattn/go-sqlite3 v1.14.11 ## explicit github.com/mattn/go-sqlite3 # github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369 @@ -369,16 +387,16 @@ github.com/matttproud/golang_protobuf_extensions/pbutil github.com/mbilski/exhaustivestruct/pkg/analyzer # github.com/mgechev/dots v0.0.0-20210922191527-e955255bf517 github.com/mgechev/dots -# github.com/mgechev/revive v1.1.2 +# github.com/mgechev/revive v1.1.4 github.com/mgechev/revive/config github.com/mgechev/revive/formatter github.com/mgechev/revive/lint github.com/mgechev/revive/rule # github.com/mitchellh/go-homedir v1.1.0 github.com/mitchellh/go-homedir -# github.com/mitchellh/mapstructure v1.4.2 +# github.com/mitchellh/mapstructure v1.4.3 github.com/mitchellh/mapstructure -# github.com/moby/moby v20.10.10+incompatible +# github.com/moby/moby v20.10.12+incompatible ## explicit github.com/moby/moby/client github.com/moby/moby/pkg/jsonmessage @@ -413,7 +431,7 @@ github.com/nbutton23/zxcvbn-go/match github.com/nbutton23/zxcvbn-go/matching github.com/nbutton23/zxcvbn-go/scoring github.com/nbutton23/zxcvbn-go/utils/math -# github.com/nishanths/exhaustive v0.2.3 +# github.com/nishanths/exhaustive v0.7.11 github.com/nishanths/exhaustive # github.com/nishanths/predeclared v0.2.1 github.com/nishanths/predeclared/passes/predeclared @@ -433,9 +451,9 @@ github.com/phayes/checkstyle github.com/pkg/errors # github.com/pmezard/go-difflib v1.0.0 github.com/pmezard/go-difflib/difflib -# github.com/polyfloyd/go-errorlint v0.0.0-20210722154253-910bb7978349 +# github.com/polyfloyd/go-errorlint v0.0.0-20211125173453-6d6d39c5bb8b github.com/polyfloyd/go-errorlint/errorlint -# github.com/prometheus/client_golang v1.12.0 +# github.com/prometheus/client_golang v1.12.1 ## explicit github.com/prometheus/client_golang/prometheus github.com/prometheus/client_golang/prometheus/internal @@ -452,21 +470,24 @@ github.com/prometheus/common/model github.com/prometheus/procfs github.com/prometheus/procfs/internal/fs github.com/prometheus/procfs/internal/util -# github.com/quasilyte/go-ruleguard v0.3.13 +# github.com/quasilyte/go-ruleguard v0.3.15 github.com/quasilyte/go-ruleguard/internal/goenv -github.com/quasilyte/go-ruleguard/internal/gogrep github.com/quasilyte/go-ruleguard/internal/golist github.com/quasilyte/go-ruleguard/internal/stdinfo github.com/quasilyte/go-ruleguard/internal/xsrcimporter github.com/quasilyte/go-ruleguard/internal/xtypes -github.com/quasilyte/go-ruleguard/nodetag github.com/quasilyte/go-ruleguard/ruleguard github.com/quasilyte/go-ruleguard/ruleguard/goutil github.com/quasilyte/go-ruleguard/ruleguard/ir github.com/quasilyte/go-ruleguard/ruleguard/irconv +github.com/quasilyte/go-ruleguard/ruleguard/profiling github.com/quasilyte/go-ruleguard/ruleguard/quasigo github.com/quasilyte/go-ruleguard/ruleguard/textmatch github.com/quasilyte/go-ruleguard/ruleguard/typematch +# github.com/quasilyte/gogrep v0.0.0-20220103110004-ffaa07af02e3 +github.com/quasilyte/gogrep +github.com/quasilyte/gogrep/internal/stdinfo +github.com/quasilyte/gogrep/nodetag # github.com/quasilyte/regex/syntax v0.0.0-20200407221936-30656e2c4a95 github.com/quasilyte/regex/syntax # github.com/rs/zerolog v1.26.1 @@ -484,7 +505,7 @@ github.com/ryancurrah/gomodguard github.com/ryanrolds/sqlclosecheck/pkg/analyzer # github.com/sanposhiho/wastedassign/v2 v2.0.6 github.com/sanposhiho/wastedassign/v2 -# github.com/securego/gosec/v2 v2.9.1 +# github.com/securego/gosec/v2 v2.9.6 github.com/securego/gosec/v2 github.com/securego/gosec/v2/cwe github.com/securego/gosec/v2/rules @@ -492,6 +513,8 @@ github.com/securego/gosec/v2/rules github.com/shazow/go-diff/difflib # github.com/sirupsen/logrus v1.8.1 github.com/sirupsen/logrus +# github.com/sivchari/containedctx v1.0.1 +github.com/sivchari/containedctx # github.com/sivchari/tenv v1.4.7 github.com/sivchari/tenv # github.com/sonatard/noctx v0.0.1 @@ -505,13 +528,13 @@ github.com/spf13/afero github.com/spf13/afero/mem # github.com/spf13/cast v1.4.1 github.com/spf13/cast -# github.com/spf13/cobra v1.2.1 +# github.com/spf13/cobra v1.3.0 github.com/spf13/cobra # github.com/spf13/jwalterweatherman v1.1.0 github.com/spf13/jwalterweatherman # github.com/spf13/pflag v1.0.5 github.com/spf13/pflag -# github.com/spf13/viper v1.9.0 +# github.com/spf13/viper v1.10.1 github.com/spf13/viper github.com/spf13/viper/internal/encoding github.com/spf13/viper/internal/encoding/hcl @@ -544,18 +567,18 @@ github.com/syndtr/goleveldb/leveldb/opt github.com/syndtr/goleveldb/leveldb/storage github.com/syndtr/goleveldb/leveldb/table github.com/syndtr/goleveldb/leveldb/util -# github.com/tdakkota/asciicheck v0.0.0-20200416200610-e657995f937b +# github.com/tdakkota/asciicheck v0.1.1 github.com/tdakkota/asciicheck # github.com/tetafro/godot v1.4.11 github.com/tetafro/godot # github.com/tevino/abool v1.2.0 ## explicit github.com/tevino/abool -# github.com/timakin/bodyclose v0.0.0-20200424151742-cb6215831a94 +# github.com/timakin/bodyclose v0.0.0-20210704033933-f49887972144 github.com/timakin/bodyclose/passes/bodyclose # github.com/tomarrell/wrapcheck/v2 v2.4.0 github.com/tomarrell/wrapcheck/v2/wrapcheck -# github.com/tommy-muehle/go-mnd/v2 v2.4.0 +# github.com/tommy-muehle/go-mnd/v2 v2.5.0 github.com/tommy-muehle/go-mnd/v2 github.com/tommy-muehle/go-mnd/v2/checks github.com/tommy-muehle/go-mnd/v2/config @@ -565,7 +588,7 @@ github.com/tommy-muehle/go-mnd/v2/config github.com/ugorji/go/codec # github.com/ultraware/funlen v0.0.3 github.com/ultraware/funlen -# github.com/ultraware/whitespace v0.0.4 +# github.com/ultraware/whitespace v0.0.5 github.com/ultraware/whitespace # github.com/urfave/cli/v2 v2.3.0 ## explicit @@ -576,7 +599,7 @@ github.com/uudashr/gocognit ## explicit github.com/woodpecker-ci/expr github.com/woodpecker-ci/expr/parse -# github.com/xanzy/go-gitlab v0.52.2 +# github.com/xanzy/go-gitlab v0.55.1 ## explicit github.com/xanzy/go-gitlab # github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb @@ -587,9 +610,15 @@ github.com/xeipuuv/gojsonreference # github.com/xeipuuv/gojsonschema v1.2.0 ## explicit github.com/xeipuuv/gojsonschema -# github.com/yeya24/promlinter v0.1.0 +# github.com/yagipy/maintidx v1.0.0 +github.com/yagipy/maintidx +github.com/yagipy/maintidx/pkg/cyc +github.com/yagipy/maintidx/pkg/halstvol +# github.com/yeya24/promlinter v0.1.1-0.20210918184747-d757024714a1 github.com/yeya24/promlinter -# golang.org/x/crypto v0.0.0-20220128200615-198e4374d7ed +# gitlab.com/bosi/decorder v0.2.1 +gitlab.com/bosi/decorder +# golang.org/x/crypto v0.0.0-20220214200702-86341886e292 ## explicit golang.org/x/crypto/acme golang.org/x/crypto/acme/autocert @@ -601,7 +630,7 @@ golang.org/x/crypto/openpgp/errors golang.org/x/crypto/openpgp/packet golang.org/x/crypto/openpgp/s2k golang.org/x/crypto/sha3 -# golang.org/x/mod v0.5.0 +# golang.org/x/mod v0.5.1 golang.org/x/mod/internal/lazyregexp golang.org/x/mod/modfile golang.org/x/mod/module @@ -618,7 +647,7 @@ golang.org/x/net/internal/socks golang.org/x/net/internal/timeseries golang.org/x/net/proxy golang.org/x/net/trace -# golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8 +# golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b ## explicit golang.org/x/oauth2 golang.org/x/oauth2/bitbucket @@ -626,7 +655,7 @@ golang.org/x/oauth2/internal # golang.org/x/sync v0.0.0-20210220032951-036812b2e83c ## explicit golang.org/x/sync/errgroup -# golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27 +# golang.org/x/sys v0.0.0-20220224120231-95c6836cb0e7 ## explicit golang.org/x/sys/cpu golang.org/x/sys/execabs @@ -643,10 +672,10 @@ golang.org/x/text/transform golang.org/x/text/unicode/bidi golang.org/x/text/unicode/norm golang.org/x/text/width -# golang.org/x/time v0.0.0-20211116232009-f0f3c7e86c11 +# golang.org/x/time v0.0.0-20220210224613-90d013bbcef8 ## explicit golang.org/x/time/rate -# golang.org/x/tools v0.1.7 +# golang.org/x/tools v0.1.9 golang.org/x/tools/go/analysis golang.org/x/tools/go/analysis/passes/asmdecl golang.org/x/tools/go/analysis/passes/assign @@ -810,14 +839,14 @@ google.golang.org/protobuf/types/descriptorpb google.golang.org/protobuf/types/known/anypb google.golang.org/protobuf/types/known/durationpb google.golang.org/protobuf/types/known/timestamppb -# gopkg.in/ini.v1 v1.63.2 +# gopkg.in/ini.v1 v1.66.2 gopkg.in/ini.v1 # gopkg.in/yaml.v2 v2.4.0 gopkg.in/yaml.v2 # gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b ## explicit gopkg.in/yaml.v3 -# honnef.co/go/tools v0.2.1 +# honnef.co/go/tools v0.2.2 honnef.co/go/tools/analysis/code honnef.co/go/tools/analysis/edit honnef.co/go/tools/analysis/facts @@ -837,16 +866,20 @@ honnef.co/go/tools/pattern honnef.co/go/tools/printf honnef.co/go/tools/simple honnef.co/go/tools/staticcheck +honnef.co/go/tools/staticcheck/fakejson +honnef.co/go/tools/staticcheck/fakereflect +honnef.co/go/tools/staticcheck/fakexml honnef.co/go/tools/stylecheck honnef.co/go/tools/unused honnef.co/go/tools/unused/typemap -# mvdan.cc/gofumpt v0.1.1 +# mvdan.cc/gofumpt v0.2.1 mvdan.cc/gofumpt/format +mvdan.cc/gofumpt/internal/version # mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed mvdan.cc/interfacer/check # mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b mvdan.cc/lint -# mvdan.cc/unparam v0.0.0-20210104141923-aac4ce9116a7 +# mvdan.cc/unparam v0.0.0-20211214103731-d0ef000c54e5 mvdan.cc/unparam/check # xorm.io/builder v0.3.9 ## explicit diff --git a/vendor/mvdan.cc/gofumpt/format/format.go b/vendor/mvdan.cc/gofumpt/format/format.go index 808d8ce18..e4154d285 100644 --- a/vendor/mvdan.cc/gofumpt/format/format.go +++ b/vendor/mvdan.cc/gofumpt/format/format.go @@ -13,6 +13,7 @@ import ( "go/format" "go/parser" "go/token" + "os" "reflect" "regexp" "sort" @@ -24,6 +25,8 @@ import ( "github.com/google/go-cmp/cmp" "golang.org/x/mod/semver" "golang.org/x/tools/go/ast/astutil" + + "mvdan.cc/gofumpt/internal/version" ) type Options struct { @@ -47,6 +50,11 @@ type Options struct { // source file. func Source(src []byte, opts Options) ([]byte, error) { fset := token.NewFileSet() + + // Ensure our parsed files never start with base 1, + // to ensure that using token.NoPos+1 will panic. + fset.AddFile("gofumpt_base.go", 1, 10) + file, err := parser.ParseFile(fset, "", src, parser.ParseComments) if err != nil { return nil, err @@ -61,10 +69,25 @@ func Source(src []byte, opts Options) ([]byte, error) { return buf.Bytes(), nil } +var rxCodeGenerated = regexp.MustCompile(`^// Code generated .* DO NOT EDIT\.$`) + // File modifies a file and fset in place to follow gofumpt's format. The // changes might include manipulating adding or removing newlines in fset, // modifying the position of nodes, or modifying literal values. func File(fset *token.FileSet, file *ast.File, opts Options) { + simplify(file) + + for _, cg := range file.Comments { + if cg.Pos() > file.Package { + break + } + for _, line := range cg.List { + if rxCodeGenerated.MatchString(line.Text) { + return + } + } + } + if opts.LangVersion == "" { opts.LangVersion = "v1" } else if opts.LangVersion[0] != 'v' { @@ -78,16 +101,55 @@ func File(fset *token.FileSet, file *ast.File, opts Options) { fset: fset, astFile: file, Options: opts, + + minSplitFactor: 0.4, } + var topFuncType *ast.FuncType pre := func(c *astutil.Cursor) bool { f.applyPre(c) - if _, ok := c.Node().(*ast.BlockStmt); ok { + switch node := c.Node().(type) { + case *ast.FuncDecl: + topFuncType = node.Type + case *ast.FieldList: + ft, _ := c.Parent().(*ast.FuncType) + if ft == nil || ft != topFuncType { + break + } + + // For top-level function declaration parameters, + // require the line split to be longer. + // This avoids func lines which are a bit too short, + // and allows func lines which are a bit longer. + // + // We don't just increase longLineLimit, + // as we still want splits at around the same place. + if ft.Params == node { + f.minSplitFactor = 0.6 + } + + // Don't split result parameters into multiple lines, + // as that can be easily confused for input parameters. + // TODO: consider the same for single-line func calls in + // if statements. + // TODO: perhaps just use a higher factor, like 0.8. + if ft.Results == node { + f.minSplitFactor = 1000 + } + case *ast.BlockStmt: f.blockLevel++ } return true } post := func(c *astutil.Cursor) bool { - if _, ok := c.Node().(*ast.BlockStmt); ok { + f.applyPost(c) + + // Reset minSplitFactor and blockLevel. + switch node := c.Node().(type) { + case *ast.FuncType: + if node == topFuncType { + f.minSplitFactor = 0.4 + } + case *ast.BlockStmt: f.blockLevel-- } return true @@ -95,10 +157,14 @@ func File(fset *token.FileSet, file *ast.File, opts Options) { astutil.Apply(file, pre, post) } -// Multiline nodes which could fit on a single line under this many -// bytes may be collapsed onto a single line. +// Multiline nodes which could easily fit on a single line under this many bytes +// may be collapsed onto a single line. const shortLineLimit = 60 +// Single-line nodes which take over this many bytes, and could easily be split +// into two lines of at least its minSplitFactor factor, may be split. +const longLineLimit = 100 + var rxOctalInteger = regexp.MustCompile(`\A0[0-7_]+\z`) type fumpter struct { @@ -109,7 +175,12 @@ type fumpter struct { astFile *ast.File + // blockLevel is the number of indentation blocks we're currently under. + // It is used to approximate the levels of indentation a line will end + // up with. blockLevel int + + minSplitFactor float64 } func (f *fumpter) commentsBetween(p1, p2 token.Pos) []*ast.CommentGroup { @@ -210,50 +281,57 @@ func (f *fumpter) printLength(node ast.Node) int { return int(count) + (f.blockLevel * 8) } +func (f *fumpter) tabbedColumn(p token.Pos) int { + col := f.Position(p).Column + + // Like in printLength, add an approximation of the indentation level. + // Since any existing tabs were already counted as one column, multiply + // the level by 7. + return col + (f.blockLevel * 7) +} + +func (f *fumpter) lineEnd(line int) token.Pos { + if line < 1 { + panic("illegal line number") + } + total := f.LineCount() + if line > total { + panic("illegal line number") + } + if line == total { + return f.astFile.End() + } + return f.LineStart(line+1) - 1 +} + // rxCommentDirective covers all common Go comment directives: // -// //go: | standard Go directives, like go:noinline -// //some-words: | similar to the syntax above, like lint:ignore or go-sumtype:decl -// //line | inserted line information for cmd/compile -// //export | to mark cgo funcs for exporting -// //extern | C function declarations for gccgo -// //sys(nb)? | syscall function wrapper prototypes -// //nolint | nolint directive for golangci +// //go: | standard Go directives, like go:noinline +// //some-words: | similar to the syntax above, like lint:ignore or go-sumtype:decl +// //line | inserted line information for cmd/compile +// //export | to mark cgo funcs for exporting +// //extern | C function declarations for gccgo +// //sys(nb)? | syscall function wrapper prototypes +// //nolint | nolint directive for golangci +// //noinspection | noinspection directive for GoLand and friends // // Note that the "some-words:" matching expects a letter afterward, such as // "go:generate", to prevent matching false positives like "https://site". -var rxCommentDirective = regexp.MustCompile(`^([a-z-]+:[a-z]+|line\b|export\b|extern\b|sys(nb)?\b|nolint\b)`) +var rxCommentDirective = regexp.MustCompile(`^([a-z-]+:[a-z]+|line\b|export\b|extern\b|sys(nb)?\b|no(lint|inspection)\b)`) -// visit takes either an ast.Node or a []ast.Stmt. func (f *fumpter) applyPre(c *astutil.Cursor) { + f.splitLongLine(c) + switch node := c.Node().(type) { case *ast.File: - var lastMulti bool - var lastEnd token.Pos - for _, decl := range node.Decls { - pos := decl.Pos() - comments := f.commentsBetween(lastEnd, pos) - if len(comments) > 0 { - pos = comments[0].Pos() - } - - // multiline top-level declarations should be separated - multi := f.Line(pos) < f.Line(decl.End()) - if multi && lastMulti && f.Line(lastEnd)+1 == f.Line(pos) { - f.addNewline(lastEnd) - } - - lastMulti = multi - lastEnd = decl.End() - } - - // Join contiguous lone var/const/import lines; abort if there - // are empty lines or comments in between. + // Join contiguous lone var/const/import lines. + // Abort if there are empty lines or comments in between, + // including a leading comment, which could be a directive. newDecls := make([]ast.Decl, 0, len(node.Decls)) for i := 0; i < len(node.Decls); { newDecls = append(newDecls, node.Decls[i]) start, ok := node.Decls[i].(*ast.GenDecl) - if !ok || isCgoImport(start) { + if !ok || isCgoImport(start) || start.Doc != nil { i++ continue } @@ -268,6 +346,10 @@ func (f *fumpter) applyPre(c *astutil.Cursor) { if c := f.inlineComment(cont.End()); c != nil { // don't move an inline comment outside start.Rparen = c.End() + } else { + // so the code below treats the joined + // decl group as multi-line + start.Rparen = cont.End() } lastPos = cont.Pos() i++ @@ -275,10 +357,43 @@ func (f *fumpter) applyPre(c *astutil.Cursor) { } node.Decls = newDecls + // Multiline top-level declarations should be separated by an + // empty line. + // Do this after the joining of lone declarations above, + // as joining single-line declarations makes then multi-line. + var lastMulti bool + var lastEnd token.Pos + for _, decl := range node.Decls { + pos := decl.Pos() + comments := f.commentsBetween(lastEnd, pos) + if len(comments) > 0 { + pos = comments[0].Pos() + } + + multi := f.Line(pos) < f.Line(decl.End()) + if multi && lastMulti && f.Line(lastEnd)+1 == f.Line(pos) { + f.addNewline(lastEnd) + } + + lastMulti = multi + lastEnd = decl.End() + } + // Comments aren't nodes, so they're not walked by default. groupLoop: for _, group := range node.Comments { for _, comment := range group.List { + if comment.Text == "//gofumpt:diagnose" || strings.HasPrefix(comment.Text, "//gofumpt:diagnose ") { + slc := []string{ + "//gofumpt:diagnose", + version.String(), + "-lang=" + f.LangVersion, + } + if f.ExtraRules { + slc = append(slc, "-extra") + } + comment.Text = strings.Join(slc, " ") + } body := strings.TrimPrefix(comment.Text, "//") if body == comment.Text { // /*-style comment @@ -300,7 +415,7 @@ func (f *fumpter) applyPre(c *astutil.Cursor) { body := strings.TrimPrefix(comment.Text, "//") r, _ := utf8.DecodeRuneInString(body) if !unicode.IsSpace(r) { - comment.Text = "// " + strings.TrimPrefix(comment.Text, "//") + comment.Text = "// " + body } } } @@ -355,6 +470,31 @@ func (f *fumpter) applyPre(c *astutil.Cursor) { node.Rparen = token.NoPos } + case *ast.InterfaceType: + var prev *ast.Field + for _, method := range node.Methods.List { + switch { + case prev == nil: + removeToPos := method.Pos() + if comments := f.commentsBetween(node.Interface, method.Pos()); len(comments) > 0 { + // only remove leading line upto the first comment + removeToPos = comments[0].Pos() + } + // remove leading lines if they exist + f.removeLines(f.Line(node.Interface)+1, f.Line(removeToPos)) + + case len(f.commentsBetween(prev.End(), method.Pos())) > 0: + // comments in between; leave newlines alone + case len(prev.Names) != len(method.Names): + // don't group type unions with methods + case len(prev.Names) == 1 && token.IsExported(prev.Names[0].Name) != token.IsExported(method.Names[0].Name): + // don't group exported and unexported methods together + default: + f.removeLinesBetween(prev.End(), method.Pos()) + } + prev = method + } + case *ast.BlockStmt: f.stmts(node.List) comments := f.commentsBetween(node.Lbrace, node.Rbrace) @@ -404,22 +544,104 @@ func (f *fumpter) applyPre(c *astutil.Cursor) { return } if sign != nil { - var lastParam *ast.Field - if l := sign.Results; l != nil && len(l.List) > 0 { - lastParam = l.List[len(l.List)-1] - } else if l := sign.Params; l != nil && len(l.List) > 0 { - lastParam = l.List[len(l.List)-1] - } endLine := f.Line(sign.End()) - if lastParam != nil && f.Line(sign.Pos()) != endLine && f.Line(lastParam.Pos()) == endLine { + + paramClosingIsFirstCharOnEndLine := sign.Params != nil && + f.Position(sign.Params.Closing).Column == 1 && + f.Line(sign.Params.Closing) == endLine + + resultClosingIsFirstCharOnEndLine := sign.Results != nil && + f.Position(sign.Results.Closing).Column == 1 && + f.Line(sign.Results.Closing) == endLine + + endLineIsIndented := !(paramClosingIsFirstCharOnEndLine || resultClosingIsFirstCharOnEndLine) + + if f.Line(sign.Pos()) != endLine && endLineIsIndented { + // is there an empty line? + isThereAnEmptyLine := endLine+1 != f.Line(bodyPos) + // The body is preceded by a multi-line function - // signature, and the empty line helps readability. - return + // signature, we move the `) {` to avoid the empty line. + switch { + case isThereAnEmptyLine && sign.Results != nil && + !resultClosingIsFirstCharOnEndLine && + sign.Results.Closing.IsValid(): // there may be no ")" + sign.Results.Closing += 1 + f.addNewline(sign.Results.Closing) + + case isThereAnEmptyLine && sign.Params != nil && + !paramClosingIsFirstCharOnEndLine: + sign.Params.Closing += 1 + f.addNewline(sign.Params.Closing) + } } } f.removeLinesBetween(node.Lbrace, bodyPos) + case *ast.CaseClause: + f.stmts(node.Body) + openLine := f.Line(node.Case) + closeLine := f.Line(node.Colon) + if openLine == closeLine { + // nothing to do + break + } + if len(f.commentsBetween(node.Case, node.Colon)) > 0 { + // don't move comments + break + } + if f.printLength(node) > shortLineLimit { + // too long to collapse + break + } + f.removeLines(openLine, closeLine) + + case *ast.CommClause: + f.stmts(node.Body) + + case *ast.FieldList: + if node.NumFields() == 0 && len(f.commentsBetween(node.Pos(), node.End())) == 0 { + // Empty field lists should not contain a newline. + // Do not join the two lines if the first has an inline + // comment, as that can result in broken formatting. + openLine := f.Line(node.Pos()) + closeLine := f.Line(node.End()) + f.removeLines(openLine, closeLine) + } + + // Merging adjacent fields (e.g. parameters) is disabled by default. + if !f.ExtraRules { + break + } + switch c.Parent().(type) { + case *ast.FuncDecl, *ast.FuncType, *ast.InterfaceType: + node.List = f.mergeAdjacentFields(node.List) + c.Replace(node) + case *ast.StructType: + // Do not merge adjacent fields in structs. + } + + case *ast.BasicLit: + // Octal number literals were introduced in 1.13. + if semver.Compare(f.LangVersion, "v1.13") >= 0 { + if node.Kind == token.INT && rxOctalInteger.MatchString(node.Value) { + node.Value = "0o" + node.Value[1:] + c.Replace(node) + } + } + + case *ast.AssignStmt: + // Only remove lines between the assignment token and the first right-hand side expression + f.removeLines(f.Line(node.TokPos), f.Line(node.Rhs[0].Pos())) + } +} + +func (f *fumpter) applyPost(c *astutil.Cursor) { + switch node := c.Node().(type) { + // Adding newlines to composite literals happens as a "post" step, so + // that we can take into account whether "pre" steps added any newlines + // that would affect us here. case *ast.CompositeLit: if len(node.Elts) == 0 { // doesn't have elements @@ -434,16 +656,26 @@ func (f *fumpter) applyPre(c *astutil.Cursor) { newlineAroundElems := false newlineBetweenElems := false + lastEnd := node.Lbrace lastLine := openLine for i, elem := range node.Elts { - if f.Line(elem.Pos()) > lastLine { + pos := elem.Pos() + comments := f.commentsBetween(lastEnd, pos) + if len(comments) > 0 { + pos = comments[0].Pos() + } + if curLine := f.Line(pos); curLine > lastLine { if i == 0 { newlineAroundElems = true + + // remove leading lines if they exist + f.removeLines(openLine+1, curLine) } else { newlineBetweenElems = true } } - lastLine = f.Line(elem.End()) + lastEnd = elem.End() + lastLine = f.Line(lastEnd) } if closeLine > lastLine { newlineAroundElems = true @@ -484,58 +716,100 @@ func (f *fumpter) applyPre(c *astutil.Cursor) { f.addNewline(elem1.End()) } } + } +} - case *ast.CaseClause: - f.stmts(node.Body) - openLine := f.Line(node.Case) - closeLine := f.Line(node.Colon) - if openLine == closeLine { - // nothing to do - break - } - if len(f.commentsBetween(node.Case, node.Colon)) > 0 { - // don't move comments - break - } - if f.printLength(node) > shortLineLimit { - // too long to collapse - break - } - f.removeLines(openLine, closeLine) +func (f *fumpter) splitLongLine(c *astutil.Cursor) { + if os.Getenv("GOFUMPT_SPLIT_LONG_LINES") != "on" { + // By default, this feature is turned off. + // Turn it on by setting GOFUMPT_SPLIT_LONG_LINES=on. + return + } + node := c.Node() + if node == nil { + return + } - case *ast.CommClause: - f.stmts(node.Body) + newlinePos := node.Pos() + start := f.Position(node.Pos()) + end := f.Position(node.End()) - case *ast.FieldList: - if node.NumFields() == 0 && f.inlineComment(node.Pos()) == nil { - // Empty field lists should not contain a newline. - // Do not join the two lines if the first has an inline - // comment, as that can result in broken formatting. - openLine := f.Line(node.Pos()) - closeLine := f.Line(node.End()) - f.removeLines(openLine, closeLine) - } + // If the node is already split in multiple lines, there's nothing to do. + if start.Line != end.Line { + return + } - // Merging adjacent fields (e.g. parameters) is disabled by default. - if !f.ExtraRules { - break - } - switch c.Parent().(type) { - case *ast.FuncDecl, *ast.FuncType, *ast.InterfaceType: - node.List = f.mergeAdjacentFields(node.List) - c.Replace(node) - case *ast.StructType: - // Do not merge adjacent fields in structs. - } + // Only split at the start of the current node if it's part of a list. + if _, ok := c.Parent().(*ast.BinaryExpr); ok { + // Chains of binary expressions are considered lists, too. + } else if c.Index() >= 0 { + // For the rest of the nodes, we're in a list if c.Index() >= 0. + } else { + return + } - case *ast.BasicLit: - // Octal number literals were introduced in 1.13. - if semver.Compare(f.LangVersion, "v1.13") >= 0 { - if node.Kind == token.INT && rxOctalInteger.MatchString(node.Value) { - node.Value = "0o" + node.Value[1:] - c.Replace(node) - } - } + // Like in printLength, add an approximation of the indentation level. + // Since any existing tabs were already counted as one column, multiply + // the level by 7. + startCol := start.Column + f.blockLevel*7 + endCol := end.Column + f.blockLevel*7 + + // If this is a composite literal, + // and we were going to insert a newline before the entire literal, + // insert the newline before the first element instead. + // Since we'll add a newline after the last element too, + // this format is generally going to be nicer. + if comp := isComposite(node); comp != nil && len(comp.Elts) > 0 { + newlinePos = comp.Elts[0].Pos() + } + + // If this is a function call, + // and we were to add a newline before the first argument, + // prefer adding the newline before the entire call. + // End-of-line parentheses aren't very nice, as we don't put their + // counterparts at the start of a line too. + // We do this by using the average of the two starting positions. + if call, _ := node.(*ast.CallExpr); call != nil && len(call.Args) > 0 { + first := f.Position(call.Args[0].Pos()) + startCol += (first.Column - start.Column) / 2 + } + + // If the start position is too short, we definitely won't split the line. + if startCol <= shortLineLimit { + return + } + + lineEnd := f.Position(f.lineEnd(start.Line)) + + // firstLength and secondLength are the split line lengths, excluding + // indentation. + firstLength := start.Column - f.blockLevel + if firstLength < 0 { + panic("negative length") + } + secondLength := lineEnd.Column - start.Column + if secondLength < 0 { + panic("negative length") + } + + // If the line ends past the long line limit, + // and both splits are estimated to take at least minSplitFactor of the limit, + // then split the line. + minSplitLength := int(f.minSplitFactor * longLineLimit) + if endCol > longLineLimit && + firstLength >= minSplitLength && secondLength >= minSplitLength { + f.addNewline(newlinePos) + } +} + +func isComposite(node ast.Node) *ast.CompositeLit { + switch node := node.(type) { + case *ast.CompositeLit: + return node + case *ast.UnaryExpr: + return isComposite(node.X) // e.g. &T{} + default: + return nil } } @@ -571,13 +845,21 @@ func identEqual(expr ast.Expr, name string) bool { // // import "C" // +// or the equivalent: +// +// import `C` +// // Note that parentheses do not affect the result. func isCgoImport(decl *ast.GenDecl) bool { if decl.Tok != token.IMPORT || len(decl.Specs) != 1 { return false } spec := decl.Specs[0].(*ast.ImportSpec) - return spec.Path.Value == `"C"` + v, err := strconv.Unquote(spec.Path.Value) + if err != nil { + panic(err) // should never error + } + return v == "C" } // joinStdImports ensures that all standard library imports are together and at @@ -653,7 +935,7 @@ func (f *fumpter) mergeAdjacentFields(fields []*ast.Field) []*ast.Field { // Otherwise, iterate over adjacent pairs of fields, merging if possible, // and mutating fields. Elements of fields may be mutated (if merged with - // following fields), discarded (if merged with a preceeding field), or left + // following fields), discarded (if merged with a preceding field), or left // unchanged. i := 0 for j := 1; j < len(fields); j++ { diff --git a/vendor/mvdan.cc/gofumpt/format/rewrite.go b/vendor/mvdan.cc/gofumpt/format/rewrite.go new file mode 100644 index 000000000..9e5d4579f --- /dev/null +++ b/vendor/mvdan.cc/gofumpt/format/rewrite.go @@ -0,0 +1,113 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package format + +import ( + "go/ast" + "go/token" + "reflect" + "unicode" + "unicode/utf8" +) + +// Values/types for special cases. +var ( + identType = reflect.TypeOf((*ast.Ident)(nil)) + objectPtrType = reflect.TypeOf((*ast.Object)(nil)) + positionType = reflect.TypeOf(token.NoPos) + callExprType = reflect.TypeOf((*ast.CallExpr)(nil)) +) + +func isWildcard(s string) bool { + rune, size := utf8.DecodeRuneInString(s) + return size == len(s) && unicode.IsLower(rune) +} + +// match reports whether pattern matches val, +// recording wildcard submatches in m. +// If m == nil, match checks whether pattern == val. +func match(m map[string]reflect.Value, pattern, val reflect.Value) bool { + // Wildcard matches any expression. If it appears multiple + // times in the pattern, it must match the same expression + // each time. + if m != nil && pattern.IsValid() && pattern.Type() == identType { + name := pattern.Interface().(*ast.Ident).Name + if isWildcard(name) && val.IsValid() { + // wildcards only match valid (non-nil) expressions. + if _, ok := val.Interface().(ast.Expr); ok && !val.IsNil() { + if old, ok := m[name]; ok { + return match(nil, old, val) + } + m[name] = val + return true + } + } + } + + // Otherwise, pattern and val must match recursively. + if !pattern.IsValid() || !val.IsValid() { + return !pattern.IsValid() && !val.IsValid() + } + if pattern.Type() != val.Type() { + return false + } + + // Special cases. + switch pattern.Type() { + case identType: + // For identifiers, only the names need to match + // (and none of the other *ast.Object information). + // This is a common case, handle it all here instead + // of recursing down any further via reflection. + p := pattern.Interface().(*ast.Ident) + v := val.Interface().(*ast.Ident) + return p == nil && v == nil || p != nil && v != nil && p.Name == v.Name + case objectPtrType, positionType: + // object pointers and token positions always match + return true + case callExprType: + // For calls, the Ellipsis fields (token.Position) must + // match since that is how f(x) and f(x...) are different. + // Check them here but fall through for the remaining fields. + p := pattern.Interface().(*ast.CallExpr) + v := val.Interface().(*ast.CallExpr) + if p.Ellipsis.IsValid() != v.Ellipsis.IsValid() { + return false + } + } + + p := reflect.Indirect(pattern) + v := reflect.Indirect(val) + if !p.IsValid() || !v.IsValid() { + return !p.IsValid() && !v.IsValid() + } + + switch p.Kind() { + case reflect.Slice: + if p.Len() != v.Len() { + return false + } + for i := 0; i < p.Len(); i++ { + if !match(m, p.Index(i), v.Index(i)) { + return false + } + } + return true + + case reflect.Struct: + for i := 0; i < p.NumField(); i++ { + if !match(m, p.Field(i), v.Field(i)) { + return false + } + } + return true + + case reflect.Interface: + return match(m, p.Elem(), v.Elem()) + } + + // Handle token integers, etc. + return p.Interface() == v.Interface() +} diff --git a/vendor/mvdan.cc/gofumpt/format/simplify.go b/vendor/mvdan.cc/gofumpt/format/simplify.go new file mode 100644 index 000000000..f2fd4d6bd --- /dev/null +++ b/vendor/mvdan.cc/gofumpt/format/simplify.go @@ -0,0 +1,165 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package format + +import ( + "go/ast" + "go/token" + "reflect" +) + +type simplifier struct{} + +func (s simplifier) Visit(node ast.Node) ast.Visitor { + switch n := node.(type) { + case *ast.CompositeLit: + // array, slice, and map composite literals may be simplified + outer := n + var keyType, eltType ast.Expr + switch typ := outer.Type.(type) { + case *ast.ArrayType: + eltType = typ.Elt + case *ast.MapType: + keyType = typ.Key + eltType = typ.Value + } + + if eltType != nil { + var ktyp reflect.Value + if keyType != nil { + ktyp = reflect.ValueOf(keyType) + } + typ := reflect.ValueOf(eltType) + for i, x := range outer.Elts { + px := &outer.Elts[i] + // look at value of indexed/named elements + if t, ok := x.(*ast.KeyValueExpr); ok { + if keyType != nil { + s.simplifyLiteral(ktyp, keyType, t.Key, &t.Key) + } + x = t.Value + px = &t.Value + } + s.simplifyLiteral(typ, eltType, x, px) + } + // node was simplified - stop walk (there are no subnodes to simplify) + return nil + } + + case *ast.SliceExpr: + // a slice expression of the form: s[a:len(s)] + // can be simplified to: s[a:] + // if s is "simple enough" (for now we only accept identifiers) + // + // Note: This may not be correct because len may have been redeclared in another + // file belonging to the same package. However, this is extremely unlikely + // and so far (April 2016, after years of supporting this rewrite feature) + // has never come up, so let's keep it working as is (see also #15153). + if n.Max != nil { + // - 3-index slices always require the 2nd and 3rd index + break + } + if s, _ := n.X.(*ast.Ident); s != nil && s.Obj != nil { + // the array/slice object is a single, resolved identifier + if call, _ := n.High.(*ast.CallExpr); call != nil && len(call.Args) == 1 && !call.Ellipsis.IsValid() { + // the high expression is a function call with a single argument + if fun, _ := call.Fun.(*ast.Ident); fun != nil && fun.Name == "len" && fun.Obj == nil { + // the function called is "len" and it is not locally defined; and + // because we don't have dot imports, it must be the predefined len() + if arg, _ := call.Args[0].(*ast.Ident); arg != nil && arg.Obj == s.Obj { + // the len argument is the array/slice object + n.High = nil + } + } + } + } + // Note: We could also simplify slice expressions of the form s[0:b] to s[:b] + // but we leave them as is since sometimes we want to be very explicit + // about the lower bound. + // An example where the 0 helps: + // x, y, z := b[0:2], b[2:4], b[4:6] + // An example where it does not: + // x, y := b[:n], b[n:] + + case *ast.RangeStmt: + // - a range of the form: for x, _ = range v {...} + // can be simplified to: for x = range v {...} + // - a range of the form: for _ = range v {...} + // can be simplified to: for range v {...} + if isBlank(n.Value) { + n.Value = nil + } + if isBlank(n.Key) && n.Value == nil { + n.Key = nil + } + } + + return s +} + +func (s simplifier) simplifyLiteral(typ reflect.Value, astType, x ast.Expr, px *ast.Expr) { + ast.Walk(s, x) // simplify x + + // if the element is a composite literal and its literal type + // matches the outer literal's element type exactly, the inner + // literal type may be omitted + if inner, ok := x.(*ast.CompositeLit); ok { + if match(nil, typ, reflect.ValueOf(inner.Type)) { + inner.Type = nil + } + } + // if the outer literal's element type is a pointer type *T + // and the element is & of a composite literal of type T, + // the inner &T may be omitted. + if ptr, ok := astType.(*ast.StarExpr); ok { + if addr, ok := x.(*ast.UnaryExpr); ok && addr.Op == token.AND { + if inner, ok := addr.X.(*ast.CompositeLit); ok { + if match(nil, reflect.ValueOf(ptr.X), reflect.ValueOf(inner.Type)) { + inner.Type = nil // drop T + *px = inner // drop & + } + } + } + } +} + +func isBlank(x ast.Expr) bool { + ident, ok := x.(*ast.Ident) + return ok && ident.Name == "_" +} + +func simplify(f *ast.File) { + // remove empty declarations such as "const ()", etc + removeEmptyDeclGroups(f) + + var s simplifier + ast.Walk(s, f) +} + +func removeEmptyDeclGroups(f *ast.File) { + i := 0 + for _, d := range f.Decls { + if g, ok := d.(*ast.GenDecl); !ok || !isEmpty(f, g) { + f.Decls[i] = d + i++ + } + } + f.Decls = f.Decls[:i] +} + +func isEmpty(f *ast.File, g *ast.GenDecl) bool { + if g.Doc != nil || g.Specs != nil { + return false + } + + for _, c := range f.Comments { + // if there is a comment in the declaration, it is not considered empty + if g.Pos() <= c.Pos() && c.End() <= g.End() { + return false + } + } + + return true +} diff --git a/vendor/mvdan.cc/gofumpt/internal/version/version.go b/vendor/mvdan.cc/gofumpt/internal/version/version.go new file mode 100644 index 000000000..3ec8830de --- /dev/null +++ b/vendor/mvdan.cc/gofumpt/internal/version/version.go @@ -0,0 +1,31 @@ +// Copyright (c) 2020, Daniel Martí +// See LICENSE for licensing information + +package version + +import ( + "fmt" + "os" + "runtime/debug" +) + +var version = "(devel)" // to match the default from runtime/debug + +func String() string { + if testVersion := os.Getenv("GOFUMPT_VERSION_TEST"); testVersion != "" { + return testVersion + } + // don't overwrite the version if it was set by -ldflags=-X + if info, ok := debug.ReadBuildInfo(); ok && version == "(devel)" { + mod := &info.Main + if mod.Replace != nil { + mod = mod.Replace + } + version = mod.Version + } + return version +} + +func Print() { + fmt.Println(String()) +}