go mod tidy

go mod vendor
This commit is contained in:
Jacob Woffenden 2022-10-19 22:04:21 +00:00 committed by GitHub
parent d5cd4e592c
commit 1fa911fdec
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
89 changed files with 4979 additions and 1395 deletions

7
go.mod
View File

@ -37,7 +37,7 @@ require (
go.uber.org/automaxprocs v1.4.0
golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa
golang.org/x/net v0.0.0-20220909164309-bea034e7d591
golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4
golang.org/x/sys v0.0.0-20220808155132-1c4a2a72c664
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211
google.golang.org/protobuf v1.28.0
@ -89,11 +89,10 @@ require (
github.com/prometheus/common v0.32.1 // indirect
github.com/prometheus/procfs v0.7.3 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect
golang.org/x/mod v0.4.2 // indirect
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 // indirect
golang.org/x/oauth2 v0.0.0-20220822191816-0ebed06d0094 // indirect
golang.org/x/text v0.3.8 // indirect
golang.org/x/tools v0.1.6-0.20210726203631-07bc1bf47fb2 // indirect
golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f // indirect
golang.org/x/tools v0.1.12 // indirect
google.golang.org/appengine v1.6.7 // indirect
google.golang.org/genproto v0.0.0-20220616135557-88e70c0c3a90 // indirect
google.golang.org/grpc v1.47.0 // indirect

13
go.sum
View File

@ -681,8 +681,9 @@ golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo=
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 h1:6zppjxzCulZykYSLyVDYbneBfbaBIQPYMevg0bEwv2s=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@ -780,8 +781,9 @@ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f h1:Ax0t5p6N38Ga0dThY21weqDEyz2oklo4IvDkpigvkD8=
golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4 h1:uVc8UZUe6tr40fFVnUP5Oj+veunVezqYl9z7DYw9xzw=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@ -879,8 +881,9 @@ golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.3.8 h1:nAL+RVCQ9uMn3vJZbV+MRnydTJFPf8qqY42YiA6MrqY=
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@ -945,15 +948,15 @@ golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.6-0.20210726203631-07bc1bf47fb2 h1:BonxutuHCTL0rBDnZlKjpGIQFTjyUVTexFOdWkB6Fg0=
golang.org/x/tools v0.1.6-0.20210726203631-07bc1bf47fb2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.12 h1:VveCTK38A2rkS8ZqFY25HIDFscX5X9OoEhJd3quQmXU=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8=
golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f h1:uF6paiQQebLeSXkrTqHqz0MXhXXS1KgF41eUdBNvxK0=
golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8=
google.golang.org/api v0.0.0-20180910000450-7ca32eb868bf/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
google.golang.org/api v0.0.0-20181030000543-1d582fd0359e/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=

78
vendor/golang.org/x/mod/internal/lazyregexp/lazyre.go generated vendored Normal file
View File

@ -0,0 +1,78 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package lazyregexp is a thin wrapper over regexp, allowing the use of global
// regexp variables without forcing them to be compiled at init.
package lazyregexp
import (
"os"
"regexp"
"strings"
"sync"
)
// Regexp is a wrapper around regexp.Regexp, where the underlying regexp will be
// compiled the first time it is needed.
type Regexp struct {
str string
once sync.Once
rx *regexp.Regexp
}
func (r *Regexp) re() *regexp.Regexp {
r.once.Do(r.build)
return r.rx
}
func (r *Regexp) build() {
r.rx = regexp.MustCompile(r.str)
r.str = ""
}
func (r *Regexp) FindSubmatch(s []byte) [][]byte {
return r.re().FindSubmatch(s)
}
func (r *Regexp) FindStringSubmatch(s string) []string {
return r.re().FindStringSubmatch(s)
}
func (r *Regexp) FindStringSubmatchIndex(s string) []int {
return r.re().FindStringSubmatchIndex(s)
}
func (r *Regexp) ReplaceAllString(src, repl string) string {
return r.re().ReplaceAllString(src, repl)
}
func (r *Regexp) FindString(s string) string {
return r.re().FindString(s)
}
func (r *Regexp) FindAllString(s string, n int) []string {
return r.re().FindAllString(s, n)
}
func (r *Regexp) MatchString(s string) bool {
return r.re().MatchString(s)
}
func (r *Regexp) SubexpNames() []string {
return r.re().SubexpNames()
}
var inTest = len(os.Args) > 0 && strings.HasSuffix(strings.TrimSuffix(os.Args[0], ".exe"), ".test")
// New creates a new lazy regexp, delaying the compiling work until it is first
// needed. If the code is being run as part of tests, the regexp compiling will
// happen immediately.
func New(str string) *Regexp {
lr := &Regexp{str: str}
if inTest {
// In tests, always compile the regexps early.
lr.re()
}
return lr
}

View File

@ -15,7 +15,7 @@
// but additional checking functions, most notably Check, verify that
// a particular path, version pair is valid.
//
// Escaped Paths
// # Escaped Paths
//
// Module paths appear as substrings of file system paths
// (in the download cache) and of web server URLs in the proxy protocol.
@ -55,7 +55,7 @@
// Import paths have never allowed exclamation marks, so there is no
// need to define how to escape a literal !.
//
// Unicode Restrictions
// # Unicode Restrictions
//
// Today, paths are disallowed from using Unicode.
//
@ -102,9 +102,9 @@ import (
"strings"
"unicode"
"unicode/utf8"
"errors"
"golang.org/x/mod/semver"
errors "golang.org/x/xerrors"
)
// A Version (for clients, a module.Version) is defined by a module path and version pair.
@ -192,6 +192,21 @@ func (e *InvalidVersionError) Error() string {
func (e *InvalidVersionError) Unwrap() error { return e.Err }
// An InvalidPathError indicates a module, import, or file path doesn't
// satisfy all naming constraints. See CheckPath, CheckImportPath,
// and CheckFilePath for specific restrictions.
type InvalidPathError struct {
Kind string // "module", "import", or "file"
Path string
Err error
}
func (e *InvalidPathError) Error() string {
return fmt.Sprintf("malformed %s path %q: %v", e.Kind, e.Path, e.Err)
}
func (e *InvalidPathError) Unwrap() error { return e.Err }
// Check checks that a given module path, version pair is valid.
// In addition to the path being a valid module path
// and the version being a valid semantic version,
@ -271,12 +286,7 @@ func fileNameOK(r rune) bool {
if '0' <= r && r <= '9' || 'A' <= r && r <= 'Z' || 'a' <= r && r <= 'z' {
return true
}
for i := 0; i < len(allowed); i++ {
if rune(allowed[i]) == r {
return true
}
}
return false
return strings.ContainsRune(allowed, r)
}
// It may be OK to add more ASCII punctuation here, but only carefully.
// For example Windows disallows < > \, and macOS disallows :, so we must not allow those.
@ -296,30 +306,36 @@ func fileNameOK(r rune) bool {
// this second requirement is replaced by a requirement that the path
// follow the gopkg.in server's conventions.
// Third, no path element may begin with a dot.
func CheckPath(path string) error {
func CheckPath(path string) (err error) {
defer func() {
if err != nil {
err = &InvalidPathError{Kind: "module", Path: path, Err: err}
}
}()
if err := checkPath(path, modulePath); err != nil {
return fmt.Errorf("malformed module path %q: %v", path, err)
return err
}
i := strings.Index(path, "/")
if i < 0 {
i = len(path)
}
if i == 0 {
return fmt.Errorf("malformed module path %q: leading slash", path)
return fmt.Errorf("leading slash")
}
if !strings.Contains(path[:i], ".") {
return fmt.Errorf("malformed module path %q: missing dot in first path element", path)
return fmt.Errorf("missing dot in first path element")
}
if path[0] == '-' {
return fmt.Errorf("malformed module path %q: leading dash in first path element", path)
return fmt.Errorf("leading dash in first path element")
}
for _, r := range path[:i] {
if !firstPathOK(r) {
return fmt.Errorf("malformed module path %q: invalid char %q in first path element", path, r)
return fmt.Errorf("invalid char %q in first path element", r)
}
}
if _, _, ok := SplitPathVersion(path); !ok {
return fmt.Errorf("malformed module path %q: invalid version", path)
return fmt.Errorf("invalid version")
}
return nil
}
@ -343,7 +359,7 @@ func CheckPath(path string) error {
// subtleties of Unicode.
func CheckImportPath(path string) error {
if err := checkPath(path, importPath); err != nil {
return fmt.Errorf("malformed import path %q: %v", path, err)
return &InvalidPathError{Kind: "import", Path: path, Err: err}
}
return nil
}
@ -358,12 +374,13 @@ const (
filePath
)
// checkPath checks that a general path is valid.
// It returns an error describing why but not mentioning path.
// Because these checks apply to both module paths and import paths,
// the caller is expected to add the "malformed ___ path %q: " prefix.
// fileName indicates whether the final element of the path is a file name
// (as opposed to a directory name).
// checkPath checks that a general path is valid. kind indicates what
// specific constraints should be applied.
//
// checkPath returns an error describing why the path is not valid.
// Because these checks apply to module, import, and file paths,
// and because other checks may be applied, the caller is expected to wrap
// this error with InvalidPathError.
func checkPath(path string, kind pathKind) error {
if !utf8.ValidString(path) {
return fmt.Errorf("invalid UTF-8")
@ -371,7 +388,7 @@ func checkPath(path string, kind pathKind) error {
if path == "" {
return fmt.Errorf("empty string")
}
if path[0] == '-' {
if path[0] == '-' && kind != filePath {
return fmt.Errorf("leading dash")
}
if strings.Contains(path, "//") {
@ -477,7 +494,7 @@ func checkElem(elem string, kind pathKind) error {
// subtleties of Unicode.
func CheckFilePath(path string) error {
if err := checkPath(path, filePath); err != nil {
return fmt.Errorf("malformed file path %q: %v", path, err)
return &InvalidPathError{Kind: "file", Path: path, Err: err}
}
return nil
}
@ -781,6 +798,7 @@ func unescapeString(escaped string) (string, bool) {
// GOPRIVATE environment variable, as described by 'go help module-private'.
//
// It ignores any empty or malformed patterns in the list.
// Trailing slashes on patterns are ignored.
func MatchPrefixPatterns(globs, target string) bool {
for globs != "" {
// Extract next non-empty glob in comma-separated list.
@ -790,6 +808,7 @@ func MatchPrefixPatterns(globs, target string) bool {
} else {
glob, globs = globs, ""
}
glob = strings.TrimSuffix(glob, "/")
if glob == "" {
continue
}

250
vendor/golang.org/x/mod/module/pseudo.go generated vendored Normal file
View File

@ -0,0 +1,250 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Pseudo-versions
//
// Code authors are expected to tag the revisions they want users to use,
// including prereleases. However, not all authors tag versions at all,
// and not all commits a user might want to try will have tags.
// A pseudo-version is a version with a special form that allows us to
// address an untagged commit and order that version with respect to
// other versions we might encounter.
//
// A pseudo-version takes one of the general forms:
//
// (1) vX.0.0-yyyymmddhhmmss-abcdef123456
// (2) vX.Y.(Z+1)-0.yyyymmddhhmmss-abcdef123456
// (3) vX.Y.(Z+1)-0.yyyymmddhhmmss-abcdef123456+incompatible
// (4) vX.Y.Z-pre.0.yyyymmddhhmmss-abcdef123456
// (5) vX.Y.Z-pre.0.yyyymmddhhmmss-abcdef123456+incompatible
//
// If there is no recently tagged version with the right major version vX,
// then form (1) is used, creating a space of pseudo-versions at the bottom
// of the vX version range, less than any tagged version, including the unlikely v0.0.0.
//
// If the most recent tagged version before the target commit is vX.Y.Z or vX.Y.Z+incompatible,
// then the pseudo-version uses form (2) or (3), making it a prerelease for the next
// possible semantic version after vX.Y.Z. The leading 0 segment in the prerelease string
// ensures that the pseudo-version compares less than possible future explicit prereleases
// like vX.Y.(Z+1)-rc1 or vX.Y.(Z+1)-1.
//
// If the most recent tagged version before the target commit is vX.Y.Z-pre or vX.Y.Z-pre+incompatible,
// then the pseudo-version uses form (4) or (5), making it a slightly later prerelease.
package module
import (
"errors"
"fmt"
"strings"
"time"
"golang.org/x/mod/internal/lazyregexp"
"golang.org/x/mod/semver"
)
var pseudoVersionRE = lazyregexp.New(`^v[0-9]+\.(0\.0-|\d+\.\d+-([^+]*\.)?0\.)\d{14}-[A-Za-z0-9]+(\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$`)
const PseudoVersionTimestampFormat = "20060102150405"
// PseudoVersion returns a pseudo-version for the given major version ("v1")
// preexisting older tagged version ("" or "v1.2.3" or "v1.2.3-pre"), revision time,
// and revision identifier (usually a 12-byte commit hash prefix).
func PseudoVersion(major, older string, t time.Time, rev string) string {
if major == "" {
major = "v0"
}
segment := fmt.Sprintf("%s-%s", t.UTC().Format(PseudoVersionTimestampFormat), rev)
build := semver.Build(older)
older = semver.Canonical(older)
if older == "" {
return major + ".0.0-" + segment // form (1)
}
if semver.Prerelease(older) != "" {
return older + ".0." + segment + build // form (4), (5)
}
// Form (2), (3).
// Extract patch from vMAJOR.MINOR.PATCH
i := strings.LastIndex(older, ".") + 1
v, patch := older[:i], older[i:]
// Reassemble.
return v + incDecimal(patch) + "-0." + segment + build
}
// ZeroPseudoVersion returns a pseudo-version with a zero timestamp and
// revision, which may be used as a placeholder.
func ZeroPseudoVersion(major string) string {
return PseudoVersion(major, "", time.Time{}, "000000000000")
}
// incDecimal returns the decimal string incremented by 1.
func incDecimal(decimal string) string {
// Scan right to left turning 9s to 0s until you find a digit to increment.
digits := []byte(decimal)
i := len(digits) - 1
for ; i >= 0 && digits[i] == '9'; i-- {
digits[i] = '0'
}
if i >= 0 {
digits[i]++
} else {
// digits is all zeros
digits[0] = '1'
digits = append(digits, '0')
}
return string(digits)
}
// decDecimal returns the decimal string decremented by 1, or the empty string
// if the decimal is all zeroes.
func decDecimal(decimal string) string {
// Scan right to left turning 0s to 9s until you find a digit to decrement.
digits := []byte(decimal)
i := len(digits) - 1
for ; i >= 0 && digits[i] == '0'; i-- {
digits[i] = '9'
}
if i < 0 {
// decimal is all zeros
return ""
}
if i == 0 && digits[i] == '1' && len(digits) > 1 {
digits = digits[1:]
} else {
digits[i]--
}
return string(digits)
}
// IsPseudoVersion reports whether v is a pseudo-version.
func IsPseudoVersion(v string) bool {
return strings.Count(v, "-") >= 2 && semver.IsValid(v) && pseudoVersionRE.MatchString(v)
}
// IsZeroPseudoVersion returns whether v is a pseudo-version with a zero base,
// timestamp, and revision, as returned by ZeroPseudoVersion.
func IsZeroPseudoVersion(v string) bool {
return v == ZeroPseudoVersion(semver.Major(v))
}
// PseudoVersionTime returns the time stamp of the pseudo-version v.
// It returns an error if v is not a pseudo-version or if the time stamp
// embedded in the pseudo-version is not a valid time.
func PseudoVersionTime(v string) (time.Time, error) {
_, timestamp, _, _, err := parsePseudoVersion(v)
if err != nil {
return time.Time{}, err
}
t, err := time.Parse("20060102150405", timestamp)
if err != nil {
return time.Time{}, &InvalidVersionError{
Version: v,
Pseudo: true,
Err: fmt.Errorf("malformed time %q", timestamp),
}
}
return t, nil
}
// PseudoVersionRev returns the revision identifier of the pseudo-version v.
// It returns an error if v is not a pseudo-version.
func PseudoVersionRev(v string) (rev string, err error) {
_, _, rev, _, err = parsePseudoVersion(v)
return
}
// PseudoVersionBase returns the canonical parent version, if any, upon which
// the pseudo-version v is based.
//
// If v has no parent version (that is, if it is "vX.0.0-[…]"),
// PseudoVersionBase returns the empty string and a nil error.
func PseudoVersionBase(v string) (string, error) {
base, _, _, build, err := parsePseudoVersion(v)
if err != nil {
return "", err
}
switch pre := semver.Prerelease(base); pre {
case "":
// vX.0.0-yyyymmddhhmmss-abcdef123456 → ""
if build != "" {
// Pseudo-versions of the form vX.0.0-yyyymmddhhmmss-abcdef123456+incompatible
// are nonsensical: the "vX.0.0-" prefix implies that there is no parent tag,
// but the "+incompatible" suffix implies that the major version of
// the parent tag is not compatible with the module's import path.
//
// There are a few such entries in the index generated by proxy.golang.org,
// but we believe those entries were generated by the proxy itself.
return "", &InvalidVersionError{
Version: v,
Pseudo: true,
Err: fmt.Errorf("lacks base version, but has build metadata %q", build),
}
}
return "", nil
case "-0":
// vX.Y.(Z+1)-0.yyyymmddhhmmss-abcdef123456 → vX.Y.Z
// vX.Y.(Z+1)-0.yyyymmddhhmmss-abcdef123456+incompatible → vX.Y.Z+incompatible
base = strings.TrimSuffix(base, pre)
i := strings.LastIndexByte(base, '.')
if i < 0 {
panic("base from parsePseudoVersion missing patch number: " + base)
}
patch := decDecimal(base[i+1:])
if patch == "" {
// vX.0.0-0 is invalid, but has been observed in the wild in the index
// generated by requests to proxy.golang.org.
//
// NOTE(bcmills): I cannot find a historical bug that accounts for
// pseudo-versions of this form, nor have I seen such versions in any
// actual go.mod files. If we find actual examples of this form and a
// reasonable theory of how they came into existence, it seems fine to
// treat them as equivalent to vX.0.0 (especially since the invalid
// pseudo-versions have lower precedence than the real ones). For now, we
// reject them.
return "", &InvalidVersionError{
Version: v,
Pseudo: true,
Err: fmt.Errorf("version before %s would have negative patch number", base),
}
}
return base[:i+1] + patch + build, nil
default:
// vX.Y.Z-pre.0.yyyymmddhhmmss-abcdef123456 → vX.Y.Z-pre
// vX.Y.Z-pre.0.yyyymmddhhmmss-abcdef123456+incompatible → vX.Y.Z-pre+incompatible
if !strings.HasSuffix(base, ".0") {
panic(`base from parsePseudoVersion missing ".0" before date: ` + base)
}
return strings.TrimSuffix(base, ".0") + build, nil
}
}
var errPseudoSyntax = errors.New("syntax error")
func parsePseudoVersion(v string) (base, timestamp, rev, build string, err error) {
if !IsPseudoVersion(v) {
return "", "", "", "", &InvalidVersionError{
Version: v,
Pseudo: true,
Err: errPseudoSyntax,
}
}
build = semver.Build(v)
v = strings.TrimSuffix(v, build)
j := strings.LastIndex(v, "-")
v, rev = v[:j], v[j+1:]
i := strings.LastIndex(v, "-")
if j := strings.LastIndex(v, "."); j > i {
base = v[:j] // "vX.Y.Z-pre.0" or "vX.Y.(Z+1)-0"
timestamp = v[j+1:]
} else {
base = v[:i] // "vX.0.0"
timestamp = v[i+1:]
}
return base, timestamp, rev, build, nil
}

View File

@ -22,6 +22,8 @@
// as shorthands for vMAJOR.0.0 and vMAJOR.MINOR.0.
package semver
import "sort"
// parsed returns the parsed form of a semantic version string.
type parsed struct {
major string
@ -30,7 +32,6 @@ type parsed struct {
short string
prerelease string
build string
err string
}
// IsValid reports whether v is a valid semantic version string.
@ -150,14 +151,30 @@ func Max(v, w string) string {
return w
}
// ByVersion implements sort.Interface for sorting semantic version strings.
type ByVersion []string
func (vs ByVersion) Len() int { return len(vs) }
func (vs ByVersion) Swap(i, j int) { vs[i], vs[j] = vs[j], vs[i] }
func (vs ByVersion) Less(i, j int) bool {
cmp := Compare(vs[i], vs[j])
if cmp != 0 {
return cmp < 0
}
return vs[i] < vs[j]
}
// Sort sorts a list of semantic version strings using ByVersion.
func Sort(list []string) {
sort.Sort(ByVersion(list))
}
func parse(v string) (p parsed, ok bool) {
if v == "" || v[0] != 'v' {
p.err = "missing v prefix"
return
}
p.major, v, ok = parseInt(v[1:])
if !ok {
p.err = "bad major version"
return
}
if v == "" {
@ -167,13 +184,11 @@ func parse(v string) (p parsed, ok bool) {
return
}
if v[0] != '.' {
p.err = "bad minor prefix"
ok = false
return
}
p.minor, v, ok = parseInt(v[1:])
if !ok {
p.err = "bad minor version"
return
}
if v == "" {
@ -182,31 +197,26 @@ func parse(v string) (p parsed, ok bool) {
return
}
if v[0] != '.' {
p.err = "bad patch prefix"
ok = false
return
}
p.patch, v, ok = parseInt(v[1:])
if !ok {
p.err = "bad patch version"
return
}
if len(v) > 0 && v[0] == '-' {
p.prerelease, v, ok = parsePrerelease(v)
if !ok {
p.err = "bad prerelease"
return
}
}
if len(v) > 0 && v[0] == '+' {
p.build, v, ok = parseBuild(v)
if !ok {
p.err = "bad build"
return
}
}
if v != "" {
p.err = "junk on end"
ok = false
return
}

3
vendor/golang.org/x/sync/AUTHORS generated vendored
View File

@ -1,3 +0,0 @@
# This source code refers to The Go Authors for copyright purposes.
# The master list of authors is in the main Go distribution,
# visible at http://tip.golang.org/AUTHORS.

View File

@ -1,3 +0,0 @@
# This source code was written by the Go contributors.
# The master list of contributors is in the main Go distribution,
# visible at http://tip.golang.org/CONTRIBUTORS.

3
vendor/golang.org/x/text/AUTHORS generated vendored
View File

@ -1,3 +0,0 @@
# This source code refers to The Go Authors for copyright purposes.
# The master list of authors is in the main Go distribution,
# visible at http://tip.golang.org/AUTHORS.

View File

@ -1,3 +0,0 @@
# This source code was written by the Go contributors.
# The master list of contributors is in the main Go distribution,
# visible at http://tip.golang.org/CONTRIBUTORS.

View File

@ -193,14 +193,14 @@ func (p *paragraph) run() {
//
// At the end of this function:
//
// - The member variable matchingPDI is set to point to the index of the
// matching PDI character for each isolate initiator character. If there is
// no matching PDI, it is set to the length of the input text. For other
// characters, it is set to -1.
// - The member variable matchingIsolateInitiator is set to point to the
// index of the matching isolate initiator character for each PDI character.
// If there is no matching isolate initiator, or the character is not a PDI,
// it is set to -1.
// - The member variable matchingPDI is set to point to the index of the
// matching PDI character for each isolate initiator character. If there is
// no matching PDI, it is set to the length of the input text. For other
// characters, it is set to -1.
// - The member variable matchingIsolateInitiator is set to point to the
// index of the matching isolate initiator character for each PDI character.
// If there is no matching isolate initiator, or the character is not a PDI,
// it is set to -1.
func (p *paragraph) determineMatchingIsolates() {
p.matchingPDI = make([]int, p.Len())
p.matchingIsolateInitiator = make([]int, p.Len())
@ -435,7 +435,7 @@ func maxLevel(a, b level) level {
}
// Rule X10, second bullet: Determine the start-of-sequence (sos) and end-of-sequence (eos) types,
// either L or R, for each isolating run sequence.
// either L or R, for each isolating run sequence.
func (p *paragraph) isolatingRunSequence(indexes []int) *isolatingRunSequence {
length := len(indexes)
types := make([]Class, length)
@ -495,9 +495,9 @@ func (s *isolatingRunSequence) resolveWeakTypes() {
if t == NSM {
s.types[i] = precedingCharacterType
} else {
if t.in(LRI, RLI, FSI, PDI) {
precedingCharacterType = ON
}
// if t.in(LRI, RLI, FSI, PDI) {
// precedingCharacterType = ON
// }
precedingCharacterType = t
}
}
@ -905,7 +905,7 @@ func (p *paragraph) getLevels(linebreaks []int) []level {
// Lines are concatenated from left to right. So for example, the fifth
// character from the left on the third line is
//
// getReordering(linebreaks)[linebreaks[1] + 4]
// getReordering(linebreaks)[linebreaks[1] + 4]
//
// (linebreaks[1] is the position after the last character of the second
// line, which is also the index of the first character on the third line,

View File

@ -110,10 +110,11 @@ func (p Properties) BoundaryAfter() bool {
}
// We pack quick check data in 4 bits:
// 5: Combines forward (0 == false, 1 == true)
// 4..3: NFC_QC Yes(00), No (10), or Maybe (11)
// 2: NFD_QC Yes (0) or No (1). No also means there is a decomposition.
// 1..0: Number of trailing non-starters.
//
// 5: Combines forward (0 == false, 1 == true)
// 4..3: NFC_QC Yes(00), No (10), or Maybe (11)
// 2: NFD_QC Yes (0) or No (1). No also means there is a decomposition.
// 1..0: Number of trailing non-starters.
//
// When all 4 bits are zero, the character is inert, meaning it is never
// influenced by normalization.

View File

@ -18,16 +18,17 @@ import (
// A Form denotes a canonical representation of Unicode code points.
// The Unicode-defined normalization and equivalence forms are:
//
// NFC Unicode Normalization Form C
// NFD Unicode Normalization Form D
// NFKC Unicode Normalization Form KC
// NFKD Unicode Normalization Form KD
// NFC Unicode Normalization Form C
// NFD Unicode Normalization Form D
// NFKC Unicode Normalization Form KC
// NFKD Unicode Normalization Form KD
//
// For a Form f, this documentation uses the notation f(x) to mean
// the bytes or string x converted to the given form.
// A position n in x is called a boundary if conversion to the form can
// proceed independently on both sides:
// f(x) == append(f(x[0:n]), f(x[n:])...)
//
// f(x) == append(f(x[0:n]), f(x[n:])...)
//
// References: https://unicode.org/reports/tr15/ and
// https://unicode.org/notes/tn5/.

View File

@ -7315,7 +7315,7 @@ const recompMapPacked = "" +
"\x00V\x03\x03\x00\x00\x1e|" + // 0x00560303: 0x00001E7C
"\x00v\x03\x03\x00\x00\x1e}" + // 0x00760303: 0x00001E7D
"\x00V\x03#\x00\x00\x1e~" + // 0x00560323: 0x00001E7E
"\x00v\x03#\x00\x00\x1e\u007f" + // 0x00760323: 0x00001E7F
"\x00v\x03#\x00\x00\x1e\x7f" + // 0x00760323: 0x00001E7F
"\x00W\x03\x00\x00\x00\x1e\x80" + // 0x00570300: 0x00001E80
"\x00w\x03\x00\x00\x00\x1e\x81" + // 0x00770300: 0x00001E81
"\x00W\x03\x01\x00\x00\x1e\x82" + // 0x00570301: 0x00001E82
@ -7342,7 +7342,7 @@ const recompMapPacked = "" +
"\x00t\x03\b\x00\x00\x1e\x97" + // 0x00740308: 0x00001E97
"\x00w\x03\n\x00\x00\x1e\x98" + // 0x0077030A: 0x00001E98
"\x00y\x03\n\x00\x00\x1e\x99" + // 0x0079030A: 0x00001E99
"\x01\u007f\x03\a\x00\x00\x1e\x9b" + // 0x017F0307: 0x00001E9B
"\x01\x7f\x03\a\x00\x00\x1e\x9b" + // 0x017F0307: 0x00001E9B
"\x00A\x03#\x00\x00\x1e\xa0" + // 0x00410323: 0x00001EA0
"\x00a\x03#\x00\x00\x1e\xa1" + // 0x00610323: 0x00001EA1
"\x00A\x03\t\x00\x00\x1e\xa2" + // 0x00410309: 0x00001EA2

3
vendor/golang.org/x/tools/AUTHORS generated vendored
View File

@ -1,3 +0,0 @@
# This source code refers to The Go Authors for copyright purposes.
# The master list of authors is in the main Go distribution,
# visible at http://tip.golang.org/AUTHORS.

View File

@ -1,3 +0,0 @@
# This source code was written by the Go contributors.
# The master list of contributors is in the main Go distribution,
# visible at http://tip.golang.org/CONTRIBUTORS.

View File

@ -11,6 +11,8 @@ import (
"go/ast"
"go/token"
"sort"
"golang.org/x/tools/internal/typeparams"
)
// PathEnclosingInterval returns the node that encloses the source
@ -20,9 +22,9 @@ import (
// additional whitespace abutting a node to be enclosed by it.
// In this example:
//
// z := x + y // add them
// <-A->
// <----B----->
// z := x + y // add them
// <-A->
// <----B----->
//
// the ast.BinaryExpr(+) node is considered to enclose interval B
// even though its [Pos()..End()) is actually only interval A.
@ -41,10 +43,10 @@ import (
// interior whitespace of path[0].
// In this example:
//
// z := x + y // add them
// <--C--> <---E-->
// ^
// D
// z := x + y // add them
// <--C--> <---E-->
// ^
// D
//
// intervals C, D and E are inexact. C is contained by the
// z-assignment statement, because it spans three of its children (:=,
@ -52,12 +54,11 @@ import (
// interior whitespace of the assignment. E is considered interior
// whitespace of the BlockStmt containing the assignment.
//
// Precondition: [start, end) both lie within the same file as root.
// TODO(adonovan): return (nil, false) in this case and remove precond.
// Requires FileSet; see loader.tokenFileContainsPos.
//
// Postcondition: path is never nil; it always contains at least 'root'.
//
// The resulting path is never empty; it always contains at least the
// 'root' *ast.File. Ideally PathEnclosingInterval would reject
// intervals that lie wholly or partially outside the range of the
// file, but unfortunately ast.File records only the token.Pos of
// the 'package' keyword, but not of the start of the file itself.
func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) {
// fmt.Printf("EnclosingInterval %d %d\n", start, end) // debugging
@ -133,6 +134,7 @@ func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Nod
return false // inexact: overlaps multiple children
}
// Ensure [start,end) is nondecreasing.
if start > end {
start, end = end, start
}
@ -160,7 +162,6 @@ func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Nod
// tokenNode is a dummy implementation of ast.Node for a single token.
// They are used transiently by PathEnclosingInterval but never escape
// this package.
//
type tokenNode struct {
pos token.Pos
end token.Pos
@ -181,7 +182,6 @@ func tok(pos token.Pos, len int) ast.Node {
// childrenOf returns the direct non-nil children of ast.Node n.
// It may include fake ast.Node implementations for bare tokens.
// it is not safe to call (e.g.) ast.Walk on such nodes.
//
func childrenOf(n ast.Node) []ast.Node {
var children []ast.Node
@ -294,8 +294,8 @@ func childrenOf(n ast.Node) []ast.Node {
case *ast.FieldList:
children = append(children,
tok(n.Opening, len("(")),
tok(n.Closing, len(")")))
tok(n.Opening, len("(")), // or len("[")
tok(n.Closing, len(")"))) // or len("]")
case *ast.File:
// TODO test: Doc
@ -322,6 +322,9 @@ func childrenOf(n ast.Node) []ast.Node {
children = append(children, n.Recv)
}
children = append(children, n.Name)
if tparams := typeparams.ForFuncType(n.Type); tparams != nil {
children = append(children, tparams)
}
if n.Type.Params != nil {
children = append(children, n.Type.Params)
}
@ -371,8 +374,13 @@ func childrenOf(n ast.Node) []ast.Node {
case *ast.IndexExpr:
children = append(children,
tok(n.Lbrack, len("{")),
tok(n.Rbrack, len("}")))
tok(n.Lbrack, len("[")),
tok(n.Rbrack, len("]")))
case *typeparams.IndexListExpr:
children = append(children,
tok(n.Lbrack, len("[")),
tok(n.Rbrack, len("]")))
case *ast.InterfaceType:
children = append(children,
@ -478,7 +486,6 @@ func (sl byPos) Swap(i, j int) {
// TODO(adonovan): in some cases (e.g. Field, FieldList, Ident,
// StarExpr) we could be much more specific given the path to the AST
// root. Perhaps we should do that.
//
func NodeDescription(n ast.Node) string {
switch n := n.(type) {
case *ast.ArrayType:
@ -581,6 +588,8 @@ func NodeDescription(n ast.Node) string {
return "decrement statement"
case *ast.IndexExpr:
return "index expression"
case *typeparams.IndexListExpr:
return "index list expression"
case *ast.InterfaceType:
return "interface type"
case *ast.KeyValueExpr:

View File

@ -22,8 +22,11 @@ func AddImport(fset *token.FileSet, f *ast.File, path string) (added bool) {
// If name is not empty, it is used to rename the import.
//
// For example, calling
//
// AddNamedImport(fset, f, "pathpkg", "path")
//
// adds
//
// import pathpkg "path"
func AddNamedImport(fset *token.FileSet, f *ast.File, name, path string) (added bool) {
if imports(f, name, path) {
@ -270,8 +273,8 @@ func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (del
}
if j > 0 {
lastImpspec := gen.Specs[j-1].(*ast.ImportSpec)
lastLine := fset.Position(lastImpspec.Path.ValuePos).Line
line := fset.Position(impspec.Path.ValuePos).Line
lastLine := fset.PositionFor(lastImpspec.Path.ValuePos, false).Line
line := fset.PositionFor(impspec.Path.ValuePos, false).Line
// We deleted an entry but now there may be
// a blank line-sized hole where the import was.

View File

@ -41,7 +41,6 @@ type ApplyFunc func(*Cursor) bool
// Children are traversed in the order in which they appear in the
// respective node's struct definition. A package's files are
// traversed in the filenames' alphabetical order.
//
func Apply(root ast.Node, pre, post ApplyFunc) (result ast.Node) {
parent := &struct{ ast.Node }{root}
defer func() {
@ -65,8 +64,8 @@ var abort = new(int) // singleton, to signal termination of Apply
// c.Parent(), and f is the field identifier with name c.Name(),
// the following invariants hold:
//
// p.f == c.Node() if c.Index() < 0
// p.f[c.Index()] == c.Node() if c.Index() >= 0
// p.f == c.Node() if c.Index() < 0
// p.f[c.Index()] == c.Node() if c.Index() >= 0
//
// The methods Replace, Delete, InsertBefore, and InsertAfter
// can be used to change the AST without disrupting Apply.
@ -253,6 +252,10 @@ func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.
a.apply(n, "X", nil, n.X)
a.apply(n, "Index", nil, n.Index)
case *typeparams.IndexListExpr:
a.apply(n, "X", nil, n.X)
a.applyList(n, "Indices")
case *ast.SliceExpr:
a.apply(n, "X", nil, n.X)
a.apply(n, "Low", nil, n.Low)
@ -290,6 +293,9 @@ func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.
a.apply(n, "Fields", nil, n.Fields)
case *ast.FuncType:
if tparams := typeparams.ForFuncType(n); tparams != nil {
a.apply(n, "TypeParams", nil, tparams)
}
a.apply(n, "Params", nil, n.Params)
a.apply(n, "Results", nil, n.Results)
@ -402,6 +408,9 @@ func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.
case *ast.TypeSpec:
a.apply(n, "Doc", nil, n.Doc)
a.apply(n, "Name", nil, n.Name)
if tparams := typeparams.ForTypeSpec(n); tparams != nil {
a.apply(n, "TypeParams", nil, tparams)
}
a.apply(n, "Type", nil, n.Type)
a.apply(n, "Comment", nil, n.Comment)
@ -439,11 +448,7 @@ func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.
}
default:
if typeparams.IsListExpr(n) {
a.applyList(n, "ElemList")
} else {
panic(fmt.Sprintf("Apply: unexpected node type %T", n))
}
panic(fmt.Sprintf("Apply: unexpected node type %T", n))
}
if a.post != nil && !a.post(&a.cursor) {

View File

@ -9,7 +9,11 @@ package inspector
// The initial map-based implementation was too slow;
// see https://go-review.googlesource.com/c/tools/+/135655/1/go/ast/inspector/inspector.go#196
import "go/ast"
import (
"go/ast"
"golang.org/x/tools/internal/typeparams"
)
const (
nArrayType = iota
@ -47,6 +51,7 @@ const (
nImportSpec
nIncDecStmt
nIndexExpr
nIndexListExpr
nInterfaceType
nKeyValueExpr
nLabeledStmt
@ -72,12 +77,14 @@ const (
// typeOf returns a distinct single-bit value that represents the type of n.
//
// Various implementations were benchmarked with BenchmarkNewInspector:
// GOGC=off
// - type switch 4.9-5.5ms 2.1ms
// - binary search over a sorted list of types 5.5-5.9ms 2.5ms
// - linear scan, frequency-ordered list 5.9-6.1ms 2.7ms
// - linear scan, unordered list 6.4ms 2.7ms
// - hash table 6.5ms 3.1ms
//
// GOGC=off
// - type switch 4.9-5.5ms 2.1ms
// - binary search over a sorted list of types 5.5-5.9ms 2.5ms
// - linear scan, frequency-ordered list 5.9-6.1ms 2.7ms
// - linear scan, unordered list 6.4ms 2.7ms
// - hash table 6.5ms 3.1ms
//
// A perfect hash seemed like overkill.
//
// The compiler's switch statement is the clear winner
@ -85,7 +92,6 @@ const (
// with constant conditions and good branch prediction.
// (Sadly it is the most verbose in source code.)
// Binary search suffered from poor branch prediction.
//
func typeOf(n ast.Node) uint64 {
// Fast path: nearly half of all nodes are identifiers.
if _, ok := n.(*ast.Ident); ok {
@ -164,6 +170,8 @@ func typeOf(n ast.Node) uint64 {
return 1 << nIncDecStmt
case *ast.IndexExpr:
return 1 << nIndexExpr
case *typeparams.IndexListExpr:
return 1 << nIndexListExpr
case *ast.InterfaceType:
return 1 << nInterfaceType
case *ast.KeyValueExpr:

View File

@ -17,32 +17,47 @@
// developer tools, which will then be able to consume both Go 1.7 and
// Go 1.8 export data files, so they will work before and after the
// Go update. (See discussion at https://golang.org/issue/15651.)
//
package gcexportdata // import "golang.org/x/tools/go/gcexportdata"
import (
"bufio"
"bytes"
"encoding/json"
"fmt"
"go/token"
"go/types"
"io"
"io/ioutil"
"os/exec"
"golang.org/x/tools/go/internal/gcimporter"
)
// Find returns the name of an object (.o) or archive (.a) file
// containing type information for the specified import path,
// using the workspace layout conventions of go/build.
// using the go command.
// If no file was found, an empty filename is returned.
//
// A relative srcDir is interpreted relative to the current working directory.
//
// Find also returns the package's resolved (canonical) import path,
// reflecting the effects of srcDir and vendoring on importPath.
//
// Deprecated: Use the higher-level API in golang.org/x/tools/go/packages,
// which is more efficient.
func Find(importPath, srcDir string) (filename, path string) {
return gcimporter.FindPkg(importPath, srcDir)
cmd := exec.Command("go", "list", "-json", "-export", "--", importPath)
cmd.Dir = srcDir
out, err := cmd.CombinedOutput()
if err != nil {
return "", ""
}
var data struct {
ImportPath string
Export string
}
json.Unmarshal(out, &data)
return data.Export, data.ImportPath
}
// NewReader returns a reader for the export data section of an object
@ -50,11 +65,24 @@ func Find(importPath, srcDir string) (filename, path string) {
// additional trailing data beyond the end of the export data.
func NewReader(r io.Reader) (io.Reader, error) {
buf := bufio.NewReader(r)
_, err := gcimporter.FindExportData(buf)
// If we ever switch to a zip-like archive format with the ToC
// at the end, we can return the correct portion of export data,
// but for now we must return the entire rest of the file.
return buf, err
_, size, err := gcimporter.FindExportData(buf)
if err != nil {
return nil, err
}
if size >= 0 {
// We were given an archive and found the __.PKGDEF in it.
// This tells us the size of the export data, and we don't
// need to return the entire file.
return &io.LimitedReader{
R: buf,
N: size,
}, nil
} else {
// We were given an object file. As such, we don't know how large
// the export data is and must return the entire file.
return buf, nil
}
}
// Read reads export data from in, decodes it, and returns type
@ -88,13 +116,29 @@ func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package,
// The indexed export format starts with an 'i'; the older
// binary export format starts with a 'c', 'd', or 'v'
// (from "version"). Select appropriate importer.
if len(data) > 0 && data[0] == 'i' {
_, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path)
return pkg, err
}
if len(data) > 0 {
switch data[0] {
case 'i':
_, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path)
return pkg, err
_, pkg, err := gcimporter.BImportData(fset, imports, data, path)
return pkg, err
case 'v', 'c', 'd':
_, pkg, err := gcimporter.BImportData(fset, imports, data, path)
return pkg, err
case 'u':
_, pkg, err := gcimporter.UImportData(fset, imports, data[1:], path)
return pkg, err
default:
l := len(data)
if l > 10 {
l = 10
}
return nil, fmt.Errorf("unexpected export data with prefix %q for path %s", string(data[:l]), path)
}
}
return nil, fmt.Errorf("empty export data for %s", path)
}
// Write writes encoded type information for the specified package to out.

View File

@ -23,6 +23,8 @@ import (
// or to control the FileSet or access the imports map populated during
// package loading.
//
// Deprecated: Use the higher-level API in golang.org/x/tools/go/packages,
// which is more efficient.
func NewImporter(fset *token.FileSet, imports map[string]*types.Package) types.ImporterFrom {
return importer{fset, imports}
}

View File

@ -34,20 +34,19 @@ import (
// (suspected) format errors, and whenever a change is made to the format.
const debugFormat = false // default: false
// If trace is set, debugging output is printed to std out.
const trace = false // default: false
// Current export format version. Increase with each format change.
//
// Note: The latest binary (non-indexed) export format is at version 6.
// This exporter is still at level 4, but it doesn't matter since
// the binary importer can handle older versions just fine.
// 6: package height (CL 105038) -- NOT IMPLEMENTED HERE
// 5: improved position encoding efficiency (issue 20080, CL 41619) -- NOT IMPLEMEMTED HERE
// 4: type name objects support type aliases, uses aliasTag
// 3: Go1.8 encoding (same as version 2, aliasTag defined but never used)
// 2: removed unused bool in ODCL export (compiler only)
// 1: header format change (more regular), export package for _ struct fields
// 0: Go1.7 encoding
// This exporter is still at level 4, but it doesn't matter since
// the binary importer can handle older versions just fine.
//
// 6: package height (CL 105038) -- NOT IMPLEMENTED HERE
// 5: improved position encoding efficiency (issue 20080, CL 41619) -- NOT IMPLEMENTED HERE
// 4: type name objects support type aliases, uses aliasTag
// 3: Go1.8 encoding (same as version 2, aliasTag defined but never used)
// 2: removed unused bool in ODCL export (compiler only)
// 1: header format change (more regular), export package for _ struct fields
// 0: Go1.7 encoding
const exportVersion = 4
// trackAllTypes enables cycle tracking for all types, not just named
@ -92,16 +91,18 @@ func internalErrorf(format string, args ...interface{}) error {
// BExportData returns binary export data for pkg.
// If no file set is provided, position info will be missing.
func BExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error) {
defer func() {
if e := recover(); e != nil {
if ierr, ok := e.(internalError); ok {
err = ierr
return
if !debug {
defer func() {
if e := recover(); e != nil {
if ierr, ok := e.(internalError); ok {
err = ierr
return
}
// Not an internal error; panic again.
panic(e)
}
// Not an internal error; panic again.
panic(e)
}
}()
}()
}
p := exporter{
fset: fset,

View File

@ -74,9 +74,10 @@ func BImportData(fset *token.FileSet, imports map[string]*types.Package, data []
pathList: []string{""}, // empty string is mapped to 0
fake: fakeFileSet{
fset: fset,
files: make(map[string]*token.File),
files: make(map[string]*fileInfo),
},
}
defer p.fake.setLines() // set lines for files in fset
// read version info
var versionstr string
@ -338,37 +339,49 @@ func (p *importer) pos() token.Pos {
// Synthesize a token.Pos
type fakeFileSet struct {
fset *token.FileSet
files map[string]*token.File
files map[string]*fileInfo
}
type fileInfo struct {
file *token.File
lastline int
}
const maxlines = 64 * 1024
func (s *fakeFileSet) pos(file string, line, column int) token.Pos {
// TODO(mdempsky): Make use of column.
// Since we don't know the set of needed file positions, we
// reserve maxlines positions per file.
const maxlines = 64 * 1024
// Since we don't know the set of needed file positions, we reserve maxlines
// positions per file. We delay calling token.File.SetLines until all
// positions have been calculated (by way of fakeFileSet.setLines), so that
// we can avoid setting unnecessary lines. See also golang/go#46586.
f := s.files[file]
if f == nil {
f = s.fset.AddFile(file, -1, maxlines)
f = &fileInfo{file: s.fset.AddFile(file, -1, maxlines)}
s.files[file] = f
// Allocate the fake linebreak indices on first use.
// TODO(adonovan): opt: save ~512KB using a more complex scheme?
fakeLinesOnce.Do(func() {
fakeLines = make([]int, maxlines)
for i := range fakeLines {
fakeLines[i] = i
}
})
f.SetLines(fakeLines)
}
if line > maxlines {
line = 1
}
if line > f.lastline {
f.lastline = line
}
// Treat the file as if it contained only newlines
// and column=1: use the line number as the offset.
return f.Pos(line - 1)
// Return a fake position assuming that f.file consists only of newlines.
return token.Pos(f.file.Base() + line - 1)
}
func (s *fakeFileSet) setLines() {
fakeLinesOnce.Do(func() {
fakeLines = make([]int, maxlines)
for i := range fakeLines {
fakeLines[i] = i
}
})
for _, f := range s.files {
f.file.SetLines(fakeLines[:f.lastline])
}
}
var (
@ -1029,6 +1042,7 @@ func predeclared() []types.Type {
// used internally by gc; never used by this package or in .a files
anyType{},
}
predecl = append(predecl, additionalPredeclared()...)
})
return predecl
}

View File

@ -16,7 +16,7 @@ import (
"strings"
)
func readGopackHeader(r *bufio.Reader) (name string, size int, err error) {
func readGopackHeader(r *bufio.Reader) (name string, size int64, err error) {
// See $GOROOT/include/ar.h.
hdr := make([]byte, 16+12+6+6+8+10+2)
_, err = io.ReadFull(r, hdr)
@ -28,7 +28,8 @@ func readGopackHeader(r *bufio.Reader) (name string, size int, err error) {
fmt.Printf("header: %s", hdr)
}
s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10]))
size, err = strconv.Atoi(s)
length, err := strconv.Atoi(s)
size = int64(length)
if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' {
err = fmt.Errorf("invalid archive header")
return
@ -42,8 +43,8 @@ func readGopackHeader(r *bufio.Reader) (name string, size int, err error) {
// file by reading from it. The reader must be positioned at the
// start of the file before calling this function. The hdr result
// is the string before the export data, either "$$" or "$$B".
//
func FindExportData(r *bufio.Reader) (hdr string, err error) {
// The size result is the length of the export data in bytes, or -1 if not known.
func FindExportData(r *bufio.Reader) (hdr string, size int64, err error) {
// Read first line to make sure this is an object file.
line, err := r.ReadSlice('\n')
if err != nil {
@ -54,7 +55,7 @@ func FindExportData(r *bufio.Reader) (hdr string, err error) {
if string(line) == "!<arch>\n" {
// Archive file. Scan to __.PKGDEF.
var name string
if name, _, err = readGopackHeader(r); err != nil {
if name, size, err = readGopackHeader(r); err != nil {
return
}
@ -70,6 +71,7 @@ func FindExportData(r *bufio.Reader) (hdr string, err error) {
err = fmt.Errorf("can't find export data (%v)", err)
return
}
size -= int64(len(line))
}
// Now at __.PKGDEF in archive or still at beginning of file.
@ -86,8 +88,12 @@ func FindExportData(r *bufio.Reader) (hdr string, err error) {
err = fmt.Errorf("can't find export data (%v)", err)
return
}
size -= int64(len(line))
}
hdr = string(line)
if size < 0 {
size = -1
}
return
}

View File

@ -29,8 +29,14 @@ import (
"text/scanner"
)
// debugging/development support
const debug = false
const (
// Enable debug during development: it adds some additional checks, and
// prevents errors from being recovered.
debug = false
// If trace is set, debugging output is printed to std out.
trace = false
)
var pkgExts = [...]string{".a", ".o"}
@ -39,7 +45,6 @@ var pkgExts = [...]string{".a", ".o"}
// the build.Default build.Context). A relative srcDir is interpreted
// relative to the current working directory.
// If no file was found, an empty filename is returned.
//
func FindPkg(path, srcDir string) (filename, id string) {
if path == "" {
return
@ -103,7 +108,6 @@ func FindPkg(path, srcDir string) (filename, id string) {
// If packages[id] contains the completely imported package, that package
// can be used directly, and there is no need to call this function (but
// there is also no harm but for extra time used).
//
func ImportData(packages map[string]*types.Package, filename, id string, data io.Reader) (pkg *types.Package, err error) {
// support for parser error handling
defer func() {
@ -127,7 +131,6 @@ func ImportData(packages map[string]*types.Package, filename, id string, data io
// Import imports a gc-generated package given its import path and srcDir, adds
// the corresponding package object to the packages map, and returns the object.
// The packages map must contain all packages already imported.
//
func Import(packages map[string]*types.Package, path, srcDir string, lookup func(path string) (io.ReadCloser, error)) (pkg *types.Package, err error) {
var rc io.ReadCloser
var filename, id string
@ -178,8 +181,9 @@ func Import(packages map[string]*types.Package, path, srcDir string, lookup func
defer rc.Close()
var hdr string
var size int64
buf := bufio.NewReader(rc)
if hdr, err = FindExportData(buf); err != nil {
if hdr, size, err = FindExportData(buf); err != nil {
return
}
@ -207,10 +211,27 @@ func Import(packages map[string]*types.Package, path, srcDir string, lookup func
// The indexed export format starts with an 'i'; the older
// binary export format starts with a 'c', 'd', or 'v'
// (from "version"). Select appropriate importer.
if len(data) > 0 && data[0] == 'i' {
_, pkg, err = IImportData(fset, packages, data[1:], id)
} else {
_, pkg, err = BImportData(fset, packages, data, id)
if len(data) > 0 {
switch data[0] {
case 'i':
_, pkg, err := IImportData(fset, packages, data[1:], id)
return pkg, err
case 'v', 'c', 'd':
_, pkg, err := BImportData(fset, packages, data, id)
return pkg, err
case 'u':
_, pkg, err := UImportData(fset, packages, data[1:size], id)
return pkg, err
default:
l := len(data)
if l > 10 {
l = 10
}
return nil, fmt.Errorf("unexpected export data with prefix %q for path %s", string(data[:l]), id)
}
}
default:
@ -342,8 +363,9 @@ func (p *parser) expectKeyword(keyword string) {
// ----------------------------------------------------------------------------
// Qualified and unqualified names
// PackageId = string_lit .
// parsePackageID parses a PackageId:
//
// PackageId = string_lit .
func (p *parser) parsePackageID() string {
id, err := strconv.Unquote(p.expect(scanner.String))
if err != nil {
@ -357,13 +379,16 @@ func (p *parser) parsePackageID() string {
return id
}
// PackageName = ident .
// parsePackageName parse a PackageName:
//
// PackageName = ident .
func (p *parser) parsePackageName() string {
return p.expect(scanner.Ident)
}
// dotIdentifier = ( ident | '·' ) { ident | int | '·' } .
// parseDotIdent parses a dotIdentifier:
//
// dotIdentifier = ( ident | '·' ) { ident | int | '·' } .
func (p *parser) parseDotIdent() string {
ident := ""
if p.tok != scanner.Int {
@ -380,8 +405,9 @@ func (p *parser) parseDotIdent() string {
return ident
}
// QualifiedName = "@" PackageId "." ( "?" | dotIdentifier ) .
// parseQualifiedName parses a QualifiedName:
//
// QualifiedName = "@" PackageId "." ( "?" | dotIdentifier ) .
func (p *parser) parseQualifiedName() (id, name string) {
p.expect('@')
id = p.parsePackageID()
@ -404,7 +430,6 @@ func (p *parser) parseQualifiedName() (id, name string) {
// id identifies a package, usually by a canonical package path like
// "encoding/json" but possibly by a non-canonical import path like
// "./json".
//
func (p *parser) getPkg(id, name string) *types.Package {
// package unsafe is not in the packages maps - handle explicitly
if id == "unsafe" {
@ -440,7 +465,6 @@ func (p *parser) getPkg(id, name string) *types.Package {
// parseExportedName is like parseQualifiedName, but
// the package id is resolved to an imported *types.Package.
//
func (p *parser) parseExportedName() (pkg *types.Package, name string) {
id, name := p.parseQualifiedName()
pkg = p.getPkg(id, "")
@ -450,8 +474,9 @@ func (p *parser) parseExportedName() (pkg *types.Package, name string) {
// ----------------------------------------------------------------------------
// Types
// BasicType = identifier .
// parseBasicType parses a BasicType:
//
// BasicType = identifier .
func (p *parser) parseBasicType() types.Type {
id := p.expect(scanner.Ident)
obj := types.Universe.Lookup(id)
@ -462,8 +487,9 @@ func (p *parser) parseBasicType() types.Type {
return nil
}
// ArrayType = "[" int_lit "]" Type .
// parseArrayType parses an ArrayType:
//
// ArrayType = "[" int_lit "]" Type .
func (p *parser) parseArrayType(parent *types.Package) types.Type {
// "[" already consumed and lookahead known not to be "]"
lit := p.expect(scanner.Int)
@ -476,8 +502,9 @@ func (p *parser) parseArrayType(parent *types.Package) types.Type {
return types.NewArray(elem, n)
}
// MapType = "map" "[" Type "]" Type .
// parseMapType parses a MapType:
//
// MapType = "map" "[" Type "]" Type .
func (p *parser) parseMapType(parent *types.Package) types.Type {
p.expectKeyword("map")
p.expect('[')
@ -487,7 +514,9 @@ func (p *parser) parseMapType(parent *types.Package) types.Type {
return types.NewMap(key, elem)
}
// Name = identifier | "?" | QualifiedName .
// parseName parses a Name:
//
// Name = identifier | "?" | QualifiedName .
//
// For unqualified and anonymous names, the returned package is the parent
// package unless parent == nil, in which case the returned package is the
@ -499,7 +528,6 @@ func (p *parser) parseMapType(parent *types.Package) types.Type {
// it doesn't exist yet) unless materializePkg is set (which creates an
// unnamed package with valid package path). In the latter case, a
// subsequent import clause is expected to provide a name for the package.
//
func (p *parser) parseName(parent *types.Package, materializePkg bool) (pkg *types.Package, name string) {
pkg = parent
if pkg == nil {
@ -533,8 +561,9 @@ func deref(typ types.Type) types.Type {
return typ
}
// Field = Name Type [ string_lit ] .
// parseField parses a Field:
//
// Field = Name Type [ string_lit ] .
func (p *parser) parseField(parent *types.Package) (*types.Var, string) {
pkg, name := p.parseName(parent, true)
@ -577,9 +606,10 @@ func (p *parser) parseField(parent *types.Package) (*types.Var, string) {
return types.NewField(token.NoPos, pkg, name, typ, anonymous), tag
}
// StructType = "struct" "{" [ FieldList ] "}" .
// FieldList = Field { ";" Field } .
// parseStructType parses a StructType:
//
// StructType = "struct" "{" [ FieldList ] "}" .
// FieldList = Field { ";" Field } .
func (p *parser) parseStructType(parent *types.Package) types.Type {
var fields []*types.Var
var tags []string
@ -604,8 +634,9 @@ func (p *parser) parseStructType(parent *types.Package) types.Type {
return types.NewStruct(fields, tags)
}
// Parameter = ( identifier | "?" ) [ "..." ] Type [ string_lit ] .
// parseParameter parses a Parameter:
//
// Parameter = ( identifier | "?" ) [ "..." ] Type [ string_lit ] .
func (p *parser) parseParameter() (par *types.Var, isVariadic bool) {
_, name := p.parseName(nil, false)
// remove gc-specific parameter numbering
@ -629,9 +660,10 @@ func (p *parser) parseParameter() (par *types.Var, isVariadic bool) {
return
}
// Parameters = "(" [ ParameterList ] ")" .
// ParameterList = { Parameter "," } Parameter .
// parseParameters parses a Parameters:
//
// Parameters = "(" [ ParameterList ] ")" .
// ParameterList = { Parameter "," } Parameter .
func (p *parser) parseParameters() (list []*types.Var, isVariadic bool) {
p.expect('(')
for p.tok != ')' && p.tok != scanner.EOF {
@ -652,9 +684,10 @@ func (p *parser) parseParameters() (list []*types.Var, isVariadic bool) {
return
}
// Signature = Parameters [ Result ] .
// Result = Type | Parameters .
// parseSignature parses a Signature:
//
// Signature = Parameters [ Result ] .
// Result = Type | Parameters .
func (p *parser) parseSignature(recv *types.Var) *types.Signature {
params, isVariadic := p.parseParameters()
@ -671,14 +704,15 @@ func (p *parser) parseSignature(recv *types.Var) *types.Signature {
return types.NewSignature(recv, types.NewTuple(params...), types.NewTuple(results...), isVariadic)
}
// InterfaceType = "interface" "{" [ MethodList ] "}" .
// MethodList = Method { ";" Method } .
// Method = Name Signature .
// parseInterfaceType parses an InterfaceType:
//
// InterfaceType = "interface" "{" [ MethodList ] "}" .
// MethodList = Method { ";" Method } .
// Method = Name Signature .
//
// The methods of embedded interfaces are always "inlined"
// by the compiler and thus embedded interfaces are never
// visible in the export data.
//
func (p *parser) parseInterfaceType(parent *types.Package) types.Type {
var methods []*types.Func
@ -699,8 +733,9 @@ func (p *parser) parseInterfaceType(parent *types.Package) types.Type {
return newInterface(methods, nil).Complete()
}
// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type .
// parseChanType parses a ChanType:
//
// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type .
func (p *parser) parseChanType(parent *types.Package) types.Type {
dir := types.SendRecv
if p.tok == scanner.Ident {
@ -718,17 +753,18 @@ func (p *parser) parseChanType(parent *types.Package) types.Type {
return types.NewChan(dir, elem)
}
// Type =
// BasicType | TypeName | ArrayType | SliceType | StructType |
// PointerType | FuncType | InterfaceType | MapType | ChanType |
// "(" Type ")" .
// parseType parses a Type:
//
// BasicType = ident .
// TypeName = ExportedName .
// SliceType = "[" "]" Type .
// PointerType = "*" Type .
// FuncType = "func" Signature .
// Type =
// BasicType | TypeName | ArrayType | SliceType | StructType |
// PointerType | FuncType | InterfaceType | MapType | ChanType |
// "(" Type ")" .
//
// BasicType = ident .
// TypeName = ExportedName .
// SliceType = "[" "]" Type .
// PointerType = "*" Type .
// FuncType = "func" Signature .
func (p *parser) parseType(parent *types.Package) types.Type {
switch p.tok {
case scanner.Ident:
@ -780,16 +816,18 @@ func (p *parser) parseType(parent *types.Package) types.Type {
// ----------------------------------------------------------------------------
// Declarations
// ImportDecl = "import" PackageName PackageId .
// parseImportDecl parses an ImportDecl:
//
// ImportDecl = "import" PackageName PackageId .
func (p *parser) parseImportDecl() {
p.expectKeyword("import")
name := p.parsePackageName()
p.getPkg(p.parsePackageID(), name)
}
// int_lit = [ "+" | "-" ] { "0" ... "9" } .
// parseInt parses an int_lit:
//
// int_lit = [ "+" | "-" ] { "0" ... "9" } .
func (p *parser) parseInt() string {
s := ""
switch p.tok {
@ -802,8 +840,9 @@ func (p *parser) parseInt() string {
return s + p.expect(scanner.Int)
}
// number = int_lit [ "p" int_lit ] .
// parseNumber parses a number:
//
// number = int_lit [ "p" int_lit ] .
func (p *parser) parseNumber() (typ *types.Basic, val constant.Value) {
// mantissa
mant := constant.MakeFromLiteral(p.parseInt(), token.INT, 0)
@ -838,13 +877,14 @@ func (p *parser) parseNumber() (typ *types.Basic, val constant.Value) {
return
}
// ConstDecl = "const" ExportedName [ Type ] "=" Literal .
// Literal = bool_lit | int_lit | float_lit | complex_lit | rune_lit | string_lit .
// bool_lit = "true" | "false" .
// complex_lit = "(" float_lit "+" float_lit "i" ")" .
// rune_lit = "(" int_lit "+" int_lit ")" .
// string_lit = `"` { unicode_char } `"` .
// parseConstDecl parses a ConstDecl:
//
// ConstDecl = "const" ExportedName [ Type ] "=" Literal .
// Literal = bool_lit | int_lit | float_lit | complex_lit | rune_lit | string_lit .
// bool_lit = "true" | "false" .
// complex_lit = "(" float_lit "+" float_lit "i" ")" .
// rune_lit = "(" int_lit "+" int_lit ")" .
// string_lit = `"` { unicode_char } `"` .
func (p *parser) parseConstDecl() {
p.expectKeyword("const")
pkg, name := p.parseExportedName()
@ -914,8 +954,9 @@ func (p *parser) parseConstDecl() {
pkg.Scope().Insert(types.NewConst(token.NoPos, pkg, name, typ0, val))
}
// TypeDecl = "type" ExportedName Type .
// parseTypeDecl parses a TypeDecl:
//
// TypeDecl = "type" ExportedName Type .
func (p *parser) parseTypeDecl() {
p.expectKeyword("type")
pkg, name := p.parseExportedName()
@ -933,8 +974,9 @@ func (p *parser) parseTypeDecl() {
}
}
// VarDecl = "var" ExportedName Type .
// parseVarDecl parses a VarDecl:
//
// VarDecl = "var" ExportedName Type .
func (p *parser) parseVarDecl() {
p.expectKeyword("var")
pkg, name := p.parseExportedName()
@ -942,9 +984,10 @@ func (p *parser) parseVarDecl() {
pkg.Scope().Insert(types.NewVar(token.NoPos, pkg, name, typ))
}
// Func = Signature [ Body ] .
// Body = "{" ... "}" .
// parseFunc parses a Func:
//
// Func = Signature [ Body ] .
// Body = "{" ... "}" .
func (p *parser) parseFunc(recv *types.Var) *types.Signature {
sig := p.parseSignature(recv)
if p.tok == '{' {
@ -961,9 +1004,10 @@ func (p *parser) parseFunc(recv *types.Var) *types.Signature {
return sig
}
// MethodDecl = "func" Receiver Name Func .
// Receiver = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" .
// parseMethodDecl parses a MethodDecl:
//
// MethodDecl = "func" Receiver Name Func .
// Receiver = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" .
func (p *parser) parseMethodDecl() {
// "func" already consumed
p.expect('(')
@ -986,8 +1030,9 @@ func (p *parser) parseMethodDecl() {
base.AddMethod(types.NewFunc(token.NoPos, pkg, name, sig))
}
// FuncDecl = "func" ExportedName Func .
// parseFuncDecl parses a FuncDecl:
//
// FuncDecl = "func" ExportedName Func .
func (p *parser) parseFuncDecl() {
// "func" already consumed
pkg, name := p.parseExportedName()
@ -995,8 +1040,9 @@ func (p *parser) parseFuncDecl() {
pkg.Scope().Insert(types.NewFunc(token.NoPos, pkg, name, typ))
}
// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" .
// parseDecl parses a Decl:
//
// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" .
func (p *parser) parseDecl() {
if p.tok == scanner.Ident {
switch p.lit {
@ -1023,9 +1069,10 @@ func (p *parser) parseDecl() {
// ----------------------------------------------------------------------------
// Export
// Export = "PackageClause { Decl } "$$" .
// PackageClause = "package" PackageName [ "safe" ] "\n" .
// parseExport parses an Export:
//
// Export = "PackageClause { Decl } "$$" .
// PackageClause = "package" PackageName [ "safe" ] "\n" .
func (p *parser) parseExport() *types.Package {
p.expectKeyword("package")
name := p.parsePackageName()

View File

@ -11,6 +11,7 @@ package gcimporter
import (
"bytes"
"encoding/binary"
"fmt"
"go/ast"
"go/constant"
"go/token"
@ -19,11 +20,11 @@ import (
"math/big"
"reflect"
"sort"
)
"strconv"
"strings"
// Current indexed export format version. Increase with each format change.
// 0: Go1.11 encoding
const iexportVersion = 0
"golang.org/x/tools/internal/typeparams"
)
// Current bundled export format version. Increase with each format change.
// 0: initial implementation
@ -35,31 +36,35 @@ const bundleVersion = 0
// The package path of the top-level package will not be recorded,
// so that calls to IImportData can override with a provided package path.
func IExportData(out io.Writer, fset *token.FileSet, pkg *types.Package) error {
return iexportCommon(out, fset, false, []*types.Package{pkg})
return iexportCommon(out, fset, false, iexportVersion, []*types.Package{pkg})
}
// IExportBundle writes an indexed export bundle for pkgs to out.
func IExportBundle(out io.Writer, fset *token.FileSet, pkgs []*types.Package) error {
return iexportCommon(out, fset, true, pkgs)
return iexportCommon(out, fset, true, iexportVersion, pkgs)
}
func iexportCommon(out io.Writer, fset *token.FileSet, bundle bool, pkgs []*types.Package) (err error) {
defer func() {
if e := recover(); e != nil {
if ierr, ok := e.(internalError); ok {
err = ierr
return
func iexportCommon(out io.Writer, fset *token.FileSet, bundle bool, version int, pkgs []*types.Package) (err error) {
if !debug {
defer func() {
if e := recover(); e != nil {
if ierr, ok := e.(internalError); ok {
err = ierr
return
}
// Not an internal error; panic again.
panic(e)
}
// Not an internal error; panic again.
panic(e)
}
}()
}()
}
p := iexporter{
fset: fset,
version: version,
allPkgs: map[*types.Package]bool{},
stringIndex: map[string]uint64{},
declIndex: map[types.Object]uint64{},
tparamNames: map[types.Object]string{},
typIndex: map[types.Type]uint64{},
}
if !bundle {
@ -119,7 +124,7 @@ func iexportCommon(out io.Writer, fset *token.FileSet, bundle bool, pkgs []*type
if bundle {
hdr.uint64(bundleVersion)
}
hdr.uint64(iexportVersion)
hdr.uint64(uint64(p.version))
hdr.uint64(uint64(p.strings.Len()))
hdr.uint64(dataLen)
@ -136,8 +141,12 @@ func iexportCommon(out io.Writer, fset *token.FileSet, bundle bool, pkgs []*type
// non-compiler tools and includes a complete package description
// (i.e., name and height).
func (w *exportWriter) writeIndex(index map[types.Object]uint64) {
type pkgObj struct {
obj types.Object
name string // qualified name; differs from obj.Name for type params
}
// Build a map from packages to objects from that package.
pkgObjs := map[*types.Package][]types.Object{}
pkgObjs := map[*types.Package][]pkgObj{}
// For the main index, make sure to include every package that
// we reference, even if we're not exporting (or reexporting)
@ -150,7 +159,8 @@ func (w *exportWriter) writeIndex(index map[types.Object]uint64) {
}
for obj := range index {
pkgObjs[obj.Pkg()] = append(pkgObjs[obj.Pkg()], obj)
name := w.p.exportName(obj)
pkgObjs[obj.Pkg()] = append(pkgObjs[obj.Pkg()], pkgObj{obj, name})
}
var pkgs []*types.Package
@ -158,7 +168,7 @@ func (w *exportWriter) writeIndex(index map[types.Object]uint64) {
pkgs = append(pkgs, pkg)
sort.Slice(objs, func(i, j int) bool {
return objs[i].Name() < objs[j].Name()
return objs[i].name < objs[j].name
})
}
@ -175,15 +185,25 @@ func (w *exportWriter) writeIndex(index map[types.Object]uint64) {
objs := pkgObjs[pkg]
w.uint64(uint64(len(objs)))
for _, obj := range objs {
w.string(obj.Name())
w.uint64(index[obj])
w.string(obj.name)
w.uint64(index[obj.obj])
}
}
}
// exportName returns the 'exported' name of an object. It differs from
// obj.Name() only for type parameters (see tparamExportName for details).
func (p *iexporter) exportName(obj types.Object) (res string) {
if name := p.tparamNames[obj]; name != "" {
return name
}
return obj.Name()
}
type iexporter struct {
fset *token.FileSet
out *bytes.Buffer
fset *token.FileSet
out *bytes.Buffer
version int
localpkg *types.Package
@ -197,9 +217,21 @@ type iexporter struct {
strings intWriter
stringIndex map[string]uint64
data0 intWriter
declIndex map[types.Object]uint64
typIndex map[types.Type]uint64
data0 intWriter
declIndex map[types.Object]uint64
tparamNames map[types.Object]string // typeparam->exported name
typIndex map[types.Type]uint64
indent int // for tracing support
}
func (p *iexporter) trace(format string, args ...interface{}) {
if !trace {
// Call sites should also be guarded, but having this check here allows
// easily enabling/disabling debug trace statements.
return
}
fmt.Printf(strings.Repeat("..", p.indent)+format+"\n", args...)
}
// stringOff returns the offset of s within the string section.
@ -219,13 +251,16 @@ func (p *iexporter) stringOff(s string) uint64 {
// pushDecl adds n to the declaration work queue, if not already present.
func (p *iexporter) pushDecl(obj types.Object) {
// Package unsafe is known to the compiler and predeclared.
assert(obj.Pkg() != types.Unsafe)
// Caller should not ask us to do export it.
if obj.Pkg() == types.Unsafe {
panic("cannot export package unsafe")
}
if _, ok := p.declIndex[obj]; ok {
return
}
p.declIndex[obj] = ^uint64(0) // mark n present in work queue
p.declIndex[obj] = ^uint64(0) // mark obj present in work queue
p.declTodo.pushTail(obj)
}
@ -233,10 +268,11 @@ func (p *iexporter) pushDecl(obj types.Object) {
type exportWriter struct {
p *iexporter
data intWriter
currPkg *types.Package
prevFile string
prevLine int64
data intWriter
currPkg *types.Package
prevFile string
prevLine int64
prevColumn int64
}
func (w *exportWriter) exportPath(pkg *types.Package) string {
@ -247,6 +283,14 @@ func (w *exportWriter) exportPath(pkg *types.Package) string {
}
func (p *iexporter) doDecl(obj types.Object) {
if trace {
p.trace("exporting decl %v (%T)", obj, obj)
p.indent++
defer func() {
p.indent--
p.trace("=> %s", obj)
}()
}
w := p.newWriter()
w.setPkg(obj.Pkg(), false)
@ -261,8 +305,24 @@ func (p *iexporter) doDecl(obj types.Object) {
if sig.Recv() != nil {
panic(internalErrorf("unexpected method: %v", sig))
}
w.tag('F')
// Function.
if typeparams.ForSignature(sig).Len() == 0 {
w.tag('F')
} else {
w.tag('G')
}
w.pos(obj.Pos())
// The tparam list of the function type is the declaration of the type
// params. So, write out the type params right now. Then those type params
// will be referenced via their type offset (via typOff) in all other
// places in the signature and function where they are used.
//
// While importing the type parameters, tparamList computes and records
// their export name, so that it can be later used when writing the index.
if tparams := typeparams.ForSignature(sig); tparams.Len() > 0 {
w.tparamList(obj.Name(), tparams, obj.Pkg())
}
w.signature(sig)
case *types.Const:
@ -271,30 +331,56 @@ func (p *iexporter) doDecl(obj types.Object) {
w.value(obj.Type(), obj.Val())
case *types.TypeName:
t := obj.Type()
if tparam, ok := t.(*typeparams.TypeParam); ok {
w.tag('P')
w.pos(obj.Pos())
constraint := tparam.Constraint()
if p.version >= iexportVersionGo1_18 {
implicit := false
if iface, _ := constraint.(*types.Interface); iface != nil {
implicit = typeparams.IsImplicit(iface)
}
w.bool(implicit)
}
w.typ(constraint, obj.Pkg())
break
}
if obj.IsAlias() {
w.tag('A')
w.pos(obj.Pos())
w.typ(obj.Type(), obj.Pkg())
w.typ(t, obj.Pkg())
break
}
// Defined type.
w.tag('T')
named, ok := t.(*types.Named)
if !ok {
panic(internalErrorf("%s is not a defined type", t))
}
if typeparams.ForNamed(named).Len() == 0 {
w.tag('T')
} else {
w.tag('U')
}
w.pos(obj.Pos())
if typeparams.ForNamed(named).Len() > 0 {
// While importing the type parameters, tparamList computes and records
// their export name, so that it can be later used when writing the index.
w.tparamList(obj.Name(), typeparams.ForNamed(named), obj.Pkg())
}
underlying := obj.Type().Underlying()
w.typ(underlying, obj.Pkg())
t := obj.Type()
if types.IsInterface(t) {
break
}
named, ok := t.(*types.Named)
if !ok {
panic(internalErrorf("%s is not a defined type", t))
}
n := named.NumMethods()
w.uint64(uint64(n))
for i := 0; i < n; i++ {
@ -302,6 +388,17 @@ func (p *iexporter) doDecl(obj types.Object) {
w.pos(m.Pos())
w.string(m.Name())
sig, _ := m.Type().(*types.Signature)
// Receiver type parameters are type arguments of the receiver type, so
// their name must be qualified before exporting recv.
if rparams := typeparams.RecvTypeParams(sig); rparams.Len() > 0 {
prefix := obj.Name() + "." + m.Name()
for i := 0; i < rparams.Len(); i++ {
rparam := rparams.At(i)
name := tparamExportName(prefix, rparam)
w.p.tparamNames[rparam.Obj()] = name
}
}
w.param(sig.Recv())
w.signature(sig)
}
@ -318,6 +415,48 @@ func (w *exportWriter) tag(tag byte) {
}
func (w *exportWriter) pos(pos token.Pos) {
if w.p.version >= iexportVersionPosCol {
w.posV1(pos)
} else {
w.posV0(pos)
}
}
func (w *exportWriter) posV1(pos token.Pos) {
if w.p.fset == nil {
w.int64(0)
return
}
p := w.p.fset.Position(pos)
file := p.Filename
line := int64(p.Line)
column := int64(p.Column)
deltaColumn := (column - w.prevColumn) << 1
deltaLine := (line - w.prevLine) << 1
if file != w.prevFile {
deltaLine |= 1
}
if deltaLine != 0 {
deltaColumn |= 1
}
w.int64(deltaColumn)
if deltaColumn&1 != 0 {
w.int64(deltaLine)
if deltaLine&1 != 0 {
w.string(file)
}
}
w.prevFile = file
w.prevLine = line
w.prevColumn = column
}
func (w *exportWriter) posV0(pos token.Pos) {
if w.p.fset == nil {
w.int64(0)
return
@ -359,10 +498,11 @@ func (w *exportWriter) pkg(pkg *types.Package) {
}
func (w *exportWriter) qualifiedIdent(obj types.Object) {
name := w.p.exportName(obj)
// Ensure any referenced declarations are written out too.
w.p.pushDecl(obj)
w.string(obj.Name())
w.string(name)
w.pkg(obj.Pkg())
}
@ -396,11 +536,32 @@ func (w *exportWriter) startType(k itag) {
}
func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) {
if trace {
w.p.trace("exporting type %s (%T)", t, t)
w.p.indent++
defer func() {
w.p.indent--
w.p.trace("=> %s", t)
}()
}
switch t := t.(type) {
case *types.Named:
if targs := typeparams.NamedTypeArgs(t); targs.Len() > 0 {
w.startType(instanceType)
// TODO(rfindley): investigate if this position is correct, and if it
// matters.
w.pos(t.Obj().Pos())
w.typeList(targs, pkg)
w.typ(typeparams.NamedTypeOrigin(t), pkg)
return
}
w.startType(definedType)
w.qualifiedIdent(t.Obj())
case *typeparams.TypeParam:
w.startType(typeParamType)
w.qualifiedIdent(t.Obj())
case *types.Pointer:
w.startType(pointerType)
w.typ(t.Elem(), pkg)
@ -461,9 +622,14 @@ func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) {
n := t.NumEmbeddeds()
w.uint64(uint64(n))
for i := 0; i < n; i++ {
f := t.Embedded(i)
w.pos(f.Obj().Pos())
w.typ(f.Obj().Type(), f.Obj().Pkg())
ft := t.EmbeddedType(i)
tPkg := pkg
if named, _ := ft.(*types.Named); named != nil {
w.pos(named.Obj().Pos())
} else {
w.pos(token.NoPos)
}
w.typ(ft, tPkg)
}
n = t.NumExplicitMethods()
@ -476,6 +642,16 @@ func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) {
w.signature(sig)
}
case *typeparams.Union:
w.startType(unionType)
nt := t.Len()
w.uint64(uint64(nt))
for i := 0; i < nt; i++ {
term := t.Term(i)
w.bool(term.Tilde())
w.typ(term.Type(), pkg)
}
default:
panic(internalErrorf("unexpected type: %v, %v", t, reflect.TypeOf(t)))
}
@ -497,6 +673,56 @@ func (w *exportWriter) signature(sig *types.Signature) {
}
}
func (w *exportWriter) typeList(ts *typeparams.TypeList, pkg *types.Package) {
w.uint64(uint64(ts.Len()))
for i := 0; i < ts.Len(); i++ {
w.typ(ts.At(i), pkg)
}
}
func (w *exportWriter) tparamList(prefix string, list *typeparams.TypeParamList, pkg *types.Package) {
ll := uint64(list.Len())
w.uint64(ll)
for i := 0; i < list.Len(); i++ {
tparam := list.At(i)
// Set the type parameter exportName before exporting its type.
exportName := tparamExportName(prefix, tparam)
w.p.tparamNames[tparam.Obj()] = exportName
w.typ(list.At(i), pkg)
}
}
const blankMarker = "$"
// tparamExportName returns the 'exported' name of a type parameter, which
// differs from its actual object name: it is prefixed with a qualifier, and
// blank type parameter names are disambiguated by their index in the type
// parameter list.
func tparamExportName(prefix string, tparam *typeparams.TypeParam) string {
assert(prefix != "")
name := tparam.Obj().Name()
if name == "_" {
name = blankMarker + strconv.Itoa(tparam.Index())
}
return prefix + "." + name
}
// tparamName returns the real name of a type parameter, after stripping its
// qualifying prefix and reverting blank-name encoding. See tparamExportName
// for details.
func tparamName(exportName string) string {
// Remove the "path" from the type param name that makes it unique.
ix := strings.LastIndex(exportName, ".")
if ix < 0 {
errorf("malformed type parameter export name %s: missing prefix", exportName)
}
name := exportName[ix+1:]
if strings.HasPrefix(name, blankMarker) {
return "_"
}
return name
}
func (w *exportWriter) paramList(tup *types.Tuple) {
n := tup.Len()
w.uint64(uint64(n))
@ -513,6 +739,9 @@ func (w *exportWriter) param(obj types.Object) {
func (w *exportWriter) value(typ types.Type, v constant.Value) {
w.typ(typ, nil)
if w.p.version >= iexportVersionGo1_18 {
w.int64(int64(v.Kind()))
}
switch b := typ.Underlying().(*types.Basic); b.Info() & types.IsConstType {
case types.IsBoolean:

View File

@ -17,7 +17,11 @@ import (
"go/token"
"go/types"
"io"
"math/big"
"sort"
"strings"
"golang.org/x/tools/internal/typeparams"
)
type intReader struct {
@ -41,6 +45,19 @@ func (r *intReader) uint64() uint64 {
return i
}
// Keep this in sync with constants in iexport.go.
const (
iexportVersionGo1_11 = 0
iexportVersionPosCol = 1
iexportVersionGo1_18 = 2
iexportVersionGenerics = 2
)
type ident struct {
pkg *types.Package
name string
}
const predeclReserved = 32
type itag uint64
@ -56,6 +73,9 @@ const (
signatureType
structType
interfaceType
typeParamType
instanceType
unionType
)
// IImportData imports a package from the serialized package data
@ -78,15 +98,19 @@ func IImportBundle(fset *token.FileSet, imports map[string]*types.Package, data
func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data []byte, bundle bool, path string) (pkgs []*types.Package, err error) {
const currentVersion = 1
version := int64(-1)
defer func() {
if e := recover(); e != nil {
if version > currentVersion {
err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e)
} else {
err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e)
if !debug {
defer func() {
if e := recover(); e != nil {
if bundle {
err = fmt.Errorf("%v", e)
} else if version > currentVersion {
err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e)
} else {
err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e)
}
}
}
}()
}()
}
r := &intReader{bytes.NewReader(data), path}
@ -101,9 +125,13 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data
version = int64(r.uint64())
switch version {
case currentVersion, 0:
case iexportVersionGo1_18, iexportVersionPosCol, iexportVersionGo1_11:
default:
errorf("unknown iexport format version %d", version)
if version > iexportVersionGo1_18 {
errorf("unstable iexport format version %d, just rebuild compiler and std library", version)
} else {
errorf("unknown iexport format version %d", version)
}
}
sLen := int64(r.uint64())
@ -115,8 +143,8 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data
r.Seek(sLen+dLen, io.SeekCurrent)
p := iimporter{
ipath: path,
version: int(version),
ipath: path,
stringData: stringData,
stringCache: make(map[uint64]string),
@ -125,12 +153,16 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data
declData: declData,
pkgIndex: make(map[*types.Package]map[string]uint64),
typCache: make(map[uint64]types.Type),
// Separate map for typeparams, keyed by their package and unique
// name.
tparamIndex: make(map[ident]types.Type),
fake: fakeFileSet{
fset: fset,
files: make(map[string]*token.File),
files: make(map[string]*fileInfo),
},
}
defer p.fake.setLines() // set lines for files in fset
for i, pt := range predeclared() {
p.typCache[uint64(i)] = pt
@ -208,6 +240,15 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data
pkg.MarkComplete()
}
// SetConstraint can't be called if the constraint type is not yet complete.
// When type params are created in the 'P' case of (*importReader).obj(),
// the associated constraint type may not be complete due to recursion.
// Therefore, we defer calling SetConstraint there, and call it here instead
// after all types are complete.
for _, d := range p.later {
typeparams.SetTypeParamConstraint(d.t, d.constraint)
}
for _, typ := range p.interfaceList {
typ.Complete()
}
@ -215,23 +256,51 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data
return pkgs, nil
}
type setConstraintArgs struct {
t *typeparams.TypeParam
constraint types.Type
}
type iimporter struct {
ipath string
version int
ipath string
stringData []byte
stringCache map[uint64]string
pkgCache map[uint64]*types.Package
declData []byte
pkgIndex map[*types.Package]map[string]uint64
typCache map[uint64]types.Type
declData []byte
pkgIndex map[*types.Package]map[string]uint64
typCache map[uint64]types.Type
tparamIndex map[ident]types.Type
fake fakeFileSet
interfaceList []*types.Interface
// Arguments for calls to SetConstraint that are deferred due to recursive types
later []setConstraintArgs
indent int // for tracing support
}
func (p *iimporter) trace(format string, args ...interface{}) {
if !trace {
// Call sites should also be guarded, but having this check here allows
// easily enabling/disabling debug trace statements.
return
}
fmt.Printf(strings.Repeat("..", p.indent)+format+"\n", args...)
}
func (p *iimporter) doDecl(pkg *types.Package, name string) {
if debug {
p.trace("import decl %s", name)
p.indent++
defer func() {
p.indent--
p.trace("=> %s", name)
}()
}
// See if we've already imported this declaration.
if obj := pkg.Scope().Lookup(name); obj != nil {
return
@ -273,7 +342,7 @@ func (p *iimporter) pkgAt(off uint64) *types.Package {
}
func (p *iimporter) typAt(off uint64, base *types.Named) types.Type {
if t, ok := p.typCache[off]; ok && (base == nil || !isInterface(t)) {
if t, ok := p.typCache[off]; ok && canReuse(base, t) {
return t
}
@ -285,12 +354,30 @@ func (p *iimporter) typAt(off uint64, base *types.Named) types.Type {
r.declReader.Reset(p.declData[off-predeclReserved:])
t := r.doType(base)
if base == nil || !isInterface(t) {
if canReuse(base, t) {
p.typCache[off] = t
}
return t
}
// canReuse reports whether the type rhs on the RHS of the declaration for def
// may be re-used.
//
// Specifically, if def is non-nil and rhs is an interface type with methods, it
// may not be re-used because we have a convention of setting the receiver type
// for interface methods to def.
func canReuse(def *types.Named, rhs types.Type) bool {
if def == nil {
return true
}
iface, _ := rhs.(*types.Interface)
if iface == nil {
return true
}
// Don't use iface.Empty() here as iface may not be complete.
return iface.NumEmbeddeds() == 0 && iface.NumExplicitMethods() == 0
}
type importReader struct {
p *iimporter
declReader bytes.Reader
@ -315,17 +402,26 @@ func (r *importReader) obj(name string) {
r.declare(types.NewConst(pos, r.currPkg, name, typ, val))
case 'F':
sig := r.signature(nil)
case 'F', 'G':
var tparams []*typeparams.TypeParam
if tag == 'G' {
tparams = r.tparamList()
}
sig := r.signature(nil, nil, tparams)
r.declare(types.NewFunc(pos, r.currPkg, name, sig))
case 'T':
case 'T', 'U':
// Types can be recursive. We need to setup a stub
// declaration before recursing.
obj := types.NewTypeName(pos, r.currPkg, name, nil)
named := types.NewNamed(obj, nil, nil)
// Declare obj before calling r.tparamList, so the new type name is recognized
// if used in the constraint of one of its own typeparams (see #48280).
r.declare(obj)
if tag == 'U' {
tparams := r.tparamList()
typeparams.SetForNamed(named, tparams)
}
underlying := r.p.typAt(r.uint64(), named).Underlying()
named.SetUnderlying(underlying)
@ -335,12 +431,59 @@ func (r *importReader) obj(name string) {
mpos := r.pos()
mname := r.ident()
recv := r.param()
msig := r.signature(recv)
// If the receiver has any targs, set those as the
// rparams of the method (since those are the
// typeparams being used in the method sig/body).
base := baseType(recv.Type())
assert(base != nil)
targs := typeparams.NamedTypeArgs(base)
var rparams []*typeparams.TypeParam
if targs.Len() > 0 {
rparams = make([]*typeparams.TypeParam, targs.Len())
for i := range rparams {
rparams[i] = targs.At(i).(*typeparams.TypeParam)
}
}
msig := r.signature(recv, rparams, nil)
named.AddMethod(types.NewFunc(mpos, r.currPkg, mname, msig))
}
}
case 'P':
// We need to "declare" a typeparam in order to have a name that
// can be referenced recursively (if needed) in the type param's
// bound.
if r.p.version < iexportVersionGenerics {
errorf("unexpected type param type")
}
name0 := tparamName(name)
tn := types.NewTypeName(pos, r.currPkg, name0, nil)
t := typeparams.NewTypeParam(tn, nil)
// To handle recursive references to the typeparam within its
// bound, save the partial type in tparamIndex before reading the bounds.
id := ident{r.currPkg, name}
r.p.tparamIndex[id] = t
var implicit bool
if r.p.version >= iexportVersionGo1_18 {
implicit = r.bool()
}
constraint := r.typ()
if implicit {
iface, _ := constraint.(*types.Interface)
if iface == nil {
errorf("non-interface constraint marked implicit")
}
typeparams.MarkImplicit(iface)
}
// The constraint type may not be complete, if we
// are in the middle of a type recursion involving type
// constraints. So, we defer SetConstraint until we have
// completely set up all types in ImportData.
r.p.later = append(r.p.later, setConstraintArgs{t: t, constraint: constraint})
case 'V':
typ := r.typ()
@ -357,6 +500,10 @@ func (r *importReader) declare(obj types.Object) {
func (r *importReader) value() (typ types.Type, val constant.Value) {
typ = r.typ()
if r.p.version >= iexportVersionGo1_18 {
// TODO: add support for using the kind.
_ = constant.Kind(r.int64())
}
switch b := typ.Underlying().(*types.Basic); b.Info() & types.IsConstType {
case types.IsBoolean:
@ -366,7 +513,9 @@ func (r *importReader) value() (typ types.Type, val constant.Value) {
val = constant.MakeString(r.string())
case types.IsInteger:
val = r.mpint(b)
var x big.Int
r.mpint(&x, b)
val = constant.Make(&x)
case types.IsFloat:
val = r.mpfloat(b)
@ -415,8 +564,8 @@ func intSize(b *types.Basic) (signed bool, maxBytes uint) {
return
}
func (r *importReader) mpint(b *types.Basic) constant.Value {
signed, maxBytes := intSize(b)
func (r *importReader) mpint(x *big.Int, typ *types.Basic) {
signed, maxBytes := intSize(typ)
maxSmall := 256 - maxBytes
if signed {
@ -435,7 +584,8 @@ func (r *importReader) mpint(b *types.Basic) constant.Value {
v = ^v
}
}
return constant.MakeInt64(v)
x.SetInt64(v)
return
}
v := -n
@ -445,47 +595,23 @@ func (r *importReader) mpint(b *types.Basic) constant.Value {
if v < 1 || uint(v) > maxBytes {
errorf("weird decoding: %v, %v => %v", n, signed, v)
}
buf := make([]byte, v)
io.ReadFull(&r.declReader, buf)
// convert to little endian
// TODO(gri) go/constant should have a more direct conversion function
// (e.g., once it supports a big.Float based implementation)
for i, j := 0, len(buf)-1; i < j; i, j = i+1, j-1 {
buf[i], buf[j] = buf[j], buf[i]
}
x := constant.MakeFromBytes(buf)
b := make([]byte, v)
io.ReadFull(&r.declReader, b)
x.SetBytes(b)
if signed && n&1 != 0 {
x = constant.UnaryOp(token.SUB, x, 0)
x.Neg(x)
}
return x
}
func (r *importReader) mpfloat(b *types.Basic) constant.Value {
x := r.mpint(b)
if constant.Sign(x) == 0 {
return x
func (r *importReader) mpfloat(typ *types.Basic) constant.Value {
var mant big.Int
r.mpint(&mant, typ)
var f big.Float
f.SetInt(&mant)
if f.Sign() != 0 {
f.SetMantExp(&f, int(r.int64()))
}
exp := r.int64()
switch {
case exp > 0:
x = constant.Shift(x, token.SHL, uint(exp))
// Ensure that the imported Kind is Float, else this constant may run into
// bitsize limits on overlarge integers. Eventually we can instead adopt
// the approach of CL 288632, but that CL relies on go/constant APIs that
// were introduced in go1.13.
//
// TODO(rFindley): sync the logic here with tip Go once we no longer
// support go1.12.
x = constant.ToFloat(x)
case exp < 0:
d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp))
x = constant.BinaryOp(x, token.QUO, d)
}
return x
return constant.Make(&f)
}
func (r *importReader) ident() string {
@ -499,7 +625,7 @@ func (r *importReader) qualifiedIdent() (*types.Package, string) {
}
func (r *importReader) pos() token.Pos {
if r.p.version >= 1 {
if r.p.version >= iexportVersionPosCol {
r.posv1()
} else {
r.posv0()
@ -547,8 +673,17 @@ func isInterface(t types.Type) bool {
func (r *importReader) pkg() *types.Package { return r.p.pkgAt(r.uint64()) }
func (r *importReader) string() string { return r.p.stringAt(r.uint64()) }
func (r *importReader) doType(base *types.Named) types.Type {
switch k := r.kind(); k {
func (r *importReader) doType(base *types.Named) (res types.Type) {
k := r.kind()
if debug {
r.p.trace("importing type %d (base: %s)", k, base)
r.p.indent++
defer func() {
r.p.indent--
r.p.trace("=> %s", res)
}()
}
switch k {
default:
errorf("unexpected kind tag in %q: %v", r.p.ipath, k)
return nil
@ -571,7 +706,7 @@ func (r *importReader) doType(base *types.Named) types.Type {
return types.NewMap(r.typ(), r.typ())
case signatureType:
r.currPkg = r.pkg()
return r.signature(nil)
return r.signature(nil, nil, nil)
case structType:
r.currPkg = r.pkg()
@ -611,13 +746,56 @@ func (r *importReader) doType(base *types.Named) types.Type {
recv = types.NewVar(token.NoPos, r.currPkg, "", base)
}
msig := r.signature(recv)
msig := r.signature(recv, nil, nil)
methods[i] = types.NewFunc(mpos, r.currPkg, mname, msig)
}
typ := newInterface(methods, embeddeds)
r.p.interfaceList = append(r.p.interfaceList, typ)
return typ
case typeParamType:
if r.p.version < iexportVersionGenerics {
errorf("unexpected type param type")
}
pkg, name := r.qualifiedIdent()
id := ident{pkg, name}
if t, ok := r.p.tparamIndex[id]; ok {
// We're already in the process of importing this typeparam.
return t
}
// Otherwise, import the definition of the typeparam now.
r.p.doDecl(pkg, name)
return r.p.tparamIndex[id]
case instanceType:
if r.p.version < iexportVersionGenerics {
errorf("unexpected instantiation type")
}
// pos does not matter for instances: they are positioned on the original
// type.
_ = r.pos()
len := r.uint64()
targs := make([]types.Type, len)
for i := range targs {
targs[i] = r.typ()
}
baseType := r.typ()
// The imported instantiated type doesn't include any methods, so
// we must always use the methods of the base (orig) type.
// TODO provide a non-nil *Environment
t, _ := typeparams.Instantiate(nil, baseType, targs, false)
return t
case unionType:
if r.p.version < iexportVersionGenerics {
errorf("unexpected instantiation type")
}
terms := make([]*typeparams.Term, r.uint64())
for i := range terms {
terms[i] = typeparams.NewTerm(r.bool(), r.typ())
}
return typeparams.NewUnion(terms)
}
}
@ -625,11 +803,25 @@ func (r *importReader) kind() itag {
return itag(r.uint64())
}
func (r *importReader) signature(recv *types.Var) *types.Signature {
func (r *importReader) signature(recv *types.Var, rparams []*typeparams.TypeParam, tparams []*typeparams.TypeParam) *types.Signature {
params := r.paramList()
results := r.paramList()
variadic := params.Len() > 0 && r.bool()
return types.NewSignature(recv, params, results, variadic)
return typeparams.NewSignatureType(recv, rparams, tparams, params, results, variadic)
}
func (r *importReader) tparamList() []*typeparams.TypeParam {
n := r.uint64()
if n == 0 {
return nil
}
xs := make([]*typeparams.TypeParam, n)
for i := range xs {
// Note: the standard library importer is tolerant of nil types here,
// though would panic in SetTypeParams.
xs[i] = r.typ().(*typeparams.TypeParam)
}
return xs
}
func (r *importReader) paramList() *types.Tuple {
@ -674,3 +866,13 @@ func (r *importReader) byte() byte {
}
return x
}
func baseType(typ types.Type) *types.Named {
// pointer receivers are never types.Named types
if p, _ := typ.(*types.Pointer); p != nil {
typ = p.Elem()
}
// receiver base types are always (possibly generic) types.Named types
n, _ := typ.(*types.Named)
return n
}

View File

@ -0,0 +1,16 @@
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build !go1.18
// +build !go1.18
package gcimporter
import "go/types"
const iexportVersion = iexportVersionGo1_11
func additionalPredeclared() []types.Type {
return nil
}

View File

@ -0,0 +1,23 @@
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build go1.18
// +build go1.18
package gcimporter
import "go/types"
const iexportVersion = iexportVersionGenerics
// additionalPredeclared returns additional predeclared types in go.1.18.
func additionalPredeclared() []types.Type {
return []types.Type{
// comparable
types.Universe.Lookup("comparable").Type(),
// any
types.Universe.Lookup("any").Type(),
}
}

View File

@ -0,0 +1,10 @@
// Copyright 2022 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build !(go1.18 && goexperiment.unified)
// +build !go1.18 !goexperiment.unified
package gcimporter
const unifiedIR = false

View File

@ -0,0 +1,10 @@
// Copyright 2022 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build go1.18 && goexperiment.unified
// +build go1.18,goexperiment.unified
package gcimporter
const unifiedIR = true

View File

@ -0,0 +1,19 @@
// Copyright 2022 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build !go1.18
// +build !go1.18
package gcimporter
import (
"fmt"
"go/token"
"go/types"
)
func UImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) {
err = fmt.Errorf("go/tools compiled with a Go version earlier than 1.18 cannot read unified IR export data")
return
}

View File

@ -0,0 +1,612 @@
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Derived from go/internal/gcimporter/ureader.go
//go:build go1.18
// +build go1.18
package gcimporter
import (
"go/token"
"go/types"
"strings"
"golang.org/x/tools/go/internal/pkgbits"
)
// A pkgReader holds the shared state for reading a unified IR package
// description.
type pkgReader struct {
pkgbits.PkgDecoder
fake fakeFileSet
ctxt *types.Context
imports map[string]*types.Package // previously imported packages, indexed by path
// lazily initialized arrays corresponding to the unified IR
// PosBase, Pkg, and Type sections, respectively.
posBases []string // position bases (i.e., file names)
pkgs []*types.Package
typs []types.Type
// laterFns holds functions that need to be invoked at the end of
// import reading.
laterFns []func()
}
// later adds a function to be invoked at the end of import reading.
func (pr *pkgReader) later(fn func()) {
pr.laterFns = append(pr.laterFns, fn)
}
// See cmd/compile/internal/noder.derivedInfo.
type derivedInfo struct {
idx pkgbits.Index
needed bool
}
// See cmd/compile/internal/noder.typeInfo.
type typeInfo struct {
idx pkgbits.Index
derived bool
}
func UImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) {
s := string(data)
s = s[:strings.LastIndex(s, "\n$$\n")]
input := pkgbits.NewPkgDecoder(path, s)
pkg = readUnifiedPackage(fset, nil, imports, input)
return
}
// readUnifiedPackage reads a package description from the given
// unified IR export data decoder.
func readUnifiedPackage(fset *token.FileSet, ctxt *types.Context, imports map[string]*types.Package, input pkgbits.PkgDecoder) *types.Package {
pr := pkgReader{
PkgDecoder: input,
fake: fakeFileSet{
fset: fset,
files: make(map[string]*fileInfo),
},
ctxt: ctxt,
imports: imports,
posBases: make([]string, input.NumElems(pkgbits.RelocPosBase)),
pkgs: make([]*types.Package, input.NumElems(pkgbits.RelocPkg)),
typs: make([]types.Type, input.NumElems(pkgbits.RelocType)),
}
defer pr.fake.setLines()
r := pr.newReader(pkgbits.RelocMeta, pkgbits.PublicRootIdx, pkgbits.SyncPublic)
pkg := r.pkg()
r.Bool() // has init
for i, n := 0, r.Len(); i < n; i++ {
// As if r.obj(), but avoiding the Scope.Lookup call,
// to avoid eager loading of imports.
r.Sync(pkgbits.SyncObject)
assert(!r.Bool())
r.p.objIdx(r.Reloc(pkgbits.RelocObj))
assert(r.Len() == 0)
}
r.Sync(pkgbits.SyncEOF)
for _, fn := range pr.laterFns {
fn()
}
pkg.MarkComplete()
return pkg
}
// A reader holds the state for reading a single unified IR element
// within a package.
type reader struct {
pkgbits.Decoder
p *pkgReader
dict *readerDict
}
// A readerDict holds the state for type parameters that parameterize
// the current unified IR element.
type readerDict struct {
// bounds is a slice of typeInfos corresponding to the underlying
// bounds of the element's type parameters.
bounds []typeInfo
// tparams is a slice of the constructed TypeParams for the element.
tparams []*types.TypeParam
// devived is a slice of types derived from tparams, which may be
// instantiated while reading the current element.
derived []derivedInfo
derivedTypes []types.Type // lazily instantiated from derived
}
func (pr *pkgReader) newReader(k pkgbits.RelocKind, idx pkgbits.Index, marker pkgbits.SyncMarker) *reader {
return &reader{
Decoder: pr.NewDecoder(k, idx, marker),
p: pr,
}
}
// @@@ Positions
func (r *reader) pos() token.Pos {
r.Sync(pkgbits.SyncPos)
if !r.Bool() {
return token.NoPos
}
// TODO(mdempsky): Delta encoding.
posBase := r.posBase()
line := r.Uint()
col := r.Uint()
return r.p.fake.pos(posBase, int(line), int(col))
}
func (r *reader) posBase() string {
return r.p.posBaseIdx(r.Reloc(pkgbits.RelocPosBase))
}
func (pr *pkgReader) posBaseIdx(idx pkgbits.Index) string {
if b := pr.posBases[idx]; b != "" {
return b
}
r := pr.newReader(pkgbits.RelocPosBase, idx, pkgbits.SyncPosBase)
// Within types2, position bases have a lot more details (e.g.,
// keeping track of where //line directives appeared exactly).
//
// For go/types, we just track the file name.
filename := r.String()
if r.Bool() { // file base
// Was: "b = token.NewTrimmedFileBase(filename, true)"
} else { // line base
pos := r.pos()
line := r.Uint()
col := r.Uint()
// Was: "b = token.NewLineBase(pos, filename, true, line, col)"
_, _, _ = pos, line, col
}
b := filename
pr.posBases[idx] = b
return b
}
// @@@ Packages
func (r *reader) pkg() *types.Package {
r.Sync(pkgbits.SyncPkg)
return r.p.pkgIdx(r.Reloc(pkgbits.RelocPkg))
}
func (pr *pkgReader) pkgIdx(idx pkgbits.Index) *types.Package {
// TODO(mdempsky): Consider using some non-nil pointer to indicate
// the universe scope, so we don't need to keep re-reading it.
if pkg := pr.pkgs[idx]; pkg != nil {
return pkg
}
pkg := pr.newReader(pkgbits.RelocPkg, idx, pkgbits.SyncPkgDef).doPkg()
pr.pkgs[idx] = pkg
return pkg
}
func (r *reader) doPkg() *types.Package {
path := r.String()
switch path {
case "":
path = r.p.PkgPath()
case "builtin":
return nil // universe
case "unsafe":
return types.Unsafe
}
if pkg := r.p.imports[path]; pkg != nil {
return pkg
}
name := r.String()
pkg := types.NewPackage(path, name)
r.p.imports[path] = pkg
imports := make([]*types.Package, r.Len())
for i := range imports {
imports[i] = r.pkg()
}
pkg.SetImports(imports)
return pkg
}
// @@@ Types
func (r *reader) typ() types.Type {
return r.p.typIdx(r.typInfo(), r.dict)
}
func (r *reader) typInfo() typeInfo {
r.Sync(pkgbits.SyncType)
if r.Bool() {
return typeInfo{idx: pkgbits.Index(r.Len()), derived: true}
}
return typeInfo{idx: r.Reloc(pkgbits.RelocType), derived: false}
}
func (pr *pkgReader) typIdx(info typeInfo, dict *readerDict) types.Type {
idx := info.idx
var where *types.Type
if info.derived {
where = &dict.derivedTypes[idx]
idx = dict.derived[idx].idx
} else {
where = &pr.typs[idx]
}
if typ := *where; typ != nil {
return typ
}
r := pr.newReader(pkgbits.RelocType, idx, pkgbits.SyncTypeIdx)
r.dict = dict
typ := r.doTyp()
assert(typ != nil)
// See comment in pkgReader.typIdx explaining how this happens.
if prev := *where; prev != nil {
return prev
}
*where = typ
return typ
}
func (r *reader) doTyp() (res types.Type) {
switch tag := pkgbits.CodeType(r.Code(pkgbits.SyncType)); tag {
default:
errorf("unhandled type tag: %v", tag)
panic("unreachable")
case pkgbits.TypeBasic:
return types.Typ[r.Len()]
case pkgbits.TypeNamed:
obj, targs := r.obj()
name := obj.(*types.TypeName)
if len(targs) != 0 {
t, _ := types.Instantiate(r.p.ctxt, name.Type(), targs, false)
return t
}
return name.Type()
case pkgbits.TypeTypeParam:
return r.dict.tparams[r.Len()]
case pkgbits.TypeArray:
len := int64(r.Uint64())
return types.NewArray(r.typ(), len)
case pkgbits.TypeChan:
dir := types.ChanDir(r.Len())
return types.NewChan(dir, r.typ())
case pkgbits.TypeMap:
return types.NewMap(r.typ(), r.typ())
case pkgbits.TypePointer:
return types.NewPointer(r.typ())
case pkgbits.TypeSignature:
return r.signature(nil, nil, nil)
case pkgbits.TypeSlice:
return types.NewSlice(r.typ())
case pkgbits.TypeStruct:
return r.structType()
case pkgbits.TypeInterface:
return r.interfaceType()
case pkgbits.TypeUnion:
return r.unionType()
}
}
func (r *reader) structType() *types.Struct {
fields := make([]*types.Var, r.Len())
var tags []string
for i := range fields {
pos := r.pos()
pkg, name := r.selector()
ftyp := r.typ()
tag := r.String()
embedded := r.Bool()
fields[i] = types.NewField(pos, pkg, name, ftyp, embedded)
if tag != "" {
for len(tags) < i {
tags = append(tags, "")
}
tags = append(tags, tag)
}
}
return types.NewStruct(fields, tags)
}
func (r *reader) unionType() *types.Union {
terms := make([]*types.Term, r.Len())
for i := range terms {
terms[i] = types.NewTerm(r.Bool(), r.typ())
}
return types.NewUnion(terms)
}
func (r *reader) interfaceType() *types.Interface {
methods := make([]*types.Func, r.Len())
embeddeds := make([]types.Type, r.Len())
implicit := len(methods) == 0 && len(embeddeds) == 1 && r.Bool()
for i := range methods {
pos := r.pos()
pkg, name := r.selector()
mtyp := r.signature(nil, nil, nil)
methods[i] = types.NewFunc(pos, pkg, name, mtyp)
}
for i := range embeddeds {
embeddeds[i] = r.typ()
}
iface := types.NewInterfaceType(methods, embeddeds)
if implicit {
iface.MarkImplicit()
}
return iface
}
func (r *reader) signature(recv *types.Var, rtparams, tparams []*types.TypeParam) *types.Signature {
r.Sync(pkgbits.SyncSignature)
params := r.params()
results := r.params()
variadic := r.Bool()
return types.NewSignatureType(recv, rtparams, tparams, params, results, variadic)
}
func (r *reader) params() *types.Tuple {
r.Sync(pkgbits.SyncParams)
params := make([]*types.Var, r.Len())
for i := range params {
params[i] = r.param()
}
return types.NewTuple(params...)
}
func (r *reader) param() *types.Var {
r.Sync(pkgbits.SyncParam)
pos := r.pos()
pkg, name := r.localIdent()
typ := r.typ()
return types.NewParam(pos, pkg, name, typ)
}
// @@@ Objects
func (r *reader) obj() (types.Object, []types.Type) {
r.Sync(pkgbits.SyncObject)
assert(!r.Bool())
pkg, name := r.p.objIdx(r.Reloc(pkgbits.RelocObj))
obj := pkgScope(pkg).Lookup(name)
targs := make([]types.Type, r.Len())
for i := range targs {
targs[i] = r.typ()
}
return obj, targs
}
func (pr *pkgReader) objIdx(idx pkgbits.Index) (*types.Package, string) {
rname := pr.newReader(pkgbits.RelocName, idx, pkgbits.SyncObject1)
objPkg, objName := rname.qualifiedIdent()
assert(objName != "")
tag := pkgbits.CodeObj(rname.Code(pkgbits.SyncCodeObj))
if tag == pkgbits.ObjStub {
assert(objPkg == nil || objPkg == types.Unsafe)
return objPkg, objName
}
if objPkg.Scope().Lookup(objName) == nil {
dict := pr.objDictIdx(idx)
r := pr.newReader(pkgbits.RelocObj, idx, pkgbits.SyncObject1)
r.dict = dict
declare := func(obj types.Object) {
objPkg.Scope().Insert(obj)
}
switch tag {
default:
panic("weird")
case pkgbits.ObjAlias:
pos := r.pos()
typ := r.typ()
declare(types.NewTypeName(pos, objPkg, objName, typ))
case pkgbits.ObjConst:
pos := r.pos()
typ := r.typ()
val := r.Value()
declare(types.NewConst(pos, objPkg, objName, typ, val))
case pkgbits.ObjFunc:
pos := r.pos()
tparams := r.typeParamNames()
sig := r.signature(nil, nil, tparams)
declare(types.NewFunc(pos, objPkg, objName, sig))
case pkgbits.ObjType:
pos := r.pos()
obj := types.NewTypeName(pos, objPkg, objName, nil)
named := types.NewNamed(obj, nil, nil)
declare(obj)
named.SetTypeParams(r.typeParamNames())
// TODO(mdempsky): Rewrite receiver types to underlying is an
// Interface? The go/types importer does this (I think because
// unit tests expected that), but cmd/compile doesn't care
// about it, so maybe we can avoid worrying about that here.
rhs := r.typ()
r.p.later(func() {
underlying := rhs.Underlying()
named.SetUnderlying(underlying)
})
for i, n := 0, r.Len(); i < n; i++ {
named.AddMethod(r.method())
}
case pkgbits.ObjVar:
pos := r.pos()
typ := r.typ()
declare(types.NewVar(pos, objPkg, objName, typ))
}
}
return objPkg, objName
}
func (pr *pkgReader) objDictIdx(idx pkgbits.Index) *readerDict {
r := pr.newReader(pkgbits.RelocObjDict, idx, pkgbits.SyncObject1)
var dict readerDict
if implicits := r.Len(); implicits != 0 {
errorf("unexpected object with %v implicit type parameter(s)", implicits)
}
dict.bounds = make([]typeInfo, r.Len())
for i := range dict.bounds {
dict.bounds[i] = r.typInfo()
}
dict.derived = make([]derivedInfo, r.Len())
dict.derivedTypes = make([]types.Type, len(dict.derived))
for i := range dict.derived {
dict.derived[i] = derivedInfo{r.Reloc(pkgbits.RelocType), r.Bool()}
}
// function references follow, but reader doesn't need those
return &dict
}
func (r *reader) typeParamNames() []*types.TypeParam {
r.Sync(pkgbits.SyncTypeParamNames)
// Note: This code assumes it only processes objects without
// implement type parameters. This is currently fine, because
// reader is only used to read in exported declarations, which are
// always package scoped.
if len(r.dict.bounds) == 0 {
return nil
}
// Careful: Type parameter lists may have cycles. To allow for this,
// we construct the type parameter list in two passes: first we
// create all the TypeNames and TypeParams, then we construct and
// set the bound type.
r.dict.tparams = make([]*types.TypeParam, len(r.dict.bounds))
for i := range r.dict.bounds {
pos := r.pos()
pkg, name := r.localIdent()
tname := types.NewTypeName(pos, pkg, name, nil)
r.dict.tparams[i] = types.NewTypeParam(tname, nil)
}
typs := make([]types.Type, len(r.dict.bounds))
for i, bound := range r.dict.bounds {
typs[i] = r.p.typIdx(bound, r.dict)
}
// TODO(mdempsky): This is subtle, elaborate further.
//
// We have to save tparams outside of the closure, because
// typeParamNames() can be called multiple times with the same
// dictionary instance.
//
// Also, this needs to happen later to make sure SetUnderlying has
// been called.
//
// TODO(mdempsky): Is it safe to have a single "later" slice or do
// we need to have multiple passes? See comments on CL 386002 and
// go.dev/issue/52104.
tparams := r.dict.tparams
r.p.later(func() {
for i, typ := range typs {
tparams[i].SetConstraint(typ)
}
})
return r.dict.tparams
}
func (r *reader) method() *types.Func {
r.Sync(pkgbits.SyncMethod)
pos := r.pos()
pkg, name := r.selector()
rparams := r.typeParamNames()
sig := r.signature(r.param(), rparams, nil)
_ = r.pos() // TODO(mdempsky): Remove; this is a hacker for linker.go.
return types.NewFunc(pos, pkg, name, sig)
}
func (r *reader) qualifiedIdent() (*types.Package, string) { return r.ident(pkgbits.SyncSym) }
func (r *reader) localIdent() (*types.Package, string) { return r.ident(pkgbits.SyncLocalIdent) }
func (r *reader) selector() (*types.Package, string) { return r.ident(pkgbits.SyncSelector) }
func (r *reader) ident(marker pkgbits.SyncMarker) (*types.Package, string) {
r.Sync(marker)
return r.pkg(), r.String()
}
// pkgScope returns pkg.Scope().
// If pkg is nil, it returns types.Universe instead.
//
// TODO(mdempsky): Remove after x/tools can depend on Go 1.19.
func pkgScope(pkg *types.Package) *types.Scope {
if pkg != nil {
return pkg.Scope()
}
return types.Universe
}

77
vendor/golang.org/x/tools/go/internal/pkgbits/codes.go generated vendored Normal file
View File

@ -0,0 +1,77 @@
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package pkgbits
// A Code is an enum value that can be encoded into bitstreams.
//
// Code types are preferable for enum types, because they allow
// Decoder to detect desyncs.
type Code interface {
// Marker returns the SyncMarker for the Code's dynamic type.
Marker() SyncMarker
// Value returns the Code's ordinal value.
Value() int
}
// A CodeVal distinguishes among go/constant.Value encodings.
type CodeVal int
func (c CodeVal) Marker() SyncMarker { return SyncVal }
func (c CodeVal) Value() int { return int(c) }
// Note: These values are public and cannot be changed without
// updating the go/types importers.
const (
ValBool CodeVal = iota
ValString
ValInt64
ValBigInt
ValBigRat
ValBigFloat
)
// A CodeType distinguishes among go/types.Type encodings.
type CodeType int
func (c CodeType) Marker() SyncMarker { return SyncType }
func (c CodeType) Value() int { return int(c) }
// Note: These values are public and cannot be changed without
// updating the go/types importers.
const (
TypeBasic CodeType = iota
TypeNamed
TypePointer
TypeSlice
TypeArray
TypeChan
TypeMap
TypeSignature
TypeStruct
TypeInterface
TypeUnion
TypeTypeParam
)
// A CodeObj distinguishes among go/types.Object encodings.
type CodeObj int
func (c CodeObj) Marker() SyncMarker { return SyncCodeObj }
func (c CodeObj) Value() int { return int(c) }
// Note: These values are public and cannot be changed without
// updating the go/types importers.
const (
ObjAlias CodeObj = iota
ObjConst
ObjType
ObjFunc
ObjVar
ObjStub
)

View File

@ -0,0 +1,433 @@
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package pkgbits
import (
"encoding/binary"
"fmt"
"go/constant"
"go/token"
"math/big"
"os"
"runtime"
"strings"
)
// A PkgDecoder provides methods for decoding a package's Unified IR
// export data.
type PkgDecoder struct {
// version is the file format version.
version uint32
// sync indicates whether the file uses sync markers.
sync bool
// pkgPath is the package path for the package to be decoded.
//
// TODO(mdempsky): Remove; unneeded since CL 391014.
pkgPath string
// elemData is the full data payload of the encoded package.
// Elements are densely and contiguously packed together.
//
// The last 8 bytes of elemData are the package fingerprint.
elemData string
// elemEnds stores the byte-offset end positions of element
// bitstreams within elemData.
//
// For example, element I's bitstream data starts at elemEnds[I-1]
// (or 0, if I==0) and ends at elemEnds[I].
//
// Note: elemEnds is indexed by absolute indices, not
// section-relative indices.
elemEnds []uint32
// elemEndsEnds stores the index-offset end positions of relocation
// sections within elemEnds.
//
// For example, section K's end positions start at elemEndsEnds[K-1]
// (or 0, if K==0) and end at elemEndsEnds[K].
elemEndsEnds [numRelocs]uint32
}
// PkgPath returns the package path for the package
//
// TODO(mdempsky): Remove; unneeded since CL 391014.
func (pr *PkgDecoder) PkgPath() string { return pr.pkgPath }
// SyncMarkers reports whether pr uses sync markers.
func (pr *PkgDecoder) SyncMarkers() bool { return pr.sync }
// NewPkgDecoder returns a PkgDecoder initialized to read the Unified
// IR export data from input. pkgPath is the package path for the
// compilation unit that produced the export data.
//
// TODO(mdempsky): Remove pkgPath parameter; unneeded since CL 391014.
func NewPkgDecoder(pkgPath, input string) PkgDecoder {
pr := PkgDecoder{
pkgPath: pkgPath,
}
// TODO(mdempsky): Implement direct indexing of input string to
// avoid copying the position information.
r := strings.NewReader(input)
assert(binary.Read(r, binary.LittleEndian, &pr.version) == nil)
switch pr.version {
default:
panic(fmt.Errorf("unsupported version: %v", pr.version))
case 0:
// no flags
case 1:
var flags uint32
assert(binary.Read(r, binary.LittleEndian, &flags) == nil)
pr.sync = flags&flagSyncMarkers != 0
}
assert(binary.Read(r, binary.LittleEndian, pr.elemEndsEnds[:]) == nil)
pr.elemEnds = make([]uint32, pr.elemEndsEnds[len(pr.elemEndsEnds)-1])
assert(binary.Read(r, binary.LittleEndian, pr.elemEnds[:]) == nil)
pos, err := r.Seek(0, os.SEEK_CUR)
assert(err == nil)
pr.elemData = input[pos:]
assert(len(pr.elemData)-8 == int(pr.elemEnds[len(pr.elemEnds)-1]))
return pr
}
// NumElems returns the number of elements in section k.
func (pr *PkgDecoder) NumElems(k RelocKind) int {
count := int(pr.elemEndsEnds[k])
if k > 0 {
count -= int(pr.elemEndsEnds[k-1])
}
return count
}
// TotalElems returns the total number of elements across all sections.
func (pr *PkgDecoder) TotalElems() int {
return len(pr.elemEnds)
}
// Fingerprint returns the package fingerprint.
func (pr *PkgDecoder) Fingerprint() [8]byte {
var fp [8]byte
copy(fp[:], pr.elemData[len(pr.elemData)-8:])
return fp
}
// AbsIdx returns the absolute index for the given (section, index)
// pair.
func (pr *PkgDecoder) AbsIdx(k RelocKind, idx Index) int {
absIdx := int(idx)
if k > 0 {
absIdx += int(pr.elemEndsEnds[k-1])
}
if absIdx >= int(pr.elemEndsEnds[k]) {
errorf("%v:%v is out of bounds; %v", k, idx, pr.elemEndsEnds)
}
return absIdx
}
// DataIdx returns the raw element bitstream for the given (section,
// index) pair.
func (pr *PkgDecoder) DataIdx(k RelocKind, idx Index) string {
absIdx := pr.AbsIdx(k, idx)
var start uint32
if absIdx > 0 {
start = pr.elemEnds[absIdx-1]
}
end := pr.elemEnds[absIdx]
return pr.elemData[start:end]
}
// StringIdx returns the string value for the given string index.
func (pr *PkgDecoder) StringIdx(idx Index) string {
return pr.DataIdx(RelocString, idx)
}
// NewDecoder returns a Decoder for the given (section, index) pair,
// and decodes the given SyncMarker from the element bitstream.
func (pr *PkgDecoder) NewDecoder(k RelocKind, idx Index, marker SyncMarker) Decoder {
r := pr.NewDecoderRaw(k, idx)
r.Sync(marker)
return r
}
// NewDecoderRaw returns a Decoder for the given (section, index) pair.
//
// Most callers should use NewDecoder instead.
func (pr *PkgDecoder) NewDecoderRaw(k RelocKind, idx Index) Decoder {
r := Decoder{
common: pr,
k: k,
Idx: idx,
}
// TODO(mdempsky) r.data.Reset(...) after #44505 is resolved.
r.Data = *strings.NewReader(pr.DataIdx(k, idx))
r.Sync(SyncRelocs)
r.Relocs = make([]RelocEnt, r.Len())
for i := range r.Relocs {
r.Sync(SyncReloc)
r.Relocs[i] = RelocEnt{RelocKind(r.Len()), Index(r.Len())}
}
return r
}
// A Decoder provides methods for decoding an individual element's
// bitstream data.
type Decoder struct {
common *PkgDecoder
Relocs []RelocEnt
Data strings.Reader
k RelocKind
Idx Index
}
func (r *Decoder) checkErr(err error) {
if err != nil {
errorf("unexpected decoding error: %w", err)
}
}
func (r *Decoder) rawUvarint() uint64 {
x, err := binary.ReadUvarint(&r.Data)
r.checkErr(err)
return x
}
func (r *Decoder) rawVarint() int64 {
ux := r.rawUvarint()
// Zig-zag decode.
x := int64(ux >> 1)
if ux&1 != 0 {
x = ^x
}
return x
}
func (r *Decoder) rawReloc(k RelocKind, idx int) Index {
e := r.Relocs[idx]
assert(e.Kind == k)
return e.Idx
}
// Sync decodes a sync marker from the element bitstream and asserts
// that it matches the expected marker.
//
// If r.common.sync is false, then Sync is a no-op.
func (r *Decoder) Sync(mWant SyncMarker) {
if !r.common.sync {
return
}
pos, _ := r.Data.Seek(0, os.SEEK_CUR) // TODO(mdempsky): io.SeekCurrent after #44505 is resolved
mHave := SyncMarker(r.rawUvarint())
writerPCs := make([]int, r.rawUvarint())
for i := range writerPCs {
writerPCs[i] = int(r.rawUvarint())
}
if mHave == mWant {
return
}
// There's some tension here between printing:
//
// (1) full file paths that tools can recognize (e.g., so emacs
// hyperlinks the "file:line" text for easy navigation), or
//
// (2) short file paths that are easier for humans to read (e.g., by
// omitting redundant or irrelevant details, so it's easier to
// focus on the useful bits that remain).
//
// The current formatting favors the former, as it seems more
// helpful in practice. But perhaps the formatting could be improved
// to better address both concerns. For example, use relative file
// paths if they would be shorter, or rewrite file paths to contain
// "$GOROOT" (like objabi.AbsFile does) if tools can be taught how
// to reliably expand that again.
fmt.Printf("export data desync: package %q, section %v, index %v, offset %v\n", r.common.pkgPath, r.k, r.Idx, pos)
fmt.Printf("\nfound %v, written at:\n", mHave)
if len(writerPCs) == 0 {
fmt.Printf("\t[stack trace unavailable; recompile package %q with -d=syncframes]\n", r.common.pkgPath)
}
for _, pc := range writerPCs {
fmt.Printf("\t%s\n", r.common.StringIdx(r.rawReloc(RelocString, pc)))
}
fmt.Printf("\nexpected %v, reading at:\n", mWant)
var readerPCs [32]uintptr // TODO(mdempsky): Dynamically size?
n := runtime.Callers(2, readerPCs[:])
for _, pc := range fmtFrames(readerPCs[:n]...) {
fmt.Printf("\t%s\n", pc)
}
// We already printed a stack trace for the reader, so now we can
// simply exit. Printing a second one with panic or base.Fatalf
// would just be noise.
os.Exit(1)
}
// Bool decodes and returns a bool value from the element bitstream.
func (r *Decoder) Bool() bool {
r.Sync(SyncBool)
x, err := r.Data.ReadByte()
r.checkErr(err)
assert(x < 2)
return x != 0
}
// Int64 decodes and returns an int64 value from the element bitstream.
func (r *Decoder) Int64() int64 {
r.Sync(SyncInt64)
return r.rawVarint()
}
// Int64 decodes and returns a uint64 value from the element bitstream.
func (r *Decoder) Uint64() uint64 {
r.Sync(SyncUint64)
return r.rawUvarint()
}
// Len decodes and returns a non-negative int value from the element bitstream.
func (r *Decoder) Len() int { x := r.Uint64(); v := int(x); assert(uint64(v) == x); return v }
// Int decodes and returns an int value from the element bitstream.
func (r *Decoder) Int() int { x := r.Int64(); v := int(x); assert(int64(v) == x); return v }
// Uint decodes and returns a uint value from the element bitstream.
func (r *Decoder) Uint() uint { x := r.Uint64(); v := uint(x); assert(uint64(v) == x); return v }
// Code decodes a Code value from the element bitstream and returns
// its ordinal value. It's the caller's responsibility to convert the
// result to an appropriate Code type.
//
// TODO(mdempsky): Ideally this method would have signature "Code[T
// Code] T" instead, but we don't allow generic methods and the
// compiler can't depend on generics yet anyway.
func (r *Decoder) Code(mark SyncMarker) int {
r.Sync(mark)
return r.Len()
}
// Reloc decodes a relocation of expected section k from the element
// bitstream and returns an index to the referenced element.
func (r *Decoder) Reloc(k RelocKind) Index {
r.Sync(SyncUseReloc)
return r.rawReloc(k, r.Len())
}
// String decodes and returns a string value from the element
// bitstream.
func (r *Decoder) String() string {
r.Sync(SyncString)
return r.common.StringIdx(r.Reloc(RelocString))
}
// Strings decodes and returns a variable-length slice of strings from
// the element bitstream.
func (r *Decoder) Strings() []string {
res := make([]string, r.Len())
for i := range res {
res[i] = r.String()
}
return res
}
// Value decodes and returns a constant.Value from the element
// bitstream.
func (r *Decoder) Value() constant.Value {
r.Sync(SyncValue)
isComplex := r.Bool()
val := r.scalar()
if isComplex {
val = constant.BinaryOp(val, token.ADD, constant.MakeImag(r.scalar()))
}
return val
}
func (r *Decoder) scalar() constant.Value {
switch tag := CodeVal(r.Code(SyncVal)); tag {
default:
panic(fmt.Errorf("unexpected scalar tag: %v", tag))
case ValBool:
return constant.MakeBool(r.Bool())
case ValString:
return constant.MakeString(r.String())
case ValInt64:
return constant.MakeInt64(r.Int64())
case ValBigInt:
return constant.Make(r.bigInt())
case ValBigRat:
num := r.bigInt()
denom := r.bigInt()
return constant.Make(new(big.Rat).SetFrac(num, denom))
case ValBigFloat:
return constant.Make(r.bigFloat())
}
}
func (r *Decoder) bigInt() *big.Int {
v := new(big.Int).SetBytes([]byte(r.String()))
if r.Bool() {
v.Neg(v)
}
return v
}
func (r *Decoder) bigFloat() *big.Float {
v := new(big.Float).SetPrec(512)
assert(v.UnmarshalText([]byte(r.String())) == nil)
return v
}
// @@@ Helpers
// TODO(mdempsky): These should probably be removed. I think they're a
// smell that the export data format is not yet quite right.
// PeekPkgPath returns the package path for the specified package
// index.
func (pr *PkgDecoder) PeekPkgPath(idx Index) string {
r := pr.NewDecoder(RelocPkg, idx, SyncPkgDef)
path := r.String()
if path == "" {
path = pr.pkgPath
}
return path
}
// PeekObj returns the package path, object name, and CodeObj for the
// specified object index.
func (pr *PkgDecoder) PeekObj(idx Index) (string, string, CodeObj) {
r := pr.NewDecoder(RelocName, idx, SyncObject1)
r.Sync(SyncSym)
r.Sync(SyncPkg)
path := pr.PeekPkgPath(r.Reloc(RelocPkg))
name := r.String()
assert(name != "")
tag := CodeObj(r.Code(SyncCodeObj))
return path, name, tag
}

32
vendor/golang.org/x/tools/go/internal/pkgbits/doc.go generated vendored Normal file
View File

@ -0,0 +1,32 @@
// Copyright 2022 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package pkgbits implements low-level coding abstractions for
// Unified IR's export data format.
//
// At a low-level, a package is a collection of bitstream elements.
// Each element has a "kind" and a dense, non-negative index.
// Elements can be randomly accessed given their kind and index.
//
// Individual elements are sequences of variable-length values (e.g.,
// integers, booleans, strings, go/constant values, cross-references
// to other elements). Package pkgbits provides APIs for encoding and
// decoding these low-level values, but the details of mapping
// higher-level Go constructs into elements is left to higher-level
// abstractions.
//
// Elements may cross-reference each other with "relocations." For
// example, an element representing a pointer type has a relocation
// referring to the element type.
//
// Go constructs may be composed as a constellation of multiple
// elements. For example, a declared function may have one element to
// describe the object (e.g., its name, type, position), and a
// separate element to describe its function body. This allows readers
// some flexibility in efficiently seeking or re-reading data (e.g.,
// inlining requires re-reading the function body for each inlined
// call, without needing to re-read the object-level details).
//
// This is a copy of internal/pkgbits in the Go implementation.
package pkgbits

View File

@ -0,0 +1,379 @@
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package pkgbits
import (
"bytes"
"crypto/md5"
"encoding/binary"
"go/constant"
"io"
"math/big"
"runtime"
)
// currentVersion is the current version number.
//
// - v0: initial prototype
//
// - v1: adds the flags uint32 word
const currentVersion uint32 = 1
// A PkgEncoder provides methods for encoding a package's Unified IR
// export data.
type PkgEncoder struct {
// elems holds the bitstream for previously encoded elements.
elems [numRelocs][]string
// stringsIdx maps previously encoded strings to their index within
// the RelocString section, to allow deduplication. That is,
// elems[RelocString][stringsIdx[s]] == s (if present).
stringsIdx map[string]Index
// syncFrames is the number of frames to write at each sync
// marker. A negative value means sync markers are omitted.
syncFrames int
}
// SyncMarkers reports whether pw uses sync markers.
func (pw *PkgEncoder) SyncMarkers() bool { return pw.syncFrames >= 0 }
// NewPkgEncoder returns an initialized PkgEncoder.
//
// syncFrames is the number of caller frames that should be serialized
// at Sync points. Serializing additional frames results in larger
// export data files, but can help diagnosing desync errors in
// higher-level Unified IR reader/writer code. If syncFrames is
// negative, then sync markers are omitted entirely.
func NewPkgEncoder(syncFrames int) PkgEncoder {
return PkgEncoder{
stringsIdx: make(map[string]Index),
syncFrames: syncFrames,
}
}
// DumpTo writes the package's encoded data to out0 and returns the
// package fingerprint.
func (pw *PkgEncoder) DumpTo(out0 io.Writer) (fingerprint [8]byte) {
h := md5.New()
out := io.MultiWriter(out0, h)
writeUint32 := func(x uint32) {
assert(binary.Write(out, binary.LittleEndian, x) == nil)
}
writeUint32(currentVersion)
var flags uint32
if pw.SyncMarkers() {
flags |= flagSyncMarkers
}
writeUint32(flags)
// Write elemEndsEnds.
var sum uint32
for _, elems := range &pw.elems {
sum += uint32(len(elems))
writeUint32(sum)
}
// Write elemEnds.
sum = 0
for _, elems := range &pw.elems {
for _, elem := range elems {
sum += uint32(len(elem))
writeUint32(sum)
}
}
// Write elemData.
for _, elems := range &pw.elems {
for _, elem := range elems {
_, err := io.WriteString(out, elem)
assert(err == nil)
}
}
// Write fingerprint.
copy(fingerprint[:], h.Sum(nil))
_, err := out0.Write(fingerprint[:])
assert(err == nil)
return
}
// StringIdx adds a string value to the strings section, if not
// already present, and returns its index.
func (pw *PkgEncoder) StringIdx(s string) Index {
if idx, ok := pw.stringsIdx[s]; ok {
assert(pw.elems[RelocString][idx] == s)
return idx
}
idx := Index(len(pw.elems[RelocString]))
pw.elems[RelocString] = append(pw.elems[RelocString], s)
pw.stringsIdx[s] = idx
return idx
}
// NewEncoder returns an Encoder for a new element within the given
// section, and encodes the given SyncMarker as the start of the
// element bitstream.
func (pw *PkgEncoder) NewEncoder(k RelocKind, marker SyncMarker) Encoder {
e := pw.NewEncoderRaw(k)
e.Sync(marker)
return e
}
// NewEncoderRaw returns an Encoder for a new element within the given
// section.
//
// Most callers should use NewEncoder instead.
func (pw *PkgEncoder) NewEncoderRaw(k RelocKind) Encoder {
idx := Index(len(pw.elems[k]))
pw.elems[k] = append(pw.elems[k], "") // placeholder
return Encoder{
p: pw,
k: k,
Idx: idx,
}
}
// An Encoder provides methods for encoding an individual element's
// bitstream data.
type Encoder struct {
p *PkgEncoder
Relocs []RelocEnt
Data bytes.Buffer // accumulated element bitstream data
encodingRelocHeader bool
k RelocKind
Idx Index // index within relocation section
}
// Flush finalizes the element's bitstream and returns its Index.
func (w *Encoder) Flush() Index {
var sb bytes.Buffer // TODO(mdempsky): strings.Builder after #44505 is resolved
// Backup the data so we write the relocations at the front.
var tmp bytes.Buffer
io.Copy(&tmp, &w.Data)
// TODO(mdempsky): Consider writing these out separately so they're
// easier to strip, along with function bodies, so that we can prune
// down to just the data that's relevant to go/types.
if w.encodingRelocHeader {
panic("encodingRelocHeader already true; recursive flush?")
}
w.encodingRelocHeader = true
w.Sync(SyncRelocs)
w.Len(len(w.Relocs))
for _, rEnt := range w.Relocs {
w.Sync(SyncReloc)
w.Len(int(rEnt.Kind))
w.Len(int(rEnt.Idx))
}
io.Copy(&sb, &w.Data)
io.Copy(&sb, &tmp)
w.p.elems[w.k][w.Idx] = sb.String()
return w.Idx
}
func (w *Encoder) checkErr(err error) {
if err != nil {
errorf("unexpected encoding error: %v", err)
}
}
func (w *Encoder) rawUvarint(x uint64) {
var buf [binary.MaxVarintLen64]byte
n := binary.PutUvarint(buf[:], x)
_, err := w.Data.Write(buf[:n])
w.checkErr(err)
}
func (w *Encoder) rawVarint(x int64) {
// Zig-zag encode.
ux := uint64(x) << 1
if x < 0 {
ux = ^ux
}
w.rawUvarint(ux)
}
func (w *Encoder) rawReloc(r RelocKind, idx Index) int {
// TODO(mdempsky): Use map for lookup; this takes quadratic time.
for i, rEnt := range w.Relocs {
if rEnt.Kind == r && rEnt.Idx == idx {
return i
}
}
i := len(w.Relocs)
w.Relocs = append(w.Relocs, RelocEnt{r, idx})
return i
}
func (w *Encoder) Sync(m SyncMarker) {
if !w.p.SyncMarkers() {
return
}
// Writing out stack frame string references requires working
// relocations, but writing out the relocations themselves involves
// sync markers. To prevent infinite recursion, we simply trim the
// stack frame for sync markers within the relocation header.
var frames []string
if !w.encodingRelocHeader && w.p.syncFrames > 0 {
pcs := make([]uintptr, w.p.syncFrames)
n := runtime.Callers(2, pcs)
frames = fmtFrames(pcs[:n]...)
}
// TODO(mdempsky): Save space by writing out stack frames as a
// linked list so we can share common stack frames.
w.rawUvarint(uint64(m))
w.rawUvarint(uint64(len(frames)))
for _, frame := range frames {
w.rawUvarint(uint64(w.rawReloc(RelocString, w.p.StringIdx(frame))))
}
}
// Bool encodes and writes a bool value into the element bitstream,
// and then returns the bool value.
//
// For simple, 2-alternative encodings, the idiomatic way to call Bool
// is something like:
//
// if w.Bool(x != 0) {
// // alternative #1
// } else {
// // alternative #2
// }
//
// For multi-alternative encodings, use Code instead.
func (w *Encoder) Bool(b bool) bool {
w.Sync(SyncBool)
var x byte
if b {
x = 1
}
err := w.Data.WriteByte(x)
w.checkErr(err)
return b
}
// Int64 encodes and writes an int64 value into the element bitstream.
func (w *Encoder) Int64(x int64) {
w.Sync(SyncInt64)
w.rawVarint(x)
}
// Uint64 encodes and writes a uint64 value into the element bitstream.
func (w *Encoder) Uint64(x uint64) {
w.Sync(SyncUint64)
w.rawUvarint(x)
}
// Len encodes and writes a non-negative int value into the element bitstream.
func (w *Encoder) Len(x int) { assert(x >= 0); w.Uint64(uint64(x)) }
// Int encodes and writes an int value into the element bitstream.
func (w *Encoder) Int(x int) { w.Int64(int64(x)) }
// Len encodes and writes a uint value into the element bitstream.
func (w *Encoder) Uint(x uint) { w.Uint64(uint64(x)) }
// Reloc encodes and writes a relocation for the given (section,
// index) pair into the element bitstream.
//
// Note: Only the index is formally written into the element
// bitstream, so bitstream decoders must know from context which
// section an encoded relocation refers to.
func (w *Encoder) Reloc(r RelocKind, idx Index) {
w.Sync(SyncUseReloc)
w.Len(w.rawReloc(r, idx))
}
// Code encodes and writes a Code value into the element bitstream.
func (w *Encoder) Code(c Code) {
w.Sync(c.Marker())
w.Len(c.Value())
}
// String encodes and writes a string value into the element
// bitstream.
//
// Internally, strings are deduplicated by adding them to the strings
// section (if not already present), and then writing a relocation
// into the element bitstream.
func (w *Encoder) String(s string) {
w.Sync(SyncString)
w.Reloc(RelocString, w.p.StringIdx(s))
}
// Strings encodes and writes a variable-length slice of strings into
// the element bitstream.
func (w *Encoder) Strings(ss []string) {
w.Len(len(ss))
for _, s := range ss {
w.String(s)
}
}
// Value encodes and writes a constant.Value into the element
// bitstream.
func (w *Encoder) Value(val constant.Value) {
w.Sync(SyncValue)
if w.Bool(val.Kind() == constant.Complex) {
w.scalar(constant.Real(val))
w.scalar(constant.Imag(val))
} else {
w.scalar(val)
}
}
func (w *Encoder) scalar(val constant.Value) {
switch v := constant.Val(val).(type) {
default:
errorf("unhandled %v (%v)", val, val.Kind())
case bool:
w.Code(ValBool)
w.Bool(v)
case string:
w.Code(ValString)
w.String(v)
case int64:
w.Code(ValInt64)
w.Int64(v)
case *big.Int:
w.Code(ValBigInt)
w.bigInt(v)
case *big.Rat:
w.Code(ValBigRat)
w.bigInt(v.Num())
w.bigInt(v.Denom())
case *big.Float:
w.Code(ValBigFloat)
w.bigFloat(v)
}
}
func (w *Encoder) bigInt(v *big.Int) {
b := v.Bytes()
w.String(string(b)) // TODO: More efficient encoding.
w.Bool(v.Sign() < 0)
}
func (w *Encoder) bigFloat(v *big.Float) {
b := v.Append(nil, 'p', -1)
w.String(string(b)) // TODO: More efficient encoding.
}

View File

@ -0,0 +1,9 @@
// Copyright 2022 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package pkgbits
const (
flagSyncMarkers = 1 << iota // file format contains sync markers
)

View File

@ -0,0 +1,21 @@
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build !go1.7
// +build !go1.7
// TODO(mdempsky): Remove after #44505 is resolved
package pkgbits
import "runtime"
func walkFrames(pcs []uintptr, visit frameVisitor) {
for _, pc := range pcs {
fn := runtime.FuncForPC(pc)
file, line := fn.FileLine(pc)
visit(file, line, fn.Name(), pc-fn.Entry())
}
}

View File

@ -0,0 +1,28 @@
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build go1.7
// +build go1.7
package pkgbits
import "runtime"
// walkFrames calls visit for each call frame represented by pcs.
//
// pcs should be a slice of PCs, as returned by runtime.Callers.
func walkFrames(pcs []uintptr, visit frameVisitor) {
if len(pcs) == 0 {
return
}
frames := runtime.CallersFrames(pcs)
for {
frame, more := frames.Next()
visit(frame.File, frame.Line, frame.Function, frame.PC-frame.Entry)
if !more {
return
}
}
}

42
vendor/golang.org/x/tools/go/internal/pkgbits/reloc.go generated vendored Normal file
View File

@ -0,0 +1,42 @@
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package pkgbits
// A RelocKind indicates a particular section within a unified IR export.
type RelocKind int
// An Index represents a bitstream element index within a particular
// section.
type Index int
// A relocEnt (relocation entry) is an entry in an element's local
// reference table.
//
// TODO(mdempsky): Rename this too.
type RelocEnt struct {
Kind RelocKind
Idx Index
}
// Reserved indices within the meta relocation section.
const (
PublicRootIdx Index = 0
PrivateRootIdx Index = 1
)
const (
RelocString RelocKind = iota
RelocMeta
RelocPosBase
RelocPkg
RelocName
RelocType
RelocObj
RelocObjExt
RelocObjDict
RelocBody
numRelocs = iota
)

View File

@ -0,0 +1,17 @@
// Copyright 2022 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package pkgbits
import "fmt"
func assert(b bool) {
if !b {
panic("assertion failed")
}
}
func errorf(format string, args ...interface{}) {
panic(fmt.Errorf(format, args...))
}

113
vendor/golang.org/x/tools/go/internal/pkgbits/sync.go generated vendored Normal file
View File

@ -0,0 +1,113 @@
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package pkgbits
import (
"fmt"
"strings"
)
// fmtFrames formats a backtrace for reporting reader/writer desyncs.
func fmtFrames(pcs ...uintptr) []string {
res := make([]string, 0, len(pcs))
walkFrames(pcs, func(file string, line int, name string, offset uintptr) {
// Trim package from function name. It's just redundant noise.
name = strings.TrimPrefix(name, "cmd/compile/internal/noder.")
res = append(res, fmt.Sprintf("%s:%v: %s +0x%v", file, line, name, offset))
})
return res
}
type frameVisitor func(file string, line int, name string, offset uintptr)
// SyncMarker is an enum type that represents markers that may be
// written to export data to ensure the reader and writer stay
// synchronized.
type SyncMarker int
//go:generate stringer -type=SyncMarker -trimprefix=Sync
const (
_ SyncMarker = iota
// Public markers (known to go/types importers).
// Low-level coding markers.
SyncEOF
SyncBool
SyncInt64
SyncUint64
SyncString
SyncValue
SyncVal
SyncRelocs
SyncReloc
SyncUseReloc
// Higher-level object and type markers.
SyncPublic
SyncPos
SyncPosBase
SyncObject
SyncObject1
SyncPkg
SyncPkgDef
SyncMethod
SyncType
SyncTypeIdx
SyncTypeParamNames
SyncSignature
SyncParams
SyncParam
SyncCodeObj
SyncSym
SyncLocalIdent
SyncSelector
// Private markers (only known to cmd/compile).
SyncPrivate
SyncFuncExt
SyncVarExt
SyncTypeExt
SyncPragma
SyncExprList
SyncExprs
SyncExpr
SyncExprType
SyncAssign
SyncOp
SyncFuncLit
SyncCompLit
SyncDecl
SyncFuncBody
SyncOpenScope
SyncCloseScope
SyncCloseAnotherScope
SyncDeclNames
SyncDeclName
SyncStmts
SyncBlockStmt
SyncIfStmt
SyncForStmt
SyncSwitchStmt
SyncRangeStmt
SyncCaseClause
SyncCommClause
SyncSelectStmt
SyncDecls
SyncLabeledStmt
SyncUseObjLocal
SyncAddLocal
SyncLinkname
SyncStmt1
SyncStmtsEnd
SyncLabel
SyncOptLabel
)

View File

@ -0,0 +1,89 @@
// Code generated by "stringer -type=SyncMarker -trimprefix=Sync"; DO NOT EDIT.
package pkgbits
import "strconv"
func _() {
// An "invalid array index" compiler error signifies that the constant values have changed.
// Re-run the stringer command to generate them again.
var x [1]struct{}
_ = x[SyncEOF-1]
_ = x[SyncBool-2]
_ = x[SyncInt64-3]
_ = x[SyncUint64-4]
_ = x[SyncString-5]
_ = x[SyncValue-6]
_ = x[SyncVal-7]
_ = x[SyncRelocs-8]
_ = x[SyncReloc-9]
_ = x[SyncUseReloc-10]
_ = x[SyncPublic-11]
_ = x[SyncPos-12]
_ = x[SyncPosBase-13]
_ = x[SyncObject-14]
_ = x[SyncObject1-15]
_ = x[SyncPkg-16]
_ = x[SyncPkgDef-17]
_ = x[SyncMethod-18]
_ = x[SyncType-19]
_ = x[SyncTypeIdx-20]
_ = x[SyncTypeParamNames-21]
_ = x[SyncSignature-22]
_ = x[SyncParams-23]
_ = x[SyncParam-24]
_ = x[SyncCodeObj-25]
_ = x[SyncSym-26]
_ = x[SyncLocalIdent-27]
_ = x[SyncSelector-28]
_ = x[SyncPrivate-29]
_ = x[SyncFuncExt-30]
_ = x[SyncVarExt-31]
_ = x[SyncTypeExt-32]
_ = x[SyncPragma-33]
_ = x[SyncExprList-34]
_ = x[SyncExprs-35]
_ = x[SyncExpr-36]
_ = x[SyncExprType-37]
_ = x[SyncAssign-38]
_ = x[SyncOp-39]
_ = x[SyncFuncLit-40]
_ = x[SyncCompLit-41]
_ = x[SyncDecl-42]
_ = x[SyncFuncBody-43]
_ = x[SyncOpenScope-44]
_ = x[SyncCloseScope-45]
_ = x[SyncCloseAnotherScope-46]
_ = x[SyncDeclNames-47]
_ = x[SyncDeclName-48]
_ = x[SyncStmts-49]
_ = x[SyncBlockStmt-50]
_ = x[SyncIfStmt-51]
_ = x[SyncForStmt-52]
_ = x[SyncSwitchStmt-53]
_ = x[SyncRangeStmt-54]
_ = x[SyncCaseClause-55]
_ = x[SyncCommClause-56]
_ = x[SyncSelectStmt-57]
_ = x[SyncDecls-58]
_ = x[SyncLabeledStmt-59]
_ = x[SyncUseObjLocal-60]
_ = x[SyncAddLocal-61]
_ = x[SyncLinkname-62]
_ = x[SyncStmt1-63]
_ = x[SyncStmtsEnd-64]
_ = x[SyncLabel-65]
_ = x[SyncOptLabel-66]
}
const _SyncMarker_name = "EOFBoolInt64Uint64StringValueValRelocsRelocUseRelocPublicPosPosBaseObjectObject1PkgPkgDefMethodTypeTypeIdxTypeParamNamesSignatureParamsParamCodeObjSymLocalIdentSelectorPrivateFuncExtVarExtTypeExtPragmaExprListExprsExprExprTypeAssignOpFuncLitCompLitDeclFuncBodyOpenScopeCloseScopeCloseAnotherScopeDeclNamesDeclNameStmtsBlockStmtIfStmtForStmtSwitchStmtRangeStmtCaseClauseCommClauseSelectStmtDeclsLabeledStmtUseObjLocalAddLocalLinknameStmt1StmtsEndLabelOptLabel"
var _SyncMarker_index = [...]uint16{0, 3, 7, 12, 18, 24, 29, 32, 38, 43, 51, 57, 60, 67, 73, 80, 83, 89, 95, 99, 106, 120, 129, 135, 140, 147, 150, 160, 168, 175, 182, 188, 195, 201, 209, 214, 218, 226, 232, 234, 241, 248, 252, 260, 269, 279, 296, 305, 313, 318, 327, 333, 340, 350, 359, 369, 379, 389, 394, 405, 416, 424, 432, 437, 445, 450, 458}
func (i SyncMarker) String() string {
i -= 1
if i < 0 || i >= SyncMarker(len(_SyncMarker_index)-1) {
return "SyncMarker(" + strconv.FormatInt(int64(i+1), 10) + ")"
}
return _SyncMarker_name[_SyncMarker_index[i]:_SyncMarker_index[i+1]]
}

View File

@ -67,7 +67,6 @@ Most tools should pass their command-line arguments (after any flags)
uninterpreted to the loader, so that the loader can interpret them
according to the conventions of the underlying build system.
See the Example function for typical usage.
*/
package packages // import "golang.org/x/tools/go/packages"

View File

@ -26,7 +26,6 @@ import (
"golang.org/x/tools/go/internal/packagesdriver"
"golang.org/x/tools/internal/gocommand"
"golang.org/x/tools/internal/packagesinternal"
"golang.org/x/xerrors"
)
// debug controls verbose logging.
@ -303,11 +302,12 @@ func (state *golistState) runContainsQueries(response *responseDeduper, queries
}
dirResponse, err := state.createDriverResponse(pattern)
// If there was an error loading the package, or the package is returned
// with errors, try to load the file as an ad-hoc package.
// If there was an error loading the package, or no packages are returned,
// or the package is returned with errors, try to load the file as an
// ad-hoc package.
// Usually the error will appear in a returned package, but may not if we're
// in module mode and the ad-hoc is located outside a module.
if err != nil || len(dirResponse.Packages) == 1 && len(dirResponse.Packages[0].GoFiles) == 0 &&
if err != nil || len(dirResponse.Packages) == 0 || len(dirResponse.Packages) == 1 && len(dirResponse.Packages[0].GoFiles) == 0 &&
len(dirResponse.Packages[0].Errors) == 1 {
var queryErr error
if dirResponse, queryErr = state.adhocPackage(pattern, query); queryErr != nil {
@ -393,6 +393,8 @@ type jsonPackage struct {
CompiledGoFiles []string
IgnoredGoFiles []string
IgnoredOtherFiles []string
EmbedPatterns []string
EmbedFiles []string
CFiles []string
CgoFiles []string
CXXFiles []string
@ -444,7 +446,11 @@ func (state *golistState) createDriverResponse(words ...string) (*driverResponse
// Run "go list" for complete
// information on the specified packages.
buf, err := state.invokeGo("list", golistargs(state.cfg, words)...)
goVersion, err := state.getGoVersion()
if err != nil {
return nil, err
}
buf, err := state.invokeGo("list", golistargs(state.cfg, words, goVersion)...)
if err != nil {
return nil, err
}
@ -565,6 +571,8 @@ func (state *golistState) createDriverResponse(words ...string) (*driverResponse
GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles),
CompiledGoFiles: absJoin(p.Dir, p.CompiledGoFiles),
OtherFiles: absJoin(p.Dir, otherFiles(p)...),
EmbedFiles: absJoin(p.Dir, p.EmbedFiles),
EmbedPatterns: absJoin(p.Dir, p.EmbedPatterns),
IgnoredFiles: absJoin(p.Dir, p.IgnoredGoFiles, p.IgnoredOtherFiles),
forTest: p.ForTest,
depsErrors: p.DepsErrors,
@ -805,17 +813,83 @@ func absJoin(dir string, fileses ...[]string) (res []string) {
return res
}
func golistargs(cfg *Config, words []string) []string {
func jsonFlag(cfg *Config, goVersion int) string {
if goVersion < 19 {
return "-json"
}
var fields []string
added := make(map[string]bool)
addFields := func(fs ...string) {
for _, f := range fs {
if !added[f] {
added[f] = true
fields = append(fields, f)
}
}
}
addFields("Name", "ImportPath", "Error") // These fields are always needed
if cfg.Mode&NeedFiles != 0 || cfg.Mode&NeedTypes != 0 {
addFields("Dir", "GoFiles", "IgnoredGoFiles", "IgnoredOtherFiles", "CFiles",
"CgoFiles", "CXXFiles", "MFiles", "HFiles", "FFiles", "SFiles",
"SwigFiles", "SwigCXXFiles", "SysoFiles")
if cfg.Tests {
addFields("TestGoFiles", "XTestGoFiles")
}
}
if cfg.Mode&NeedTypes != 0 {
// CompiledGoFiles seems to be required for the test case TestCgoNoSyntax,
// even when -compiled isn't passed in.
// TODO(#52435): Should we make the test ask for -compiled, or automatically
// request CompiledGoFiles in certain circumstances?
addFields("Dir", "CompiledGoFiles")
}
if cfg.Mode&NeedCompiledGoFiles != 0 {
addFields("Dir", "CompiledGoFiles", "Export")
}
if cfg.Mode&NeedImports != 0 {
// When imports are requested, DepOnly is used to distinguish between packages
// explicitly requested and transitive imports of those packages.
addFields("DepOnly", "Imports", "ImportMap")
if cfg.Tests {
addFields("TestImports", "XTestImports")
}
}
if cfg.Mode&NeedDeps != 0 {
addFields("DepOnly")
}
if usesExportData(cfg) {
// Request Dir in the unlikely case Export is not absolute.
addFields("Dir", "Export")
}
if cfg.Mode&needInternalForTest != 0 {
addFields("ForTest")
}
if cfg.Mode&needInternalDepsErrors != 0 {
addFields("DepsErrors")
}
if cfg.Mode&NeedModule != 0 {
addFields("Module")
}
if cfg.Mode&NeedEmbedFiles != 0 {
addFields("EmbedFiles")
}
if cfg.Mode&NeedEmbedPatterns != 0 {
addFields("EmbedPatterns")
}
return "-json=" + strings.Join(fields, ",")
}
func golistargs(cfg *Config, words []string, goVersion int) []string {
const findFlags = NeedImports | NeedTypes | NeedSyntax | NeedTypesInfo
fullargs := []string{
"-e", "-json",
"-e", jsonFlag(cfg, goVersion),
fmt.Sprintf("-compiled=%t", cfg.Mode&(NeedCompiledGoFiles|NeedSyntax|NeedTypes|NeedTypesInfo|NeedTypesSizes) != 0),
fmt.Sprintf("-test=%t", cfg.Tests),
fmt.Sprintf("-export=%t", usesExportData(cfg)),
fmt.Sprintf("-deps=%t", cfg.Mode&NeedImports != 0),
// go list doesn't let you pass -test and -find together,
// probably because you'd just get the TestMain.
fmt.Sprintf("-find=%t", !cfg.Tests && cfg.Mode&findFlags == 0),
fmt.Sprintf("-find=%t", !cfg.Tests && cfg.Mode&findFlags == 0 && !usesExportData(cfg)),
}
fullargs = append(fullargs, cfg.BuildFlags...)
fullargs = append(fullargs, "--")
@ -879,7 +953,7 @@ func (state *golistState) invokeGo(verb string, args ...string) (*bytes.Buffer,
if !ok {
// Catastrophic error:
// - context cancellation
return nil, xerrors.Errorf("couldn't run 'go': %w", err)
return nil, fmt.Errorf("couldn't run 'go': %w", err)
}
// Old go version?

View File

@ -15,7 +15,7 @@ var allModes = []LoadMode{
NeedCompiledGoFiles,
NeedImports,
NeedDeps,
NeedExportsFile,
NeedExportFile,
NeedTypes,
NeedSyntax,
NeedTypesInfo,
@ -28,7 +28,7 @@ var modeStrings = []string{
"NeedCompiledGoFiles",
"NeedImports",
"NeedDeps",
"NeedExportsFile",
"NeedExportFile",
"NeedTypes",
"NeedSyntax",
"NeedTypesInfo",

View File

@ -26,6 +26,7 @@ import (
"golang.org/x/tools/go/gcexportdata"
"golang.org/x/tools/internal/gocommand"
"golang.org/x/tools/internal/packagesinternal"
"golang.org/x/tools/internal/typeparams"
"golang.org/x/tools/internal/typesinternal"
)
@ -38,9 +39,6 @@ import (
// Load may return more information than requested.
type LoadMode int
// TODO(matloob): When a V2 of go/packages is released, rename NeedExportsFile to
// NeedExportFile to make it consistent with the Package field it's adding.
const (
// NeedName adds Name and PkgPath.
NeedName LoadMode = 1 << iota
@ -58,8 +56,8 @@ const (
// NeedDeps adds the fields requested by the LoadMode in the packages in Imports.
NeedDeps
// NeedExportsFile adds ExportFile.
NeedExportsFile
// NeedExportFile adds ExportFile.
NeedExportFile
// NeedTypes adds Types, Fset, and IllTyped.
NeedTypes
@ -73,12 +71,25 @@ const (
// NeedTypesSizes adds TypesSizes.
NeedTypesSizes
// needInternalDepsErrors adds the internal deps errors field for use by gopls.
needInternalDepsErrors
// needInternalForTest adds the internal forTest field.
// Tests must also be set on the context for this field to be populated.
needInternalForTest
// typecheckCgo enables full support for type checking cgo. Requires Go 1.15+.
// Modifies CompiledGoFiles and Types, and has no effect on its own.
typecheckCgo
// NeedModule adds Module.
NeedModule
// NeedEmbedFiles adds EmbedFiles.
NeedEmbedFiles
// NeedEmbedPatterns adds EmbedPatterns.
NeedEmbedPatterns
)
const (
@ -101,6 +112,9 @@ const (
// Deprecated: LoadAllSyntax exists for historical compatibility
// and should not be used. Please directly specify the needed fields using the Need values.
LoadAllSyntax = LoadSyntax | NeedDeps
// Deprecated: NeedExportsFile is a historical misspelling of NeedExportFile.
NeedExportsFile = NeedExportFile
)
// A Config specifies details about how packages should be loaded.
@ -295,6 +309,14 @@ type Package struct {
// including assembly, C, C++, Fortran, Objective-C, SWIG, and so on.
OtherFiles []string
// EmbedFiles lists the absolute file paths of the package's files
// embedded with go:embed.
EmbedFiles []string
// EmbedPatterns lists the absolute file patterns of the package's
// files embedded with go:embed.
EmbedPatterns []string
// IgnoredFiles lists source files that are not part of the package
// using the current build configuration but that might be part of
// the package using other build configurations.
@ -327,6 +349,9 @@ type Package struct {
// The NeedSyntax LoadMode bit populates this field for packages matching the patterns.
// If NeedDeps and NeedImports are also set, this field will also be populated
// for dependencies.
//
// Syntax is kept in the same order as CompiledGoFiles, with the caveat that nils are
// removed. If parsing returned nil, Syntax may be shorter than CompiledGoFiles.
Syntax []*ast.File
// TypesInfo provides type information about the package's syntax trees.
@ -385,6 +410,8 @@ func init() {
config.(*Config).modFlag = value
}
packagesinternal.TypecheckCgo = int(typecheckCgo)
packagesinternal.DepsErrors = int(needInternalDepsErrors)
packagesinternal.ForTest = int(needInternalForTest)
}
// An Error describes a problem with a package's metadata, syntax, or types.
@ -427,6 +454,8 @@ type flatPackage struct {
GoFiles []string `json:",omitempty"`
CompiledGoFiles []string `json:",omitempty"`
OtherFiles []string `json:",omitempty"`
EmbedFiles []string `json:",omitempty"`
EmbedPatterns []string `json:",omitempty"`
IgnoredFiles []string `json:",omitempty"`
ExportFile string `json:",omitempty"`
Imports map[string]string `json:",omitempty"`
@ -450,6 +479,8 @@ func (p *Package) MarshalJSON() ([]byte, error) {
GoFiles: p.GoFiles,
CompiledGoFiles: p.CompiledGoFiles,
OtherFiles: p.OtherFiles,
EmbedFiles: p.EmbedFiles,
EmbedPatterns: p.EmbedPatterns,
IgnoredFiles: p.IgnoredFiles,
ExportFile: p.ExportFile,
}
@ -477,6 +508,8 @@ func (p *Package) UnmarshalJSON(b []byte) error {
GoFiles: flat.GoFiles,
CompiledGoFiles: flat.CompiledGoFiles,
OtherFiles: flat.OtherFiles,
EmbedFiles: flat.EmbedFiles,
EmbedPatterns: flat.EmbedPatterns,
ExportFile: flat.ExportFile,
}
if len(flat.Imports) > 0 {
@ -610,7 +643,7 @@ func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
needsrc := ((ld.Mode&(NeedSyntax|NeedTypesInfo) != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0)) ||
// ... or if we need types and the exportData is invalid. We fall back to (incompletely)
// typechecking packages from source if they fail to compile.
(ld.Mode&NeedTypes|NeedTypesInfo != 0 && exportDataInvalid)) && pkg.PkgPath != "unsafe"
(ld.Mode&(NeedTypes|NeedTypesInfo) != 0 && exportDataInvalid)) && pkg.PkgPath != "unsafe"
lpkg := &loaderPackage{
Package: pkg,
needtypes: needtypes,
@ -748,13 +781,19 @@ func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
ld.pkgs[i].OtherFiles = nil
ld.pkgs[i].IgnoredFiles = nil
}
if ld.requestedMode&NeedEmbedFiles == 0 {
ld.pkgs[i].EmbedFiles = nil
}
if ld.requestedMode&NeedEmbedPatterns == 0 {
ld.pkgs[i].EmbedPatterns = nil
}
if ld.requestedMode&NeedCompiledGoFiles == 0 {
ld.pkgs[i].CompiledGoFiles = nil
}
if ld.requestedMode&NeedImports == 0 {
ld.pkgs[i].Imports = nil
}
if ld.requestedMode&NeedExportsFile == 0 {
if ld.requestedMode&NeedExportFile == 0 {
ld.pkgs[i].ExportFile = ""
}
if ld.requestedMode&NeedTypes == 0 {
@ -910,6 +949,7 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
Scopes: make(map[ast.Node]*types.Scope),
Selections: make(map[*ast.SelectorExpr]*types.Selection),
}
typeparams.InitInstanceInfo(lpkg.TypesInfo)
lpkg.TypesSizes = ld.sizes
importer := importerFunc(func(path string) (*types.Package, error) {
@ -1048,7 +1088,6 @@ func (ld *loader) parseFile(filename string) (*ast.File, error) {
//
// Because files are scanned in parallel, the token.Pos
// positions of the resulting ast.Files are not ordered.
//
func (ld *loader) parseFiles(filenames []string) ([]*ast.File, []error) {
var wg sync.WaitGroup
n := len(filenames)
@ -1092,7 +1131,6 @@ func (ld *loader) parseFiles(filenames []string) ([]*ast.File, []error) {
// sameFile returns true if x and y have the same basename and denote
// the same file.
//
func sameFile(x, y string) bool {
if x == y {
// It could be the case that y doesn't exist.
@ -1205,8 +1243,13 @@ func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error
if err != nil {
return nil, fmt.Errorf("reading %s: %v", lpkg.ExportFile, err)
}
if _, ok := view["go.shape"]; ok {
// Account for the pseudopackage "go.shape" that gets
// created by generic code.
viewLen++
}
if viewLen != len(view) {
log.Fatalf("Unexpected package creation during export data loading")
log.Panicf("golang.org/x/tools/go/packages: unexpected new packages during load of %s", lpkg.PkgPath)
}
lpkg.Types = tpkg
@ -1217,17 +1260,8 @@ func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error
// impliedLoadMode returns loadMode with its dependencies.
func impliedLoadMode(loadMode LoadMode) LoadMode {
if loadMode&NeedTypesInfo != 0 && loadMode&NeedImports == 0 {
// If NeedTypesInfo, go/packages needs to do typechecking itself so it can
// associate type info with the AST. To do so, we need the export data
// for dependencies, which means we need to ask for the direct dependencies.
// NeedImports is used to ask for the direct dependencies.
loadMode |= NeedImports
}
if loadMode&NeedDeps != 0 && loadMode&NeedImports == 0 {
// With NeedDeps we need to load at least direct dependencies.
// NeedImports is used to ask for the direct dependencies.
if loadMode&(NeedDeps|NeedTypes|NeedTypesInfo) != 0 {
// All these things require knowing the import graph.
loadMode |= NeedImports
}
@ -1235,5 +1269,5 @@ func impliedLoadMode(loadMode LoadMode) LoadMode {
}
func usesExportData(cfg *Config) bool {
return cfg.Mode&NeedExportsFile != 0 || cfg.Mode&NeedTypes != 0 && cfg.Mode&NeedDeps == 0
return cfg.Mode&NeedExportFile != 0 || cfg.Mode&NeedTypes != 0 && cfg.Mode&NeedDeps == 0
}

View File

@ -40,7 +40,7 @@ var LocalPrefix string
//
// Note that filename's directory influences which imports can be chosen,
// so it is important that filename be accurate.
// To process data ``as if'' it were in filename, pass the data as a non-nil src.
// To process data “as if” it were in filename, pass the data as a non-nil src.
func Process(filename string, src []byte, opt *Options) ([]byte, error) {
var err error
if src == nil {

View File

@ -40,12 +40,12 @@ var ErrSkipFiles = errors.New("fastwalk: skip remaining files in directory")
// If fastWalk returns filepath.SkipDir, the directory is skipped.
//
// Unlike filepath.Walk:
// * file stat calls must be done by the user.
// - file stat calls must be done by the user.
// The only provided metadata is the file type, which does not include
// any permission bits.
// * multiple goroutines stat the filesystem concurrently. The provided
// - multiple goroutines stat the filesystem concurrently. The provided
// walkFn must be safe for concurrent use.
// * fastWalk can follow symlinks if walkFn returns the TraverseLink
// - fastWalk can follow symlinks if walkFn returns the TraverseLink
// sentinel error. It is the walkFn's responsibility to prevent
// fastWalk from going into symlink cycles.
func Walk(root string, walkFn func(path string, typ os.FileMode) error) error {

View File

@ -9,7 +9,6 @@ import (
"bytes"
"context"
"fmt"
exec "golang.org/x/sys/execabs"
"io"
"os"
"regexp"
@ -18,6 +17,8 @@ import (
"sync"
"time"
exec "golang.org/x/sys/execabs"
"golang.org/x/tools/internal/event"
)
@ -131,9 +132,16 @@ type Invocation struct {
Verb string
Args []string
BuildFlags []string
ModFlag string
ModFile string
Overlay string
// If ModFlag is set, the go command is invoked with -mod=ModFlag.
ModFlag string
// If ModFile is set, the go command is invoked with -modfile=ModFile.
ModFile string
// If Overlay is set, the go command is invoked with -overlay=Overlay.
Overlay string
// If CleanEnv is set, the invocation will run only with the environment
// in Env, not starting with os.Environ.
CleanEnv bool
@ -256,8 +264,10 @@ func cmdDebugStr(cmd *exec.Cmd) string {
env := make(map[string]string)
for _, kv := range cmd.Env {
split := strings.SplitN(kv, "=", 2)
k, v := split[0], split[1]
env[k] = v
if len(split) == 2 {
k, v := split[0], split[1]
env[k] = v
}
}
var args []string

View File

@ -38,10 +38,10 @@ var modFlagRegexp = regexp.MustCompile(`-mod[ =](\w+)`)
// with the supplied context.Context and Invocation. The Invocation can contain pre-defined fields,
// of which only Verb and Args are modified to run the appropriate Go command.
// Inspired by setDefaultBuildMod in modload/init.go
func VendorEnabled(ctx context.Context, inv Invocation, r *Runner) (*ModuleJSON, bool, error) {
func VendorEnabled(ctx context.Context, inv Invocation, r *Runner) (bool, *ModuleJSON, error) {
mainMod, go114, err := getMainModuleAnd114(ctx, inv, r)
if err != nil {
return nil, false, err
return false, nil, err
}
// We check the GOFLAGS to see if there is anything overridden or not.
@ -49,7 +49,7 @@ func VendorEnabled(ctx context.Context, inv Invocation, r *Runner) (*ModuleJSON,
inv.Args = []string{"GOFLAGS"}
stdout, err := r.Run(ctx, inv)
if err != nil {
return nil, false, err
return false, nil, err
}
goflags := string(bytes.TrimSpace(stdout.Bytes()))
matches := modFlagRegexp.FindStringSubmatch(goflags)
@ -57,25 +57,27 @@ func VendorEnabled(ctx context.Context, inv Invocation, r *Runner) (*ModuleJSON,
if len(matches) != 0 {
modFlag = matches[1]
}
if modFlag != "" {
// Don't override an explicit '-mod=' argument.
return mainMod, modFlag == "vendor", nil
// Don't override an explicit '-mod=' argument.
if modFlag == "vendor" {
return true, mainMod, nil
} else if modFlag != "" {
return false, nil, nil
}
if mainMod == nil || !go114 {
return mainMod, false, nil
return false, nil, nil
}
// Check 1.14's automatic vendor mode.
if fi, err := os.Stat(filepath.Join(mainMod.Dir, "vendor")); err == nil && fi.IsDir() {
if mainMod.GoVersion != "" && semver.Compare("v"+mainMod.GoVersion, "v1.14") >= 0 {
// The Go version is at least 1.14, and a vendor directory exists.
// Set -mod=vendor by default.
return mainMod, true, nil
return true, mainMod, nil
}
}
return mainMod, false, nil
return false, nil, nil
}
// getMainModuleAnd114 gets the main module's information and whether the
// getMainModuleAnd114 gets one of the main modules' information and whether the
// go command in use is 1.14+. This is the information needed to figure out
// if vendoring should be enabled.
func getMainModuleAnd114(ctx context.Context, inv Invocation, r *Runner) (*ModuleJSON, bool, error) {

View File

@ -175,8 +175,8 @@ func (w *walker) shouldSkipDir(fi os.FileInfo, dir string) bool {
// walk walks through the given path.
func (w *walker) walk(path string, typ os.FileMode) error {
dir := filepath.Dir(path)
if typ.IsRegular() {
dir := filepath.Dir(path)
if dir == w.root.Path && (w.root.Type == RootGOROOT || w.root.Type == RootGOPATH) {
// Doesn't make sense to have regular files
// directly in your $GOPATH/src or $GOROOT/src.
@ -209,12 +209,7 @@ func (w *walker) walk(path string, typ os.FileMode) error {
// Emacs noise.
return nil
}
fi, err := os.Lstat(path)
if err != nil {
// Just ignore it.
return nil
}
if w.shouldTraverse(dir, fi) {
if w.shouldTraverse(path) {
return fastwalk.ErrTraverseLink
}
}
@ -224,13 +219,8 @@ func (w *walker) walk(path string, typ os.FileMode) error {
// shouldTraverse reports whether the symlink fi, found in dir,
// should be followed. It makes sure symlinks were never visited
// before to avoid symlink loops.
func (w *walker) shouldTraverse(dir string, fi os.FileInfo) bool {
path := filepath.Join(dir, fi.Name())
target, err := filepath.EvalSymlinks(path)
if err != nil {
return false
}
ts, err := os.Stat(target)
func (w *walker) shouldTraverse(path string) bool {
ts, err := os.Stat(path)
if err != nil {
fmt.Fprintln(os.Stderr, err)
return false
@ -238,7 +228,7 @@ func (w *walker) shouldTraverse(dir string, fi os.FileInfo) bool {
if !ts.IsDir() {
return false
}
if w.shouldSkipDir(ts, dir) {
if w.shouldSkipDir(ts, filepath.Dir(path)) {
return false
}
// Check for symlink loops by statting each directory component

View File

@ -796,7 +796,7 @@ func GetPackageExports(ctx context.Context, wrapped func(PackageExport), searchP
return getCandidatePkgs(ctx, callback, filename, filePkg, env)
}
var RequiredGoEnvVars = []string{"GO111MODULE", "GOFLAGS", "GOINSECURE", "GOMOD", "GOMODCACHE", "GONOPROXY", "GONOSUMDB", "GOPATH", "GOPROXY", "GOROOT", "GOSUMDB"}
var RequiredGoEnvVars = []string{"GO111MODULE", "GOFLAGS", "GOINSECURE", "GOMOD", "GOMODCACHE", "GONOPROXY", "GONOSUMDB", "GOPATH", "GOPROXY", "GOROOT", "GOSUMDB", "GOWORK"}
// ProcessEnv contains environment variables and settings that affect the use of
// the go command, the go/build package, etc.
@ -906,7 +906,7 @@ func (e *ProcessEnv) GetResolver() (Resolver, error) {
if err := e.init(); err != nil {
return nil, err
}
if len(e.Env["GOMOD"]) == 0 {
if len(e.Env["GOMOD"]) == 0 && len(e.Env["GOWORK"]) == 0 {
e.resolver = newGopathResolver(e)
return e.resolver, nil
}

View File

@ -103,12 +103,17 @@ func ApplyFixes(fixes []*ImportFix, filename string, src []byte, opt *Options, e
return formatFile(fileSet, file, src, nil, opt)
}
func formatFile(fileSet *token.FileSet, file *ast.File, src []byte, adjust func(orig []byte, src []byte) []byte, opt *Options) ([]byte, error) {
mergeImports(fileSet, file)
sortImports(opt.LocalPrefix, fileSet, file)
imps := astutil.Imports(fileSet, file)
// formatFile formats the file syntax tree.
// It may mutate the token.FileSet.
//
// If an adjust function is provided, it is called after formatting
// with the original source (formatFile's src parameter) and the
// formatted file, and returns the postpocessed result.
func formatFile(fset *token.FileSet, file *ast.File, src []byte, adjust func(orig []byte, src []byte) []byte, opt *Options) ([]byte, error) {
mergeImports(file)
sortImports(opt.LocalPrefix, fset.File(file.Pos()), file)
var spacesBefore []string // import paths we need spaces before
for _, impSection := range imps {
for _, impSection := range astutil.Imports(fset, file) {
// Within each block of contiguous imports, see if any
// import lines are in different group numbers. If so,
// we'll need to put a space between them so it's
@ -132,7 +137,7 @@ func formatFile(fileSet *token.FileSet, file *ast.File, src []byte, adjust func(
printConfig := &printer.Config{Mode: printerMode, Tabwidth: opt.TabWidth}
var buf bytes.Buffer
err := printConfig.Fprint(&buf, fileSet, file)
err := printConfig.Fprint(&buf, fset, file)
if err != nil {
return nil, err
}
@ -276,11 +281,11 @@ func cutSpace(b []byte) (before, middle, after []byte) {
}
// matchSpace reformats src to use the same space context as orig.
// 1) If orig begins with blank lines, matchSpace inserts them at the beginning of src.
// 2) matchSpace copies the indentation of the first non-blank line in orig
// to every non-blank line in src.
// 3) matchSpace copies the trailing space from orig and uses it in place
// of src's trailing space.
// 1. If orig begins with blank lines, matchSpace inserts them at the beginning of src.
// 2. matchSpace copies the indentation of the first non-blank line in orig
// to every non-blank line in src.
// 3. matchSpace copies the trailing space from orig and uses it in place
// of src's trailing space.
func matchSpace(orig []byte, src []byte) []byte {
before, _, after := cutSpace(orig)
i := bytes.LastIndex(before, []byte{'\n'})
@ -306,7 +311,7 @@ func matchSpace(orig []byte, src []byte) []byte {
return b.Bytes()
}
var impLine = regexp.MustCompile(`^\s+(?:[\w\.]+\s+)?"(.+)"`)
var impLine = regexp.MustCompile(`^\s+(?:[\w\.]+\s+)?"(.+?)"`)
func addImportSpaces(r io.Reader, breaks []string) ([]byte, error) {
var out bytes.Buffer

View File

@ -34,7 +34,8 @@ type ModuleResolver struct {
scannedRoots map[gopathwalk.Root]bool
initialized bool
main *gocommand.ModuleJSON
mains []*gocommand.ModuleJSON
mainByDir map[string]*gocommand.ModuleJSON
modsByModPath []*gocommand.ModuleJSON // All modules, ordered by # of path components in module Path...
modsByDir []*gocommand.ModuleJSON // ...or Dir.
@ -69,21 +70,29 @@ func (r *ModuleResolver) init() error {
Logf: r.env.Logf,
WorkingDir: r.env.WorkingDir,
}
mainMod, vendorEnabled, err := gocommand.VendorEnabled(context.TODO(), inv, r.env.GocmdRunner)
if err != nil {
return err
vendorEnabled := false
var mainModVendor *gocommand.ModuleJSON
// Module vendor directories are ignored in workspace mode:
// https://go.googlesource.com/proposal/+/master/design/45713-workspace.md
if len(r.env.Env["GOWORK"]) == 0 {
vendorEnabled, mainModVendor, err = gocommand.VendorEnabled(context.TODO(), inv, r.env.GocmdRunner)
if err != nil {
return err
}
}
if mainMod != nil && vendorEnabled {
if mainModVendor != nil && vendorEnabled {
// Vendor mode is on, so all the non-Main modules are irrelevant,
// and we need to search /vendor for everything.
r.main = mainMod
r.mains = []*gocommand.ModuleJSON{mainModVendor}
r.dummyVendorMod = &gocommand.ModuleJSON{
Path: "",
Dir: filepath.Join(mainMod.Dir, "vendor"),
Dir: filepath.Join(mainModVendor.Dir, "vendor"),
}
r.modsByModPath = []*gocommand.ModuleJSON{mainMod, r.dummyVendorMod}
r.modsByDir = []*gocommand.ModuleJSON{mainMod, r.dummyVendorMod}
r.modsByModPath = []*gocommand.ModuleJSON{mainModVendor, r.dummyVendorMod}
r.modsByDir = []*gocommand.ModuleJSON{mainModVendor, r.dummyVendorMod}
} else {
// Vendor mode is off, so run go list -m ... to find everything.
err := r.initAllMods()
@ -122,8 +131,10 @@ func (r *ModuleResolver) init() error {
r.roots = []gopathwalk.Root{
{filepath.Join(goenv["GOROOT"], "/src"), gopathwalk.RootGOROOT},
}
if r.main != nil {
r.roots = append(r.roots, gopathwalk.Root{r.main.Dir, gopathwalk.RootCurrentModule})
r.mainByDir = make(map[string]*gocommand.ModuleJSON)
for _, main := range r.mains {
r.roots = append(r.roots, gopathwalk.Root{main.Dir, gopathwalk.RootCurrentModule})
r.mainByDir[main.Dir] = main
}
if vendorEnabled {
r.roots = append(r.roots, gopathwalk.Root{r.dummyVendorMod.Dir, gopathwalk.RootOther})
@ -189,7 +200,7 @@ func (r *ModuleResolver) initAllMods() error {
r.modsByModPath = append(r.modsByModPath, mod)
r.modsByDir = append(r.modsByDir, mod)
if mod.Main {
r.main = mod
r.mains = append(r.mains, mod)
}
}
return nil
@ -609,7 +620,7 @@ func (r *ModuleResolver) scanDirForPackage(root gopathwalk.Root, dir string) dir
}
switch root.Type {
case gopathwalk.RootCurrentModule:
importPath = path.Join(r.main.Path, filepath.ToSlash(subdir))
importPath = path.Join(r.mainByDir[root.Path].Path, filepath.ToSlash(subdir))
case gopathwalk.RootModuleCache:
matches := modCacheRegexp.FindStringSubmatch(subdir)
if len(matches) == 0 {

View File

@ -3,19 +3,23 @@
// license that can be found in the LICENSE file.
// Hacked up copy of go/ast/import.go
// Modified to use a single token.File in preference to a FileSet.
package imports
import (
"go/ast"
"go/token"
"log"
"sort"
"strconv"
)
// sortImports sorts runs of consecutive import lines in import blocks in f.
// It also removes duplicate imports when it is possible to do so without data loss.
func sortImports(localPrefix string, fset *token.FileSet, f *ast.File) {
//
// It may mutate the token.File.
func sortImports(localPrefix string, tokFile *token.File, f *ast.File) {
for i, d := range f.Decls {
d, ok := d.(*ast.GenDecl)
if !ok || d.Tok != token.IMPORT {
@ -38,21 +42,21 @@ func sortImports(localPrefix string, fset *token.FileSet, f *ast.File) {
i := 0
specs := d.Specs[:0]
for j, s := range d.Specs {
if j > i && fset.Position(s.Pos()).Line > 1+fset.Position(d.Specs[j-1].End()).Line {
if j > i && tokFile.Line(s.Pos()) > 1+tokFile.Line(d.Specs[j-1].End()) {
// j begins a new run. End this one.
specs = append(specs, sortSpecs(localPrefix, fset, f, d.Specs[i:j])...)
specs = append(specs, sortSpecs(localPrefix, tokFile, f, d.Specs[i:j])...)
i = j
}
}
specs = append(specs, sortSpecs(localPrefix, fset, f, d.Specs[i:])...)
specs = append(specs, sortSpecs(localPrefix, tokFile, f, d.Specs[i:])...)
d.Specs = specs
// Deduping can leave a blank line before the rparen; clean that up.
if len(d.Specs) > 0 {
lastSpec := d.Specs[len(d.Specs)-1]
lastLine := fset.Position(lastSpec.Pos()).Line
if rParenLine := fset.Position(d.Rparen).Line; rParenLine > lastLine+1 {
fset.File(d.Rparen).MergeLine(rParenLine - 1)
lastLine := tokFile.PositionFor(lastSpec.Pos(), false).Line
if rParenLine := tokFile.PositionFor(d.Rparen, false).Line; rParenLine > lastLine+1 {
tokFile.MergeLine(rParenLine - 1) // has side effects!
}
}
}
@ -60,7 +64,8 @@ func sortImports(localPrefix string, fset *token.FileSet, f *ast.File) {
// mergeImports merges all the import declarations into the first one.
// Taken from golang.org/x/tools/ast/astutil.
func mergeImports(fset *token.FileSet, f *ast.File) {
// This does not adjust line numbers properly
func mergeImports(f *ast.File) {
if len(f.Decls) <= 1 {
return
}
@ -142,7 +147,9 @@ type posSpan struct {
End token.Pos
}
func sortSpecs(localPrefix string, fset *token.FileSet, f *ast.File, specs []ast.Spec) []ast.Spec {
// sortSpecs sorts the import specs within each import decl.
// It may mutate the token.File.
func sortSpecs(localPrefix string, tokFile *token.File, f *ast.File, specs []ast.Spec) []ast.Spec {
// Can't short-circuit here even if specs are already sorted,
// since they might yet need deduplication.
// A lone import, however, may be safely ignored.
@ -158,7 +165,7 @@ func sortSpecs(localPrefix string, fset *token.FileSet, f *ast.File, specs []ast
// Identify comments in this range.
// Any comment from pos[0].Start to the final line counts.
lastLine := fset.Position(pos[len(pos)-1].End).Line
lastLine := tokFile.Line(pos[len(pos)-1].End)
cstart := len(f.Comments)
cend := len(f.Comments)
for i, g := range f.Comments {
@ -168,7 +175,7 @@ func sortSpecs(localPrefix string, fset *token.FileSet, f *ast.File, specs []ast
if i < cstart {
cstart = i
}
if fset.Position(g.End()).Line > lastLine {
if tokFile.Line(g.End()) > lastLine {
cend = i
break
}
@ -201,7 +208,7 @@ func sortSpecs(localPrefix string, fset *token.FileSet, f *ast.File, specs []ast
deduped = append(deduped, s)
} else {
p := s.Pos()
fset.File(p).MergeLine(fset.Position(p).Line)
tokFile.MergeLine(tokFile.Line(p)) // has side effects!
}
}
specs = deduped
@ -232,13 +239,22 @@ func sortSpecs(localPrefix string, fset *token.FileSet, f *ast.File, specs []ast
// Fixup comments can insert blank lines, because import specs are on different lines.
// We remove those blank lines here by merging import spec to the first import spec line.
firstSpecLine := fset.Position(specs[0].Pos()).Line
firstSpecLine := tokFile.Line(specs[0].Pos())
for _, s := range specs[1:] {
p := s.Pos()
line := fset.File(p).Line(p)
line := tokFile.Line(p)
for previousLine := line - 1; previousLine >= firstSpecLine; {
fset.File(p).MergeLine(previousLine)
previousLine--
// MergeLine can panic. Avoid the panic at the cost of not removing the blank line
// golang/go#50329
if previousLine > 0 && previousLine < tokFile.LineCount() {
tokFile.MergeLine(previousLine) // has side effects!
previousLine--
} else {
// try to gather some data to diagnose how this could happen
req := "Please report what the imports section of your go file looked like."
log.Printf("panic avoided: first:%d line:%d previous:%d max:%d. %s",
firstSpecLine, line, previousLine, tokFile.LineCount(), req)
}
}
}
return specs

View File

@ -88,6 +88,7 @@ var stdlib = map[string][]string{
"ContainsAny",
"ContainsRune",
"Count",
"Cut",
"Equal",
"EqualFold",
"ErrTooLarge",
@ -180,6 +181,8 @@ var stdlib = map[string][]string{
"NewReader",
"NewWriter",
"Order",
"Reader",
"Writer",
},
"compress/zlib": []string{
"BestCompression",
@ -641,7 +644,9 @@ var stdlib = map[string][]string{
"Named",
"NamedArg",
"NullBool",
"NullByte",
"NullFloat64",
"NullInt16",
"NullInt32",
"NullInt64",
"NullString",
@ -707,6 +712,11 @@ var stdlib = map[string][]string{
"ValueConverter",
"Valuer",
},
"debug/buildinfo": []string{
"BuildInfo",
"Read",
"ReadFile",
},
"debug/dwarf": []string{
"AddrType",
"ArrayType",
@ -1940,6 +1950,7 @@ var stdlib = map[string][]string{
"R_PPC64_REL24_NOTOC",
"R_PPC64_REL32",
"R_PPC64_REL64",
"R_PPC64_RELATIVE",
"R_PPC64_SECTOFF_DS",
"R_PPC64_SECTOFF_LO_DS",
"R_PPC64_TLS",
@ -2248,6 +2259,7 @@ var stdlib = map[string][]string{
"SHT_LOOS",
"SHT_LOPROC",
"SHT_LOUSER",
"SHT_MIPS_ABIFLAGS",
"SHT_NOBITS",
"SHT_NOTE",
"SHT_NULL",
@ -2542,6 +2554,7 @@ var stdlib = map[string][]string{
"Symbol",
},
"debug/plan9obj": []string{
"ErrNoSymbols",
"File",
"FileHeader",
"Magic386",
@ -2901,6 +2914,7 @@ var stdlib = map[string][]string{
"Importer",
"IncDecStmt",
"IndexExpr",
"IndexListExpr",
"Inspect",
"InterfaceType",
"IsExported",
@ -3061,6 +3075,7 @@ var stdlib = map[string][]string{
"ParseExpr",
"ParseExprFrom",
"ParseFile",
"SkipObjectResolution",
"SpuriousErrors",
"Trace",
},
@ -3173,6 +3188,7 @@ var stdlib = map[string][]string{
"SUB",
"SUB_ASSIGN",
"SWITCH",
"TILDE",
"TYPE",
"Token",
"UnaryPrec",
@ -3181,6 +3197,7 @@ var stdlib = map[string][]string{
"XOR_ASSIGN",
},
"go/types": []string{
"ArgumentError",
"Array",
"AssertableTo",
"AssignableTo",
@ -3199,6 +3216,7 @@ var stdlib = map[string][]string{
"Complex64",
"Config",
"Const",
"Context",
"ConvertibleTo",
"DefPredeclaredTestFuncs",
"Default",
@ -3218,6 +3236,8 @@ var stdlib = map[string][]string{
"ImporterFrom",
"Info",
"Initializer",
"Instance",
"Instantiate",
"Int",
"Int16",
"Int32",
@ -3248,6 +3268,7 @@ var stdlib = map[string][]string{
"NewChan",
"NewChecker",
"NewConst",
"NewContext",
"NewField",
"NewFunc",
"NewInterface",
@ -3262,10 +3283,14 @@ var stdlib = map[string][]string{
"NewPointer",
"NewScope",
"NewSignature",
"NewSignatureType",
"NewSlice",
"NewStruct",
"NewTerm",
"NewTuple",
"NewTypeName",
"NewTypeParam",
"NewUnion",
"NewVar",
"Nil",
"Object",
@ -3290,11 +3315,15 @@ var stdlib = map[string][]string{
"StdSizes",
"String",
"Struct",
"Term",
"Tuple",
"Typ",
"Type",
"TypeAndValue",
"TypeList",
"TypeName",
"TypeParam",
"TypeParamList",
"TypeString",
"Uint",
"Uint16",
@ -3302,6 +3331,7 @@ var stdlib = map[string][]string{
"Uint64",
"Uint8",
"Uintptr",
"Union",
"Universe",
"Unsafe",
"UnsafePointer",
@ -3441,6 +3471,7 @@ var stdlib = map[string][]string{
"Pt",
"RGBA",
"RGBA64",
"RGBA64Image",
"Rect",
"Rectangle",
"RegisterFormat",
@ -3507,6 +3538,7 @@ var stdlib = map[string][]string{
"Op",
"Over",
"Quantizer",
"RGBA64Image",
"Src",
},
"image/gif": []string{
@ -3612,6 +3644,7 @@ var stdlib = map[string][]string{
"FS",
"File",
"FileInfo",
"FileInfoToDirEntry",
"FileMode",
"Glob",
"GlobFS",
@ -3772,15 +3805,18 @@ var stdlib = map[string][]string{
"Max",
"MaxFloat32",
"MaxFloat64",
"MaxInt",
"MaxInt16",
"MaxInt32",
"MaxInt64",
"MaxInt8",
"MaxUint",
"MaxUint16",
"MaxUint32",
"MaxUint64",
"MaxUint8",
"Min",
"MinInt",
"MinInt16",
"MinInt32",
"MinInt64",
@ -4068,9 +4104,11 @@ var stdlib = map[string][]string{
"SRV",
"SplitHostPort",
"TCPAddr",
"TCPAddrFromAddrPort",
"TCPConn",
"TCPListener",
"UDPAddr",
"UDPAddrFromAddrPort",
"UDPConn",
"UnixAddr",
"UnixConn",
@ -4078,6 +4116,7 @@ var stdlib = map[string][]string{
"UnknownNetworkError",
},
"net/http": []string{
"AllowQuerySemicolons",
"CanonicalHeaderKey",
"Client",
"CloseNotifier",
@ -4129,6 +4168,7 @@ var stdlib = map[string][]string{
"ListenAndServe",
"ListenAndServeTLS",
"LocalAddrContextKey",
"MaxBytesHandler",
"MaxBytesReader",
"MethodConnect",
"MethodDelete",
@ -4325,6 +4365,25 @@ var stdlib = map[string][]string{
"ParseDate",
"ReadMessage",
},
"net/netip": []string{
"Addr",
"AddrFrom16",
"AddrFrom4",
"AddrFromSlice",
"AddrPort",
"AddrPortFrom",
"IPv4Unspecified",
"IPv6LinkLocalAllNodes",
"IPv6Unspecified",
"MustParseAddr",
"MustParseAddrPort",
"MustParsePrefix",
"ParseAddr",
"ParseAddrPort",
"ParsePrefix",
"Prefix",
"PrefixFrom",
},
"net/rpc": []string{
"Accept",
"Call",
@ -4628,6 +4687,8 @@ var stdlib = map[string][]string{
"Method",
"New",
"NewAt",
"Pointer",
"PointerTo",
"Ptr",
"PtrTo",
"RecvDir",
@ -4660,6 +4721,7 @@ var stdlib = map[string][]string{
"Value",
"ValueError",
"ValueOf",
"VisibleFields",
"Zero",
},
"regexp": []string{
@ -4799,11 +4861,17 @@ var stdlib = map[string][]string{
"UnlockOSThread",
"Version",
},
"runtime/cgo": []string{
"Handle",
"NewHandle",
},
"runtime/debug": []string{
"BuildInfo",
"BuildSetting",
"FreeOSMemory",
"GCStats",
"Module",
"ParseBuildInfo",
"PrintStack",
"ReadBuildInfo",
"ReadGCStats",
@ -4915,16 +4983,19 @@ var stdlib = map[string][]string{
"QuoteRuneToGraphic",
"QuoteToASCII",
"QuoteToGraphic",
"QuotedPrefix",
"Unquote",
"UnquoteChar",
},
"strings": []string{
"Builder",
"Clone",
"Compare",
"Contains",
"ContainsAny",
"ContainsRune",
"Count",
"Cut",
"EqualFold",
"Fields",
"FieldsFunc",
@ -9774,6 +9845,7 @@ var stdlib = map[string][]string{
"Syscall18",
"Syscall6",
"Syscall9",
"SyscallN",
"Sysctl",
"SysctlUint32",
"Sysctlnode",
@ -10183,7 +10255,6 @@ var stdlib = map[string][]string{
"Value",
"ValueError",
"ValueOf",
"Wrapper",
},
"testing": []string{
"AllocsPerRun",
@ -10194,9 +10265,11 @@ var stdlib = map[string][]string{
"CoverBlock",
"CoverMode",
"Coverage",
"F",
"Init",
"InternalBenchmark",
"InternalExample",
"InternalFuzzTarget",
"InternalTest",
"M",
"Main",
@ -10294,9 +10367,11 @@ var stdlib = map[string][]string{
"ActionNode",
"BoolNode",
"BranchNode",
"BreakNode",
"ChainNode",
"CommandNode",
"CommentNode",
"ContinueNode",
"DotNode",
"FieldNode",
"IdentifierNode",
@ -10310,9 +10385,11 @@ var stdlib = map[string][]string{
"Node",
"NodeAction",
"NodeBool",
"NodeBreak",
"NodeChain",
"NodeCommand",
"NodeComment",
"NodeContinue",
"NodeDot",
"NodeField",
"NodeIdentifier",
@ -10334,6 +10411,7 @@ var stdlib = map[string][]string{
"PipeNode",
"Pos",
"RangeNode",
"SkipFuncCheck",
"StringNode",
"TemplateNode",
"TextNode",
@ -10358,6 +10436,7 @@ var stdlib = map[string][]string{
"July",
"June",
"Kitchen",
"Layout",
"LoadLocation",
"LoadLocationFromTZData",
"Local",
@ -10406,6 +10485,8 @@ var stdlib = map[string][]string{
"UTC",
"Unix",
"UnixDate",
"UnixMicro",
"UnixMilli",
"Until",
"Wednesday",
"Weekday",
@ -10704,6 +10785,7 @@ var stdlib = map[string][]string{
"IsSurrogate",
},
"unicode/utf8": []string{
"AppendRune",
"DecodeLastRune",
"DecodeLastRuneInString",
"DecodeRune",

View File

@ -23,6 +23,8 @@ var GetGoCmdRunner = func(config interface{}) *gocommand.Runner { return nil }
var SetGoCmdRunner = func(config interface{}, runner *gocommand.Runner) {}
var TypecheckCgo int
var DepsErrors int // must be set as a LoadMode to call GetDepsErrors
var ForTest int // must be set as a LoadMode to call GetForTest
var SetModFlag = func(config interface{}, value string) {}
var SetModFile = func(config interface{}, value string) {}

179
vendor/golang.org/x/tools/internal/typeparams/common.go generated vendored Normal file
View File

@ -0,0 +1,179 @@
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package typeparams contains common utilities for writing tools that interact
// with generic Go code, as introduced with Go 1.18.
//
// Many of the types and functions in this package are proxies for the new APIs
// introduced in the standard library with Go 1.18. For example, the
// typeparams.Union type is an alias for go/types.Union, and the ForTypeSpec
// function returns the value of the go/ast.TypeSpec.TypeParams field. At Go
// versions older than 1.18 these helpers are implemented as stubs, allowing
// users of this package to write code that handles generic constructs inline,
// even if the Go version being used to compile does not support generics.
//
// Additionally, this package contains common utilities for working with the
// new generic constructs, to supplement the standard library APIs. Notably,
// the StructuralTerms API computes a minimal representation of the structural
// restrictions on a type parameter.
//
// An external version of these APIs is available in the
// golang.org/x/exp/typeparams module.
package typeparams
import (
"go/ast"
"go/token"
"go/types"
)
// UnpackIndexExpr extracts data from AST nodes that represent index
// expressions.
//
// For an ast.IndexExpr, the resulting indices slice will contain exactly one
// index expression. For an ast.IndexListExpr (go1.18+), it may have a variable
// number of index expressions.
//
// For nodes that don't represent index expressions, the first return value of
// UnpackIndexExpr will be nil.
func UnpackIndexExpr(n ast.Node) (x ast.Expr, lbrack token.Pos, indices []ast.Expr, rbrack token.Pos) {
switch e := n.(type) {
case *ast.IndexExpr:
return e.X, e.Lbrack, []ast.Expr{e.Index}, e.Rbrack
case *IndexListExpr:
return e.X, e.Lbrack, e.Indices, e.Rbrack
}
return nil, token.NoPos, nil, token.NoPos
}
// PackIndexExpr returns an *ast.IndexExpr or *ast.IndexListExpr, depending on
// the cardinality of indices. Calling PackIndexExpr with len(indices) == 0
// will panic.
func PackIndexExpr(x ast.Expr, lbrack token.Pos, indices []ast.Expr, rbrack token.Pos) ast.Expr {
switch len(indices) {
case 0:
panic("empty indices")
case 1:
return &ast.IndexExpr{
X: x,
Lbrack: lbrack,
Index: indices[0],
Rbrack: rbrack,
}
default:
return &IndexListExpr{
X: x,
Lbrack: lbrack,
Indices: indices,
Rbrack: rbrack,
}
}
}
// IsTypeParam reports whether t is a type parameter.
func IsTypeParam(t types.Type) bool {
_, ok := t.(*TypeParam)
return ok
}
// OriginMethod returns the origin method associated with the method fn.
// For methods on a non-generic receiver base type, this is just
// fn. However, for methods with a generic receiver, OriginMethod returns the
// corresponding method in the method set of the origin type.
//
// As a special case, if fn is not a method (has no receiver), OriginMethod
// returns fn.
func OriginMethod(fn *types.Func) *types.Func {
recv := fn.Type().(*types.Signature).Recv()
if recv == nil {
return fn
}
base := recv.Type()
p, isPtr := base.(*types.Pointer)
if isPtr {
base = p.Elem()
}
named, isNamed := base.(*types.Named)
if !isNamed {
// Receiver is a *types.Interface.
return fn
}
if ForNamed(named).Len() == 0 {
// Receiver base has no type parameters, so we can avoid the lookup below.
return fn
}
orig := NamedTypeOrigin(named)
gfn, _, _ := types.LookupFieldOrMethod(orig, true, fn.Pkg(), fn.Name())
return gfn.(*types.Func)
}
// GenericAssignableTo is a generalization of types.AssignableTo that
// implements the following rule for uninstantiated generic types:
//
// If V and T are generic named types, then V is considered assignable to T if,
// for every possible instantation of V[A_1, ..., A_N], the instantiation
// T[A_1, ..., A_N] is valid and V[A_1, ..., A_N] implements T[A_1, ..., A_N].
//
// If T has structural constraints, they must be satisfied by V.
//
// For example, consider the following type declarations:
//
// type Interface[T any] interface {
// Accept(T)
// }
//
// type Container[T any] struct {
// Element T
// }
//
// func (c Container[T]) Accept(t T) { c.Element = t }
//
// In this case, GenericAssignableTo reports that instantiations of Container
// are assignable to the corresponding instantiation of Interface.
func GenericAssignableTo(ctxt *Context, V, T types.Type) bool {
// If V and T are not both named, or do not have matching non-empty type
// parameter lists, fall back on types.AssignableTo.
VN, Vnamed := V.(*types.Named)
TN, Tnamed := T.(*types.Named)
if !Vnamed || !Tnamed {
return types.AssignableTo(V, T)
}
vtparams := ForNamed(VN)
ttparams := ForNamed(TN)
if vtparams.Len() == 0 || vtparams.Len() != ttparams.Len() || NamedTypeArgs(VN).Len() != 0 || NamedTypeArgs(TN).Len() != 0 {
return types.AssignableTo(V, T)
}
// V and T have the same (non-zero) number of type params. Instantiate both
// with the type parameters of V. This must always succeed for V, and will
// succeed for T if and only if the type set of each type parameter of V is a
// subset of the type set of the corresponding type parameter of T, meaning
// that every instantiation of V corresponds to a valid instantiation of T.
// Minor optimization: ensure we share a context across the two
// instantiations below.
if ctxt == nil {
ctxt = NewContext()
}
var targs []types.Type
for i := 0; i < vtparams.Len(); i++ {
targs = append(targs, vtparams.At(i))
}
vinst, err := Instantiate(ctxt, V, targs, true)
if err != nil {
panic("type parameters should satisfy their own constraints")
}
tinst, err := Instantiate(ctxt, T, targs, true)
if err != nil {
return false
}
return types.AssignableTo(vinst, tinst)
}

View File

@ -0,0 +1,122 @@
// Copyright 2022 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package typeparams
import (
"go/types"
)
// CoreType returns the core type of T or nil if T does not have a core type.
//
// See https://go.dev/ref/spec#Core_types for the definition of a core type.
func CoreType(T types.Type) types.Type {
U := T.Underlying()
if _, ok := U.(*types.Interface); !ok {
return U // for non-interface types,
}
terms, err := _NormalTerms(U)
if len(terms) == 0 || err != nil {
// len(terms) -> empty type set of interface.
// err != nil => U is invalid, exceeds complexity bounds, or has an empty type set.
return nil // no core type.
}
U = terms[0].Type().Underlying()
var identical int // i in [0,identical) => Identical(U, terms[i].Type().Underlying())
for identical = 1; identical < len(terms); identical++ {
if !types.Identical(U, terms[identical].Type().Underlying()) {
break
}
}
if identical == len(terms) {
// https://go.dev/ref/spec#Core_types
// "There is a single type U which is the underlying type of all types in the type set of T"
return U
}
ch, ok := U.(*types.Chan)
if !ok {
return nil // no core type as identical < len(terms) and U is not a channel.
}
// https://go.dev/ref/spec#Core_types
// "the type chan E if T contains only bidirectional channels, or the type chan<- E or
// <-chan E depending on the direction of the directional channels present."
for chans := identical; chans < len(terms); chans++ {
curr, ok := terms[chans].Type().Underlying().(*types.Chan)
if !ok {
return nil
}
if !types.Identical(ch.Elem(), curr.Elem()) {
return nil // channel elements are not identical.
}
if ch.Dir() == types.SendRecv {
// ch is bidirectional. We can safely always use curr's direction.
ch = curr
} else if curr.Dir() != types.SendRecv && ch.Dir() != curr.Dir() {
// ch and curr are not bidirectional and not the same direction.
return nil
}
}
return ch
}
// _NormalTerms returns a slice of terms representing the normalized structural
// type restrictions of a type, if any.
//
// For all types other than *types.TypeParam, *types.Interface, and
// *types.Union, this is just a single term with Tilde() == false and
// Type() == typ. For *types.TypeParam, *types.Interface, and *types.Union, see
// below.
//
// Structural type restrictions of a type parameter are created via
// non-interface types embedded in its constraint interface (directly, or via a
// chain of interface embeddings). For example, in the declaration type
// T[P interface{~int; m()}] int the structural restriction of the type
// parameter P is ~int.
//
// With interface embedding and unions, the specification of structural type
// restrictions may be arbitrarily complex. For example, consider the
// following:
//
// type A interface{ ~string|~[]byte }
//
// type B interface{ int|string }
//
// type C interface { ~string|~int }
//
// type T[P interface{ A|B; C }] int
//
// In this example, the structural type restriction of P is ~string|int: A|B
// expands to ~string|~[]byte|int|string, which reduces to ~string|~[]byte|int,
// which when intersected with C (~string|~int) yields ~string|int.
//
// _NormalTerms computes these expansions and reductions, producing a
// "normalized" form of the embeddings. A structural restriction is normalized
// if it is a single union containing no interface terms, and is minimal in the
// sense that removing any term changes the set of types satisfying the
// constraint. It is left as a proof for the reader that, modulo sorting, there
// is exactly one such normalized form.
//
// Because the minimal representation always takes this form, _NormalTerms
// returns a slice of tilde terms corresponding to the terms of the union in
// the normalized structural restriction. An error is returned if the type is
// invalid, exceeds complexity bounds, or has an empty type set. In the latter
// case, _NormalTerms returns ErrEmptyTypeSet.
//
// _NormalTerms makes no guarantees about the order of terms, except that it
// is deterministic.
func _NormalTerms(typ types.Type) ([]*Term, error) {
switch typ := typ.(type) {
case *TypeParam:
return StructuralTerms(typ)
case *Union:
return UnionTermSet(typ)
case *types.Interface:
return InterfaceTermSet(typ)
default:
return []*Term{NewTerm(false, typ)}, nil
}
}

View File

@ -1,11 +0,0 @@
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package typeparams provides functions to work indirectly with type parameter
// data stored in go/ast and go/types objects, while these API are guarded by a
// build constraint.
//
// This package exists to make it easier for tools to work with generic code,
// while also compiling against older Go versions.
package typeparams

View File

@ -0,0 +1,12 @@
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build !go1.18
// +build !go1.18
package typeparams
// Enabled reports whether type parameters are enabled in the current build
// environment.
const Enabled = false

View File

@ -0,0 +1,15 @@
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build go1.18
// +build go1.18
package typeparams
// Note: this constant is in a separate file as this is the only acceptable
// diff between the <1.18 API of this package and the 1.18 API.
// Enabled reports whether type parameters are enabled in the current build
// environment.
const Enabled = true

View File

@ -0,0 +1,218 @@
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package typeparams
import (
"errors"
"fmt"
"go/types"
"os"
"strings"
)
//go:generate go run copytermlist.go
const debug = false
var ErrEmptyTypeSet = errors.New("empty type set")
// StructuralTerms returns a slice of terms representing the normalized
// structural type restrictions of a type parameter, if any.
//
// Structural type restrictions of a type parameter are created via
// non-interface types embedded in its constraint interface (directly, or via a
// chain of interface embeddings). For example, in the declaration
//
// type T[P interface{~int; m()}] int
//
// the structural restriction of the type parameter P is ~int.
//
// With interface embedding and unions, the specification of structural type
// restrictions may be arbitrarily complex. For example, consider the
// following:
//
// type A interface{ ~string|~[]byte }
//
// type B interface{ int|string }
//
// type C interface { ~string|~int }
//
// type T[P interface{ A|B; C }] int
//
// In this example, the structural type restriction of P is ~string|int: A|B
// expands to ~string|~[]byte|int|string, which reduces to ~string|~[]byte|int,
// which when intersected with C (~string|~int) yields ~string|int.
//
// StructuralTerms computes these expansions and reductions, producing a
// "normalized" form of the embeddings. A structural restriction is normalized
// if it is a single union containing no interface terms, and is minimal in the
// sense that removing any term changes the set of types satisfying the
// constraint. It is left as a proof for the reader that, modulo sorting, there
// is exactly one such normalized form.
//
// Because the minimal representation always takes this form, StructuralTerms
// returns a slice of tilde terms corresponding to the terms of the union in
// the normalized structural restriction. An error is returned if the
// constraint interface is invalid, exceeds complexity bounds, or has an empty
// type set. In the latter case, StructuralTerms returns ErrEmptyTypeSet.
//
// StructuralTerms makes no guarantees about the order of terms, except that it
// is deterministic.
func StructuralTerms(tparam *TypeParam) ([]*Term, error) {
constraint := tparam.Constraint()
if constraint == nil {
return nil, fmt.Errorf("%s has nil constraint", tparam)
}
iface, _ := constraint.Underlying().(*types.Interface)
if iface == nil {
return nil, fmt.Errorf("constraint is %T, not *types.Interface", constraint.Underlying())
}
return InterfaceTermSet(iface)
}
// InterfaceTermSet computes the normalized terms for a constraint interface,
// returning an error if the term set cannot be computed or is empty. In the
// latter case, the error will be ErrEmptyTypeSet.
//
// See the documentation of StructuralTerms for more information on
// normalization.
func InterfaceTermSet(iface *types.Interface) ([]*Term, error) {
return computeTermSet(iface)
}
// UnionTermSet computes the normalized terms for a union, returning an error
// if the term set cannot be computed or is empty. In the latter case, the
// error will be ErrEmptyTypeSet.
//
// See the documentation of StructuralTerms for more information on
// normalization.
func UnionTermSet(union *Union) ([]*Term, error) {
return computeTermSet(union)
}
func computeTermSet(typ types.Type) ([]*Term, error) {
tset, err := computeTermSetInternal(typ, make(map[types.Type]*termSet), 0)
if err != nil {
return nil, err
}
if tset.terms.isEmpty() {
return nil, ErrEmptyTypeSet
}
if tset.terms.isAll() {
return nil, nil
}
var terms []*Term
for _, term := range tset.terms {
terms = append(terms, NewTerm(term.tilde, term.typ))
}
return terms, nil
}
// A termSet holds the normalized set of terms for a given type.
//
// The name termSet is intentionally distinct from 'type set': a type set is
// all types that implement a type (and includes method restrictions), whereas
// a term set just represents the structural restrictions on a type.
type termSet struct {
complete bool
terms termlist
}
func indentf(depth int, format string, args ...interface{}) {
fmt.Fprintf(os.Stderr, strings.Repeat(".", depth)+format+"\n", args...)
}
func computeTermSetInternal(t types.Type, seen map[types.Type]*termSet, depth int) (res *termSet, err error) {
if t == nil {
panic("nil type")
}
if debug {
indentf(depth, "%s", t.String())
defer func() {
if err != nil {
indentf(depth, "=> %s", err)
} else {
indentf(depth, "=> %s", res.terms.String())
}
}()
}
const maxTermCount = 100
if tset, ok := seen[t]; ok {
if !tset.complete {
return nil, fmt.Errorf("cycle detected in the declaration of %s", t)
}
return tset, nil
}
// Mark the current type as seen to avoid infinite recursion.
tset := new(termSet)
defer func() {
tset.complete = true
}()
seen[t] = tset
switch u := t.Underlying().(type) {
case *types.Interface:
// The term set of an interface is the intersection of the term sets of its
// embedded types.
tset.terms = allTermlist
for i := 0; i < u.NumEmbeddeds(); i++ {
embedded := u.EmbeddedType(i)
if _, ok := embedded.Underlying().(*TypeParam); ok {
return nil, fmt.Errorf("invalid embedded type %T", embedded)
}
tset2, err := computeTermSetInternal(embedded, seen, depth+1)
if err != nil {
return nil, err
}
tset.terms = tset.terms.intersect(tset2.terms)
}
case *Union:
// The term set of a union is the union of term sets of its terms.
tset.terms = nil
for i := 0; i < u.Len(); i++ {
t := u.Term(i)
var terms termlist
switch t.Type().Underlying().(type) {
case *types.Interface:
tset2, err := computeTermSetInternal(t.Type(), seen, depth+1)
if err != nil {
return nil, err
}
terms = tset2.terms
case *TypeParam, *Union:
// A stand-alone type parameter or union is not permitted as union
// term.
return nil, fmt.Errorf("invalid union term %T", t)
default:
if t.Type() == types.Typ[types.Invalid] {
continue
}
terms = termlist{{t.Tilde(), t.Type()}}
}
tset.terms = tset.terms.union(terms)
if len(tset.terms) > maxTermCount {
return nil, fmt.Errorf("exceeded max term count %d", maxTermCount)
}
}
case *TypeParam:
panic("unreachable")
default:
// For all other types, the term set is just a single non-tilde term
// holding the type itself.
if u != types.Typ[types.Invalid] {
tset.terms = termlist{{false, t}}
}
}
return tset, nil
}
// under is a facade for the go/types internal function of the same name. It is
// used by typeterm.go.
func under(t types.Type) types.Type {
return t.Underlying()
}

View File

@ -1,90 +0,0 @@
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build !typeparams || !go1.17
// +build !typeparams !go1.17
package typeparams
import (
"go/ast"
"go/types"
)
// NOTE: doc comments must be kept in sync with typeparams.go.
// Enabled reports whether type parameters are enabled in the current build
// environment.
const Enabled = false
// UnpackIndex extracts all index expressions from e. For non-generic code this
// is always one expression: e.Index, but may be more than one expression for
// generic type instantiation.
func UnpackIndex(e *ast.IndexExpr) []ast.Expr {
return []ast.Expr{e.Index}
}
// IsListExpr reports whether n is an *ast.ListExpr, which is a new node type
// introduced to hold type arguments for generic type instantiation.
func IsListExpr(n ast.Node) bool {
return false
}
// ForTypeDecl extracts the (possibly nil) type parameter node list from n.
func ForTypeDecl(*ast.TypeSpec) *ast.FieldList {
return nil
}
// ForFuncDecl extracts the (possibly nil) type parameter node list from n.
func ForFuncDecl(*ast.FuncDecl) *ast.FieldList {
return nil
}
// ForSignature extracts the (possibly empty) type parameter object list from
// sig.
func ForSignature(*types.Signature) []*types.TypeName {
return nil
}
// HasTypeSet reports if iface has a type set.
func HasTypeSet(*types.Interface) bool {
return false
}
// IsComparable reports if iface is the comparable interface.
func IsComparable(*types.Interface) bool {
return false
}
// IsConstraint reports whether iface may only be used as a type parameter
// constraint (i.e. has a type set or is the comparable interface).
func IsConstraint(*types.Interface) bool {
return false
}
// ForNamed extracts the (possibly empty) type parameter object list from
// named.
func ForNamed(*types.Named) []*types.TypeName {
return nil
}
// NamedTArgs extracts the (possibly empty) type argument list from named.
func NamedTArgs(*types.Named) []types.Type {
return nil
}
// InitInferred initializes info to record inferred type information.
func InitInferred(*types.Info) {
}
// GetInferred extracts inferred type information from info for e.
//
// The expression e may have an inferred type if it is an *ast.IndexExpr
// representing partial instantiation of a generic function type for which type
// arguments have been inferred using constraint type inference, or if it is an
// *ast.CallExpr for which type type arguments have be inferred using both
// constraint type inference and function argument inference.
func GetInferred(*types.Info, ast.Expr) ([]types.Type, *types.Signature) {
return nil, nil
}

View File

@ -0,0 +1,163 @@
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Code generated by copytermlist.go DO NOT EDIT.
package typeparams
import (
"bytes"
"go/types"
)
// A termlist represents the type set represented by the union
// t1 y2 ... tn of the type sets of the terms t1 to tn.
// A termlist is in normal form if all terms are disjoint.
// termlist operations don't require the operands to be in
// normal form.
type termlist []*term
// allTermlist represents the set of all types.
// It is in normal form.
var allTermlist = termlist{new(term)}
// String prints the termlist exactly (without normalization).
func (xl termlist) String() string {
if len(xl) == 0 {
return "∅"
}
var buf bytes.Buffer
for i, x := range xl {
if i > 0 {
buf.WriteString(" ")
}
buf.WriteString(x.String())
}
return buf.String()
}
// isEmpty reports whether the termlist xl represents the empty set of types.
func (xl termlist) isEmpty() bool {
// If there's a non-nil term, the entire list is not empty.
// If the termlist is in normal form, this requires at most
// one iteration.
for _, x := range xl {
if x != nil {
return false
}
}
return true
}
// isAll reports whether the termlist xl represents the set of all types.
func (xl termlist) isAll() bool {
// If there's a 𝓤 term, the entire list is 𝓤.
// If the termlist is in normal form, this requires at most
// one iteration.
for _, x := range xl {
if x != nil && x.typ == nil {
return true
}
}
return false
}
// norm returns the normal form of xl.
func (xl termlist) norm() termlist {
// Quadratic algorithm, but good enough for now.
// TODO(gri) fix asymptotic performance
used := make([]bool, len(xl))
var rl termlist
for i, xi := range xl {
if xi == nil || used[i] {
continue
}
for j := i + 1; j < len(xl); j++ {
xj := xl[j]
if xj == nil || used[j] {
continue
}
if u1, u2 := xi.union(xj); u2 == nil {
// If we encounter a 𝓤 term, the entire list is 𝓤.
// Exit early.
// (Note that this is not just an optimization;
// if we continue, we may end up with a 𝓤 term
// and other terms and the result would not be
// in normal form.)
if u1.typ == nil {
return allTermlist
}
xi = u1
used[j] = true // xj is now unioned into xi - ignore it in future iterations
}
}
rl = append(rl, xi)
}
return rl
}
// union returns the union xl yl.
func (xl termlist) union(yl termlist) termlist {
return append(xl, yl...).norm()
}
// intersect returns the intersection xl ∩ yl.
func (xl termlist) intersect(yl termlist) termlist {
if xl.isEmpty() || yl.isEmpty() {
return nil
}
// Quadratic algorithm, but good enough for now.
// TODO(gri) fix asymptotic performance
var rl termlist
for _, x := range xl {
for _, y := range yl {
if r := x.intersect(y); r != nil {
rl = append(rl, r)
}
}
}
return rl.norm()
}
// equal reports whether xl and yl represent the same type set.
func (xl termlist) equal(yl termlist) bool {
// TODO(gri) this should be more efficient
return xl.subsetOf(yl) && yl.subsetOf(xl)
}
// includes reports whether t ∈ xl.
func (xl termlist) includes(t types.Type) bool {
for _, x := range xl {
if x.includes(t) {
return true
}
}
return false
}
// supersetOf reports whether y ⊆ xl.
func (xl termlist) supersetOf(y *term) bool {
for _, x := range xl {
if y.subsetOf(x) {
return true
}
}
return false
}
// subsetOf reports whether xl ⊆ yl.
func (xl termlist) subsetOf(yl termlist) bool {
if yl.isEmpty() {
return xl.isEmpty()
}
// each term x of xl must be a subset of yl
for _, x := range xl {
if !yl.supersetOf(x) {
return false // x is not a subset yl
}
}
return true
}

View File

@ -1,105 +0,0 @@
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build typeparams && go1.17
// +build typeparams,go1.17
package typeparams
import (
"go/ast"
"go/types"
)
// NOTE: doc comments must be kept in sync with notypeparams.go.
// Enabled reports whether type parameters are enabled in the current build
// environment.
const Enabled = true
// UnpackIndex extracts all index expressions from e. For non-generic code this
// is always one expression: e.Index, but may be more than one expression for
// generic type instantiation.
func UnpackIndex(e *ast.IndexExpr) []ast.Expr {
if x, _ := e.Index.(*ast.ListExpr); x != nil {
return x.ElemList
}
if e.Index != nil {
return []ast.Expr{e.Index}
}
return nil
}
// IsListExpr reports whether n is an *ast.ListExpr, which is a new node type
// introduced to hold type arguments for generic type instantiation.
func IsListExpr(n ast.Node) bool {
_, ok := n.(*ast.ListExpr)
return ok
}
// ForTypeDecl extracts the (possibly nil) type parameter node list from n.
func ForTypeDecl(n *ast.TypeSpec) *ast.FieldList {
return n.TParams
}
// ForFuncDecl extracts the (possibly nil) type parameter node list from n.
func ForFuncDecl(n *ast.FuncDecl) *ast.FieldList {
if n.Type != nil {
return n.Type.TParams
}
return nil
}
// ForSignature extracts the (possibly empty) type parameter object list from
// sig.
func ForSignature(sig *types.Signature) []*types.TypeName {
return sig.TParams()
}
// HasTypeSet reports if iface has a type set.
func HasTypeSet(iface *types.Interface) bool {
return iface.HasTypeList()
}
// IsComparable reports if iface is the comparable interface.
func IsComparable(iface *types.Interface) bool {
return iface.IsComparable()
}
// IsConstraint reports whether iface may only be used as a type parameter
// constraint (i.e. has a type set or is the comparable interface).
func IsConstraint(iface *types.Interface) bool {
return iface.IsConstraint()
}
// ForNamed extracts the (possibly empty) type parameter object list from
// named.
func ForNamed(named *types.Named) []*types.TypeName {
return named.TParams()
}
// NamedTArgs extracts the (possibly empty) type argument list from named.
func NamedTArgs(named *types.Named) []types.Type {
return named.TArgs()
}
// InitInferred initializes info to record inferred type information.
func InitInferred(info *types.Info) {
info.Inferred = make(map[ast.Expr]types.Inferred)
}
// GetInferred extracts inferred type information from info for e.
//
// The expression e may have an inferred type if it is an *ast.IndexExpr
// representing partial instantiation of a generic function type for which type
// arguments have been inferred using constraint type inference, or if it is an
// *ast.CallExpr for which type type arguments have be inferred using both
// constraint type inference and function argument inference.
func GetInferred(info *types.Info, e ast.Expr) ([]types.Type, *types.Signature) {
if info.Inferred == nil {
return nil, nil
}
inf := info.Inferred[e]
return inf.TArgs, inf.Sig
}

View File

@ -0,0 +1,197 @@
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build !go1.18
// +build !go1.18
package typeparams
import (
"go/ast"
"go/token"
"go/types"
)
func unsupported() {
panic("type parameters are unsupported at this go version")
}
// IndexListExpr is a placeholder type, as type parameters are not supported at
// this Go version. Its methods panic on use.
type IndexListExpr struct {
ast.Expr
X ast.Expr // expression
Lbrack token.Pos // position of "["
Indices []ast.Expr // index expressions
Rbrack token.Pos // position of "]"
}
// ForTypeSpec returns an empty field list, as type parameters on not supported
// at this Go version.
func ForTypeSpec(*ast.TypeSpec) *ast.FieldList {
return nil
}
// ForFuncType returns an empty field list, as type parameters are not
// supported at this Go version.
func ForFuncType(*ast.FuncType) *ast.FieldList {
return nil
}
// TypeParam is a placeholder type, as type parameters are not supported at
// this Go version. Its methods panic on use.
type TypeParam struct{ types.Type }
func (*TypeParam) Index() int { unsupported(); return 0 }
func (*TypeParam) Constraint() types.Type { unsupported(); return nil }
func (*TypeParam) Obj() *types.TypeName { unsupported(); return nil }
// TypeParamList is a placeholder for an empty type parameter list.
type TypeParamList struct{}
func (*TypeParamList) Len() int { return 0 }
func (*TypeParamList) At(int) *TypeParam { unsupported(); return nil }
// TypeList is a placeholder for an empty type list.
type TypeList struct{}
func (*TypeList) Len() int { return 0 }
func (*TypeList) At(int) types.Type { unsupported(); return nil }
// NewTypeParam is unsupported at this Go version, and panics.
func NewTypeParam(name *types.TypeName, constraint types.Type) *TypeParam {
unsupported()
return nil
}
// SetTypeParamConstraint is unsupported at this Go version, and panics.
func SetTypeParamConstraint(tparam *TypeParam, constraint types.Type) {
unsupported()
}
// NewSignatureType calls types.NewSignature, panicking if recvTypeParams or
// typeParams is non-empty.
func NewSignatureType(recv *types.Var, recvTypeParams, typeParams []*TypeParam, params, results *types.Tuple, variadic bool) *types.Signature {
if len(recvTypeParams) != 0 || len(typeParams) != 0 {
panic("signatures cannot have type parameters at this Go version")
}
return types.NewSignature(recv, params, results, variadic)
}
// ForSignature returns an empty slice.
func ForSignature(*types.Signature) *TypeParamList {
return nil
}
// RecvTypeParams returns a nil slice.
func RecvTypeParams(sig *types.Signature) *TypeParamList {
return nil
}
// IsComparable returns false, as no interfaces are type-restricted at this Go
// version.
func IsComparable(*types.Interface) bool {
return false
}
// IsMethodSet returns true, as no interfaces are type-restricted at this Go
// version.
func IsMethodSet(*types.Interface) bool {
return true
}
// IsImplicit returns false, as no interfaces are implicit at this Go version.
func IsImplicit(*types.Interface) bool {
return false
}
// MarkImplicit does nothing, because this Go version does not have implicit
// interfaces.
func MarkImplicit(*types.Interface) {}
// ForNamed returns an empty type parameter list, as type parameters are not
// supported at this Go version.
func ForNamed(*types.Named) *TypeParamList {
return nil
}
// SetForNamed panics if tparams is non-empty.
func SetForNamed(_ *types.Named, tparams []*TypeParam) {
if len(tparams) > 0 {
unsupported()
}
}
// NamedTypeArgs returns nil.
func NamedTypeArgs(*types.Named) *TypeList {
return nil
}
// NamedTypeOrigin is the identity method at this Go version.
func NamedTypeOrigin(named *types.Named) types.Type {
return named
}
// Term holds information about a structural type restriction.
type Term struct {
tilde bool
typ types.Type
}
func (m *Term) Tilde() bool { return m.tilde }
func (m *Term) Type() types.Type { return m.typ }
func (m *Term) String() string {
pre := ""
if m.tilde {
pre = "~"
}
return pre + m.typ.String()
}
// NewTerm is unsupported at this Go version, and panics.
func NewTerm(tilde bool, typ types.Type) *Term {
return &Term{tilde, typ}
}
// Union is a placeholder type, as type parameters are not supported at this Go
// version. Its methods panic on use.
type Union struct{ types.Type }
func (*Union) Len() int { return 0 }
func (*Union) Term(i int) *Term { unsupported(); return nil }
// NewUnion is unsupported at this Go version, and panics.
func NewUnion(terms []*Term) *Union {
unsupported()
return nil
}
// InitInstanceInfo is a noop at this Go version.
func InitInstanceInfo(*types.Info) {}
// Instance is a placeholder type, as type parameters are not supported at this
// Go version.
type Instance struct {
TypeArgs *TypeList
Type types.Type
}
// GetInstances returns a nil map, as type parameters are not supported at this
// Go version.
func GetInstances(info *types.Info) map[*ast.Ident]Instance { return nil }
// Context is a placeholder type, as type parameters are not supported at
// this Go version.
type Context struct{}
// NewContext returns a placeholder Context instance.
func NewContext() *Context {
return &Context{}
}
// Instantiate is unsupported on this Go version, and panics.
func Instantiate(ctxt *Context, typ types.Type, targs []types.Type, validate bool) (types.Type, error) {
unsupported()
return nil, nil
}

View File

@ -0,0 +1,151 @@
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build go1.18
// +build go1.18
package typeparams
import (
"go/ast"
"go/types"
)
// IndexListExpr is an alias for ast.IndexListExpr.
type IndexListExpr = ast.IndexListExpr
// ForTypeSpec returns n.TypeParams.
func ForTypeSpec(n *ast.TypeSpec) *ast.FieldList {
if n == nil {
return nil
}
return n.TypeParams
}
// ForFuncType returns n.TypeParams.
func ForFuncType(n *ast.FuncType) *ast.FieldList {
if n == nil {
return nil
}
return n.TypeParams
}
// TypeParam is an alias for types.TypeParam
type TypeParam = types.TypeParam
// TypeParamList is an alias for types.TypeParamList
type TypeParamList = types.TypeParamList
// TypeList is an alias for types.TypeList
type TypeList = types.TypeList
// NewTypeParam calls types.NewTypeParam.
func NewTypeParam(name *types.TypeName, constraint types.Type) *TypeParam {
return types.NewTypeParam(name, constraint)
}
// SetTypeParamConstraint calls tparam.SetConstraint(constraint).
func SetTypeParamConstraint(tparam *TypeParam, constraint types.Type) {
tparam.SetConstraint(constraint)
}
// NewSignatureType calls types.NewSignatureType.
func NewSignatureType(recv *types.Var, recvTypeParams, typeParams []*TypeParam, params, results *types.Tuple, variadic bool) *types.Signature {
return types.NewSignatureType(recv, recvTypeParams, typeParams, params, results, variadic)
}
// ForSignature returns sig.TypeParams()
func ForSignature(sig *types.Signature) *TypeParamList {
return sig.TypeParams()
}
// RecvTypeParams returns sig.RecvTypeParams().
func RecvTypeParams(sig *types.Signature) *TypeParamList {
return sig.RecvTypeParams()
}
// IsComparable calls iface.IsComparable().
func IsComparable(iface *types.Interface) bool {
return iface.IsComparable()
}
// IsMethodSet calls iface.IsMethodSet().
func IsMethodSet(iface *types.Interface) bool {
return iface.IsMethodSet()
}
// IsImplicit calls iface.IsImplicit().
func IsImplicit(iface *types.Interface) bool {
return iface.IsImplicit()
}
// MarkImplicit calls iface.MarkImplicit().
func MarkImplicit(iface *types.Interface) {
iface.MarkImplicit()
}
// ForNamed extracts the (possibly empty) type parameter object list from
// named.
func ForNamed(named *types.Named) *TypeParamList {
return named.TypeParams()
}
// SetForNamed sets the type params tparams on n. Each tparam must be of
// dynamic type *types.TypeParam.
func SetForNamed(n *types.Named, tparams []*TypeParam) {
n.SetTypeParams(tparams)
}
// NamedTypeArgs returns named.TypeArgs().
func NamedTypeArgs(named *types.Named) *TypeList {
return named.TypeArgs()
}
// NamedTypeOrigin returns named.Orig().
func NamedTypeOrigin(named *types.Named) types.Type {
return named.Origin()
}
// Term is an alias for types.Term.
type Term = types.Term
// NewTerm calls types.NewTerm.
func NewTerm(tilde bool, typ types.Type) *Term {
return types.NewTerm(tilde, typ)
}
// Union is an alias for types.Union
type Union = types.Union
// NewUnion calls types.NewUnion.
func NewUnion(terms []*Term) *Union {
return types.NewUnion(terms)
}
// InitInstanceInfo initializes info to record information about type and
// function instances.
func InitInstanceInfo(info *types.Info) {
info.Instances = make(map[*ast.Ident]types.Instance)
}
// Instance is an alias for types.Instance.
type Instance = types.Instance
// GetInstances returns info.Instances.
func GetInstances(info *types.Info) map[*ast.Ident]Instance {
return info.Instances
}
// Context is an alias for types.Context.
type Context = types.Context
// NewContext calls types.NewContext.
func NewContext() *Context {
return types.NewContext()
}
// Instantiate calls types.Instantiate.
func Instantiate(ctxt *Context, typ types.Type, targs []types.Type, validate bool) (types.Type, error) {
return types.Instantiate(ctxt, typ, targs, validate)
}

View File

@ -0,0 +1,170 @@
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Code generated by copytermlist.go DO NOT EDIT.
package typeparams
import "go/types"
// A term describes elementary type sets:
//
// ∅: (*term)(nil) == ∅ // set of no types (empty set)
// 𝓤: &term{} == 𝓤 // set of all types (𝓤niverse)
// T: &term{false, T} == {T} // set of type T
// ~t: &term{true, t} == {t' | under(t') == t} // set of types with underlying type t
//
type term struct {
tilde bool // valid if typ != nil
typ types.Type
}
func (x *term) String() string {
switch {
case x == nil:
return "∅"
case x.typ == nil:
return "𝓤"
case x.tilde:
return "~" + x.typ.String()
default:
return x.typ.String()
}
}
// equal reports whether x and y represent the same type set.
func (x *term) equal(y *term) bool {
// easy cases
switch {
case x == nil || y == nil:
return x == y
case x.typ == nil || y.typ == nil:
return x.typ == y.typ
}
// ∅ ⊂ x, y ⊂ 𝓤
return x.tilde == y.tilde && types.Identical(x.typ, y.typ)
}
// union returns the union x y: zero, one, or two non-nil terms.
func (x *term) union(y *term) (_, _ *term) {
// easy cases
switch {
case x == nil && y == nil:
return nil, nil // ∅ ∅ == ∅
case x == nil:
return y, nil // ∅ y == y
case y == nil:
return x, nil // x ∅ == x
case x.typ == nil:
return x, nil // 𝓤 y == 𝓤
case y.typ == nil:
return y, nil // x 𝓤 == 𝓤
}
// ∅ ⊂ x, y ⊂ 𝓤
if x.disjoint(y) {
return x, y // x y == (x, y) if x ∩ y == ∅
}
// x.typ == y.typ
// ~t ~t == ~t
// ~t T == ~t
// T ~t == ~t
// T T == T
if x.tilde || !y.tilde {
return x, nil
}
return y, nil
}
// intersect returns the intersection x ∩ y.
func (x *term) intersect(y *term) *term {
// easy cases
switch {
case x == nil || y == nil:
return nil // ∅ ∩ y == ∅ and ∩ ∅ == ∅
case x.typ == nil:
return y // 𝓤 ∩ y == y
case y.typ == nil:
return x // x ∩ 𝓤 == x
}
// ∅ ⊂ x, y ⊂ 𝓤
if x.disjoint(y) {
return nil // x ∩ y == ∅ if x ∩ y == ∅
}
// x.typ == y.typ
// ~t ∩ ~t == ~t
// ~t ∩ T == T
// T ∩ ~t == T
// T ∩ T == T
if !x.tilde || y.tilde {
return x
}
return y
}
// includes reports whether t ∈ x.
func (x *term) includes(t types.Type) bool {
// easy cases
switch {
case x == nil:
return false // t ∈ ∅ == false
case x.typ == nil:
return true // t ∈ 𝓤 == true
}
// ∅ ⊂ x ⊂ 𝓤
u := t
if x.tilde {
u = under(u)
}
return types.Identical(x.typ, u)
}
// subsetOf reports whether x ⊆ y.
func (x *term) subsetOf(y *term) bool {
// easy cases
switch {
case x == nil:
return true // ∅ ⊆ y == true
case y == nil:
return false // x ⊆ ∅ == false since x != ∅
case y.typ == nil:
return true // x ⊆ 𝓤 == true
case x.typ == nil:
return false // 𝓤 ⊆ y == false since y != 𝓤
}
// ∅ ⊂ x, y ⊂ 𝓤
if x.disjoint(y) {
return false // x ⊆ y == false if x ∩ y == ∅
}
// x.typ == y.typ
// ~t ⊆ ~t == true
// ~t ⊆ T == false
// T ⊆ ~t == true
// T ⊆ T == true
return !x.tilde || y.tilde
}
// disjoint reports whether x ∩ y == ∅.
// x.typ and y.typ must not be nil.
func (x *term) disjoint(y *term) bool {
if debug && (x.typ == nil || y.typ == nil) {
panic("invalid argument(s)")
}
ux := x.typ
if y.tilde {
ux = under(ux)
}
uy := y.typ
if x.tilde {
uy = under(uy)
}
return !types.Identical(ux, uy)
}

View File

@ -1365,4 +1365,162 @@ const (
// return i
// }
InvalidGo
// All codes below were added in Go 1.17.
/* decl */
// BadDecl occurs when a declaration has invalid syntax.
BadDecl
// RepeatedDecl occurs when an identifier occurs more than once on the left
// hand side of a short variable declaration.
//
// Example:
// func _() {
// x, y, y := 1, 2, 3
// }
RepeatedDecl
/* unsafe */
// InvalidUnsafeAdd occurs when unsafe.Add is called with a
// length argument that is not of integer type.
//
// Example:
// import "unsafe"
//
// var p unsafe.Pointer
// var _ = unsafe.Add(p, float64(1))
InvalidUnsafeAdd
// InvalidUnsafeSlice occurs when unsafe.Slice is called with a
// pointer argument that is not of pointer type or a length argument
// that is not of integer type, negative, or out of bounds.
//
// Example:
// import "unsafe"
//
// var x int
// var _ = unsafe.Slice(x, 1)
//
// Example:
// import "unsafe"
//
// var x int
// var _ = unsafe.Slice(&x, float64(1))
//
// Example:
// import "unsafe"
//
// var x int
// var _ = unsafe.Slice(&x, -1)
//
// Example:
// import "unsafe"
//
// var x int
// var _ = unsafe.Slice(&x, uint64(1) << 63)
InvalidUnsafeSlice
// All codes below were added in Go 1.18.
/* features */
// UnsupportedFeature occurs when a language feature is used that is not
// supported at this Go version.
UnsupportedFeature
/* type params */
// NotAGenericType occurs when a non-generic type is used where a generic
// type is expected: in type or function instantiation.
//
// Example:
// type T int
//
// var _ T[int]
NotAGenericType
// WrongTypeArgCount occurs when a type or function is instantiated with an
// incorrent number of type arguments, including when a generic type or
// function is used without instantiation.
//
// Errors inolving failed type inference are assigned other error codes.
//
// Example:
// type T[p any] int
//
// var _ T[int, string]
//
// Example:
// func f[T any]() {}
//
// var x = f
WrongTypeArgCount
// CannotInferTypeArgs occurs when type or function type argument inference
// fails to infer all type arguments.
//
// Example:
// func f[T any]() {}
//
// func _() {
// f()
// }
//
// Example:
// type N[P, Q any] struct{}
//
// var _ N[int]
CannotInferTypeArgs
// InvalidTypeArg occurs when a type argument does not satisfy its
// corresponding type parameter constraints.
//
// Example:
// type T[P ~int] struct{}
//
// var _ T[string]
InvalidTypeArg // arguments? InferenceFailed
// InvalidInstanceCycle occurs when an invalid cycle is detected
// within the instantiation graph.
//
// Example:
// func f[T any]() { f[*T]() }
InvalidInstanceCycle
// InvalidUnion occurs when an embedded union or approximation element is
// not valid.
//
// Example:
// type _ interface {
// ~int | interface{ m() }
// }
InvalidUnion
// MisplacedConstraintIface occurs when a constraint-type interface is used
// outside of constraint position.
//
// Example:
// type I interface { ~int }
//
// var _ I
MisplacedConstraintIface
// InvalidMethodTypeParams occurs when methods have type parameters.
//
// It cannot be encountered with an AST parsed using go/parser.
InvalidMethodTypeParams
// MisplacedTypeParam occurs when a type parameter is used in a place where
// it is not permitted.
//
// Example:
// type T[P any] P
//
// Example:
// type T[P any] struct{ *P }
MisplacedTypeParam
)

View File

@ -138,11 +138,25 @@ func _() {
_ = x[UnusedResults-128]
_ = x[InvalidDefer-129]
_ = x[InvalidGo-130]
_ = x[BadDecl-131]
_ = x[RepeatedDecl-132]
_ = x[InvalidUnsafeAdd-133]
_ = x[InvalidUnsafeSlice-134]
_ = x[UnsupportedFeature-135]
_ = x[NotAGenericType-136]
_ = x[WrongTypeArgCount-137]
_ = x[CannotInferTypeArgs-138]
_ = x[InvalidTypeArg-139]
_ = x[InvalidInstanceCycle-140]
_ = x[InvalidUnion-141]
_ = x[MisplacedConstraintIface-142]
_ = x[InvalidMethodTypeParams-143]
_ = x[MisplacedTypeParam-144]
}
const _ErrorCode_name = "TestBlankPkgNameMismatchedPkgNameInvalidPkgUseBadImportPathBrokenImportImportCRenamedUnusedImportInvalidInitCycleDuplicateDeclInvalidDeclCycleInvalidTypeCycleInvalidConstInitInvalidConstValInvalidConstTypeUntypedNilWrongAssignCountUnassignableOperandNoNewVarMultiValAssignOpInvalidIfaceAssignInvalidChanAssignIncompatibleAssignUnaddressableFieldAssignNotATypeInvalidArrayLenBlankIfaceMethodIncomparableMapKeyInvalidIfaceEmbedInvalidPtrEmbedBadRecvInvalidRecvDuplicateFieldAndMethodDuplicateMethodInvalidBlankInvalidIotaMissingInitBodyInvalidInitSigInvalidInitDeclInvalidMainDeclTooManyValuesNotAnExprTruncatedFloatNumericOverflowUndefinedOpMismatchedTypesDivByZeroNonNumericIncDecUnaddressableOperandInvalidIndirectionNonIndexableOperandInvalidIndexSwappedSliceIndicesNonSliceableOperandInvalidSliceExprInvalidShiftCountInvalidShiftOperandInvalidReceiveInvalidSendDuplicateLitKeyMissingLitKeyInvalidLitIndexOversizeArrayLitMixedStructLitInvalidStructLitMissingLitFieldDuplicateLitFieldUnexportedLitFieldInvalidLitFieldUntypedLitInvalidLitAmbiguousSelectorUndeclaredImportedNameUnexportedNameUndeclaredNameMissingFieldOrMethodBadDotDotDotSyntaxNonVariadicDotDotDotMisplacedDotDotDotInvalidDotDotDotOperandInvalidDotDotDotUncalledBuiltinInvalidAppendInvalidCapInvalidCloseInvalidCopyInvalidComplexInvalidDeleteInvalidImagInvalidLenSwappedMakeArgsInvalidMakeInvalidRealInvalidAssertImpossibleAssertInvalidConversionInvalidUntypedConversionBadOffsetofSyntaxInvalidOffsetofUnusedExprUnusedVarMissingReturnWrongResultCountOutOfScopeResultInvalidCondInvalidPostDeclInvalidChanRangeInvalidIterVarInvalidRangeExprMisplacedBreakMisplacedContinueMisplacedFallthroughDuplicateCaseDuplicateDefaultBadTypeKeywordInvalidTypeSwitchInvalidExprSwitchInvalidSelectCaseUndeclaredLabelDuplicateLabelMisplacedLabelUnusedLabelJumpOverDeclJumpIntoBlockInvalidMethodExprWrongArgCountInvalidCallUnusedResultsInvalidDeferInvalidGo"
const _ErrorCode_name = "TestBlankPkgNameMismatchedPkgNameInvalidPkgUseBadImportPathBrokenImportImportCRenamedUnusedImportInvalidInitCycleDuplicateDeclInvalidDeclCycleInvalidTypeCycleInvalidConstInitInvalidConstValInvalidConstTypeUntypedNilWrongAssignCountUnassignableOperandNoNewVarMultiValAssignOpInvalidIfaceAssignInvalidChanAssignIncompatibleAssignUnaddressableFieldAssignNotATypeInvalidArrayLenBlankIfaceMethodIncomparableMapKeyInvalidIfaceEmbedInvalidPtrEmbedBadRecvInvalidRecvDuplicateFieldAndMethodDuplicateMethodInvalidBlankInvalidIotaMissingInitBodyInvalidInitSigInvalidInitDeclInvalidMainDeclTooManyValuesNotAnExprTruncatedFloatNumericOverflowUndefinedOpMismatchedTypesDivByZeroNonNumericIncDecUnaddressableOperandInvalidIndirectionNonIndexableOperandInvalidIndexSwappedSliceIndicesNonSliceableOperandInvalidSliceExprInvalidShiftCountInvalidShiftOperandInvalidReceiveInvalidSendDuplicateLitKeyMissingLitKeyInvalidLitIndexOversizeArrayLitMixedStructLitInvalidStructLitMissingLitFieldDuplicateLitFieldUnexportedLitFieldInvalidLitFieldUntypedLitInvalidLitAmbiguousSelectorUndeclaredImportedNameUnexportedNameUndeclaredNameMissingFieldOrMethodBadDotDotDotSyntaxNonVariadicDotDotDotMisplacedDotDotDotInvalidDotDotDotOperandInvalidDotDotDotUncalledBuiltinInvalidAppendInvalidCapInvalidCloseInvalidCopyInvalidComplexInvalidDeleteInvalidImagInvalidLenSwappedMakeArgsInvalidMakeInvalidRealInvalidAssertImpossibleAssertInvalidConversionInvalidUntypedConversionBadOffsetofSyntaxInvalidOffsetofUnusedExprUnusedVarMissingReturnWrongResultCountOutOfScopeResultInvalidCondInvalidPostDeclInvalidChanRangeInvalidIterVarInvalidRangeExprMisplacedBreakMisplacedContinueMisplacedFallthroughDuplicateCaseDuplicateDefaultBadTypeKeywordInvalidTypeSwitchInvalidExprSwitchInvalidSelectCaseUndeclaredLabelDuplicateLabelMisplacedLabelUnusedLabelJumpOverDeclJumpIntoBlockInvalidMethodExprWrongArgCountInvalidCallUnusedResultsInvalidDeferInvalidGoBadDeclRepeatedDeclInvalidUnsafeAddInvalidUnsafeSliceUnsupportedFeatureNotAGenericTypeWrongTypeArgCountCannotInferTypeArgsInvalidTypeArgInvalidInstanceCycleInvalidUnionMisplacedConstraintIfaceInvalidMethodTypeParamsMisplacedTypeParam"
var _ErrorCode_index = [...]uint16{0, 4, 16, 33, 46, 59, 71, 85, 97, 113, 126, 142, 158, 174, 189, 205, 215, 231, 250, 258, 274, 292, 309, 327, 351, 359, 374, 390, 408, 425, 440, 447, 458, 481, 496, 508, 519, 534, 548, 563, 578, 591, 600, 614, 629, 640, 655, 664, 680, 700, 718, 737, 749, 768, 787, 803, 820, 839, 853, 864, 879, 892, 907, 923, 937, 953, 968, 985, 1003, 1018, 1028, 1038, 1055, 1077, 1091, 1105, 1125, 1143, 1163, 1181, 1204, 1220, 1235, 1248, 1258, 1270, 1281, 1295, 1308, 1319, 1329, 1344, 1355, 1366, 1379, 1395, 1412, 1436, 1453, 1468, 1478, 1487, 1500, 1516, 1532, 1543, 1558, 1574, 1588, 1604, 1618, 1635, 1655, 1668, 1684, 1698, 1715, 1732, 1749, 1764, 1778, 1792, 1803, 1815, 1828, 1845, 1858, 1869, 1882, 1894, 1903}
var _ErrorCode_index = [...]uint16{0, 4, 16, 33, 46, 59, 71, 85, 97, 113, 126, 142, 158, 174, 189, 205, 215, 231, 250, 258, 274, 292, 309, 327, 351, 359, 374, 390, 408, 425, 440, 447, 458, 481, 496, 508, 519, 534, 548, 563, 578, 591, 600, 614, 629, 640, 655, 664, 680, 700, 718, 737, 749, 768, 787, 803, 820, 839, 853, 864, 879, 892, 907, 923, 937, 953, 968, 985, 1003, 1018, 1028, 1038, 1055, 1077, 1091, 1105, 1125, 1143, 1163, 1181, 1204, 1220, 1235, 1248, 1258, 1270, 1281, 1295, 1308, 1319, 1329, 1344, 1355, 1366, 1379, 1395, 1412, 1436, 1453, 1468, 1478, 1487, 1500, 1516, 1532, 1543, 1558, 1574, 1588, 1604, 1618, 1635, 1655, 1668, 1684, 1698, 1715, 1732, 1749, 1764, 1778, 1792, 1803, 1815, 1828, 1845, 1858, 1869, 1882, 1894, 1903, 1910, 1922, 1938, 1956, 1974, 1989, 2006, 2025, 2039, 2059, 2071, 2095, 2118, 2136}
func (i ErrorCode) String() string {
i -= 1

View File

@ -30,10 +30,15 @@ func SetUsesCgo(conf *types.Config) bool {
return true
}
func ReadGo116ErrorData(terr types.Error) (ErrorCode, token.Pos, token.Pos, bool) {
// ReadGo116ErrorData extracts additional information from types.Error values
// generated by Go version 1.16 and later: the error code, start position, and
// end position. If all positions are valid, start <= err.Pos <= end.
//
// If the data could not be read, the final result parameter will be false.
func ReadGo116ErrorData(err types.Error) (code ErrorCode, start, end token.Pos, ok bool) {
var data [3]int
// By coincidence all of these fields are ints, which simplifies things.
v := reflect.ValueOf(terr)
v := reflect.ValueOf(err)
for i, name := range []string{"go116code", "go116start", "go116end"} {
f := v.FieldByName(name)
if !f.IsValid() {
@ -43,3 +48,5 @@ func ReadGo116ErrorData(terr types.Error) (ErrorCode, token.Pos, token.Pos, bool
}
return ErrorCode(data[0]), token.Pos(data[1]), token.Pos(data[2]), true
}
var SetGoVersion = func(conf *types.Config, version string) bool { return false }

View File

@ -0,0 +1,19 @@
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build go1.18
// +build go1.18
package typesinternal
import (
"go/types"
)
func init() {
SetGoVersion = func(conf *types.Config, version string) bool {
conf.GoVersion = version
return true
}
}

27
vendor/golang.org/x/xerrors/LICENSE generated vendored
View File

@ -1,27 +0,0 @@
Copyright (c) 2019 The Go Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

22
vendor/golang.org/x/xerrors/PATENTS generated vendored
View File

@ -1,22 +0,0 @@
Additional IP Rights Grant (Patents)
"This implementation" means the copyrightable works distributed by
Google as part of the Go project.
Google hereby grants to You a perpetual, worldwide, non-exclusive,
no-charge, royalty-free, irrevocable (except as stated in this section)
patent license to make, have made, use, offer to sell, sell, import,
transfer and otherwise run, modify and propagate the contents of this
implementation of Go, where such license applies only to those patent
claims, both currently owned or controlled by Google and acquired in
the future, licensable by Google that are necessarily infringed by this
implementation of Go. This grant does not include claims that would be
infringed only as a consequence of further modification of this
implementation. If you or your agent or exclusive licensee institute or
order or agree to the institution of patent litigation against any
entity (including a cross-claim or counterclaim in a lawsuit) alleging
that this implementation of Go or any code incorporated within this
implementation of Go constitutes direct or contributory patent
infringement, or inducement of patent infringement, then any patent
rights granted to you under this License for this implementation of Go
shall terminate as of the date such litigation is filed.

2
vendor/golang.org/x/xerrors/README generated vendored
View File

@ -1,2 +0,0 @@
This repository holds the transition packages for the new Go 1.13 error values.
See golang.org/design/29934-error-values.

View File

@ -1,193 +0,0 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package xerrors
import (
"bytes"
"fmt"
"io"
"reflect"
"strconv"
)
// FormatError calls the FormatError method of f with an errors.Printer
// configured according to s and verb, and writes the result to s.
func FormatError(f Formatter, s fmt.State, verb rune) {
// Assuming this function is only called from the Format method, and given
// that FormatError takes precedence over Format, it cannot be called from
// any package that supports errors.Formatter. It is therefore safe to
// disregard that State may be a specific printer implementation and use one
// of our choice instead.
// limitations: does not support printing error as Go struct.
var (
sep = " " // separator before next error
p = &state{State: s}
direct = true
)
var err error = f
switch verb {
// Note that this switch must match the preference order
// for ordinary string printing (%#v before %+v, and so on).
case 'v':
if s.Flag('#') {
if stringer, ok := err.(fmt.GoStringer); ok {
io.WriteString(&p.buf, stringer.GoString())
goto exit
}
// proceed as if it were %v
} else if s.Flag('+') {
p.printDetail = true
sep = "\n - "
}
case 's':
case 'q', 'x', 'X':
// Use an intermediate buffer in the rare cases that precision,
// truncation, or one of the alternative verbs (q, x, and X) are
// specified.
direct = false
default:
p.buf.WriteString("%!")
p.buf.WriteRune(verb)
p.buf.WriteByte('(')
switch {
case err != nil:
p.buf.WriteString(reflect.TypeOf(f).String())
default:
p.buf.WriteString("<nil>")
}
p.buf.WriteByte(')')
io.Copy(s, &p.buf)
return
}
loop:
for {
switch v := err.(type) {
case Formatter:
err = v.FormatError((*printer)(p))
case fmt.Formatter:
v.Format(p, 'v')
break loop
default:
io.WriteString(&p.buf, v.Error())
break loop
}
if err == nil {
break
}
if p.needColon || !p.printDetail {
p.buf.WriteByte(':')
p.needColon = false
}
p.buf.WriteString(sep)
p.inDetail = false
p.needNewline = false
}
exit:
width, okW := s.Width()
prec, okP := s.Precision()
if !direct || (okW && width > 0) || okP {
// Construct format string from State s.
format := []byte{'%'}
if s.Flag('-') {
format = append(format, '-')
}
if s.Flag('+') {
format = append(format, '+')
}
if s.Flag(' ') {
format = append(format, ' ')
}
if okW {
format = strconv.AppendInt(format, int64(width), 10)
}
if okP {
format = append(format, '.')
format = strconv.AppendInt(format, int64(prec), 10)
}
format = append(format, string(verb)...)
fmt.Fprintf(s, string(format), p.buf.String())
} else {
io.Copy(s, &p.buf)
}
}
var detailSep = []byte("\n ")
// state tracks error printing state. It implements fmt.State.
type state struct {
fmt.State
buf bytes.Buffer
printDetail bool
inDetail bool
needColon bool
needNewline bool
}
func (s *state) Write(b []byte) (n int, err error) {
if s.printDetail {
if len(b) == 0 {
return 0, nil
}
if s.inDetail && s.needColon {
s.needNewline = true
if b[0] == '\n' {
b = b[1:]
}
}
k := 0
for i, c := range b {
if s.needNewline {
if s.inDetail && s.needColon {
s.buf.WriteByte(':')
s.needColon = false
}
s.buf.Write(detailSep)
s.needNewline = false
}
if c == '\n' {
s.buf.Write(b[k:i])
k = i + 1
s.needNewline = true
}
}
s.buf.Write(b[k:])
if !s.inDetail {
s.needColon = true
}
} else if !s.inDetail {
s.buf.Write(b)
}
return len(b), nil
}
// printer wraps a state to implement an xerrors.Printer.
type printer state
func (s *printer) Print(args ...interface{}) {
if !s.inDetail || s.printDetail {
fmt.Fprint((*state)(s), args...)
}
}
func (s *printer) Printf(format string, args ...interface{}) {
if !s.inDetail || s.printDetail {
fmt.Fprintf((*state)(s), format, args...)
}
}
func (s *printer) Detail() bool {
s.inDetail = true
return s.printDetail
}

View File

@ -1 +0,0 @@
issuerepo: golang/go

23
vendor/golang.org/x/xerrors/doc.go generated vendored
View File

@ -1,23 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package xerrors implements functions to manipulate errors.
//
// This package is based on the Go 2 proposal for error values:
//
// https://golang.org/design/29934-error-values
//
// These functions were incorporated into the standard library's errors package
// in Go 1.13:
// - Is
// - As
// - Unwrap
//
// Also, Errorf's %w verb was incorporated into fmt.Errorf.
//
// Use this package to get equivalent behavior in all supported Go versions.
//
// No other features of this package were included in Go 1.13, and at present
// there are no plans to include any of them.
package xerrors // import "golang.org/x/xerrors"

View File

@ -1,33 +0,0 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package xerrors
import "fmt"
// errorString is a trivial implementation of error.
type errorString struct {
s string
frame Frame
}
// New returns an error that formats as the given text.
//
// The returned error contains a Frame set to the caller's location and
// implements Formatter to show this information when printed with details.
func New(text string) error {
return &errorString{text, Caller(1)}
}
func (e *errorString) Error() string {
return e.s
}
func (e *errorString) Format(s fmt.State, v rune) { FormatError(e, s, v) }
func (e *errorString) FormatError(p Printer) (next error) {
p.Print(e.s)
e.frame.Format(p)
return nil
}

190
vendor/golang.org/x/xerrors/fmt.go generated vendored
View File

@ -1,190 +0,0 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package xerrors
import (
"fmt"
"strings"
"unicode"
"unicode/utf8"
"golang.org/x/xerrors/internal"
)
const percentBangString = "%!"
// Errorf formats according to a format specifier and returns the string as a
// value that satisfies error.
//
// The returned error includes the file and line number of the caller when
// formatted with additional detail enabled. If the last argument is an error
// the returned error's Format method will return it if the format string ends
// with ": %s", ": %v", or ": %w". If the last argument is an error and the
// format string ends with ": %w", the returned error implements an Unwrap
// method returning it.
//
// If the format specifier includes a %w verb with an error operand in a
// position other than at the end, the returned error will still implement an
// Unwrap method returning the operand, but the error's Format method will not
// return the wrapped error.
//
// It is invalid to include more than one %w verb or to supply it with an
// operand that does not implement the error interface. The %w verb is otherwise
// a synonym for %v.
//
// Note that as of Go 1.13, the fmt.Errorf function will do error formatting,
// but it will not capture a stack backtrace.
func Errorf(format string, a ...interface{}) error {
format = formatPlusW(format)
// Support a ": %[wsv]" suffix, which works well with xerrors.Formatter.
wrap := strings.HasSuffix(format, ": %w")
idx, format2, ok := parsePercentW(format)
percentWElsewhere := !wrap && idx >= 0
if !percentWElsewhere && (wrap || strings.HasSuffix(format, ": %s") || strings.HasSuffix(format, ": %v")) {
err := errorAt(a, len(a)-1)
if err == nil {
return &noWrapError{fmt.Sprintf(format, a...), nil, Caller(1)}
}
// TODO: this is not entirely correct. The error value could be
// printed elsewhere in format if it mixes numbered with unnumbered
// substitutions. With relatively small changes to doPrintf we can
// have it optionally ignore extra arguments and pass the argument
// list in its entirety.
msg := fmt.Sprintf(format[:len(format)-len(": %s")], a[:len(a)-1]...)
frame := Frame{}
if internal.EnableTrace {
frame = Caller(1)
}
if wrap {
return &wrapError{msg, err, frame}
}
return &noWrapError{msg, err, frame}
}
// Support %w anywhere.
// TODO: don't repeat the wrapped error's message when %w occurs in the middle.
msg := fmt.Sprintf(format2, a...)
if idx < 0 {
return &noWrapError{msg, nil, Caller(1)}
}
err := errorAt(a, idx)
if !ok || err == nil {
// Too many %ws or argument of %w is not an error. Approximate the Go
// 1.13 fmt.Errorf message.
return &noWrapError{fmt.Sprintf("%sw(%s)", percentBangString, msg), nil, Caller(1)}
}
frame := Frame{}
if internal.EnableTrace {
frame = Caller(1)
}
return &wrapError{msg, err, frame}
}
func errorAt(args []interface{}, i int) error {
if i < 0 || i >= len(args) {
return nil
}
err, ok := args[i].(error)
if !ok {
return nil
}
return err
}
// formatPlusW is used to avoid the vet check that will barf at %w.
func formatPlusW(s string) string {
return s
}
// Return the index of the only %w in format, or -1 if none.
// Also return a rewritten format string with %w replaced by %v, and
// false if there is more than one %w.
// TODO: handle "%[N]w".
func parsePercentW(format string) (idx int, newFormat string, ok bool) {
// Loosely copied from golang.org/x/tools/go/analysis/passes/printf/printf.go.
idx = -1
ok = true
n := 0
sz := 0
var isW bool
for i := 0; i < len(format); i += sz {
if format[i] != '%' {
sz = 1
continue
}
// "%%" is not a format directive.
if i+1 < len(format) && format[i+1] == '%' {
sz = 2
continue
}
sz, isW = parsePrintfVerb(format[i:])
if isW {
if idx >= 0 {
ok = false
} else {
idx = n
}
// "Replace" the last character, the 'w', with a 'v'.
p := i + sz - 1
format = format[:p] + "v" + format[p+1:]
}
n++
}
return idx, format, ok
}
// Parse the printf verb starting with a % at s[0].
// Return how many bytes it occupies and whether the verb is 'w'.
func parsePrintfVerb(s string) (int, bool) {
// Assume only that the directive is a sequence of non-letters followed by a single letter.
sz := 0
var r rune
for i := 1; i < len(s); i += sz {
r, sz = utf8.DecodeRuneInString(s[i:])
if unicode.IsLetter(r) {
return i + sz, r == 'w'
}
}
return len(s), false
}
type noWrapError struct {
msg string
err error
frame Frame
}
func (e *noWrapError) Error() string {
return fmt.Sprint(e)
}
func (e *noWrapError) Format(s fmt.State, v rune) { FormatError(e, s, v) }
func (e *noWrapError) FormatError(p Printer) (next error) {
p.Print(e.msg)
e.frame.Format(p)
return e.err
}
type wrapError struct {
msg string
err error
frame Frame
}
func (e *wrapError) Error() string {
return fmt.Sprint(e)
}
func (e *wrapError) Format(s fmt.State, v rune) { FormatError(e, s, v) }
func (e *wrapError) FormatError(p Printer) (next error) {
p.Print(e.msg)
e.frame.Format(p)
return e.err
}
func (e *wrapError) Unwrap() error {
return e.err
}

View File

@ -1,34 +0,0 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package xerrors
// A Formatter formats error messages.
type Formatter interface {
error
// FormatError prints the receiver's first error and returns the next error in
// the error chain, if any.
FormatError(p Printer) (next error)
}
// A Printer formats error messages.
//
// The most common implementation of Printer is the one provided by package fmt
// during Printf (as of Go 1.13). Localization packages such as golang.org/x/text/message
// typically provide their own implementations.
type Printer interface {
// Print appends args to the message output.
Print(args ...interface{})
// Printf writes a formatted string.
Printf(format string, args ...interface{})
// Detail reports whether error detail is requested.
// After the first call to Detail, all text written to the Printer
// is formatted as additional detail, or ignored when
// detail has not been requested.
// If Detail returns false, the caller can avoid printing the detail at all.
Detail() bool
}

56
vendor/golang.org/x/xerrors/frame.go generated vendored
View File

@ -1,56 +0,0 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package xerrors
import (
"runtime"
)
// A Frame contains part of a call stack.
type Frame struct {
// Make room for three PCs: the one we were asked for, what it called,
// and possibly a PC for skipPleaseUseCallersFrames. See:
// https://go.googlesource.com/go/+/032678e0fb/src/runtime/extern.go#169
frames [3]uintptr
}
// Caller returns a Frame that describes a frame on the caller's stack.
// The argument skip is the number of frames to skip over.
// Caller(0) returns the frame for the caller of Caller.
func Caller(skip int) Frame {
var s Frame
runtime.Callers(skip+1, s.frames[:])
return s
}
// location reports the file, line, and function of a frame.
//
// The returned function may be "" even if file and line are not.
func (f Frame) location() (function, file string, line int) {
frames := runtime.CallersFrames(f.frames[:])
if _, ok := frames.Next(); !ok {
return "", "", 0
}
fr, ok := frames.Next()
if !ok {
return "", "", 0
}
return fr.Function, fr.File, fr.Line
}
// Format prints the stack as error detail.
// It should be called from an error's Format implementation
// after printing any other error detail.
func (f Frame) Format(p Printer) {
if p.Detail() {
function, file, line := f.location()
if function != "" {
p.Printf("%s\n ", function)
}
if file != "" {
p.Printf("%s:%d\n", file, line)
}
}
}

View File

@ -1,8 +0,0 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package internal
// EnableTrace indicates whether stack information should be recorded in errors.
var EnableTrace = true

112
vendor/golang.org/x/xerrors/wrap.go generated vendored
View File

@ -1,112 +0,0 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package xerrors
import (
"reflect"
)
// A Wrapper provides context around another error.
type Wrapper interface {
// Unwrap returns the next error in the error chain.
// If there is no next error, Unwrap returns nil.
Unwrap() error
}
// Opaque returns an error with the same error formatting as err
// but that does not match err and cannot be unwrapped.
func Opaque(err error) error {
return noWrapper{err}
}
type noWrapper struct {
error
}
func (e noWrapper) FormatError(p Printer) (next error) {
if f, ok := e.error.(Formatter); ok {
return f.FormatError(p)
}
p.Print(e.error)
return nil
}
// Unwrap returns the result of calling the Unwrap method on err, if err implements
// Unwrap. Otherwise, Unwrap returns nil.
//
// Deprecated: As of Go 1.13, use errors.Unwrap instead.
func Unwrap(err error) error {
u, ok := err.(Wrapper)
if !ok {
return nil
}
return u.Unwrap()
}
// Is reports whether any error in err's chain matches target.
//
// An error is considered to match a target if it is equal to that target or if
// it implements a method Is(error) bool such that Is(target) returns true.
//
// Deprecated: As of Go 1.13, use errors.Is instead.
func Is(err, target error) bool {
if target == nil {
return err == target
}
isComparable := reflect.TypeOf(target).Comparable()
for {
if isComparable && err == target {
return true
}
if x, ok := err.(interface{ Is(error) bool }); ok && x.Is(target) {
return true
}
// TODO: consider supporing target.Is(err). This would allow
// user-definable predicates, but also may allow for coping with sloppy
// APIs, thereby making it easier to get away with them.
if err = Unwrap(err); err == nil {
return false
}
}
}
// As finds the first error in err's chain that matches the type to which target
// points, and if so, sets the target to its value and returns true. An error
// matches a type if it is assignable to the target type, or if it has a method
// As(interface{}) bool such that As(target) returns true. As will panic if target
// is not a non-nil pointer to a type which implements error or is of interface type.
//
// The As method should set the target to its value and return true if err
// matches the type to which target points.
//
// Deprecated: As of Go 1.13, use errors.As instead.
func As(err error, target interface{}) bool {
if target == nil {
panic("errors: target cannot be nil")
}
val := reflect.ValueOf(target)
typ := val.Type()
if typ.Kind() != reflect.Ptr || val.IsNil() {
panic("errors: target must be a non-nil pointer")
}
if e := typ.Elem(); e.Kind() != reflect.Interface && !e.Implements(errorType) {
panic("errors: *target must be interface or implement error")
}
targetType := typ.Elem()
for err != nil {
if reflect.TypeOf(err).AssignableTo(targetType) {
val.Elem().Set(reflect.ValueOf(err))
return true
}
if x, ok := err.(interface{ As(interface{}) bool }); ok && x.As(target) {
return true
}
err = Unwrap(err)
}
return false
}
var errorType = reflect.TypeOf((*error)(nil)).Elem()

16
vendor/modules.txt vendored
View File

@ -367,8 +367,9 @@ golang.org/x/crypto/salsa20/salsa
golang.org/x/crypto/ssh
golang.org/x/crypto/ssh/internal/bcrypt_pbkdf
golang.org/x/crypto/ssh/terminal
# golang.org/x/mod v0.4.2
## explicit; go 1.12
# golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4
## explicit; go 1.17
golang.org/x/mod/internal/lazyregexp
golang.org/x/mod/module
golang.org/x/mod/semver
# golang.org/x/net v0.0.0-20220909164309-bea034e7d591
@ -394,7 +395,7 @@ golang.org/x/net/websocket
## explicit; go 1.17
golang.org/x/oauth2
golang.org/x/oauth2/internal
# golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f
# golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4
## explicit
golang.org/x/sync/errgroup
# golang.org/x/sys v0.0.0-20220808155132-1c4a2a72c664
@ -418,13 +419,14 @@ golang.org/x/text/secure/bidirule
golang.org/x/text/transform
golang.org/x/text/unicode/bidi
golang.org/x/text/unicode/norm
# golang.org/x/tools v0.1.6-0.20210726203631-07bc1bf47fb2
## explicit; go 1.17
# golang.org/x/tools v0.1.12
## explicit; go 1.18
golang.org/x/tools/go/ast/astutil
golang.org/x/tools/go/ast/inspector
golang.org/x/tools/go/gcexportdata
golang.org/x/tools/go/internal/gcimporter
golang.org/x/tools/go/internal/packagesdriver
golang.org/x/tools/go/internal/pkgbits
golang.org/x/tools/go/packages
golang.org/x/tools/imports
golang.org/x/tools/internal/event
@ -438,10 +440,6 @@ golang.org/x/tools/internal/imports
golang.org/x/tools/internal/packagesinternal
golang.org/x/tools/internal/typeparams
golang.org/x/tools/internal/typesinternal
# golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f
## explicit; go 1.17
golang.org/x/xerrors
golang.org/x/xerrors/internal
# google.golang.org/appengine v1.6.7
## explicit; go 1.11
google.golang.org/appengine/internal