mirror of
https://github.com/cloudflare/cloudflared.git
synced 2025-07-28 16:01:05 +00:00
TUN-9016: update go to 1.24
## Summary Update several moving parts of cloudflared build system: * use goboring 1.24.2 in cfsetup * update linter and fix lint issues * update packages namely **quic-go and net** * install script for macos * update docker files to use go 1.24.1 * remove usage of cloudflare-go * pin golang linter Closes TUN-9016
This commit is contained in:
549
vendor/golang.org/x/tools/internal/imports/fix.go
generated
vendored
549
vendor/golang.org/x/tools/internal/imports/fix.go
generated
vendored
@@ -32,6 +32,7 @@ import (
|
||||
"golang.org/x/tools/internal/gocommand"
|
||||
"golang.org/x/tools/internal/gopathwalk"
|
||||
"golang.org/x/tools/internal/stdlib"
|
||||
"maps"
|
||||
)
|
||||
|
||||
// importToGroup is a list of functions which map from an import path to
|
||||
@@ -90,18 +91,6 @@ type ImportFix struct {
|
||||
Relevance float64 // see pkg
|
||||
}
|
||||
|
||||
// An ImportInfo represents a single import statement.
|
||||
type ImportInfo struct {
|
||||
ImportPath string // import path, e.g. "crypto/rand".
|
||||
Name string // import name, e.g. "crand", or "" if none.
|
||||
}
|
||||
|
||||
// A packageInfo represents what's known about a package.
|
||||
type packageInfo struct {
|
||||
name string // real package name, if known.
|
||||
exports map[string]bool // known exports.
|
||||
}
|
||||
|
||||
// parseOtherFiles parses all the Go files in srcDir except filename, including
|
||||
// test files if filename looks like a test.
|
||||
//
|
||||
@@ -130,7 +119,7 @@ func parseOtherFiles(ctx context.Context, fset *token.FileSet, srcDir, filename
|
||||
continue
|
||||
}
|
||||
|
||||
f, err := parser.ParseFile(fset, filepath.Join(srcDir, fi.Name()), nil, 0)
|
||||
f, err := parser.ParseFile(fset, filepath.Join(srcDir, fi.Name()), nil, parser.SkipObjectResolution)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
@@ -161,8 +150,8 @@ func addGlobals(f *ast.File, globals map[string]bool) {
|
||||
|
||||
// collectReferences builds a map of selector expressions, from
|
||||
// left hand side (X) to a set of right hand sides (Sel).
|
||||
func collectReferences(f *ast.File) references {
|
||||
refs := references{}
|
||||
func collectReferences(f *ast.File) References {
|
||||
refs := References{}
|
||||
|
||||
var visitor visitFn
|
||||
visitor = func(node ast.Node) ast.Visitor {
|
||||
@@ -232,7 +221,7 @@ func (p *pass) findMissingImport(pkg string, syms map[string]bool) *ImportInfo {
|
||||
|
||||
allFound := true
|
||||
for right := range syms {
|
||||
if !pkgInfo.exports[right] {
|
||||
if !pkgInfo.Exports[right] {
|
||||
allFound = false
|
||||
break
|
||||
}
|
||||
@@ -245,11 +234,6 @@ func (p *pass) findMissingImport(pkg string, syms map[string]bool) *ImportInfo {
|
||||
return nil
|
||||
}
|
||||
|
||||
// references is set of references found in a Go file. The first map key is the
|
||||
// left hand side of a selector expression, the second key is the right hand
|
||||
// side, and the value should always be true.
|
||||
type references map[string]map[string]bool
|
||||
|
||||
// A pass contains all the inputs and state necessary to fix a file's imports.
|
||||
// It can be modified in some ways during use; see comments below.
|
||||
type pass struct {
|
||||
@@ -257,27 +241,29 @@ type pass struct {
|
||||
fset *token.FileSet // fset used to parse f and its siblings.
|
||||
f *ast.File // the file being fixed.
|
||||
srcDir string // the directory containing f.
|
||||
env *ProcessEnv // the environment to use for go commands, etc.
|
||||
loadRealPackageNames bool // if true, load package names from disk rather than guessing them.
|
||||
otherFiles []*ast.File // sibling files.
|
||||
logf func(string, ...any)
|
||||
source Source // the environment to use for go commands, etc.
|
||||
loadRealPackageNames bool // if true, load package names from disk rather than guessing them.
|
||||
otherFiles []*ast.File // sibling files.
|
||||
goroot string
|
||||
|
||||
// Intermediate state, generated by load.
|
||||
existingImports map[string][]*ImportInfo
|
||||
allRefs references
|
||||
missingRefs references
|
||||
allRefs References
|
||||
missingRefs References
|
||||
|
||||
// Inputs to fix. These can be augmented between successive fix calls.
|
||||
lastTry bool // indicates that this is the last call and fix should clean up as best it can.
|
||||
candidates []*ImportInfo // candidate imports in priority order.
|
||||
knownPackages map[string]*packageInfo // information about all known packages.
|
||||
knownPackages map[string]*PackageInfo // information about all known packages.
|
||||
}
|
||||
|
||||
// loadPackageNames saves the package names for everything referenced by imports.
|
||||
func (p *pass) loadPackageNames(imports []*ImportInfo) error {
|
||||
if p.env.Logf != nil {
|
||||
p.env.Logf("loading package names for %v packages", len(imports))
|
||||
func (p *pass) loadPackageNames(ctx context.Context, imports []*ImportInfo) error {
|
||||
if p.logf != nil {
|
||||
p.logf("loading package names for %v packages", len(imports))
|
||||
defer func() {
|
||||
p.env.Logf("done loading package names for %v packages", len(imports))
|
||||
p.logf("done loading package names for %v packages", len(imports))
|
||||
}()
|
||||
}
|
||||
var unknown []string
|
||||
@@ -288,20 +274,17 @@ func (p *pass) loadPackageNames(imports []*ImportInfo) error {
|
||||
unknown = append(unknown, imp.ImportPath)
|
||||
}
|
||||
|
||||
resolver, err := p.env.GetResolver()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
names, err := resolver.loadPackageNames(unknown, p.srcDir)
|
||||
names, err := p.source.LoadPackageNames(ctx, p.srcDir, unknown)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// TODO(rfindley): revisit this. Why do we need to store known packages with
|
||||
// no exports? The inconsistent data is confusing.
|
||||
for path, name := range names {
|
||||
p.knownPackages[path] = &packageInfo{
|
||||
name: name,
|
||||
exports: map[string]bool{},
|
||||
p.knownPackages[path] = &PackageInfo{
|
||||
Name: name,
|
||||
Exports: map[string]bool{},
|
||||
}
|
||||
}
|
||||
return nil
|
||||
@@ -329,8 +312,8 @@ func (p *pass) importIdentifier(imp *ImportInfo) string {
|
||||
return imp.Name
|
||||
}
|
||||
known := p.knownPackages[imp.ImportPath]
|
||||
if known != nil && known.name != "" {
|
||||
return withoutVersion(known.name)
|
||||
if known != nil && known.Name != "" {
|
||||
return withoutVersion(known.Name)
|
||||
}
|
||||
return ImportPathToAssumedName(imp.ImportPath)
|
||||
}
|
||||
@@ -338,9 +321,9 @@ func (p *pass) importIdentifier(imp *ImportInfo) string {
|
||||
// load reads in everything necessary to run a pass, and reports whether the
|
||||
// file already has all the imports it needs. It fills in p.missingRefs with the
|
||||
// file's missing symbols, if any, or removes unused imports if not.
|
||||
func (p *pass) load() ([]*ImportFix, bool) {
|
||||
p.knownPackages = map[string]*packageInfo{}
|
||||
p.missingRefs = references{}
|
||||
func (p *pass) load(ctx context.Context) ([]*ImportFix, bool) {
|
||||
p.knownPackages = map[string]*PackageInfo{}
|
||||
p.missingRefs = References{}
|
||||
p.existingImports = map[string][]*ImportInfo{}
|
||||
|
||||
// Load basic information about the file in question.
|
||||
@@ -363,10 +346,10 @@ func (p *pass) load() ([]*ImportFix, bool) {
|
||||
// f's imports by the identifier they introduce.
|
||||
imports := collectImports(p.f)
|
||||
if p.loadRealPackageNames {
|
||||
err := p.loadPackageNames(append(imports, p.candidates...))
|
||||
err := p.loadPackageNames(ctx, append(imports, p.candidates...))
|
||||
if err != nil {
|
||||
if p.env.Logf != nil {
|
||||
p.env.Logf("loading package names: %v", err)
|
||||
if p.logf != nil {
|
||||
p.logf("loading package names: %v", err)
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
@@ -536,9 +519,10 @@ func (p *pass) assumeSiblingImportsValid() {
|
||||
// We have the stdlib in memory; no need to guess.
|
||||
rights = symbolNameSet(m)
|
||||
}
|
||||
p.addCandidate(imp, &packageInfo{
|
||||
// TODO(rfindley): we should set package name here, for consistency.
|
||||
p.addCandidate(imp, &PackageInfo{
|
||||
// no name; we already know it.
|
||||
exports: rights,
|
||||
Exports: rights,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -547,14 +531,14 @@ func (p *pass) assumeSiblingImportsValid() {
|
||||
|
||||
// addCandidate adds a candidate import to p, and merges in the information
|
||||
// in pkg.
|
||||
func (p *pass) addCandidate(imp *ImportInfo, pkg *packageInfo) {
|
||||
func (p *pass) addCandidate(imp *ImportInfo, pkg *PackageInfo) {
|
||||
p.candidates = append(p.candidates, imp)
|
||||
if existing, ok := p.knownPackages[imp.ImportPath]; ok {
|
||||
if existing.name == "" {
|
||||
existing.name = pkg.name
|
||||
if existing.Name == "" {
|
||||
existing.Name = pkg.Name
|
||||
}
|
||||
for export := range pkg.exports {
|
||||
existing.exports[export] = true
|
||||
for export := range pkg.Exports {
|
||||
existing.Exports[export] = true
|
||||
}
|
||||
} else {
|
||||
p.knownPackages[imp.ImportPath] = pkg
|
||||
@@ -563,33 +547,61 @@ func (p *pass) addCandidate(imp *ImportInfo, pkg *packageInfo) {
|
||||
|
||||
// fixImports adds and removes imports from f so that all its references are
|
||||
// satisfied and there are no unused imports.
|
||||
func fixImports(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) error {
|
||||
//
|
||||
// This is declared as a variable rather than a function so goimports can
|
||||
// easily be extended by adding a file with an init function.
|
||||
//
|
||||
// DO NOT REMOVE: used internally at Google.
|
||||
var fixImports = fixImportsDefault
|
||||
|
||||
func fixImportsDefault(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) error {
|
||||
fixes, err := getFixes(context.Background(), fset, f, filename, env)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
apply(fset, f, fixes)
|
||||
return err
|
||||
return nil
|
||||
}
|
||||
|
||||
// getFixes gets the import fixes that need to be made to f in order to fix the imports.
|
||||
// It does not modify the ast.
|
||||
func getFixes(ctx context.Context, fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) ([]*ImportFix, error) {
|
||||
source, err := NewProcessEnvSource(env, filename, f.Name.Name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
goEnv, err := env.goEnv()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return getFixesWithSource(ctx, fset, f, filename, goEnv["GOROOT"], env.logf, source)
|
||||
}
|
||||
|
||||
func getFixesWithSource(ctx context.Context, fset *token.FileSet, f *ast.File, filename string, goroot string, logf func(string, ...any), source Source) ([]*ImportFix, error) {
|
||||
// This logic is defensively duplicated from getFixes.
|
||||
abs, err := filepath.Abs(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
srcDir := filepath.Dir(abs)
|
||||
if env.Logf != nil {
|
||||
env.Logf("fixImports(filename=%q), abs=%q, srcDir=%q ...", filename, abs, srcDir)
|
||||
|
||||
if logf != nil {
|
||||
logf("fixImports(filename=%q), srcDir=%q ...", filename, srcDir)
|
||||
}
|
||||
|
||||
// First pass: looking only at f, and using the naive algorithm to
|
||||
// derive package names from import paths, see if the file is already
|
||||
// complete. We can't add any imports yet, because we don't know
|
||||
// if missing references are actually package vars.
|
||||
p := &pass{fset: fset, f: f, srcDir: srcDir, env: env}
|
||||
if fixes, done := p.load(); done {
|
||||
p := &pass{
|
||||
fset: fset,
|
||||
f: f,
|
||||
srcDir: srcDir,
|
||||
logf: logf,
|
||||
goroot: goroot,
|
||||
source: source,
|
||||
}
|
||||
if fixes, done := p.load(ctx); done {
|
||||
return fixes, nil
|
||||
}
|
||||
|
||||
@@ -601,7 +613,7 @@ func getFixes(ctx context.Context, fset *token.FileSet, f *ast.File, filename st
|
||||
// Second pass: add information from other files in the same package,
|
||||
// like their package vars and imports.
|
||||
p.otherFiles = otherFiles
|
||||
if fixes, done := p.load(); done {
|
||||
if fixes, done := p.load(ctx); done {
|
||||
return fixes, nil
|
||||
}
|
||||
|
||||
@@ -614,10 +626,17 @@ func getFixes(ctx context.Context, fset *token.FileSet, f *ast.File, filename st
|
||||
|
||||
// Third pass: get real package names where we had previously used
|
||||
// the naive algorithm.
|
||||
p = &pass{fset: fset, f: f, srcDir: srcDir, env: env}
|
||||
p = &pass{
|
||||
fset: fset,
|
||||
f: f,
|
||||
srcDir: srcDir,
|
||||
logf: logf,
|
||||
goroot: goroot,
|
||||
source: p.source, // safe to reuse, as it's just a wrapper around env
|
||||
}
|
||||
p.loadRealPackageNames = true
|
||||
p.otherFiles = otherFiles
|
||||
if fixes, done := p.load(); done {
|
||||
if fixes, done := p.load(ctx); done {
|
||||
return fixes, nil
|
||||
}
|
||||
|
||||
@@ -762,7 +781,7 @@ func GetAllCandidates(ctx context.Context, wrapped func(ImportFix), searchPrefix
|
||||
return true
|
||||
},
|
||||
dirFound: func(pkg *pkg) bool {
|
||||
if !canUse(filename, pkg.dir) {
|
||||
if !CanUse(filename, pkg.dir) {
|
||||
return false
|
||||
}
|
||||
// Try the assumed package name first, then a simpler path match
|
||||
@@ -797,7 +816,7 @@ func GetImportPaths(ctx context.Context, wrapped func(ImportFix), searchPrefix,
|
||||
return true
|
||||
},
|
||||
dirFound: func(pkg *pkg) bool {
|
||||
if !canUse(filename, pkg.dir) {
|
||||
if !CanUse(filename, pkg.dir) {
|
||||
return false
|
||||
}
|
||||
return strings.HasPrefix(pkg.importPathShort, searchPrefix)
|
||||
@@ -831,7 +850,7 @@ func GetPackageExports(ctx context.Context, wrapped func(PackageExport), searchP
|
||||
return true
|
||||
},
|
||||
dirFound: func(pkg *pkg) bool {
|
||||
return pkgIsCandidate(filename, references{searchPkg: nil}, pkg)
|
||||
return pkgIsCandidate(filename, References{searchPkg: nil}, pkg)
|
||||
},
|
||||
packageNameLoaded: func(pkg *pkg) bool {
|
||||
return pkg.packageName == searchPkg
|
||||
@@ -909,7 +928,7 @@ type ProcessEnv struct {
|
||||
WorkingDir string
|
||||
|
||||
// If Logf is non-nil, debug logging is enabled through this function.
|
||||
Logf func(format string, args ...interface{})
|
||||
Logf func(format string, args ...any)
|
||||
|
||||
// If set, ModCache holds a shared cache of directory info to use across
|
||||
// multiple ProcessEnvs.
|
||||
@@ -950,9 +969,7 @@ func (e *ProcessEnv) CopyConfig() *ProcessEnv {
|
||||
resolver: nil,
|
||||
Env: map[string]string{},
|
||||
}
|
||||
for k, v := range e.Env {
|
||||
copy.Env[k] = v
|
||||
}
|
||||
maps.Copy(copy.Env, e.Env)
|
||||
return copy
|
||||
}
|
||||
|
||||
@@ -985,9 +1002,7 @@ func (e *ProcessEnv) init() error {
|
||||
if err := json.Unmarshal(stdout.Bytes(), &goEnv); err != nil {
|
||||
return err
|
||||
}
|
||||
for k, v := range goEnv {
|
||||
e.Env[k] = v
|
||||
}
|
||||
maps.Copy(e.Env, goEnv)
|
||||
e.initialized = true
|
||||
return nil
|
||||
}
|
||||
@@ -1012,18 +1027,28 @@ func (e *ProcessEnv) GetResolver() (Resolver, error) {
|
||||
//
|
||||
// For gopls, we can optionally explicitly choose a resolver type, since we
|
||||
// already know the view type.
|
||||
if len(e.Env["GOMOD"]) == 0 && len(e.Env["GOWORK"]) == 0 {
|
||||
if e.Env["GOMOD"] == "" && (e.Env["GOWORK"] == "" || e.Env["GOWORK"] == "off") {
|
||||
e.resolver = newGopathResolver(e)
|
||||
e.logf("created gopath resolver")
|
||||
} else if r, err := newModuleResolver(e, e.ModCache); err != nil {
|
||||
e.resolverErr = err
|
||||
e.logf("failed to create module resolver: %v", err)
|
||||
} else {
|
||||
e.resolver = Resolver(r)
|
||||
e.logf("created module resolver")
|
||||
}
|
||||
}
|
||||
|
||||
return e.resolver, e.resolverErr
|
||||
}
|
||||
|
||||
// logf logs if e.Logf is non-nil.
|
||||
func (e *ProcessEnv) logf(format string, args ...any) {
|
||||
if e.Logf != nil {
|
||||
e.Logf(format, args...)
|
||||
}
|
||||
}
|
||||
|
||||
// buildContext returns the build.Context to use for matching files.
|
||||
//
|
||||
// TODO(rfindley): support dynamic GOOS, GOARCH here, when doing cross-platform
|
||||
@@ -1072,11 +1097,7 @@ func (e *ProcessEnv) invokeGo(ctx context.Context, verb string, args ...string)
|
||||
return e.GocmdRunner.Run(ctx, inv)
|
||||
}
|
||||
|
||||
func addStdlibCandidates(pass *pass, refs references) error {
|
||||
goenv, err := pass.env.goEnv()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
func addStdlibCandidates(pass *pass, refs References) error {
|
||||
localbase := func(nm string) string {
|
||||
ans := path.Base(nm)
|
||||
if ans[0] == 'v' {
|
||||
@@ -1091,13 +1112,13 @@ func addStdlibCandidates(pass *pass, refs references) error {
|
||||
}
|
||||
add := func(pkg string) {
|
||||
// Prevent self-imports.
|
||||
if path.Base(pkg) == pass.f.Name.Name && filepath.Join(goenv["GOROOT"], "src", pkg) == pass.srcDir {
|
||||
if path.Base(pkg) == pass.f.Name.Name && filepath.Join(pass.goroot, "src", pkg) == pass.srcDir {
|
||||
return
|
||||
}
|
||||
exports := symbolNameSet(stdlib.PackageSymbols[pkg])
|
||||
pass.addCandidate(
|
||||
&ImportInfo{ImportPath: pkg},
|
||||
&packageInfo{name: localbase(pkg), exports: exports})
|
||||
&PackageInfo{Name: localbase(pkg), Exports: exports})
|
||||
}
|
||||
for left := range refs {
|
||||
if left == "rand" {
|
||||
@@ -1108,6 +1129,9 @@ func addStdlibCandidates(pass *pass, refs references) error {
|
||||
// but we have no way of figuring out what the user is using
|
||||
// TODO: investigate using the toolchain version to disambiguate in the stdlib
|
||||
add("math/rand/v2")
|
||||
// math/rand has an overlapping API
|
||||
// TestIssue66407 fails without this
|
||||
add("math/rand")
|
||||
continue
|
||||
}
|
||||
for importPath := range stdlib.PackageSymbols {
|
||||
@@ -1127,8 +1151,8 @@ type Resolver interface {
|
||||
// scan works with callback to search for packages. See scanCallback for details.
|
||||
scan(ctx context.Context, callback *scanCallback) error
|
||||
|
||||
// loadExports returns the set of exported symbols in the package at dir.
|
||||
// loadExports may be called concurrently.
|
||||
// loadExports returns the package name and set of exported symbols in the
|
||||
// package at dir. loadExports may be called concurrently.
|
||||
loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []stdlib.Symbol, error)
|
||||
|
||||
// scoreImportPath returns the relevance for an import path.
|
||||
@@ -1161,101 +1185,22 @@ type scanCallback struct {
|
||||
exportsLoaded func(pkg *pkg, exports []stdlib.Symbol)
|
||||
}
|
||||
|
||||
func addExternalCandidates(ctx context.Context, pass *pass, refs references, filename string) error {
|
||||
func addExternalCandidates(ctx context.Context, pass *pass, refs References, filename string) error {
|
||||
ctx, done := event.Start(ctx, "imports.addExternalCandidates")
|
||||
defer done()
|
||||
|
||||
var mu sync.Mutex
|
||||
found := make(map[string][]pkgDistance)
|
||||
callback := &scanCallback{
|
||||
rootFound: func(gopathwalk.Root) bool {
|
||||
return true // We want everything.
|
||||
},
|
||||
dirFound: func(pkg *pkg) bool {
|
||||
return pkgIsCandidate(filename, refs, pkg)
|
||||
},
|
||||
packageNameLoaded: func(pkg *pkg) bool {
|
||||
if _, want := refs[pkg.packageName]; !want {
|
||||
return false
|
||||
}
|
||||
if pkg.dir == pass.srcDir && pass.f.Name.Name == pkg.packageName {
|
||||
// The candidate is in the same directory and has the
|
||||
// same package name. Don't try to import ourselves.
|
||||
return false
|
||||
}
|
||||
if !canUse(filename, pkg.dir) {
|
||||
return false
|
||||
}
|
||||
mu.Lock()
|
||||
defer mu.Unlock()
|
||||
found[pkg.packageName] = append(found[pkg.packageName], pkgDistance{pkg, distance(pass.srcDir, pkg.dir)})
|
||||
return false // We'll do our own loading after we sort.
|
||||
},
|
||||
}
|
||||
resolver, err := pass.env.GetResolver()
|
||||
results, err := pass.source.ResolveReferences(ctx, filename, refs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err = resolver.scan(ctx, callback); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Search for imports matching potential package references.
|
||||
type result struct {
|
||||
imp *ImportInfo
|
||||
pkg *packageInfo
|
||||
}
|
||||
results := make(chan result, len(refs))
|
||||
|
||||
ctx, cancel := context.WithCancel(ctx)
|
||||
var wg sync.WaitGroup
|
||||
defer func() {
|
||||
cancel()
|
||||
wg.Wait()
|
||||
}()
|
||||
var (
|
||||
firstErr error
|
||||
firstErrOnce sync.Once
|
||||
)
|
||||
for pkgName, symbols := range refs {
|
||||
wg.Add(1)
|
||||
go func(pkgName string, symbols map[string]bool) {
|
||||
defer wg.Done()
|
||||
|
||||
found, err := findImport(ctx, pass, found[pkgName], pkgName, symbols)
|
||||
|
||||
if err != nil {
|
||||
firstErrOnce.Do(func() {
|
||||
firstErr = err
|
||||
cancel()
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
if found == nil {
|
||||
return // No matching package.
|
||||
}
|
||||
|
||||
imp := &ImportInfo{
|
||||
ImportPath: found.importPathShort,
|
||||
}
|
||||
|
||||
pkg := &packageInfo{
|
||||
name: pkgName,
|
||||
exports: symbols,
|
||||
}
|
||||
results <- result{imp, pkg}
|
||||
}(pkgName, symbols)
|
||||
}
|
||||
go func() {
|
||||
wg.Wait()
|
||||
close(results)
|
||||
}()
|
||||
|
||||
for result := range results {
|
||||
for _, result := range results {
|
||||
if result == nil {
|
||||
continue
|
||||
}
|
||||
// Don't offer completions that would shadow predeclared
|
||||
// names, such as github.com/coreos/etcd/error.
|
||||
if types.Universe.Lookup(result.pkg.name) != nil { // predeclared
|
||||
if types.Universe.Lookup(result.Package.Name) != nil { // predeclared
|
||||
// Ideally we would skip this candidate only
|
||||
// if the predeclared name is actually
|
||||
// referenced by the file, but that's a lot
|
||||
@@ -1264,9 +1209,9 @@ func addExternalCandidates(ctx context.Context, pass *pass, refs references, fil
|
||||
// user before long.
|
||||
continue
|
||||
}
|
||||
pass.addCandidate(result.imp, result.pkg)
|
||||
pass.addCandidate(result.Import, result.Package)
|
||||
}
|
||||
return firstErr
|
||||
return nil
|
||||
}
|
||||
|
||||
// notIdentifier reports whether ch is an invalid identifier character.
|
||||
@@ -1608,11 +1553,10 @@ func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string, incl
|
||||
}
|
||||
|
||||
fullFile := filepath.Join(dir, fi.Name())
|
||||
// Legacy ast.Object resolution is needed here.
|
||||
f, err := parser.ParseFile(fset, fullFile, nil, 0)
|
||||
if err != nil {
|
||||
if env.Logf != nil {
|
||||
env.Logf("error parsing %v: %v", fullFile, err)
|
||||
}
|
||||
env.logf("error parsing %v: %v", fullFile, err)
|
||||
continue
|
||||
}
|
||||
if f.Name.Name == "documentation" {
|
||||
@@ -1648,9 +1592,7 @@ func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string, incl
|
||||
}
|
||||
sortSymbols(exports)
|
||||
|
||||
if env.Logf != nil {
|
||||
env.Logf("loaded exports in dir %v (package %v): %v", dir, pkgName, exports)
|
||||
}
|
||||
env.logf("loaded exports in dir %v (package %v): %v", dir, pkgName, exports)
|
||||
return pkgName, exports, nil
|
||||
}
|
||||
|
||||
@@ -1660,25 +1602,39 @@ func sortSymbols(syms []stdlib.Symbol) {
|
||||
})
|
||||
}
|
||||
|
||||
// findImport searches for a package with the given symbols.
|
||||
// If no package is found, findImport returns ("", false, nil)
|
||||
func findImport(ctx context.Context, pass *pass, candidates []pkgDistance, pkgName string, symbols map[string]bool) (*pkg, error) {
|
||||
// A symbolSearcher searches for a package with a set of symbols, among a set
|
||||
// of candidates. See [symbolSearcher.search].
|
||||
//
|
||||
// The search occurs within the scope of a single file, with context captured
|
||||
// in srcDir and xtest.
|
||||
type symbolSearcher struct {
|
||||
logf func(string, ...any)
|
||||
srcDir string // directory containing the file
|
||||
xtest bool // if set, the file containing is an x_test file
|
||||
loadExports func(ctx context.Context, pkg *pkg, includeTest bool) (string, []stdlib.Symbol, error)
|
||||
}
|
||||
|
||||
// search searches the provided candidates for a package containing all
|
||||
// exported symbols.
|
||||
//
|
||||
// If successful, returns the resulting package.
|
||||
func (s *symbolSearcher) search(ctx context.Context, candidates []pkgDistance, pkgName string, symbols map[string]bool) (*pkg, error) {
|
||||
// Sort the candidates by their import package length,
|
||||
// assuming that shorter package names are better than long
|
||||
// ones. Note that this sorts by the de-vendored name, so
|
||||
// there's no "penalty" for vendoring.
|
||||
sort.Sort(byDistanceOrImportPathShortLength(candidates))
|
||||
if pass.env.Logf != nil {
|
||||
if s.logf != nil {
|
||||
for i, c := range candidates {
|
||||
pass.env.Logf("%s candidate %d/%d: %v in %v", pkgName, i+1, len(candidates), c.pkg.importPathShort, c.pkg.dir)
|
||||
s.logf("%s candidate %d/%d: %v in %v", pkgName, i+1, len(candidates), c.pkg.importPathShort, c.pkg.dir)
|
||||
}
|
||||
}
|
||||
resolver, err := pass.env.GetResolver()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Collect exports for packages with matching names.
|
||||
// Arrange rescv so that we can we can await results in order of relevance
|
||||
// and exit as soon as we find the first match.
|
||||
//
|
||||
// Search with bounded concurrency, returning as soon as the first result
|
||||
// among rescv is non-nil.
|
||||
rescv := make([]chan *pkg, len(candidates))
|
||||
for i := range candidates {
|
||||
rescv[i] = make(chan *pkg, 1)
|
||||
@@ -1686,6 +1642,7 @@ func findImport(ctx context.Context, pass *pass, candidates []pkgDistance, pkgNa
|
||||
const maxConcurrentPackageImport = 4
|
||||
loadExportsSem := make(chan struct{}, maxConcurrentPackageImport)
|
||||
|
||||
// Ensure that all work is completed at exit.
|
||||
ctx, cancel := context.WithCancel(ctx)
|
||||
var wg sync.WaitGroup
|
||||
defer func() {
|
||||
@@ -1693,6 +1650,7 @@ func findImport(ctx context.Context, pass *pass, candidates []pkgDistance, pkgNa
|
||||
wg.Wait()
|
||||
}()
|
||||
|
||||
// Start the search.
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
@@ -1703,55 +1661,67 @@ func findImport(ctx context.Context, pass *pass, candidates []pkgDistance, pkgNa
|
||||
return
|
||||
}
|
||||
|
||||
i := i
|
||||
c := c
|
||||
wg.Add(1)
|
||||
go func(c pkgDistance, resc chan<- *pkg) {
|
||||
go func() {
|
||||
defer func() {
|
||||
<-loadExportsSem
|
||||
wg.Done()
|
||||
}()
|
||||
|
||||
if pass.env.Logf != nil {
|
||||
pass.env.Logf("loading exports in dir %s (seeking package %s)", c.pkg.dir, pkgName)
|
||||
if s.logf != nil {
|
||||
s.logf("loading exports in dir %s (seeking package %s)", c.pkg.dir, pkgName)
|
||||
}
|
||||
// If we're an x_test, load the package under test's test variant.
|
||||
includeTest := strings.HasSuffix(pass.f.Name.Name, "_test") && c.pkg.dir == pass.srcDir
|
||||
_, exports, err := resolver.loadExports(ctx, c.pkg, includeTest)
|
||||
pkg, err := s.searchOne(ctx, c, symbols)
|
||||
if err != nil {
|
||||
if pass.env.Logf != nil {
|
||||
pass.env.Logf("loading exports in dir %s (seeking package %s): %v", c.pkg.dir, pkgName, err)
|
||||
if s.logf != nil && ctx.Err() == nil {
|
||||
s.logf("loading exports in dir %s (seeking package %s): %v", c.pkg.dir, pkgName, err)
|
||||
}
|
||||
resc <- nil
|
||||
return
|
||||
pkg = nil
|
||||
}
|
||||
|
||||
exportsMap := make(map[string]bool, len(exports))
|
||||
for _, sym := range exports {
|
||||
exportsMap[sym.Name] = true
|
||||
}
|
||||
|
||||
// If it doesn't have the right
|
||||
// symbols, send nil to mean no match.
|
||||
for symbol := range symbols {
|
||||
if !exportsMap[symbol] {
|
||||
resc <- nil
|
||||
return
|
||||
}
|
||||
}
|
||||
resc <- c.pkg
|
||||
}(c, rescv[i])
|
||||
rescv[i] <- pkg // may be nil
|
||||
}()
|
||||
}
|
||||
}()
|
||||
|
||||
// Await the first (best) result.
|
||||
for _, resc := range rescv {
|
||||
pkg := <-resc
|
||||
if pkg == nil {
|
||||
continue
|
||||
select {
|
||||
case r := <-resc:
|
||||
if r != nil {
|
||||
return r, nil
|
||||
}
|
||||
case <-ctx.Done():
|
||||
return nil, ctx.Err()
|
||||
}
|
||||
return pkg, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (s *symbolSearcher) searchOne(ctx context.Context, c pkgDistance, symbols map[string]bool) (*pkg, error) {
|
||||
if ctx.Err() != nil {
|
||||
return nil, ctx.Err()
|
||||
}
|
||||
// If we're considering the package under test from an x_test, load the
|
||||
// test variant.
|
||||
includeTest := s.xtest && c.pkg.dir == s.srcDir
|
||||
_, exports, err := s.loadExports(ctx, c.pkg, includeTest)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
exportsMap := make(map[string]bool, len(exports))
|
||||
for _, sym := range exports {
|
||||
exportsMap[sym.Name] = true
|
||||
}
|
||||
for symbol := range symbols {
|
||||
if !exportsMap[symbol] {
|
||||
return nil, nil // no match
|
||||
}
|
||||
}
|
||||
return c.pkg, nil
|
||||
}
|
||||
|
||||
// pkgIsCandidate reports whether pkg is a candidate for satisfying the
|
||||
// finding which package pkgIdent in the file named by filename is trying
|
||||
// to refer to.
|
||||
@@ -1764,68 +1734,34 @@ func findImport(ctx context.Context, pass *pass, candidates []pkgDistance, pkgNa
|
||||
// filename is the file being formatted.
|
||||
// pkgIdent is the package being searched for, like "client" (if
|
||||
// searching for "client.New")
|
||||
func pkgIsCandidate(filename string, refs references, pkg *pkg) bool {
|
||||
func pkgIsCandidate(filename string, refs References, pkg *pkg) bool {
|
||||
// Check "internal" and "vendor" visibility:
|
||||
if !canUse(filename, pkg.dir) {
|
||||
if !CanUse(filename, pkg.dir) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Speed optimization to minimize disk I/O:
|
||||
// the last two components on disk must contain the
|
||||
// package name somewhere.
|
||||
//
|
||||
// This permits mismatch naming like directory
|
||||
// "go-foo" being package "foo", or "pkg.v3" being "pkg",
|
||||
// or directory "google.golang.org/api/cloudbilling/v1"
|
||||
// being package "cloudbilling", but doesn't
|
||||
// permit a directory "foo" to be package
|
||||
// "bar", which is strongly discouraged
|
||||
// anyway. There's no reason goimports needs
|
||||
// to be slow just to accommodate that.
|
||||
// Use the matchesPath heuristic to filter to package paths that could
|
||||
// reasonably match a dangling reference.
|
||||
//
|
||||
// This permits mismatch naming like directory "go-foo" being package "foo",
|
||||
// or "pkg.v3" being "pkg", or directory
|
||||
// "google.golang.org/api/cloudbilling/v1" being package "cloudbilling", but
|
||||
// doesn't permit a directory "foo" to be package "bar", which is strongly
|
||||
// discouraged anyway. There's no reason goimports needs to be slow just to
|
||||
// accommodate that.
|
||||
for pkgIdent := range refs {
|
||||
lastTwo := lastTwoComponents(pkg.importPathShort)
|
||||
if strings.Contains(lastTwo, pkgIdent) {
|
||||
return true
|
||||
}
|
||||
if hasHyphenOrUpperASCII(lastTwo) && !hasHyphenOrUpperASCII(pkgIdent) {
|
||||
lastTwo = lowerASCIIAndRemoveHyphen(lastTwo)
|
||||
if strings.Contains(lastTwo, pkgIdent) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func hasHyphenOrUpperASCII(s string) bool {
|
||||
for i := 0; i < len(s); i++ {
|
||||
b := s[i]
|
||||
if b == '-' || ('A' <= b && b <= 'Z') {
|
||||
if matchesPath(pkgIdent, pkg.importPathShort) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func lowerASCIIAndRemoveHyphen(s string) (ret string) {
|
||||
buf := make([]byte, 0, len(s))
|
||||
for i := 0; i < len(s); i++ {
|
||||
b := s[i]
|
||||
switch {
|
||||
case b == '-':
|
||||
continue
|
||||
case 'A' <= b && b <= 'Z':
|
||||
buf = append(buf, b+('a'-'A'))
|
||||
default:
|
||||
buf = append(buf, b)
|
||||
}
|
||||
}
|
||||
return string(buf)
|
||||
}
|
||||
|
||||
// canUse reports whether the package in dir is usable from filename,
|
||||
// CanUse reports whether the package in dir is usable from filename,
|
||||
// respecting the Go "internal" and "vendor" visibility rules.
|
||||
func canUse(filename, dir string) bool {
|
||||
func CanUse(filename, dir string) bool {
|
||||
// Fast path check, before any allocations. If it doesn't contain vendor
|
||||
// or internal, it's not tricky:
|
||||
// Note that this can false-negative on directories like "notinternal",
|
||||
@@ -1863,19 +1799,84 @@ func canUse(filename, dir string) bool {
|
||||
return !strings.Contains(relSlash, "/vendor/") && !strings.Contains(relSlash, "/internal/") && !strings.HasSuffix(relSlash, "/internal")
|
||||
}
|
||||
|
||||
// lastTwoComponents returns at most the last two path components
|
||||
// of v, using either / or \ as the path separator.
|
||||
func lastTwoComponents(v string) string {
|
||||
// matchesPath reports whether ident may match a potential package name
|
||||
// referred to by path, using heuristics to filter out unidiomatic package
|
||||
// names.
|
||||
//
|
||||
// Specifically, it checks whether either of the last two '/'- or '\'-delimited
|
||||
// path segments matches the identifier. The segment-matching heuristic must
|
||||
// allow for various conventions around segment naming, including go-foo,
|
||||
// foo-go, and foo.v3. To handle all of these, matching considers both (1) the
|
||||
// entire segment, ignoring '-' and '.', as well as (2) the last subsegment
|
||||
// separated by '-' or '.'. So the segment foo-go matches all of the following
|
||||
// identifiers: foo, go, and foogo. All matches are case insensitive (for ASCII
|
||||
// identifiers).
|
||||
//
|
||||
// See the docstring for [pkgIsCandidate] for an explanation of how this
|
||||
// heuristic filters potential candidate packages.
|
||||
func matchesPath(ident, path string) bool {
|
||||
// Ignore case, for ASCII.
|
||||
lowerIfASCII := func(b byte) byte {
|
||||
if 'A' <= b && b <= 'Z' {
|
||||
return b + ('a' - 'A')
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
// match reports whether path[start:end] matches ident, ignoring [.-].
|
||||
match := func(start, end int) bool {
|
||||
ii := len(ident) - 1 // current byte in ident
|
||||
pi := end - 1 // current byte in path
|
||||
for ; pi >= start && ii >= 0; pi-- {
|
||||
pb := path[pi]
|
||||
if pb == '-' || pb == '.' {
|
||||
continue
|
||||
}
|
||||
pb = lowerIfASCII(pb)
|
||||
ib := lowerIfASCII(ident[ii])
|
||||
if pb != ib {
|
||||
return false
|
||||
}
|
||||
ii--
|
||||
}
|
||||
return ii < 0 && pi < start // all bytes matched
|
||||
}
|
||||
|
||||
// segmentEnd and subsegmentEnd hold the end points of the current segment
|
||||
// and subsegment intervals.
|
||||
segmentEnd := len(path)
|
||||
subsegmentEnd := len(path)
|
||||
|
||||
// Count slashes; we only care about the last two segments.
|
||||
nslash := 0
|
||||
for i := len(v) - 1; i >= 0; i-- {
|
||||
if v[i] == '/' || v[i] == '\\' {
|
||||
|
||||
for i := len(path) - 1; i >= 0; i-- {
|
||||
switch b := path[i]; b {
|
||||
// TODO(rfindley): we handle backlashes here only because the previous
|
||||
// heuristic handled backslashes. This is perhaps overly defensive, but is
|
||||
// the result of many lessons regarding Chesterton's fence and the
|
||||
// goimports codebase.
|
||||
//
|
||||
// However, this function is only ever called with something called an
|
||||
// 'importPath'. Is it possible that this is a real import path, and
|
||||
// therefore we need only consider forward slashes?
|
||||
case '/', '\\':
|
||||
if match(i+1, segmentEnd) || match(i+1, subsegmentEnd) {
|
||||
return true
|
||||
}
|
||||
nslash++
|
||||
if nslash == 2 {
|
||||
return v[i:]
|
||||
return false // did not match above
|
||||
}
|
||||
segmentEnd, subsegmentEnd = i, i // reset
|
||||
case '-', '.':
|
||||
if match(i+1, subsegmentEnd) {
|
||||
return true
|
||||
}
|
||||
subsegmentEnd = i
|
||||
}
|
||||
}
|
||||
return v
|
||||
return match(0, segmentEnd) || match(0, subsegmentEnd)
|
||||
}
|
||||
|
||||
type visitFn func(node ast.Node) ast.Visitor
|
||||
|
33
vendor/golang.org/x/tools/internal/imports/imports.go
generated
vendored
33
vendor/golang.org/x/tools/internal/imports/imports.go
generated
vendored
@@ -47,7 +47,14 @@ type Options struct {
|
||||
// Process implements golang.org/x/tools/imports.Process with explicit context in opt.Env.
|
||||
func Process(filename string, src []byte, opt *Options) (formatted []byte, err error) {
|
||||
fileSet := token.NewFileSet()
|
||||
file, adjust, err := parse(fileSet, filename, src, opt)
|
||||
var parserMode parser.Mode
|
||||
if opt.Comments {
|
||||
parserMode |= parser.ParseComments
|
||||
}
|
||||
if opt.AllErrors {
|
||||
parserMode |= parser.AllErrors
|
||||
}
|
||||
file, adjust, err := parse(fileSet, filename, src, parserMode, opt.Fragment)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -66,17 +73,19 @@ func Process(filename string, src []byte, opt *Options) (formatted []byte, err e
|
||||
//
|
||||
// Note that filename's directory influences which imports can be chosen,
|
||||
// so it is important that filename be accurate.
|
||||
func FixImports(ctx context.Context, filename string, src []byte, opt *Options) (fixes []*ImportFix, err error) {
|
||||
func FixImports(ctx context.Context, filename string, src []byte, goroot string, logf func(string, ...any), source Source) (fixes []*ImportFix, err error) {
|
||||
ctx, done := event.Start(ctx, "imports.FixImports")
|
||||
defer done()
|
||||
|
||||
fileSet := token.NewFileSet()
|
||||
file, _, err := parse(fileSet, filename, src, opt)
|
||||
// TODO(rfindley): these default values for ParseComments and AllErrors were
|
||||
// extracted from gopls, but are they even needed?
|
||||
file, _, err := parse(fileSet, filename, src, parser.ParseComments|parser.AllErrors, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return getFixes(ctx, fileSet, file, filename, opt.Env)
|
||||
return getFixesWithSource(ctx, fileSet, file, filename, goroot, logf, source)
|
||||
}
|
||||
|
||||
// ApplyFixes applies all of the fixes to the file and formats it. extraMode
|
||||
@@ -86,7 +95,7 @@ func ApplyFixes(fixes []*ImportFix, filename string, src []byte, opt *Options, e
|
||||
// Don't use parse() -- we don't care about fragments or statement lists
|
||||
// here, and we need to work with unparseable files.
|
||||
fileSet := token.NewFileSet()
|
||||
parserMode := parser.Mode(0)
|
||||
parserMode := parser.SkipObjectResolution
|
||||
if opt.Comments {
|
||||
parserMode |= parser.ParseComments
|
||||
}
|
||||
@@ -114,7 +123,7 @@ func ApplyFixes(fixes []*ImportFix, filename string, src []byte, opt *Options, e
|
||||
// formatted file, and returns the postpocessed result.
|
||||
func formatFile(fset *token.FileSet, file *ast.File, src []byte, adjust func(orig []byte, src []byte) []byte, opt *Options) ([]byte, error) {
|
||||
mergeImports(file)
|
||||
sortImports(opt.LocalPrefix, fset.File(file.Pos()), file)
|
||||
sortImports(opt.LocalPrefix, fset.File(file.FileStart), file)
|
||||
var spacesBefore []string // import paths we need spaces before
|
||||
for _, impSection := range astutil.Imports(fset, file) {
|
||||
// Within each block of contiguous imports, see if any
|
||||
@@ -164,13 +173,9 @@ func formatFile(fset *token.FileSet, file *ast.File, src []byte, adjust func(ori
|
||||
|
||||
// parse parses src, which was read from filename,
|
||||
// as a Go source file or statement list.
|
||||
func parse(fset *token.FileSet, filename string, src []byte, opt *Options) (*ast.File, func(orig, src []byte) []byte, error) {
|
||||
parserMode := parser.Mode(0)
|
||||
if opt.Comments {
|
||||
parserMode |= parser.ParseComments
|
||||
}
|
||||
if opt.AllErrors {
|
||||
parserMode |= parser.AllErrors
|
||||
func parse(fset *token.FileSet, filename string, src []byte, parserMode parser.Mode, fragment bool) (*ast.File, func(orig, src []byte) []byte, error) {
|
||||
if parserMode&parser.SkipObjectResolution != 0 {
|
||||
panic("legacy ast.Object resolution is required")
|
||||
}
|
||||
|
||||
// Try as whole source file.
|
||||
@@ -181,7 +186,7 @@ func parse(fset *token.FileSet, filename string, src []byte, opt *Options) (*ast
|
||||
// If the error is that the source file didn't begin with a
|
||||
// package line and we accept fragmented input, fall through to
|
||||
// try as a source fragment. Stop and return on any other error.
|
||||
if !opt.Fragment || !strings.Contains(err.Error(), "expected 'package'") {
|
||||
if !fragment || !strings.Contains(err.Error(), "expected 'package'") {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
|
22
vendor/golang.org/x/tools/internal/imports/mod.go
generated
vendored
22
vendor/golang.org/x/tools/internal/imports/mod.go
generated
vendored
@@ -13,6 +13,7 @@ import (
|
||||
"path"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"slices"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
@@ -150,8 +151,8 @@ func newModuleResolver(e *ProcessEnv, moduleCacheCache *DirInfoCache) (*ModuleRe
|
||||
Path: "",
|
||||
Dir: filepath.Join(filepath.Dir(goWork), "vendor"),
|
||||
}
|
||||
r.modsByModPath = append(append([]*gocommand.ModuleJSON{}, mainModsVendor...), r.dummyVendorMod)
|
||||
r.modsByDir = append(append([]*gocommand.ModuleJSON{}, mainModsVendor...), r.dummyVendorMod)
|
||||
r.modsByModPath = append(slices.Clone(mainModsVendor), r.dummyVendorMod)
|
||||
r.modsByDir = append(slices.Clone(mainModsVendor), r.dummyVendorMod)
|
||||
}
|
||||
} else {
|
||||
// Vendor mode is off, so run go list -m ... to find everything.
|
||||
@@ -245,7 +246,10 @@ func newModuleResolver(e *ProcessEnv, moduleCacheCache *DirInfoCache) (*ModuleRe
|
||||
// 2. Use this to separate module cache scanning from other scanning.
|
||||
func gomodcacheForEnv(goenv map[string]string) string {
|
||||
if gmc := goenv["GOMODCACHE"]; gmc != "" {
|
||||
return gmc
|
||||
// golang/go#67156: ensure that the module cache is clean, since it is
|
||||
// assumed as a prefix to directories scanned by gopathwalk, which are
|
||||
// themselves clean.
|
||||
return filepath.Clean(gmc)
|
||||
}
|
||||
gopaths := filepath.SplitList(goenv["GOPATH"])
|
||||
if len(gopaths) == 0 {
|
||||
@@ -265,9 +269,7 @@ func (r *ModuleResolver) initAllMods() error {
|
||||
return err
|
||||
}
|
||||
if mod.Dir == "" {
|
||||
if r.env.Logf != nil {
|
||||
r.env.Logf("module %v has not been downloaded and will be ignored", mod.Path)
|
||||
}
|
||||
r.env.logf("module %v has not been downloaded and will be ignored", mod.Path)
|
||||
// Can't do anything with a module that's not downloaded.
|
||||
continue
|
||||
}
|
||||
@@ -742,8 +744,8 @@ func (r *ModuleResolver) loadExports(ctx context.Context, pkg *pkg, includeTest
|
||||
|
||||
func (r *ModuleResolver) scanDirForPackage(root gopathwalk.Root, dir string) directoryPackageInfo {
|
||||
subdir := ""
|
||||
if dir != root.Path {
|
||||
subdir = dir[len(root.Path)+len("/"):]
|
||||
if prefix := root.Path + string(filepath.Separator); strings.HasPrefix(dir, prefix) {
|
||||
subdir = dir[len(prefix):]
|
||||
}
|
||||
importPath := filepath.ToSlash(subdir)
|
||||
if strings.HasPrefix(importPath, "vendor/") {
|
||||
@@ -766,9 +768,7 @@ func (r *ModuleResolver) scanDirForPackage(root gopathwalk.Root, dir string) dir
|
||||
}
|
||||
modPath, err := module.UnescapePath(filepath.ToSlash(matches[1]))
|
||||
if err != nil {
|
||||
if r.env.Logf != nil {
|
||||
r.env.Logf("decoding module cache path %q: %v", subdir, err)
|
||||
}
|
||||
r.env.logf("decoding module cache path %q: %v", subdir, err)
|
||||
return directoryPackageInfo{
|
||||
status: directoryScanned,
|
||||
err: fmt.Errorf("decoding module cache path %q: %v", subdir, err),
|
||||
|
4
vendor/golang.org/x/tools/internal/imports/mod_cache.go
generated
vendored
4
vendor/golang.org/x/tools/internal/imports/mod_cache.go
generated
vendored
@@ -128,7 +128,7 @@ func (d *DirInfoCache) ScanAndListen(ctx context.Context, listener cacheListener
|
||||
// are going to be. Setting an arbitrary limit makes it much easier.
|
||||
const maxInFlight = 10
|
||||
sema := make(chan struct{}, maxInFlight)
|
||||
for i := 0; i < maxInFlight; i++ {
|
||||
for range maxInFlight {
|
||||
sema <- struct{}{}
|
||||
}
|
||||
|
||||
@@ -156,7 +156,7 @@ func (d *DirInfoCache) ScanAndListen(ctx context.Context, listener cacheListener
|
||||
d.mu.Lock()
|
||||
delete(d.listeners, cookie)
|
||||
d.mu.Unlock()
|
||||
for i := 0; i < maxInFlight; i++ {
|
||||
for range maxInFlight {
|
||||
<-sema
|
||||
}
|
||||
}
|
||||
|
5
vendor/golang.org/x/tools/internal/imports/sortimports.go
generated
vendored
5
vendor/golang.org/x/tools/internal/imports/sortimports.go
generated
vendored
@@ -11,6 +11,7 @@ import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"log"
|
||||
"slices"
|
||||
"sort"
|
||||
"strconv"
|
||||
)
|
||||
@@ -30,7 +31,7 @@ func sortImports(localPrefix string, tokFile *token.File, f *ast.File) {
|
||||
|
||||
if len(d.Specs) == 0 {
|
||||
// Empty import block, remove it.
|
||||
f.Decls = append(f.Decls[:i], f.Decls[i+1:]...)
|
||||
f.Decls = slices.Delete(f.Decls, i, i+1)
|
||||
}
|
||||
|
||||
if !d.Lparen.IsValid() {
|
||||
@@ -91,7 +92,7 @@ func mergeImports(f *ast.File) {
|
||||
spec.(*ast.ImportSpec).Path.ValuePos = first.Pos()
|
||||
first.Specs = append(first.Specs, spec)
|
||||
}
|
||||
f.Decls = append(f.Decls[:i], f.Decls[i+1:]...)
|
||||
f.Decls = slices.Delete(f.Decls, i, i+1)
|
||||
i--
|
||||
}
|
||||
}
|
||||
|
63
vendor/golang.org/x/tools/internal/imports/source.go
generated
vendored
Normal file
63
vendor/golang.org/x/tools/internal/imports/source.go
generated
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
// Copyright 2024 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package imports
|
||||
|
||||
import "context"
|
||||
|
||||
// These types document the APIs below.
|
||||
//
|
||||
// TODO(rfindley): consider making these defined types rather than aliases.
|
||||
type (
|
||||
ImportPath = string
|
||||
PackageName = string
|
||||
Symbol = string
|
||||
|
||||
// References is set of References found in a Go file. The first map key is the
|
||||
// left hand side of a selector expression, the second key is the right hand
|
||||
// side, and the value should always be true.
|
||||
References = map[PackageName]map[Symbol]bool
|
||||
)
|
||||
|
||||
// A Result satisfies a missing import.
|
||||
//
|
||||
// The Import field describes the missing import spec, and the Package field
|
||||
// summarizes the package exports.
|
||||
type Result struct {
|
||||
Import *ImportInfo
|
||||
Package *PackageInfo
|
||||
}
|
||||
|
||||
// An ImportInfo represents a single import statement.
|
||||
type ImportInfo struct {
|
||||
ImportPath string // import path, e.g. "crypto/rand".
|
||||
Name string // import name, e.g. "crand", or "" if none.
|
||||
}
|
||||
|
||||
// A PackageInfo represents what's known about a package.
|
||||
type PackageInfo struct {
|
||||
Name string // package name in the package declaration, if known
|
||||
Exports map[string]bool // set of names of known package level sortSymbols
|
||||
}
|
||||
|
||||
// A Source provides imports to satisfy unresolved references in the file being
|
||||
// fixed.
|
||||
type Source interface {
|
||||
// LoadPackageNames queries PackageName information for the requested import
|
||||
// paths, when operating from the provided srcDir.
|
||||
//
|
||||
// TODO(rfindley): try to refactor to remove this operation.
|
||||
LoadPackageNames(ctx context.Context, srcDir string, paths []ImportPath) (map[ImportPath]PackageName, error)
|
||||
|
||||
// ResolveReferences asks the Source for the best package name to satisfy
|
||||
// each of the missing references, in the context of fixing the given
|
||||
// filename.
|
||||
//
|
||||
// Returns a map from package name to a [Result] for that package name that
|
||||
// provides the required symbols. Keys may be omitted in the map if no
|
||||
// candidates satisfy all missing references for that package name. It is up
|
||||
// to each data source to select the best result for each entry in the
|
||||
// missing map.
|
||||
ResolveReferences(ctx context.Context, filename string, missing References) ([]*Result, error)
|
||||
}
|
129
vendor/golang.org/x/tools/internal/imports/source_env.go
generated
vendored
Normal file
129
vendor/golang.org/x/tools/internal/imports/source_env.go
generated
vendored
Normal file
@@ -0,0 +1,129 @@
|
||||
// Copyright 2024 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package imports
|
||||
|
||||
import (
|
||||
"context"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"golang.org/x/sync/errgroup"
|
||||
"golang.org/x/tools/internal/gopathwalk"
|
||||
)
|
||||
|
||||
// ProcessEnvSource implements the [Source] interface using the legacy
|
||||
// [ProcessEnv] abstraction.
|
||||
type ProcessEnvSource struct {
|
||||
env *ProcessEnv
|
||||
srcDir string
|
||||
filename string
|
||||
pkgName string
|
||||
}
|
||||
|
||||
// NewProcessEnvSource returns a [ProcessEnvSource] wrapping the given
|
||||
// env, to be used for fixing imports in the file with name filename in package
|
||||
// named pkgName.
|
||||
func NewProcessEnvSource(env *ProcessEnv, filename, pkgName string) (*ProcessEnvSource, error) {
|
||||
abs, err := filepath.Abs(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
srcDir := filepath.Dir(abs)
|
||||
return &ProcessEnvSource{
|
||||
env: env,
|
||||
srcDir: srcDir,
|
||||
filename: filename,
|
||||
pkgName: pkgName,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (s *ProcessEnvSource) LoadPackageNames(ctx context.Context, srcDir string, unknown []string) (map[string]string, error) {
|
||||
r, err := s.env.GetResolver()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return r.loadPackageNames(unknown, srcDir)
|
||||
}
|
||||
|
||||
func (s *ProcessEnvSource) ResolveReferences(ctx context.Context, filename string, refs map[string]map[string]bool) ([]*Result, error) {
|
||||
var mu sync.Mutex
|
||||
found := make(map[string][]pkgDistance)
|
||||
callback := &scanCallback{
|
||||
rootFound: func(gopathwalk.Root) bool {
|
||||
return true // We want everything.
|
||||
},
|
||||
dirFound: func(pkg *pkg) bool {
|
||||
return pkgIsCandidate(filename, refs, pkg)
|
||||
},
|
||||
packageNameLoaded: func(pkg *pkg) bool {
|
||||
if _, want := refs[pkg.packageName]; !want {
|
||||
return false
|
||||
}
|
||||
if pkg.dir == s.srcDir && s.pkgName == pkg.packageName {
|
||||
// The candidate is in the same directory and has the
|
||||
// same package name. Don't try to import ourselves.
|
||||
return false
|
||||
}
|
||||
if !CanUse(filename, pkg.dir) {
|
||||
return false
|
||||
}
|
||||
mu.Lock()
|
||||
defer mu.Unlock()
|
||||
found[pkg.packageName] = append(found[pkg.packageName], pkgDistance{pkg, distance(s.srcDir, pkg.dir)})
|
||||
return false // We'll do our own loading after we sort.
|
||||
},
|
||||
}
|
||||
resolver, err := s.env.GetResolver()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := resolver.scan(ctx, callback); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
g, ctx := errgroup.WithContext(ctx)
|
||||
|
||||
searcher := symbolSearcher{
|
||||
logf: s.env.logf,
|
||||
srcDir: s.srcDir,
|
||||
xtest: strings.HasSuffix(s.pkgName, "_test"),
|
||||
loadExports: resolver.loadExports,
|
||||
}
|
||||
|
||||
var resultMu sync.Mutex
|
||||
results := make(map[string]*Result, len(refs))
|
||||
for pkgName, symbols := range refs {
|
||||
g.Go(func() error {
|
||||
found, err := searcher.search(ctx, found[pkgName], pkgName, symbols)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if found == nil {
|
||||
return nil // No matching package.
|
||||
}
|
||||
|
||||
imp := &ImportInfo{
|
||||
ImportPath: found.importPathShort,
|
||||
}
|
||||
pkg := &PackageInfo{
|
||||
Name: pkgName,
|
||||
Exports: symbols,
|
||||
}
|
||||
resultMu.Lock()
|
||||
results[pkgName] = &Result{Import: imp, Package: pkg}
|
||||
resultMu.Unlock()
|
||||
return nil
|
||||
})
|
||||
}
|
||||
if err := g.Wait(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var ans []*Result
|
||||
for _, x := range results {
|
||||
ans = append(ans, x)
|
||||
}
|
||||
return ans, nil
|
||||
}
|
103
vendor/golang.org/x/tools/internal/imports/source_modindex.go
generated
vendored
Normal file
103
vendor/golang.org/x/tools/internal/imports/source_modindex.go
generated
vendored
Normal file
@@ -0,0 +1,103 @@
|
||||
// Copyright 2024 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package imports
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"golang.org/x/tools/internal/modindex"
|
||||
)
|
||||
|
||||
// This code is here rather than in the modindex package
|
||||
// to avoid import loops
|
||||
|
||||
// implements Source using modindex, so only for module cache.
|
||||
//
|
||||
// this is perhaps over-engineered. A new Index is read at first use.
|
||||
// And then Update is called after every 15 minutes, and a new Index
|
||||
// is read if the index changed. It is not clear the Mutex is needed.
|
||||
type IndexSource struct {
|
||||
modcachedir string
|
||||
mutex sync.Mutex
|
||||
ix *modindex.Index
|
||||
expires time.Time
|
||||
}
|
||||
|
||||
// create a new Source. Called from NewView in cache/session.go.
|
||||
func NewIndexSource(cachedir string) *IndexSource {
|
||||
return &IndexSource{modcachedir: cachedir}
|
||||
}
|
||||
|
||||
func (s *IndexSource) LoadPackageNames(ctx context.Context, srcDir string, paths []ImportPath) (map[ImportPath]PackageName, error) {
|
||||
/// This is used by goimports to resolve the package names of imports of the
|
||||
// current package, which is irrelevant for the module cache.
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (s *IndexSource) ResolveReferences(ctx context.Context, filename string, missing References) ([]*Result, error) {
|
||||
if err := s.maybeReadIndex(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var cs []modindex.Candidate
|
||||
for pkg, nms := range missing {
|
||||
for nm := range nms {
|
||||
x := s.ix.Lookup(pkg, nm, false)
|
||||
cs = append(cs, x...)
|
||||
}
|
||||
}
|
||||
found := make(map[string]*Result)
|
||||
for _, c := range cs {
|
||||
var x *Result
|
||||
if x = found[c.ImportPath]; x == nil {
|
||||
x = &Result{
|
||||
Import: &ImportInfo{
|
||||
ImportPath: c.ImportPath,
|
||||
Name: "",
|
||||
},
|
||||
Package: &PackageInfo{
|
||||
Name: c.PkgName,
|
||||
Exports: make(map[string]bool),
|
||||
},
|
||||
}
|
||||
found[c.ImportPath] = x
|
||||
}
|
||||
x.Package.Exports[c.Name] = true
|
||||
}
|
||||
var ans []*Result
|
||||
for _, x := range found {
|
||||
ans = append(ans, x)
|
||||
}
|
||||
return ans, nil
|
||||
}
|
||||
|
||||
func (s *IndexSource) maybeReadIndex() error {
|
||||
s.mutex.Lock()
|
||||
defer s.mutex.Unlock()
|
||||
|
||||
var readIndex bool
|
||||
if time.Now().After(s.expires) {
|
||||
ok, err := modindex.Update(s.modcachedir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if ok {
|
||||
readIndex = true
|
||||
}
|
||||
}
|
||||
|
||||
if readIndex || s.ix == nil {
|
||||
ix, err := modindex.ReadIndex(s.modcachedir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
s.ix = ix
|
||||
// for now refresh every 15 minutes
|
||||
s.expires = time.Now().Add(time.Minute * 15)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
Reference in New Issue
Block a user