TUN-9016: update go to 1.24

## Summary

Update several moving parts of cloudflared build system:

* use goboring 1.24.2 in cfsetup
* update linter and fix lint issues
* update packages namely **quic-go and net**
* install script for macos
* update docker files to use go 1.24.1
* remove usage of cloudflare-go
* pin golang linter

Closes TUN-9016
This commit is contained in:
Luis Neto
2025-06-06 09:05:49 +00:00
parent e144eac2af
commit 96ce66bd30
585 changed files with 23572 additions and 21356 deletions

View File

@@ -106,8 +106,21 @@ func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Nod
// Does augmented child strictly contain [start, end)?
if augPos <= start && end <= augEnd {
_, isToken := child.(tokenNode)
return isToken || visit(child)
if is[tokenNode](child) {
return true
}
// childrenOf elides the FuncType node beneath FuncDecl.
// Add it back here for TypeParams, Params, Results,
// all FieldLists). But we don't add it back for the "func" token
// even though it is is the tree at FuncDecl.Type.Func.
if decl, ok := node.(*ast.FuncDecl); ok {
if fields, ok := child.(*ast.FieldList); ok && fields != decl.Recv {
path = append(path, decl.Type)
}
}
return visit(child)
}
// Does [start, end) overlap multiple children?
@@ -313,6 +326,8 @@ func childrenOf(n ast.Node) []ast.Node {
//
// As a workaround, we inline the case for FuncType
// here and order things correctly.
// We also need to insert the elided FuncType just
// before the 'visit' recursion.
//
children = nil // discard ast.Walk(FuncDecl) info subtrees
children = append(children, tok(n.Type.Func, len("func")))
@@ -632,3 +647,8 @@ func NodeDescription(n ast.Node) string {
}
panic(fmt.Sprintf("unexpected node type: %T", n))
}
func is[T any](x any) bool {
_, ok := x.(T)
return ok
}

View File

@@ -9,6 +9,7 @@ import (
"fmt"
"go/ast"
"go/token"
"slices"
"strconv"
"strings"
)
@@ -186,7 +187,7 @@ func AddNamedImport(fset *token.FileSet, f *ast.File, name, path string) (added
spec.(*ast.ImportSpec).Path.ValuePos = first.Pos()
first.Specs = append(first.Specs, spec)
}
f.Decls = append(f.Decls[:i], f.Decls[i+1:]...)
f.Decls = slices.Delete(f.Decls, i, i+1)
i--
}
@@ -344,7 +345,12 @@ func RewriteImport(fset *token.FileSet, f *ast.File, oldPath, newPath string) (r
}
// UsesImport reports whether a given import is used.
// The provided File must have been parsed with syntactic object resolution
// (not using go/parser.SkipObjectResolution).
func UsesImport(f *ast.File, path string) (used bool) {
if f.Scope == nil {
panic("file f was not parsed with syntactic object resolution")
}
spec := importSpec(f, path)
if spec == nil {
return

View File

@@ -183,7 +183,7 @@ type application struct {
func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.Node) {
// convert typed nil into untyped nil
if v := reflect.ValueOf(n); v.Kind() == reflect.Ptr && v.IsNil() {
if v := reflect.ValueOf(n); v.Kind() == reflect.Pointer && v.IsNil() {
n = nil
}

View File

@@ -7,12 +7,7 @@ package astutil
import "go/ast"
// Unparen returns e with any enclosing parentheses stripped.
func Unparen(e ast.Expr) ast.Expr {
for {
p, ok := e.(*ast.ParenExpr)
if !ok {
return e
}
e = p.X
}
}
// Deprecated: use [ast.Unparen].
//
//go:fix inline
func Unparen(e ast.Expr) ast.Expr { return ast.Unparen(e) }

View File

@@ -10,6 +10,7 @@
// builds a list of push/pop events and their node type. Subsequent
// method calls that request a traversal scan this list, rather than walk
// the AST, and perform type filtering using efficient bit sets.
// This representation is sometimes called a "balanced parenthesis tree."
//
// Experiments suggest the inspector's traversals are about 2.5x faster
// than ast.Inspect, but it may take around 5 traversals for this
@@ -36,6 +37,9 @@ package inspector
import (
"go/ast"
_ "unsafe"
"golang.org/x/tools/internal/astutil/edge"
)
// An Inspector provides methods for inspecting
@@ -44,6 +48,25 @@ type Inspector struct {
events []event
}
//go:linkname events
func events(in *Inspector) []event { return in.events }
//go:linkname packEdgeKindAndIndex
func packEdgeKindAndIndex(ek edge.Kind, index int) int32 {
return int32(uint32(index+1)<<7 | uint32(ek))
}
// unpackEdgeKindAndIndex unpacks the edge kind and edge index (within
// an []ast.Node slice) from the parent field of a pop event.
//
//go:linkname unpackEdgeKindAndIndex
func unpackEdgeKindAndIndex(x int32) (edge.Kind, int) {
// The "parent" field of a pop node holds the
// edge Kind in the lower 7 bits and the index+1
// in the upper 25.
return edge.Kind(x & 0x7f), int(x>>7) - 1
}
// New returns an Inspector for the specified syntax trees.
func New(files []*ast.File) *Inspector {
return &Inspector{traverse(files)}
@@ -52,9 +75,10 @@ func New(files []*ast.File) *Inspector {
// An event represents a push or a pop
// of an ast.Node during a traversal.
type event struct {
node ast.Node
typ uint64 // typeOf(node) on push event, or union of typ strictly between push and pop events on pop events
index int // index of corresponding push or pop event
node ast.Node
typ uint64 // typeOf(node) on push event, or union of typ strictly between push and pop events on pop events
index int32 // index of corresponding push or pop event
parent int32 // index of parent's push node (push nodes only), or packed edge kind/index (pop nodes only)
}
// TODO: Experiment with storing only the second word of event.node (unsafe.Pointer).
@@ -73,8 +97,17 @@ func (in *Inspector) Preorder(types []ast.Node, f func(ast.Node)) {
// check, Preorder is almost twice as fast as Nodes. The two
// features seem to contribute similar slowdowns (~1.4x each).
// This function is equivalent to the PreorderSeq call below,
// but to avoid the additional dynamic call (which adds 13-35%
// to the benchmarks), we expand it out.
//
// in.PreorderSeq(types...)(func(n ast.Node) bool {
// f(n)
// return true
// })
mask := maskOf(types)
for i := 0; i < len(in.events); {
for i := int32(0); i < int32(len(in.events)); {
ev := in.events[i]
if ev.index > i {
// push
@@ -104,7 +137,7 @@ func (in *Inspector) Preorder(types []ast.Node, f func(ast.Node)) {
// matches an element of the types slice.
func (in *Inspector) Nodes(types []ast.Node, f func(n ast.Node, push bool) (proceed bool)) {
mask := maskOf(types)
for i := 0; i < len(in.events); {
for i := int32(0); i < int32(len(in.events)); {
ev := in.events[i]
if ev.index > i {
// push
@@ -138,7 +171,7 @@ func (in *Inspector) Nodes(types []ast.Node, f func(n ast.Node, push bool) (proc
func (in *Inspector) WithStack(types []ast.Node, f func(n ast.Node, push bool, stack []ast.Node) (proceed bool)) {
mask := maskOf(types)
var stack []ast.Node
for i := 0; i < len(in.events); {
for i := int32(0); i < int32(len(in.events)); {
ev := in.events[i]
if ev.index > i {
// push
@@ -171,50 +204,83 @@ func (in *Inspector) WithStack(types []ast.Node, f func(n ast.Node, push bool, s
// traverse builds the table of events representing a traversal.
func traverse(files []*ast.File) []event {
// Preallocate approximate number of events
// based on source file extent.
// based on source file extent of the declarations.
// (We use End-Pos not FileStart-FileEnd to neglect
// the effect of long doc comments.)
// This makes traverse faster by 4x (!).
var extent int
for _, f := range files {
extent += int(f.End() - f.Pos())
}
// This estimate is based on the net/http package.
capacity := extent * 33 / 100
if capacity > 1e6 {
capacity = 1e6 // impose some reasonable maximum
capacity := min(extent*33/100, 1e6) // impose some reasonable maximum (1M)
v := &visitor{
events: make([]event, 0, capacity),
stack: []item{{index: -1}}, // include an extra event so file nodes have a parent
}
events := make([]event, 0, capacity)
var stack []event
stack = append(stack, event{}) // include an extra event so file nodes have a parent
for _, f := range files {
ast.Inspect(f, func(n ast.Node) bool {
if n != nil {
// push
ev := event{
node: n,
typ: 0, // temporarily used to accumulate type bits of subtree
index: len(events), // push event temporarily holds own index
}
stack = append(stack, ev)
events = append(events, ev)
} else {
// pop
top := len(stack) - 1
ev := stack[top]
typ := typeOf(ev.node)
push := ev.index
parent := top - 1
events[push].typ = typ // set type of push
stack[parent].typ |= typ | ev.typ // parent's typ contains push and pop's typs.
events[push].index = len(events) // make push refer to pop
stack = stack[:top]
events = append(events, ev)
}
return true
})
for _, file := range files {
walk(v, edge.Invalid, -1, file)
}
return events
return v.events
}
type visitor struct {
events []event
stack []item
}
type item struct {
index int32 // index of current node's push event
parentIndex int32 // index of parent node's push event
typAccum uint64 // accumulated type bits of current node's descendents
edgeKindAndIndex int32 // edge.Kind and index, bit packed
}
func (v *visitor) push(ek edge.Kind, eindex int, node ast.Node) {
var (
index = int32(len(v.events))
parentIndex = v.stack[len(v.stack)-1].index
)
v.events = append(v.events, event{
node: node,
parent: parentIndex,
typ: typeOf(node),
index: 0, // (pop index is set later by visitor.pop)
})
v.stack = append(v.stack, item{
index: index,
parentIndex: parentIndex,
edgeKindAndIndex: packEdgeKindAndIndex(ek, eindex),
})
// 2B nodes ought to be enough for anyone!
if int32(len(v.events)) < 0 {
panic("event index exceeded int32")
}
// 32M elements in an []ast.Node ought to be enough for anyone!
if ek2, eindex2 := unpackEdgeKindAndIndex(packEdgeKindAndIndex(ek, eindex)); ek2 != ek || eindex2 != eindex {
panic("Node slice index exceeded uint25")
}
}
func (v *visitor) pop(node ast.Node) {
top := len(v.stack) - 1
current := v.stack[top]
push := &v.events[current.index]
parent := &v.stack[top-1]
push.index = int32(len(v.events)) // make push event refer to pop
parent.typAccum |= current.typAccum | push.typ // accumulate type bits into parent
v.stack = v.stack[:top]
v.events = append(v.events, event{
node: node,
typ: current.typAccum,
index: current.index,
parent: current.edgeKindAndIndex, // see [unpackEdgeKindAndIndex]
})
}

85
vendor/golang.org/x/tools/go/ast/inspector/iter.go generated vendored Normal file
View File

@@ -0,0 +1,85 @@
// Copyright 2024 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build go1.23
package inspector
import (
"go/ast"
"iter"
)
// PreorderSeq returns an iterator that visits all the
// nodes of the files supplied to New in depth-first order.
// It visits each node n before n's children.
// The complete traversal sequence is determined by ast.Inspect.
//
// The types argument, if non-empty, enables type-based
// filtering of events: only nodes whose type matches an
// element of the types slice are included in the sequence.
func (in *Inspector) PreorderSeq(types ...ast.Node) iter.Seq[ast.Node] {
// This implementation is identical to Preorder,
// except that it supports breaking out of the loop.
return func(yield func(ast.Node) bool) {
mask := maskOf(types)
for i := int32(0); i < int32(len(in.events)); {
ev := in.events[i]
if ev.index > i {
// push
if ev.typ&mask != 0 {
if !yield(ev.node) {
break
}
}
pop := ev.index
if in.events[pop].typ&mask == 0 {
// Subtrees do not contain types: skip them and pop.
i = pop + 1
continue
}
}
i++
}
}
}
// All[N] returns an iterator over all the nodes of type N.
// N must be a pointer-to-struct type that implements ast.Node.
//
// Example:
//
// for call := range All[*ast.CallExpr](in) { ... }
func All[N interface {
*S
ast.Node
}, S any](in *Inspector) iter.Seq[N] {
// To avoid additional dynamic call overheads,
// we duplicate rather than call the logic of PreorderSeq.
mask := typeOf((N)(nil))
return func(yield func(N) bool) {
for i := int32(0); i < int32(len(in.events)); {
ev := in.events[i]
if ev.index > i {
// push
if ev.typ&mask != 0 {
if !yield(ev.node.(N)) {
break
}
}
pop := ev.index
if in.events[pop].typ&mask == 0 {
// Subtrees do not contain types: skip them and pop.
i = pop + 1
continue
}
}
i++
}
}
}

View File

@@ -12,6 +12,8 @@ package inspector
import (
"go/ast"
"math"
_ "unsafe"
)
const (
@@ -215,8 +217,9 @@ func typeOf(n ast.Node) uint64 {
return 0
}
//go:linkname maskOf
func maskOf(nodes []ast.Node) uint64 {
if nodes == nil {
if len(nodes) == 0 {
return math.MaxUint64 // match all node types
}
var mask uint64

341
vendor/golang.org/x/tools/go/ast/inspector/walk.go generated vendored Normal file
View File

@@ -0,0 +1,341 @@
// Copyright 2025 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package inspector
// This file is a fork of ast.Inspect to reduce unnecessary dynamic
// calls and to gather edge information.
//
// Consistency with the original is ensured by TestInspectAllNodes.
import (
"fmt"
"go/ast"
"golang.org/x/tools/internal/astutil/edge"
)
func walkList[N ast.Node](v *visitor, ek edge.Kind, list []N) {
for i, node := range list {
walk(v, ek, i, node)
}
}
func walk(v *visitor, ek edge.Kind, index int, node ast.Node) {
v.push(ek, index, node)
// walk children
// (the order of the cases matches the order
// of the corresponding node types in ast.go)
switch n := node.(type) {
// Comments and fields
case *ast.Comment:
// nothing to do
case *ast.CommentGroup:
walkList(v, edge.CommentGroup_List, n.List)
case *ast.Field:
if n.Doc != nil {
walk(v, edge.Field_Doc, -1, n.Doc)
}
walkList(v, edge.Field_Names, n.Names)
if n.Type != nil {
walk(v, edge.Field_Type, -1, n.Type)
}
if n.Tag != nil {
walk(v, edge.Field_Tag, -1, n.Tag)
}
if n.Comment != nil {
walk(v, edge.Field_Comment, -1, n.Comment)
}
case *ast.FieldList:
walkList(v, edge.FieldList_List, n.List)
// Expressions
case *ast.BadExpr, *ast.Ident, *ast.BasicLit:
// nothing to do
case *ast.Ellipsis:
if n.Elt != nil {
walk(v, edge.Ellipsis_Elt, -1, n.Elt)
}
case *ast.FuncLit:
walk(v, edge.FuncLit_Type, -1, n.Type)
walk(v, edge.FuncLit_Body, -1, n.Body)
case *ast.CompositeLit:
if n.Type != nil {
walk(v, edge.CompositeLit_Type, -1, n.Type)
}
walkList(v, edge.CompositeLit_Elts, n.Elts)
case *ast.ParenExpr:
walk(v, edge.ParenExpr_X, -1, n.X)
case *ast.SelectorExpr:
walk(v, edge.SelectorExpr_X, -1, n.X)
walk(v, edge.SelectorExpr_Sel, -1, n.Sel)
case *ast.IndexExpr:
walk(v, edge.IndexExpr_X, -1, n.X)
walk(v, edge.IndexExpr_Index, -1, n.Index)
case *ast.IndexListExpr:
walk(v, edge.IndexListExpr_X, -1, n.X)
walkList(v, edge.IndexListExpr_Indices, n.Indices)
case *ast.SliceExpr:
walk(v, edge.SliceExpr_X, -1, n.X)
if n.Low != nil {
walk(v, edge.SliceExpr_Low, -1, n.Low)
}
if n.High != nil {
walk(v, edge.SliceExpr_High, -1, n.High)
}
if n.Max != nil {
walk(v, edge.SliceExpr_Max, -1, n.Max)
}
case *ast.TypeAssertExpr:
walk(v, edge.TypeAssertExpr_X, -1, n.X)
if n.Type != nil {
walk(v, edge.TypeAssertExpr_Type, -1, n.Type)
}
case *ast.CallExpr:
walk(v, edge.CallExpr_Fun, -1, n.Fun)
walkList(v, edge.CallExpr_Args, n.Args)
case *ast.StarExpr:
walk(v, edge.StarExpr_X, -1, n.X)
case *ast.UnaryExpr:
walk(v, edge.UnaryExpr_X, -1, n.X)
case *ast.BinaryExpr:
walk(v, edge.BinaryExpr_X, -1, n.X)
walk(v, edge.BinaryExpr_Y, -1, n.Y)
case *ast.KeyValueExpr:
walk(v, edge.KeyValueExpr_Key, -1, n.Key)
walk(v, edge.KeyValueExpr_Value, -1, n.Value)
// Types
case *ast.ArrayType:
if n.Len != nil {
walk(v, edge.ArrayType_Len, -1, n.Len)
}
walk(v, edge.ArrayType_Elt, -1, n.Elt)
case *ast.StructType:
walk(v, edge.StructType_Fields, -1, n.Fields)
case *ast.FuncType:
if n.TypeParams != nil {
walk(v, edge.FuncType_TypeParams, -1, n.TypeParams)
}
if n.Params != nil {
walk(v, edge.FuncType_Params, -1, n.Params)
}
if n.Results != nil {
walk(v, edge.FuncType_Results, -1, n.Results)
}
case *ast.InterfaceType:
walk(v, edge.InterfaceType_Methods, -1, n.Methods)
case *ast.MapType:
walk(v, edge.MapType_Key, -1, n.Key)
walk(v, edge.MapType_Value, -1, n.Value)
case *ast.ChanType:
walk(v, edge.ChanType_Value, -1, n.Value)
// Statements
case *ast.BadStmt:
// nothing to do
case *ast.DeclStmt:
walk(v, edge.DeclStmt_Decl, -1, n.Decl)
case *ast.EmptyStmt:
// nothing to do
case *ast.LabeledStmt:
walk(v, edge.LabeledStmt_Label, -1, n.Label)
walk(v, edge.LabeledStmt_Stmt, -1, n.Stmt)
case *ast.ExprStmt:
walk(v, edge.ExprStmt_X, -1, n.X)
case *ast.SendStmt:
walk(v, edge.SendStmt_Chan, -1, n.Chan)
walk(v, edge.SendStmt_Value, -1, n.Value)
case *ast.IncDecStmt:
walk(v, edge.IncDecStmt_X, -1, n.X)
case *ast.AssignStmt:
walkList(v, edge.AssignStmt_Lhs, n.Lhs)
walkList(v, edge.AssignStmt_Rhs, n.Rhs)
case *ast.GoStmt:
walk(v, edge.GoStmt_Call, -1, n.Call)
case *ast.DeferStmt:
walk(v, edge.DeferStmt_Call, -1, n.Call)
case *ast.ReturnStmt:
walkList(v, edge.ReturnStmt_Results, n.Results)
case *ast.BranchStmt:
if n.Label != nil {
walk(v, edge.BranchStmt_Label, -1, n.Label)
}
case *ast.BlockStmt:
walkList(v, edge.BlockStmt_List, n.List)
case *ast.IfStmt:
if n.Init != nil {
walk(v, edge.IfStmt_Init, -1, n.Init)
}
walk(v, edge.IfStmt_Cond, -1, n.Cond)
walk(v, edge.IfStmt_Body, -1, n.Body)
if n.Else != nil {
walk(v, edge.IfStmt_Else, -1, n.Else)
}
case *ast.CaseClause:
walkList(v, edge.CaseClause_List, n.List)
walkList(v, edge.CaseClause_Body, n.Body)
case *ast.SwitchStmt:
if n.Init != nil {
walk(v, edge.SwitchStmt_Init, -1, n.Init)
}
if n.Tag != nil {
walk(v, edge.SwitchStmt_Tag, -1, n.Tag)
}
walk(v, edge.SwitchStmt_Body, -1, n.Body)
case *ast.TypeSwitchStmt:
if n.Init != nil {
walk(v, edge.TypeSwitchStmt_Init, -1, n.Init)
}
walk(v, edge.TypeSwitchStmt_Assign, -1, n.Assign)
walk(v, edge.TypeSwitchStmt_Body, -1, n.Body)
case *ast.CommClause:
if n.Comm != nil {
walk(v, edge.CommClause_Comm, -1, n.Comm)
}
walkList(v, edge.CommClause_Body, n.Body)
case *ast.SelectStmt:
walk(v, edge.SelectStmt_Body, -1, n.Body)
case *ast.ForStmt:
if n.Init != nil {
walk(v, edge.ForStmt_Init, -1, n.Init)
}
if n.Cond != nil {
walk(v, edge.ForStmt_Cond, -1, n.Cond)
}
if n.Post != nil {
walk(v, edge.ForStmt_Post, -1, n.Post)
}
walk(v, edge.ForStmt_Body, -1, n.Body)
case *ast.RangeStmt:
if n.Key != nil {
walk(v, edge.RangeStmt_Key, -1, n.Key)
}
if n.Value != nil {
walk(v, edge.RangeStmt_Value, -1, n.Value)
}
walk(v, edge.RangeStmt_X, -1, n.X)
walk(v, edge.RangeStmt_Body, -1, n.Body)
// Declarations
case *ast.ImportSpec:
if n.Doc != nil {
walk(v, edge.ImportSpec_Doc, -1, n.Doc)
}
if n.Name != nil {
walk(v, edge.ImportSpec_Name, -1, n.Name)
}
walk(v, edge.ImportSpec_Path, -1, n.Path)
if n.Comment != nil {
walk(v, edge.ImportSpec_Comment, -1, n.Comment)
}
case *ast.ValueSpec:
if n.Doc != nil {
walk(v, edge.ValueSpec_Doc, -1, n.Doc)
}
walkList(v, edge.ValueSpec_Names, n.Names)
if n.Type != nil {
walk(v, edge.ValueSpec_Type, -1, n.Type)
}
walkList(v, edge.ValueSpec_Values, n.Values)
if n.Comment != nil {
walk(v, edge.ValueSpec_Comment, -1, n.Comment)
}
case *ast.TypeSpec:
if n.Doc != nil {
walk(v, edge.TypeSpec_Doc, -1, n.Doc)
}
walk(v, edge.TypeSpec_Name, -1, n.Name)
if n.TypeParams != nil {
walk(v, edge.TypeSpec_TypeParams, -1, n.TypeParams)
}
walk(v, edge.TypeSpec_Type, -1, n.Type)
if n.Comment != nil {
walk(v, edge.TypeSpec_Comment, -1, n.Comment)
}
case *ast.BadDecl:
// nothing to do
case *ast.GenDecl:
if n.Doc != nil {
walk(v, edge.GenDecl_Doc, -1, n.Doc)
}
walkList(v, edge.GenDecl_Specs, n.Specs)
case *ast.FuncDecl:
if n.Doc != nil {
walk(v, edge.FuncDecl_Doc, -1, n.Doc)
}
if n.Recv != nil {
walk(v, edge.FuncDecl_Recv, -1, n.Recv)
}
walk(v, edge.FuncDecl_Name, -1, n.Name)
walk(v, edge.FuncDecl_Type, -1, n.Type)
if n.Body != nil {
walk(v, edge.FuncDecl_Body, -1, n.Body)
}
case *ast.File:
if n.Doc != nil {
walk(v, edge.File_Doc, -1, n.Doc)
}
walk(v, edge.File_Name, -1, n.Name)
walkList(v, edge.File_Decls, n.Decls)
// don't walk n.Comments - they have been
// visited already through the individual
// nodes
default:
// (includes *ast.Package)
panic(fmt.Sprintf("Walk: unexpected node type %T", n))
}
v.pop(node)
}