vendor/golang.org/x/tools: update to a09a2fb

Notably, this includes unitchecker's -fix flag.

Also, fix one vet test that failed due to new diagnostic wording.

Change-Id: I87751083dcd9cc4b1d8dce7d54bb796c745436d0
Reviewed-on: https://go-review.googlesource.com/c/go/+/701195
Reviewed-by: Dmitri Shuralyov <dmitshur@golang.org>
Reviewed-by: Dmitri Shuralyov <dmitshur@google.com>
LUCI-TryBot-Result: Go LUCI <golang-scoped@luci-project-accounts.iam.gserviceaccount.com>
This commit is contained in:
Alan Donovan 2025-09-04 15:29:52 -04:00
parent f5b20689e9
commit 0b1eed09a3
18 changed files with 2350 additions and 333 deletions

View file

@ -11,7 +11,7 @@ require (
golang.org/x/sys v0.35.0 golang.org/x/sys v0.35.0
golang.org/x/telemetry v0.0.0-20250807160809-1a19826ec488 golang.org/x/telemetry v0.0.0-20250807160809-1a19826ec488
golang.org/x/term v0.34.0 golang.org/x/term v0.34.0
golang.org/x/tools v0.36.1-0.20250808220315-8866876b956f golang.org/x/tools v0.36.1-0.20250904192731-a09a2fba1c08
) )
require ( require (

View file

@ -22,7 +22,7 @@ golang.org/x/term v0.34.0 h1:O/2T7POpk0ZZ7MAzMeWFSg6S5IpWd/RXDlM9hgM3DR4=
golang.org/x/term v0.34.0/go.mod h1:5jC53AEywhIVebHgPVeg0mj8OD3VO9OzclacVrqpaAw= golang.org/x/term v0.34.0/go.mod h1:5jC53AEywhIVebHgPVeg0mj8OD3VO9OzclacVrqpaAw=
golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng= golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng=
golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU= golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU=
golang.org/x/tools v0.36.1-0.20250808220315-8866876b956f h1:9m2Iptt9ZZU5llKDJy1XUl5d13PN1ZYV16KwOvE6jOw= golang.org/x/tools v0.36.1-0.20250904192731-a09a2fba1c08 h1:KS/PXsrK6W9NdlNu8iuCiNb7KM8UFwsh8g1BUjJ9rww=
golang.org/x/tools v0.36.1-0.20250808220315-8866876b956f/go.mod h1:WBDiHKJK8YgLHlcQPYQzNCkUxUypCaa5ZegCVutKm+s= golang.org/x/tools v0.36.1-0.20250904192731-a09a2fba1c08/go.mod h1:n+8pplxVZfXnmHBxWsfPnQRJ5vWroQDk+U2MFpjwtFY=
rsc.io/markdown v0.0.0-20240306144322-0bf8f97ee8ef h1:mqLYrXCXYEZOop9/Dbo6RPX11539nwiCNBb1icVPmw8= rsc.io/markdown v0.0.0-20240306144322-0bf8f97ee8ef h1:mqLYrXCXYEZOop9/Dbo6RPX11539nwiCNBb1icVPmw8=
rsc.io/markdown v0.0.0-20240306144322-0bf8f97ee8ef/go.mod h1:8xcPgWmwlZONN1D9bjxtHEjrUtSEa3fakVF8iaewYKQ= rsc.io/markdown v0.0.0-20240306144322-0bf8f97ee8ef/go.mod h1:8xcPgWmwlZONN1D9bjxtHEjrUtSEa3fakVF8iaewYKQ=

View file

@ -0,0 +1,284 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package analysisflags
// This file defines the -fix logic common to unitchecker and
// {single,multi}checker.
import (
"fmt"
"go/format"
"go/token"
"log"
"maps"
"os"
"sort"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/internal/analysisinternal"
"golang.org/x/tools/internal/diff"
)
// FixAction abstracts a checker action (running one analyzer on one
// package) for the purposes of applying its diagnostics' fixes.
type FixAction struct {
Name string // e.g. "analyzer@package"
FileSet *token.FileSet
ReadFileFunc analysisinternal.ReadFileFunc
Diagnostics []analysis.Diagnostic
}
// ApplyFixes attempts to apply the first suggested fix associated
// with each diagnostic reported by the specified actions.
// All fixes must have been validated by [analysisinternal.ValidateFixes].
//
// Each fix is treated as an independent change; fixes are merged in
// an arbitrary deterministic order as if by a three-way diff tool
// such as the UNIX diff3 command or 'git merge'. Any fix that cannot be
// cleanly merged is discarded, in which case the final summary tells
// the user to re-run the tool.
// TODO(adonovan): make the checker tool re-run the analysis itself.
//
// When the same file is analyzed as a member of both a primary
// package "p" and a test-augmented package "p [p.test]", there may be
// duplicate diagnostics and fixes. One set of fixes will be applied
// and the other will be discarded; but re-running the tool may then
// show zero fixes, which may cause the confused user to wonder what
// happened to the other ones.
// TODO(adonovan): consider pre-filtering completely identical fixes.
//
// A common reason for overlapping fixes is duplicate additions of the
// same import. The merge algorithm may often cleanly resolve such
// fixes, coalescing identical edits, but the merge may sometimes be
// confused by nearby changes.
//
// Even when merging succeeds, there is no guarantee that the
// composition of the two fixes is semantically correct. Coalescing
// identical edits is appropriate for imports, but not for, say,
// increments to a counter variable; the correct resolution in that
// case might be to increment it twice. Or consider two fixes that
// each delete the penultimate reference to an import or local
// variable: each fix is sound individually, and they may be textually
// distant from each other, but when both are applied, the program is
// no longer valid because it has an unreferenced import or local
// variable.
// TODO(adonovan): investigate replacing the final "gofmt" step with a
// formatter that applies the unused-import deletion logic of
// "goimports".
//
// Merging depends on both the order of fixes and they order of edits
// within them. For example, if three fixes add import "a" twice and
// import "b" once, the two imports of "a" may be combined if they
// appear in order [a, a, b], or not if they appear as [a, b, a].
// TODO(adonovan): investigate an algebraic approach to imports;
// that is, for fixes to Go source files, convert changes within the
// import(...) portion of the file into semantic edits, compose those
// edits algebraically, then convert the result back to edits.
//
// applyFixes returns success if all fixes are valid, could be cleanly
// merged, and the corresponding files were successfully updated.
//
// If the -diff flag was set, instead of updating the files it display the final
// patch composed of all the cleanly merged fixes.
//
// TODO(adonovan): handle file-system level aliases such as symbolic
// links using robustio.FileID.
func ApplyFixes(actions []FixAction, verbose bool) error {
// Select fixes to apply.
//
// If there are several for a given Diagnostic, choose the first.
// Preserve the order of iteration, for determinism.
type fixact struct {
fix *analysis.SuggestedFix
act FixAction
}
var fixes []*fixact
for _, act := range actions {
for _, diag := range act.Diagnostics {
for i := range diag.SuggestedFixes {
fix := &diag.SuggestedFixes[i]
if i == 0 {
fixes = append(fixes, &fixact{fix, act})
} else {
// TODO(adonovan): abstract the logger.
log.Printf("%s: ignoring alternative fix %q", act.Name, fix.Message)
}
}
}
}
// Read file content on demand, from the virtual
// file system that fed the analyzer (see #62292).
//
// This cache assumes that all successful reads for the same
// file name return the same content.
// (It is tempting to group fixes by package and do the
// merge/apply/format steps one package at a time, but
// packages are not disjoint, due to test variants, so this
// would not really address the issue.)
baselineContent := make(map[string][]byte)
getBaseline := func(readFile analysisinternal.ReadFileFunc, filename string) ([]byte, error) {
content, ok := baselineContent[filename]
if !ok {
var err error
content, err = readFile(filename)
if err != nil {
return nil, err
}
baselineContent[filename] = content
}
return content, nil
}
// Apply each fix, updating the current state
// only if the entire fix can be cleanly merged.
accumulatedEdits := make(map[string][]diff.Edit)
goodFixes := 0
fixloop:
for _, fixact := range fixes {
// Convert analysis.TextEdits to diff.Edits, grouped by file.
// Precondition: a prior call to validateFix succeeded.
fileEdits := make(map[string][]diff.Edit)
for _, edit := range fixact.fix.TextEdits {
file := fixact.act.FileSet.File(edit.Pos)
baseline, err := getBaseline(fixact.act.ReadFileFunc, file.Name())
if err != nil {
log.Printf("skipping fix to file %s: %v", file.Name(), err)
continue fixloop
}
// We choose to treat size mismatch as a serious error,
// as it indicates a concurrent write to at least one file,
// and possibly others (consider a git checkout, for example).
if file.Size() != len(baseline) {
return fmt.Errorf("concurrent file modification detected in file %s (size changed from %d -> %d bytes); aborting fix",
file.Name(), file.Size(), len(baseline))
}
fileEdits[file.Name()] = append(fileEdits[file.Name()], diff.Edit{
Start: file.Offset(edit.Pos),
End: file.Offset(edit.End),
New: string(edit.NewText),
})
}
// Apply each set of edits by merging atop
// the previous accumulated state.
after := make(map[string][]diff.Edit)
for file, edits := range fileEdits {
if prev := accumulatedEdits[file]; len(prev) > 0 {
merged, ok := diff.Merge(prev, edits)
if !ok {
// debugging
if false {
log.Printf("%s: fix %s conflicts", fixact.act.Name, fixact.fix.Message)
}
continue fixloop // conflict
}
edits = merged
}
after[file] = edits
}
// The entire fix applied cleanly; commit it.
goodFixes++
maps.Copy(accumulatedEdits, after)
// debugging
if false {
log.Printf("%s: fix %s applied", fixact.act.Name, fixact.fix.Message)
}
}
badFixes := len(fixes) - goodFixes
// Show diff or update files to final state.
var files []string
for file := range accumulatedEdits {
files = append(files, file)
}
sort.Strings(files) // for deterministic -diff
var filesUpdated, totalFiles int
for _, file := range files {
edits := accumulatedEdits[file]
if len(edits) == 0 {
continue // the diffs annihilated (a miracle?)
}
// Apply accumulated fixes.
baseline := baselineContent[file] // (cache hit)
final, err := diff.ApplyBytes(baseline, edits)
if err != nil {
log.Fatalf("internal error in diff.ApplyBytes: %v", err)
}
// Attempt to format each file.
if formatted, err := format.Source(final); err == nil {
final = formatted
}
if diffFlag {
// Since we formatted the file, we need to recompute the diff.
unified := diff.Unified(file+" (old)", file+" (new)", string(baseline), string(final))
// TODO(adonovan): abstract the I/O.
os.Stdout.WriteString(unified)
} else {
// write
totalFiles++
// TODO(adonovan): abstract the I/O.
if err := os.WriteFile(file, final, 0644); err != nil {
log.Println(err)
continue
}
filesUpdated++
}
}
// TODO(adonovan): consider returning a structured result that
// maps each SuggestedFix to its status:
// - invalid
// - secondary, not selected
// - applied
// - had conflicts.
// and a mapping from each affected file to:
// - its final/original content pair, and
// - whether formatting was successful.
// Then file writes and the UI can be applied by the caller
// in whatever form they like.
// If victory was incomplete, report an error that indicates partial progress.
//
// badFixes > 0 indicates that we decided not to attempt some
// fixes due to conflicts or failure to read the source; still
// it's a relatively benign situation since the user can
// re-run the tool, and we may still make progress.
//
// filesUpdated < totalFiles indicates that some file updates
// failed. This should be rare, but is a serious error as it
// may apply half a fix, or leave the files in a bad state.
//
// These numbers are potentially misleading:
// The denominator includes duplicate conflicting fixes due to
// common files in packages "p" and "p [p.test]", which may
// have been fixed fixed and won't appear in the re-run.
// TODO(adonovan): eliminate identical fixes as an initial
// filtering step.
//
// TODO(adonovan): should we log that n files were updated in case of total victory?
if badFixes > 0 || filesUpdated < totalFiles {
if diffFlag {
return fmt.Errorf("%d of %d fixes skipped (e.g. due to conflicts)", badFixes, len(fixes))
} else {
return fmt.Errorf("applied %d of %d fixes; %d files updated. (Re-run the command to apply more.)",
goodFixes, len(fixes), filesUpdated)
}
}
if verbose {
log.Printf("applied %d fixes, updated %d files", len(fixes), filesUpdated)
}
return nil
}

View file

@ -2,8 +2,9 @@
// Use of this source code is governed by a BSD-style // Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
// Package analysisflags defines helpers for processing flags of // Package analysisflags defines helpers for processing flags (-help,
// analysis driver tools. // -json, -fix, -diff, etc) common to unitchecker and
// {single,multi}checker. It is not intended for broader use.
package analysisflags package analysisflags
import ( import (
@ -24,8 +25,10 @@ import (
// flags common to all {single,multi,unit}checkers. // flags common to all {single,multi,unit}checkers.
var ( var (
JSON = false // -json JSON = false // -json
Context = -1 // -c=N: if N>0, display offending line plus N lines of context Context = -1 // -c=N: if N>0, display offending line plus N lines of context
Fix bool // -fix
diffFlag bool // -diff (changes [ApplyFixes] behavior)
) )
// Parse creates a flag for each of the analyzer's flags, // Parse creates a flag for each of the analyzer's flags,
@ -74,6 +77,8 @@ func Parse(analyzers []*analysis.Analyzer, multi bool) []*analysis.Analyzer {
// flags common to all checkers // flags common to all checkers
flag.BoolVar(&JSON, "json", JSON, "emit JSON output") flag.BoolVar(&JSON, "json", JSON, "emit JSON output")
flag.IntVar(&Context, "c", Context, `display offending line with this many lines of context`) flag.IntVar(&Context, "c", Context, `display offending line with this many lines of context`)
flag.BoolVar(&Fix, "fix", false, "apply all suggested fixes")
flag.BoolVar(&diffFlag, "diff", false, "with -fix, don't update the files, but print a unified diff")
// Add shims for legacy vet flags to enable existing // Add shims for legacy vet flags to enable existing
// scripts that run vet to continue to work. // scripts that run vet to continue to work.

View file

@ -85,6 +85,18 @@ type Config struct {
// -V=full describe executable for build caching // -V=full describe executable for build caching
// foo.cfg perform separate modular analyze on the single // foo.cfg perform separate modular analyze on the single
// unit described by a JSON config file foo.cfg. // unit described by a JSON config file foo.cfg.
//
// Also, subject to approval of proposal #71859:
//
// -fix don't print each diagnostic, apply its first fix
// -diff don't apply a fix, print the diff (requires -fix)
//
// Additionally, the environment variable GOVET has the value "vet" or
// "fix" depending on whether the command is being invoked by "go vet",
// to report diagnostics, or "go fix", to apply fixes. This is
// necessary so that callers of Main can select their analyzer suite
// before flag parsing. (Vet analyzers must report real code problems,
// whereas Fix analyzers may fix non-problems such as style issues.)
func Main(analyzers ...*analysis.Analyzer) { func Main(analyzers ...*analysis.Analyzer) {
progname := filepath.Base(os.Args[0]) progname := filepath.Base(os.Args[0])
log.SetFlags(0) log.SetFlags(0)
@ -136,35 +148,14 @@ func Run(configFile string, analyzers []*analysis.Analyzer) {
log.Fatal(err) log.Fatal(err)
} }
code := 0
// In VetxOnly mode, the analysis is run only for facts. // In VetxOnly mode, the analysis is run only for facts.
if !cfg.VetxOnly { if !cfg.VetxOnly {
if analysisflags.JSON { code = processResults(fset, cfg.ID, results)
// JSON output
tree := make(analysisflags.JSONTree)
for _, res := range results {
tree.Add(fset, cfg.ID, res.a.Name, res.diagnostics, res.err)
}
tree.Print(os.Stdout)
} else {
// plain text
exit := 0
for _, res := range results {
if res.err != nil {
log.Println(res.err)
exit = 1
}
}
for _, res := range results {
for _, diag := range res.diagnostics {
analysisflags.PrintPlain(os.Stderr, fset, analysisflags.Context, diag)
exit = 1
}
}
os.Exit(exit)
}
} }
os.Exit(0) os.Exit(code)
} }
func readConfig(filename string) (*Config, error) { func readConfig(filename string) (*Config, error) {
@ -185,6 +176,63 @@ func readConfig(filename string) (*Config, error) {
return cfg, nil return cfg, nil
} }
func processResults(fset *token.FileSet, id string, results []result) (exit int) {
if analysisflags.Fix {
// Don't print the diagnostics,
// but apply all fixes from the root actions.
// Convert results to form needed by ApplyFixes.
fixActions := make([]analysisflags.FixAction, len(results))
for i, res := range results {
fixActions[i] = analysisflags.FixAction{
Name: res.a.Name,
FileSet: fset,
ReadFileFunc: os.ReadFile,
Diagnostics: res.diagnostics,
}
}
if err := analysisflags.ApplyFixes(fixActions, false); err != nil {
// Fail when applying fixes failed.
log.Print(err)
exit = 1
}
// Don't proceed to print text/JSON,
// and don't report an error
// just because there were diagnostics.
return
}
// Keep consistent with analogous logic in
// printDiagnostics in ../internal/checker/checker.go.
if analysisflags.JSON {
// JSON output
tree := make(analysisflags.JSONTree)
for _, res := range results {
tree.Add(fset, id, res.a.Name, res.diagnostics, res.err)
}
tree.Print(os.Stdout) // ignore error
} else {
// plain text
for _, res := range results {
if res.err != nil {
log.Println(res.err)
exit = 1
}
}
for _, res := range results {
for _, diag := range res.diagnostics {
analysisflags.PrintPlain(os.Stderr, fset, analysisflags.Context, diag)
exit = 1
}
}
}
return
}
type factImporter = func(pkgPath string) ([]byte, error) type factImporter = func(pkgPath string) ([]byte, error)
// These four hook variables are a proof of concept of a future // These four hook variables are a proof of concept of a future

177
src/cmd/vendor/golang.org/x/tools/internal/diff/diff.go generated vendored Normal file
View file

@ -0,0 +1,177 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package diff computes differences between text files or strings.
package diff
import (
"fmt"
"slices"
"sort"
"strings"
)
// An Edit describes the replacement of a portion of a text file.
type Edit struct {
Start, End int // byte offsets of the region to replace
New string // the replacement
}
func (e Edit) String() string {
return fmt.Sprintf("{Start:%d,End:%d,New:%q}", e.Start, e.End, e.New)
}
// Apply applies a sequence of edits to the src buffer and returns the
// result. Edits are applied in order of start offset; edits with the
// same start offset are applied in they order they were provided.
//
// Apply returns an error if any edit is out of bounds,
// or if any pair of edits is overlapping.
func Apply(src string, edits []Edit) (string, error) {
edits, size, err := validate(src, edits)
if err != nil {
return "", err
}
// Apply edits.
out := make([]byte, 0, size)
lastEnd := 0
for _, edit := range edits {
if lastEnd < edit.Start {
out = append(out, src[lastEnd:edit.Start]...)
}
out = append(out, edit.New...)
lastEnd = edit.End
}
out = append(out, src[lastEnd:]...)
if len(out) != size {
panic("wrong size")
}
return string(out), nil
}
// ApplyBytes is like Apply, but it accepts a byte slice.
// The result is always a new array.
func ApplyBytes(src []byte, edits []Edit) ([]byte, error) {
res, err := Apply(string(src), edits)
return []byte(res), err
}
// validate checks that edits are consistent with src,
// and returns the size of the patched output.
// It may return a different slice.
func validate(src string, edits []Edit) ([]Edit, int, error) {
if !sort.IsSorted(editsSort(edits)) {
edits = slices.Clone(edits)
SortEdits(edits)
}
// Check validity of edits and compute final size.
size := len(src)
lastEnd := 0
for _, edit := range edits {
if !(0 <= edit.Start && edit.Start <= edit.End && edit.End <= len(src)) {
return nil, 0, fmt.Errorf("diff has out-of-bounds edits")
}
if edit.Start < lastEnd {
return nil, 0, fmt.Errorf("diff has overlapping edits")
}
size += len(edit.New) + edit.Start - edit.End
lastEnd = edit.End
}
return edits, size, nil
}
// SortEdits orders a slice of Edits by (start, end) offset.
// This ordering puts insertions (end = start) before deletions
// (end > start) at the same point, but uses a stable sort to preserve
// the order of multiple insertions at the same point.
// (Apply detects multiple deletions at the same point as an error.)
func SortEdits(edits []Edit) {
sort.Stable(editsSort(edits))
}
type editsSort []Edit
func (a editsSort) Len() int { return len(a) }
func (a editsSort) Less(i, j int) bool {
if cmp := a[i].Start - a[j].Start; cmp != 0 {
return cmp < 0
}
return a[i].End < a[j].End
}
func (a editsSort) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
// lineEdits expands and merges a sequence of edits so that each
// resulting edit replaces one or more complete lines.
// See ApplyEdits for preconditions.
func lineEdits(src string, edits []Edit) ([]Edit, error) {
edits, _, err := validate(src, edits)
if err != nil {
return nil, err
}
// Do all deletions begin and end at the start of a line,
// and all insertions end with a newline?
// (This is merely a fast path.)
for _, edit := range edits {
if edit.Start >= len(src) || // insertion at EOF
edit.Start > 0 && src[edit.Start-1] != '\n' || // not at line start
edit.End > 0 && src[edit.End-1] != '\n' || // not at line start
edit.New != "" && edit.New[len(edit.New)-1] != '\n' { // partial insert
goto expand // slow path
}
}
return edits, nil // aligned
expand:
if len(edits) == 0 {
return edits, nil // no edits (unreachable due to fast path)
}
expanded := make([]Edit, 0, len(edits)) // a guess
prev := edits[0]
// TODO(adonovan): opt: start from the first misaligned edit.
// TODO(adonovan): opt: avoid quadratic cost of string += string.
for _, edit := range edits[1:] {
between := src[prev.End:edit.Start]
if !strings.Contains(between, "\n") {
// overlapping lines: combine with previous edit.
prev.New += between + edit.New
prev.End = edit.End
} else {
// non-overlapping lines: flush previous edit.
expanded = append(expanded, expandEdit(prev, src))
prev = edit
}
}
return append(expanded, expandEdit(prev, src)), nil // flush final edit
}
// expandEdit returns edit expanded to complete whole lines.
func expandEdit(edit Edit, src string) Edit {
// Expand start left to start of line.
// (delta is the zero-based column number of start.)
start := edit.Start
if delta := start - 1 - strings.LastIndex(src[:start], "\n"); delta > 0 {
edit.Start -= delta
edit.New = src[start-delta:start] + edit.New
}
// Expand end right to end of line.
end := edit.End
if end > 0 && src[end-1] != '\n' ||
edit.New != "" && edit.New[len(edit.New)-1] != '\n' {
if nl := strings.IndexByte(src[end:], '\n'); nl < 0 {
edit.End = len(src) // extend to EOF
} else {
edit.End = end + nl + 1 // extend beyond \n
}
}
edit.New += src[end:edit.End]
return edit
}

View file

@ -0,0 +1,179 @@
// Copyright 2022 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package lcs
import (
"log"
"sort"
)
// lcs is a longest common sequence
type lcs []diag
// A diag is a piece of the edit graph where A[X+i] == B[Y+i], for 0<=i<Len.
// All computed diagonals are parts of a longest common subsequence.
type diag struct {
X, Y int
Len int
}
// sort sorts in place, by lowest X, and if tied, inversely by Len
func (l lcs) sort() lcs {
sort.Slice(l, func(i, j int) bool {
if l[i].X != l[j].X {
return l[i].X < l[j].X
}
return l[i].Len > l[j].Len
})
return l
}
// validate that the elements of the lcs do not overlap
// (can only happen when the two-sided algorithm ends early)
// expects the lcs to be sorted
func (l lcs) valid() bool {
for i := 1; i < len(l); i++ {
if l[i-1].X+l[i-1].Len > l[i].X {
return false
}
if l[i-1].Y+l[i-1].Len > l[i].Y {
return false
}
}
return true
}
// repair overlapping lcs
// only called if two-sided stops early
func (l lcs) fix() lcs {
// from the set of diagonals in l, find a maximal non-conflicting set
// this problem may be NP-complete, but we use a greedy heuristic,
// which is quadratic, but with a better data structure, could be D log D.
// independent is not enough: {0,3,1} and {3,0,2} can't both occur in an lcs
// which has to have monotone x and y
if len(l) == 0 {
return nil
}
sort.Slice(l, func(i, j int) bool { return l[i].Len > l[j].Len })
tmp := make(lcs, 0, len(l))
tmp = append(tmp, l[0])
for i := 1; i < len(l); i++ {
var dir direction
nxt := l[i]
for _, in := range tmp {
if dir, nxt = overlap(in, nxt); dir == empty || dir == bad {
break
}
}
if nxt.Len > 0 && dir != bad {
tmp = append(tmp, nxt)
}
}
tmp.sort()
if false && !tmp.valid() { // debug checking
log.Fatalf("here %d", len(tmp))
}
return tmp
}
type direction int
const (
empty direction = iota // diag is empty (so not in lcs)
leftdown // proposed acceptably to the left and below
rightup // proposed diag is acceptably to the right and above
bad // proposed diag is inconsistent with the lcs so far
)
// overlap trims the proposed diag prop so it doesn't overlap with
// the existing diag that has already been added to the lcs.
func overlap(exist, prop diag) (direction, diag) {
if prop.X <= exist.X && exist.X < prop.X+prop.Len {
// remove the end of prop where it overlaps with the X end of exist
delta := prop.X + prop.Len - exist.X
prop.Len -= delta
if prop.Len <= 0 {
return empty, prop
}
}
if exist.X <= prop.X && prop.X < exist.X+exist.Len {
// remove the beginning of prop where overlaps with exist
delta := exist.X + exist.Len - prop.X
prop.Len -= delta
if prop.Len <= 0 {
return empty, prop
}
prop.X += delta
prop.Y += delta
}
if prop.Y <= exist.Y && exist.Y < prop.Y+prop.Len {
// remove the end of prop that overlaps (in Y) with exist
delta := prop.Y + prop.Len - exist.Y
prop.Len -= delta
if prop.Len <= 0 {
return empty, prop
}
}
if exist.Y <= prop.Y && prop.Y < exist.Y+exist.Len {
// remove the beginning of peop that overlaps with exist
delta := exist.Y + exist.Len - prop.Y
prop.Len -= delta
if prop.Len <= 0 {
return empty, prop
}
prop.X += delta // no test reaches this code
prop.Y += delta
}
if prop.X+prop.Len <= exist.X && prop.Y+prop.Len <= exist.Y {
return leftdown, prop
}
if exist.X+exist.Len <= prop.X && exist.Y+exist.Len <= prop.Y {
return rightup, prop
}
// prop can't be in an lcs that contains exist
return bad, prop
}
// manipulating Diag and lcs
// prepend a diagonal (x,y)-(x+1,y+1) segment either to an empty lcs
// or to its first Diag. prepend is only called to extend diagonals
// the backward direction.
func (lcs lcs) prepend(x, y int) lcs {
if len(lcs) > 0 {
d := &lcs[0]
if int(d.X) == x+1 && int(d.Y) == y+1 {
// extend the diagonal down and to the left
d.X, d.Y = int(x), int(y)
d.Len++
return lcs
}
}
r := diag{X: int(x), Y: int(y), Len: 1}
lcs = append([]diag{r}, lcs...)
return lcs
}
// append appends a diagonal, or extends the existing one.
// by adding the edge (x,y)-(x+1.y+1). append is only called
// to extend diagonals in the forward direction.
func (lcs lcs) append(x, y int) lcs {
if len(lcs) > 0 {
last := &lcs[len(lcs)-1]
// Expand last element if adjoining.
if last.X+last.Len == x && last.Y+last.Len == y {
last.Len++
return lcs
}
}
return append(lcs, diag{X: x, Y: y, Len: 1})
}
// enforce constraint on d, k
func ok(d, k int) bool {
return d >= 0 && -d <= k && k <= d
}

View file

@ -0,0 +1,156 @@
// Copyright 2022 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// package lcs contains code to find longest-common-subsequences
// (and diffs)
package lcs
/*
Compute longest-common-subsequences of two slices A, B using
algorithms from Myers' paper. A longest-common-subsequence
(LCS from now on) of A and B is a maximal set of lexically increasing
pairs of subscripts (x,y) with A[x]==B[y]. There may be many LCS, but
they all have the same length. An LCS determines a sequence of edits
that changes A into B.
The key concept is the edit graph of A and B.
If A has length N and B has length M, then the edit graph has
vertices v[i][j] for 0 <= i <= N, 0 <= j <= M. There is a
horizontal edge from v[i][j] to v[i+1][j] whenever both are in
the graph, and a vertical edge from v[i][j] to f[i][j+1] similarly.
When A[i] == B[j] there is a diagonal edge from v[i][j] to v[i+1][j+1].
A path between in the graph between (0,0) and (N,M) determines a sequence
of edits converting A into B: each horizontal edge corresponds to removing
an element of A, and each vertical edge corresponds to inserting an
element of B.
A vertex (x,y) is on (forward) diagonal k if x-y=k. A path in the graph
is of length D if it has D non-diagonal edges. The algorithms generate
forward paths (in which at least one of x,y increases at each edge),
or backward paths (in which at least one of x,y decreases at each edge),
or a combination. (Note that the orientation is the traditional mathematical one,
with the origin in the lower-left corner.)
Here is the edit graph for A:"aabbaa", B:"aacaba". (I know the diagonals look weird.)
------- ------- ------- ------- ------- -------
a | ___/ | ___/ | | | ___/ | ___/ |
------- ------- ------- ------- ------- -------
b | | | ___/ | ___/ | | |
------- ------- ------- ------- ------- -------
a | ___/ | ___/ | | | ___/ | ___/ |
------- ------- ------- ------- ------- -------
c | | | | | | |
------- ------- ------- ------- ------- -------
a | ___/ | ___/ | | | ___/ | ___/ |
------- ------- ------- ------- ------- -------
a | ___/ | ___/ | | | ___/ | ___/ |
------- ------- ------- ------- ------- -------
a a b b a a
The algorithm labels a vertex (x,y) with D,k if it is on diagonal k and at
the end of a maximal path of length D. (Because x-y=k it suffices to remember
only the x coordinate of the vertex.)
The forward algorithm: Find the longest diagonal starting at (0,0) and
label its end with D=0,k=0. From that vertex take a vertical step and
then follow the longest diagonal (up and to the right), and label that vertex
with D=1,k=-1. From the D=0,k=0 point take a horizontal step and the follow
the longest diagonal (up and to the right) and label that vertex
D=1,k=1. In the same way, having labelled all the D vertices,
from a vertex labelled D,k find two vertices
tentatively labelled D+1,k-1 and D+1,k+1. There may be two on the same
diagonal, in which case take the one with the larger x.
Eventually the path gets to (N,M), and the diagonals on it are the LCS.
Here is the edit graph with the ends of D-paths labelled. (So, for instance,
0/2,2 indicates that x=2,y=2 is labelled with 0, as it should be, since the first
step is to go up the longest diagonal from (0,0).)
A:"aabbaa", B:"aacaba"
------- ------- -------(3/3,6)------- -------(3/5,6)-------(4/6,6)
a | ___/ | ___/ | | | ___/ | ___/ |
------- ------- -------(2/3,5)------- ------- -------
b | | | ___/ | ___/ | | |
------- ------- ------- ------- -------(3/5,4)-------
a | ___/ | ___/ | | | ___/ | ___/ |
------- -------(1/2,3)-------(2/3,3)------- ------- -------
c | | | | | | |
------- -------(0/2,2)-------(1/3,2)-------(2/4,2)-------(3/5,2)-------(4/6,2)
a | ___/ | ___/ | | | ___/ | ___/ |
------- ------- ------- ------- ------- -------
a | ___/ | ___/ | | | ___/ | ___/ |
------- ------- ------- ------- ------- -------
a a b b a a
The 4-path is reconstructed starting at (4/6,6), horizontal to (3/5,6), diagonal to (3,4), vertical
to (2/3,3), horizontal to (1/2,3), vertical to (0/2,2), and diagonal to (0,0). As expected,
there are 4 non-diagonal steps, and the diagonals form an LCS.
There is a symmetric backward algorithm, which gives (backwards labels are prefixed with a colon):
A:"aabbaa", B:"aacaba"
-------- -------- -------- -------- -------- --------
a | ____/ | ____/ | | | ____/ | ____/ |
-------- -------- -------- -------- --------(:0/5,5)--------
b | | | ____/ | ____/ | | |
-------- -------- --------(:1/3,4)-------- -------- --------
a | ____/ | ____/ | | | ____/ | ____/ |
(:3/0,3)--------(:2/1,3)-------- --------(:2/3,3)--------(:1/4,3)-------- --------
c | | | | | | |
-------- -------- --------(:3/3,2)--------(:2/4,2)-------- --------
a | ____/ | ____/ | | | ____/ | ____/ |
(:3/0,1)-------- -------- -------- --------(:3/4,1)-------- --------
a | ____/ | ____/ | | | ____/ | ____/ |
(:4/0,0)-------- -------- -------- --------(:4/4,0)-------- --------
a a b b a a
Neither of these is ideal for use in an editor, where it is undesirable to send very long diffs to the
front end. It's tricky to decide exactly what 'very long diffs' means, as "replace A by B" is very short.
We want to control how big D can be, by stopping when it gets too large. The forward algorithm then
privileges common prefixes, and the backward algorithm privileges common suffixes. Either is an undesirable
asymmetry.
Fortunately there is a two-sided algorithm, implied by results in Myers' paper. Here's what the labels in
the edit graph look like.
A:"aabbaa", B:"aacaba"
--------- --------- --------- --------- --------- ---------
a | ____/ | ____/ | | | ____/ | ____/ |
--------- --------- --------- (2/3,5) --------- --------- (:0/5,5)---------
b | | | ____/ | ____/ | | |
--------- --------- --------- (:1/3,4)--------- --------- ---------
a | ____/ | ____/ | | | ____/ | ____/ |
--------- (:2/1,3)--------- (1/2,3) ---------(2:2/3,3)--------- (:1/4,3)--------- ---------
c | | | | | | |
--------- --------- (0/2,2) --------- (1/3,2) ---------(2:2/4,2)--------- ---------
a | ____/ | ____/ | | | ____/ | ____/ |
--------- --------- --------- --------- --------- ---------
a | ____/ | ____/ | | | ____/ | ____/ |
--------- --------- --------- --------- --------- ---------
a a b b a a
The algorithm stopped when it saw the backwards 2-path ending at (1,3) and the forwards 2-path ending at (3,5). The criterion
is a backwards path ending at (u,v) and a forward path ending at (x,y), where u <= x and the two points are on the same
diagonal. (Here the edgegraph has a diagonal, but the criterion is x-y=u-v.) Myers proves there is a forward
2-path from (0,0) to (1,3), and that together with the backwards 2-path ending at (1,3) gives the expected 4-path.
Unfortunately the forward path has to be constructed by another run of the forward algorithm; it can't be found from the
computed labels. That is the worst case. Had the code noticed (x,y)=(u,v)=(3,3) the whole path could be reconstructed
from the edgegraph. The implementation looks for a number of special cases to try to avoid computing an extra forward path.
If the two-sided algorithm has stop early (because D has become too large) it will have found a forward LCS and a
backwards LCS. Ideally these go with disjoint prefixes and suffixes of A and B, but disjointedness may fail and the two
computed LCS may conflict. (An easy example is where A is a suffix of B, and shares a short prefix. The backwards LCS
is all of A, and the forward LCS is a prefix of A.) The algorithm combines the two
to form a best-effort LCS. In the worst case the forward partial LCS may have to
be recomputed.
*/
/* Eugene Myers paper is titled
"An O(ND) Difference Algorithm and Its Variations"
and can be found at
http://www.xmailserver.org/diff2.pdf
(There is a generic implementation of the algorithm the repository with git hash
b9ad7e4ade3a686d608e44475390ad428e60e7fc)
*/

View file

@ -0,0 +1,33 @@
#!/bin/bash
#
# Copyright 2022 The Go Authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
#
# Creates a zip file containing all numbered versions
# of the commit history of a large source file, for use
# as input data for the tests of the diff algorithm.
#
# Run script from root of the x/tools repo.
set -eu
# WARNING: This script will install the latest version of $file
# The largest real source file in the x/tools repo.
# file=internal/golang/completion/completion.go
# file=internal/golang/diagnostics.go
file=internal/protocol/tsprotocol.go
tmp=$(mktemp -d)
git log $file |
awk '/^commit / {print $2}' |
nl -ba -nrz |
while read n hash; do
git checkout --quiet $hash $file
cp -f $file $tmp/$n
done
(cd $tmp && zip -q - *) > testdata.zip
rm -fr $tmp
git restore --staged $file
git restore $file
echo "Created testdata.zip"

View file

@ -0,0 +1,55 @@
// Copyright 2022 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package lcs
import (
"fmt"
)
// For each D, vec[D] has length D+1,
// and the label for (D, k) is stored in vec[D][(D+k)/2].
type label struct {
vec [][]int
}
// Temporary checking DO NOT COMMIT true TO PRODUCTION CODE
const debug = false
// debugging. check that the (d,k) pair is valid
// (that is, -d<=k<=d and d+k even)
func checkDK(D, k int) {
if k >= -D && k <= D && (D+k)%2 == 0 {
return
}
panic(fmt.Sprintf("out of range, d=%d,k=%d", D, k))
}
func (t *label) set(D, k, x int) {
if debug {
checkDK(D, k)
}
for len(t.vec) <= D {
t.vec = append(t.vec, nil)
}
if t.vec[D] == nil {
t.vec[D] = make([]int, D+1)
}
t.vec[D][(D+k)/2] = x // known that D+k is even
}
func (t *label) get(d, k int) int {
if debug {
checkDK(d, k)
}
return int(t.vec[d][(d+k)/2])
}
func newtriang(limit int) label {
if limit < 100 {
// Preallocate if limit is not large.
return label{vec: make([][]int, limit)}
}
return label{}
}

View file

@ -0,0 +1,478 @@
// Copyright 2022 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package lcs
// TODO(adonovan): remove unclear references to "old" in this package.
import (
"fmt"
)
// A Diff is a replacement of a portion of A by a portion of B.
type Diff struct {
Start, End int // offsets of portion to delete in A
ReplStart, ReplEnd int // offset of replacement text in B
}
// DiffStrings returns the differences between two strings.
// It does not respect rune boundaries.
func DiffStrings(a, b string) []Diff { return diff(stringSeqs{a, b}) }
// DiffBytes returns the differences between two byte sequences.
// It does not respect rune boundaries.
func DiffBytes(a, b []byte) []Diff { return diff(bytesSeqs{a, b}) }
// DiffRunes returns the differences between two rune sequences.
func DiffRunes(a, b []rune) []Diff { return diff(runesSeqs{a, b}) }
func diff(seqs sequences) []Diff {
// A limit on how deeply the LCS algorithm should search. The value is just a guess.
const maxDiffs = 100
diff, _ := compute(seqs, twosided, maxDiffs/2)
return diff
}
// compute computes the list of differences between two sequences,
// along with the LCS. It is exercised directly by tests.
// The algorithm is one of {forward, backward, twosided}.
func compute(seqs sequences, algo func(*editGraph) lcs, limit int) ([]Diff, lcs) {
if limit <= 0 {
limit = 1 << 25 // effectively infinity
}
alen, blen := seqs.lengths()
g := &editGraph{
seqs: seqs,
vf: newtriang(limit),
vb: newtriang(limit),
limit: limit,
ux: alen,
uy: blen,
delta: alen - blen,
}
lcs := algo(g)
diffs := lcs.toDiffs(alen, blen)
return diffs, lcs
}
// editGraph carries the information for computing the lcs of two sequences.
type editGraph struct {
seqs sequences
vf, vb label // forward and backward labels
limit int // maximal value of D
// the bounding rectangle of the current edit graph
lx, ly, ux, uy int
delta int // common subexpression: (ux-lx)-(uy-ly)
}
// toDiffs converts an LCS to a list of edits.
func (lcs lcs) toDiffs(alen, blen int) []Diff {
var diffs []Diff
var pa, pb int // offsets in a, b
for _, l := range lcs {
if pa < l.X || pb < l.Y {
diffs = append(diffs, Diff{pa, l.X, pb, l.Y})
}
pa = l.X + l.Len
pb = l.Y + l.Len
}
if pa < alen || pb < blen {
diffs = append(diffs, Diff{pa, alen, pb, blen})
}
return diffs
}
// --- FORWARD ---
// fdone decides if the forward path has reached the upper right
// corner of the rectangle. If so, it also returns the computed lcs.
func (e *editGraph) fdone(D, k int) (bool, lcs) {
// x, y, k are relative to the rectangle
x := e.vf.get(D, k)
y := x - k
if x == e.ux && y == e.uy {
return true, e.forwardlcs(D, k)
}
return false, nil
}
// run the forward algorithm, until success or up to the limit on D.
func forward(e *editGraph) lcs {
e.setForward(0, 0, e.lx)
if ok, ans := e.fdone(0, 0); ok {
return ans
}
// from D to D+1
for D := range e.limit {
e.setForward(D+1, -(D + 1), e.getForward(D, -D))
if ok, ans := e.fdone(D+1, -(D + 1)); ok {
return ans
}
e.setForward(D+1, D+1, e.getForward(D, D)+1)
if ok, ans := e.fdone(D+1, D+1); ok {
return ans
}
for k := -D + 1; k <= D-1; k += 2 {
// these are tricky and easy to get backwards
lookv := e.lookForward(k, e.getForward(D, k-1)+1)
lookh := e.lookForward(k, e.getForward(D, k+1))
if lookv > lookh {
e.setForward(D+1, k, lookv)
} else {
e.setForward(D+1, k, lookh)
}
if ok, ans := e.fdone(D+1, k); ok {
return ans
}
}
}
// D is too large
// find the D path with maximal x+y inside the rectangle and
// use that to compute the found part of the lcs
kmax := -e.limit - 1
diagmax := -1
for k := -e.limit; k <= e.limit; k += 2 {
x := e.getForward(e.limit, k)
y := x - k
if x+y > diagmax && x <= e.ux && y <= e.uy {
diagmax, kmax = x+y, k
}
}
return e.forwardlcs(e.limit, kmax)
}
// recover the lcs by backtracking from the farthest point reached
func (e *editGraph) forwardlcs(D, k int) lcs {
var ans lcs
for x := e.getForward(D, k); x != 0 || x-k != 0; {
if ok(D-1, k-1) && x-1 == e.getForward(D-1, k-1) {
// if (x-1,y) is labelled D-1, x--,D--,k--,continue
D, k, x = D-1, k-1, x-1
continue
} else if ok(D-1, k+1) && x == e.getForward(D-1, k+1) {
// if (x,y-1) is labelled D-1, x, D--,k++, continue
D, k = D-1, k+1
continue
}
// if (x-1,y-1)--(x,y) is a diagonal, prepend,x--,y--, continue
y := x - k
ans = ans.prepend(x+e.lx-1, y+e.ly-1)
x--
}
return ans
}
// start at (x,y), go up the diagonal as far as possible,
// and label the result with d
func (e *editGraph) lookForward(k, relx int) int {
rely := relx - k
x, y := relx+e.lx, rely+e.ly
if x < e.ux && y < e.uy {
x += e.seqs.commonPrefixLen(x, e.ux, y, e.uy)
}
return x
}
func (e *editGraph) setForward(d, k, relx int) {
x := e.lookForward(k, relx)
e.vf.set(d, k, x-e.lx)
}
func (e *editGraph) getForward(d, k int) int {
x := e.vf.get(d, k)
return x
}
// --- BACKWARD ---
// bdone decides if the backward path has reached the lower left corner
func (e *editGraph) bdone(D, k int) (bool, lcs) {
// x, y, k are relative to the rectangle
x := e.vb.get(D, k)
y := x - (k + e.delta)
if x == 0 && y == 0 {
return true, e.backwardlcs(D, k)
}
return false, nil
}
// run the backward algorithm, until success or up to the limit on D.
// (used only by tests)
func backward(e *editGraph) lcs {
e.setBackward(0, 0, e.ux)
if ok, ans := e.bdone(0, 0); ok {
return ans
}
// from D to D+1
for D := range e.limit {
e.setBackward(D+1, -(D + 1), e.getBackward(D, -D)-1)
if ok, ans := e.bdone(D+1, -(D + 1)); ok {
return ans
}
e.setBackward(D+1, D+1, e.getBackward(D, D))
if ok, ans := e.bdone(D+1, D+1); ok {
return ans
}
for k := -D + 1; k <= D-1; k += 2 {
// these are tricky and easy to get wrong
lookv := e.lookBackward(k, e.getBackward(D, k-1))
lookh := e.lookBackward(k, e.getBackward(D, k+1)-1)
if lookv < lookh {
e.setBackward(D+1, k, lookv)
} else {
e.setBackward(D+1, k, lookh)
}
if ok, ans := e.bdone(D+1, k); ok {
return ans
}
}
}
// D is too large
// find the D path with minimal x+y inside the rectangle and
// use that to compute the part of the lcs found
kmax := -e.limit - 1
diagmin := 1 << 25
for k := -e.limit; k <= e.limit; k += 2 {
x := e.getBackward(e.limit, k)
y := x - (k + e.delta)
if x+y < diagmin && x >= 0 && y >= 0 {
diagmin, kmax = x+y, k
}
}
if kmax < -e.limit {
panic(fmt.Sprintf("no paths when limit=%d?", e.limit))
}
return e.backwardlcs(e.limit, kmax)
}
// recover the lcs by backtracking
func (e *editGraph) backwardlcs(D, k int) lcs {
var ans lcs
for x := e.getBackward(D, k); x != e.ux || x-(k+e.delta) != e.uy; {
if ok(D-1, k-1) && x == e.getBackward(D-1, k-1) {
// D--, k--, x unchanged
D, k = D-1, k-1
continue
} else if ok(D-1, k+1) && x+1 == e.getBackward(D-1, k+1) {
// D--, k++, x++
D, k, x = D-1, k+1, x+1
continue
}
y := x - (k + e.delta)
ans = ans.append(x+e.lx, y+e.ly)
x++
}
return ans
}
// start at (x,y), go down the diagonal as far as possible,
func (e *editGraph) lookBackward(k, relx int) int {
rely := relx - (k + e.delta) // forward k = k + e.delta
x, y := relx+e.lx, rely+e.ly
if x > 0 && y > 0 {
x -= e.seqs.commonSuffixLen(0, x, 0, y)
}
return x
}
// convert to rectangle, and label the result with d
func (e *editGraph) setBackward(d, k, relx int) {
x := e.lookBackward(k, relx)
e.vb.set(d, k, x-e.lx)
}
func (e *editGraph) getBackward(d, k int) int {
x := e.vb.get(d, k)
return x
}
// -- TWOSIDED ---
func twosided(e *editGraph) lcs {
// The termination condition could be improved, as either the forward
// or backward pass could succeed before Myers' Lemma applies.
// Aside from questions of efficiency (is the extra testing cost-effective)
// this is more likely to matter when e.limit is reached.
e.setForward(0, 0, e.lx)
e.setBackward(0, 0, e.ux)
// from D to D+1
for D := range e.limit {
// just finished a backwards pass, so check
if got, ok := e.twoDone(D, D); ok {
return e.twolcs(D, D, got)
}
// do a forwards pass (D to D+1)
e.setForward(D+1, -(D + 1), e.getForward(D, -D))
e.setForward(D+1, D+1, e.getForward(D, D)+1)
for k := -D + 1; k <= D-1; k += 2 {
// these are tricky and easy to get backwards
lookv := e.lookForward(k, e.getForward(D, k-1)+1)
lookh := e.lookForward(k, e.getForward(D, k+1))
if lookv > lookh {
e.setForward(D+1, k, lookv)
} else {
e.setForward(D+1, k, lookh)
}
}
// just did a forward pass, so check
if got, ok := e.twoDone(D+1, D); ok {
return e.twolcs(D+1, D, got)
}
// do a backward pass, D to D+1
e.setBackward(D+1, -(D + 1), e.getBackward(D, -D)-1)
e.setBackward(D+1, D+1, e.getBackward(D, D))
for k := -D + 1; k <= D-1; k += 2 {
// these are tricky and easy to get wrong
lookv := e.lookBackward(k, e.getBackward(D, k-1))
lookh := e.lookBackward(k, e.getBackward(D, k+1)-1)
if lookv < lookh {
e.setBackward(D+1, k, lookv)
} else {
e.setBackward(D+1, k, lookh)
}
}
}
// D too large. combine a forward and backward partial lcs
// first, a forward one
kmax := -e.limit - 1
diagmax := -1
for k := -e.limit; k <= e.limit; k += 2 {
x := e.getForward(e.limit, k)
y := x - k
if x+y > diagmax && x <= e.ux && y <= e.uy {
diagmax, kmax = x+y, k
}
}
if kmax < -e.limit {
panic(fmt.Sprintf("no forward paths when limit=%d?", e.limit))
}
lcs := e.forwardlcs(e.limit, kmax)
// now a backward one
// find the D path with minimal x+y inside the rectangle and
// use that to compute the lcs
diagmin := 1 << 25 // infinity
for k := -e.limit; k <= e.limit; k += 2 {
x := e.getBackward(e.limit, k)
y := x - (k + e.delta)
if x+y < diagmin && x >= 0 && y >= 0 {
diagmin, kmax = x+y, k
}
}
if kmax < -e.limit {
panic(fmt.Sprintf("no backward paths when limit=%d?", e.limit))
}
lcs = append(lcs, e.backwardlcs(e.limit, kmax)...)
// These may overlap (e.forwardlcs and e.backwardlcs return sorted lcs)
ans := lcs.fix()
return ans
}
// Does Myers' Lemma apply?
func (e *editGraph) twoDone(df, db int) (int, bool) {
if (df+db+e.delta)%2 != 0 {
return 0, false // diagonals cannot overlap
}
kmin := max(-df, -db+e.delta)
kmax := db + e.delta
if df < kmax {
kmax = df
}
for k := kmin; k <= kmax; k += 2 {
x := e.vf.get(df, k)
u := e.vb.get(db, k-e.delta)
if u <= x {
// is it worth looking at all the other k?
for l := k; l <= kmax; l += 2 {
x := e.vf.get(df, l)
y := x - l
u := e.vb.get(db, l-e.delta)
v := u - l
if x == u || u == 0 || v == 0 || y == e.uy || x == e.ux {
return l, true
}
}
return k, true
}
}
return 0, false
}
func (e *editGraph) twolcs(df, db, kf int) lcs {
// db==df || db+1==df
x := e.vf.get(df, kf)
y := x - kf
kb := kf - e.delta
u := e.vb.get(db, kb)
v := u - kf
// Myers proved there is a df-path from (0,0) to (u,v)
// and a db-path from (x,y) to (N,M).
// In the first case the overall path is the forward path
// to (u,v) followed by the backward path to (N,M).
// In the second case the path is the backward path to (x,y)
// followed by the forward path to (x,y) from (0,0).
// Look for some special cases to avoid computing either of these paths.
if x == u {
// "babaab" "cccaba"
// already patched together
lcs := e.forwardlcs(df, kf)
lcs = append(lcs, e.backwardlcs(db, kb)...)
return lcs.sort()
}
// is (u-1,v) or (u,v-1) labelled df-1?
// if so, that forward df-1-path plus a horizontal or vertical edge
// is the df-path to (u,v), then plus the db-path to (N,M)
if u > 0 && ok(df-1, u-1-v) && e.vf.get(df-1, u-1-v) == u-1 {
// "aabbab" "cbcabc"
lcs := e.forwardlcs(df-1, u-1-v)
lcs = append(lcs, e.backwardlcs(db, kb)...)
return lcs.sort()
}
if v > 0 && ok(df-1, (u-(v-1))) && e.vf.get(df-1, u-(v-1)) == u {
// "abaabb" "bcacab"
lcs := e.forwardlcs(df-1, u-(v-1))
lcs = append(lcs, e.backwardlcs(db, kb)...)
return lcs.sort()
}
// The path can't possibly contribute to the lcs because it
// is all horizontal or vertical edges
if u == 0 || v == 0 || x == e.ux || y == e.uy {
// "abaabb" "abaaaa"
if u == 0 || v == 0 {
return e.backwardlcs(db, kb)
}
return e.forwardlcs(df, kf)
}
// is (x+1,y) or (x,y+1) labelled db-1?
if x+1 <= e.ux && ok(db-1, x+1-y-e.delta) && e.vb.get(db-1, x+1-y-e.delta) == x+1 {
// "bababb" "baaabb"
lcs := e.backwardlcs(db-1, kb+1)
lcs = append(lcs, e.forwardlcs(df, kf)...)
return lcs.sort()
}
if y+1 <= e.uy && ok(db-1, x-(y+1)-e.delta) && e.vb.get(db-1, x-(y+1)-e.delta) == x {
// "abbbaa" "cabacc"
lcs := e.backwardlcs(db-1, kb-1)
lcs = append(lcs, e.forwardlcs(df, kf)...)
return lcs.sort()
}
// need to compute another path
// "aabbaa" "aacaba"
lcs := e.backwardlcs(db, kb)
oldx, oldy := e.ux, e.uy
e.ux = u
e.uy = v
lcs = append(lcs, forward(e)...)
e.ux, e.uy = oldx, oldy
return lcs.sort()
}

View file

@ -0,0 +1,113 @@
// Copyright 2022 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package lcs
// This file defines the abstract sequence over which the LCS algorithm operates.
// sequences abstracts a pair of sequences, A and B.
type sequences interface {
lengths() (int, int) // len(A), len(B)
commonPrefixLen(ai, aj, bi, bj int) int // len(commonPrefix(A[ai:aj], B[bi:bj]))
commonSuffixLen(ai, aj, bi, bj int) int // len(commonSuffix(A[ai:aj], B[bi:bj]))
}
type stringSeqs struct{ a, b string }
func (s stringSeqs) lengths() (int, int) { return len(s.a), len(s.b) }
func (s stringSeqs) commonPrefixLen(ai, aj, bi, bj int) int {
return commonPrefixLenString(s.a[ai:aj], s.b[bi:bj])
}
func (s stringSeqs) commonSuffixLen(ai, aj, bi, bj int) int {
return commonSuffixLenString(s.a[ai:aj], s.b[bi:bj])
}
// The explicit capacity in s[i:j:j] leads to more efficient code.
type bytesSeqs struct{ a, b []byte }
func (s bytesSeqs) lengths() (int, int) { return len(s.a), len(s.b) }
func (s bytesSeqs) commonPrefixLen(ai, aj, bi, bj int) int {
return commonPrefixLenBytes(s.a[ai:aj:aj], s.b[bi:bj:bj])
}
func (s bytesSeqs) commonSuffixLen(ai, aj, bi, bj int) int {
return commonSuffixLenBytes(s.a[ai:aj:aj], s.b[bi:bj:bj])
}
type runesSeqs struct{ a, b []rune }
func (s runesSeqs) lengths() (int, int) { return len(s.a), len(s.b) }
func (s runesSeqs) commonPrefixLen(ai, aj, bi, bj int) int {
return commonPrefixLenRunes(s.a[ai:aj:aj], s.b[bi:bj:bj])
}
func (s runesSeqs) commonSuffixLen(ai, aj, bi, bj int) int {
return commonSuffixLenRunes(s.a[ai:aj:aj], s.b[bi:bj:bj])
}
// TODO(adonovan): optimize these functions using ideas from:
// - https://go.dev/cl/408116 common.go
// - https://go.dev/cl/421435 xor_generic.go
// TODO(adonovan): factor using generics when available,
// but measure performance impact.
// commonPrefixLen* returns the length of the common prefix of a[ai:aj] and b[bi:bj].
func commonPrefixLenBytes(a, b []byte) int {
n := min(len(a), len(b))
i := 0
for i < n && a[i] == b[i] {
i++
}
return i
}
func commonPrefixLenRunes(a, b []rune) int {
n := min(len(a), len(b))
i := 0
for i < n && a[i] == b[i] {
i++
}
return i
}
func commonPrefixLenString(a, b string) int {
n := min(len(a), len(b))
i := 0
for i < n && a[i] == b[i] {
i++
}
return i
}
// commonSuffixLen* returns the length of the common suffix of a[ai:aj] and b[bi:bj].
func commonSuffixLenBytes(a, b []byte) int {
n := min(len(a), len(b))
i := 0
for i < n && a[len(a)-1-i] == b[len(b)-1-i] {
i++
}
return i
}
func commonSuffixLenRunes(a, b []rune) int {
n := min(len(a), len(b))
i := 0
for i < n && a[len(a)-1-i] == b[len(b)-1-i] {
i++
}
return i
}
func commonSuffixLenString(a, b string) int {
n := min(len(a), len(b))
i := 0
for i < n && a[len(a)-1-i] == b[len(b)-1-i] {
i++
}
return i
}
func min(x, y int) int {
if x < y {
return x
} else {
return y
}
}

View file

@ -0,0 +1,81 @@
// Copyright 2025 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package diff
import (
"slices"
)
// Merge merges two valid, ordered lists of edits.
// It returns zero if there was a conflict.
//
// If corresponding edits in x and y are identical,
// they are coalesced in the result.
//
// If x and y both provide different insertions at the same point,
// the insertions from x will be first in the result.
//
// TODO(adonovan): this algorithm could be improved, for example by
// working harder to coalesce non-identical edits that share a common
// deletion or common prefix of insertion (see the tests).
// Survey the academic literature for insights.
func Merge(x, y []Edit) ([]Edit, bool) {
// Make a defensive (premature) copy of the arrays.
x = slices.Clone(x)
y = slices.Clone(y)
var merged []Edit
add := func(edit Edit) {
merged = append(merged, edit)
}
var xi, yi int
for xi < len(x) && yi < len(y) {
px := &x[xi]
py := &y[yi]
if *px == *py {
// x and y are identical: coalesce.
add(*px)
xi++
yi++
} else if px.End <= py.Start {
// x is entirely before y,
// or an insertion at start of y.
add(*px)
xi++
} else if py.End <= px.Start {
// y is entirely before x,
// or an insertion at start of x.
add(*py)
yi++
} else if px.Start < py.Start {
// x is partly before y:
// split it into a deletion and an edit.
add(Edit{px.Start, py.Start, ""})
px.Start = py.Start
} else if py.Start < px.Start {
// y is partly before x:
// split it into a deletion and an edit.
add(Edit{py.Start, px.Start, ""})
py.Start = px.Start
} else {
// x and y are unequal non-insertions
// at the same point: conflict.
return nil, false
}
}
for ; xi < len(x); xi++ {
add(x[xi])
}
for ; yi < len(y); yi++ {
add(y[yi])
}
return merged, true
}

View file

@ -0,0 +1,99 @@
// Copyright 2022 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package diff
import (
"bytes"
"unicode/utf8"
"golang.org/x/tools/internal/diff/lcs"
)
// Strings computes the differences between two strings.
// The resulting edits respect rune boundaries.
func Strings(before, after string) []Edit {
if before == after {
return nil // common case
}
if isASCII(before) && isASCII(after) {
// TODO(adonovan): opt: specialize diffASCII for strings.
return diffASCII([]byte(before), []byte(after))
}
return diffRunes([]rune(before), []rune(after))
}
// Bytes computes the differences between two byte slices.
// The resulting edits respect rune boundaries.
func Bytes(before, after []byte) []Edit {
if bytes.Equal(before, after) {
return nil // common case
}
if isASCII(before) && isASCII(after) {
return diffASCII(before, after)
}
return diffRunes(runes(before), runes(after))
}
func diffASCII(before, after []byte) []Edit {
diffs := lcs.DiffBytes(before, after)
// Convert from LCS diffs.
res := make([]Edit, len(diffs))
for i, d := range diffs {
res[i] = Edit{d.Start, d.End, string(after[d.ReplStart:d.ReplEnd])}
}
return res
}
func diffRunes(before, after []rune) []Edit {
diffs := lcs.DiffRunes(before, after)
// The diffs returned by the lcs package use indexes
// into whatever slice was passed in.
// Convert rune offsets to byte offsets.
res := make([]Edit, len(diffs))
lastEnd := 0
utf8Len := 0
for i, d := range diffs {
utf8Len += runesLen(before[lastEnd:d.Start]) // text between edits
start := utf8Len
utf8Len += runesLen(before[d.Start:d.End]) // text deleted by this edit
res[i] = Edit{start, utf8Len, string(after[d.ReplStart:d.ReplEnd])}
lastEnd = d.End
}
return res
}
// runes is like []rune(string(bytes)) without the duplicate allocation.
func runes(bytes []byte) []rune {
n := utf8.RuneCount(bytes)
runes := make([]rune, n)
for i := range n {
r, sz := utf8.DecodeRune(bytes)
bytes = bytes[sz:]
runes[i] = r
}
return runes
}
// runesLen returns the length in bytes of the UTF-8 encoding of runes.
func runesLen(runes []rune) (len int) {
for _, r := range runes {
len += utf8.RuneLen(r)
}
return len
}
// isASCII reports whether s contains only ASCII.
func isASCII[S string | []byte](s S) bool {
for i := 0; i < len(s); i++ {
if s[i] >= utf8.RuneSelf {
return false
}
}
return true
}

View file

@ -0,0 +1,251 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package diff
import (
"fmt"
"log"
"strings"
)
// DefaultContextLines is the number of unchanged lines of surrounding
// context displayed by Unified. Use ToUnified to specify a different value.
const DefaultContextLines = 3
// Unified returns a unified diff of the old and new strings.
// The old and new labels are the names of the old and new files.
// If the strings are equal, it returns the empty string.
func Unified(oldLabel, newLabel, old, new string) string {
edits := Strings(old, new)
unified, err := ToUnified(oldLabel, newLabel, old, edits, DefaultContextLines)
if err != nil {
// Can't happen: edits are consistent.
log.Fatalf("internal error in diff.Unified: %v", err)
}
return unified
}
// ToUnified applies the edits to content and returns a unified diff,
// with contextLines lines of (unchanged) context around each diff hunk.
// The old and new labels are the names of the content and result files.
// It returns an error if the edits are inconsistent; see ApplyEdits.
func ToUnified(oldLabel, newLabel, content string, edits []Edit, contextLines int) (string, error) {
u, err := toUnified(oldLabel, newLabel, content, edits, contextLines)
if err != nil {
return "", err
}
return u.String(), nil
}
// unified represents a set of edits as a unified diff.
type unified struct {
// from is the name of the original file.
from string
// to is the name of the modified file.
to string
// hunks is the set of edit hunks needed to transform the file content.
hunks []*hunk
}
// Hunk represents a contiguous set of line edits to apply.
type hunk struct {
// The line in the original source where the hunk starts.
fromLine int
// The line in the original source where the hunk finishes.
toLine int
// The set of line based edits to apply.
lines []line
}
// Line represents a single line operation to apply as part of a Hunk.
type line struct {
// kind is the type of line this represents, deletion, insertion or copy.
kind opKind
// content is the content of this line.
// For deletion it is the line being removed, for all others it is the line
// to put in the output.
content string
}
// opKind is used to denote the type of operation a line represents.
type opKind int
const (
// opDelete is the operation kind for a line that is present in the input
// but not in the output.
opDelete opKind = iota
// opInsert is the operation kind for a line that is new in the output.
opInsert
// opEqual is the operation kind for a line that is the same in the input and
// output, often used to provide context around edited lines.
opEqual
)
// String returns a human readable representation of an OpKind. It is not
// intended for machine processing.
func (k opKind) String() string {
switch k {
case opDelete:
return "delete"
case opInsert:
return "insert"
case opEqual:
return "equal"
default:
panic("unknown operation kind")
}
}
// toUnified takes a file contents and a sequence of edits, and calculates
// a unified diff that represents those edits.
func toUnified(fromName, toName string, content string, edits []Edit, contextLines int) (unified, error) {
gap := contextLines * 2
u := unified{
from: fromName,
to: toName,
}
if len(edits) == 0 {
return u, nil
}
var err error
edits, err = lineEdits(content, edits) // expand to whole lines
if err != nil {
return u, err
}
lines := splitLines(content)
var h *hunk
last := 0
toLine := 0
for _, edit := range edits {
// Compute the zero-based line numbers of the edit start and end.
// TODO(adonovan): opt: compute incrementally, avoid O(n^2).
start := strings.Count(content[:edit.Start], "\n")
end := strings.Count(content[:edit.End], "\n")
if edit.End == len(content) && len(content) > 0 && content[len(content)-1] != '\n' {
end++ // EOF counts as an implicit newline
}
switch {
case h != nil && start == last:
//direct extension
case h != nil && start <= last+gap:
//within range of previous lines, add the joiners
addEqualLines(h, lines, last, start)
default:
//need to start a new hunk
if h != nil {
// add the edge to the previous hunk
addEqualLines(h, lines, last, last+contextLines)
u.hunks = append(u.hunks, h)
}
toLine += start - last
h = &hunk{
fromLine: start + 1,
toLine: toLine + 1,
}
// add the edge to the new hunk
delta := addEqualLines(h, lines, start-contextLines, start)
h.fromLine -= delta
h.toLine -= delta
}
last = start
for i := start; i < end; i++ {
h.lines = append(h.lines, line{kind: opDelete, content: lines[i]})
last++
}
if edit.New != "" {
for _, content := range splitLines(edit.New) {
h.lines = append(h.lines, line{kind: opInsert, content: content})
toLine++
}
}
}
if h != nil {
// add the edge to the final hunk
addEqualLines(h, lines, last, last+contextLines)
u.hunks = append(u.hunks, h)
}
return u, nil
}
func splitLines(text string) []string {
lines := strings.SplitAfter(text, "\n")
if lines[len(lines)-1] == "" {
lines = lines[:len(lines)-1]
}
return lines
}
func addEqualLines(h *hunk, lines []string, start, end int) int {
delta := 0
for i := start; i < end; i++ {
if i < 0 {
continue
}
if i >= len(lines) {
return delta
}
h.lines = append(h.lines, line{kind: opEqual, content: lines[i]})
delta++
}
return delta
}
// String converts a unified diff to the standard textual form for that diff.
// The output of this function can be passed to tools like patch.
func (u unified) String() string {
if len(u.hunks) == 0 {
return ""
}
b := new(strings.Builder)
fmt.Fprintf(b, "--- %s\n", u.from)
fmt.Fprintf(b, "+++ %s\n", u.to)
for _, hunk := range u.hunks {
fromCount, toCount := 0, 0
for _, l := range hunk.lines {
switch l.kind {
case opDelete:
fromCount++
case opInsert:
toCount++
default:
fromCount++
toCount++
}
}
fmt.Fprint(b, "@@")
if fromCount > 1 {
fmt.Fprintf(b, " -%d,%d", hunk.fromLine, fromCount)
} else if hunk.fromLine == 1 && fromCount == 0 {
// Match odd GNU diff -u behavior adding to empty file.
fmt.Fprintf(b, " -0,0")
} else {
fmt.Fprintf(b, " -%d", hunk.fromLine)
}
if toCount > 1 {
fmt.Fprintf(b, " +%d,%d", hunk.toLine, toCount)
} else if hunk.toLine == 1 && toCount == 0 {
// Match odd GNU diff -u behavior adding to empty file.
fmt.Fprintf(b, " +0,0")
} else {
fmt.Fprintf(b, " +%d", hunk.toLine)
}
fmt.Fprint(b, " @@\n")
for _, l := range hunk.lines {
switch l.kind {
case opDelete:
fmt.Fprintf(b, "-%s", l.content)
case opInsert:
fmt.Fprintf(b, "+%s", l.content)
default:
fmt.Fprintf(b, " %s", l.content)
}
if !strings.HasSuffix(l.content, "\n") {
fmt.Fprintf(b, "\n\\ No newline at end of file\n")
}
}
}
return b.String()
}

View file

@ -12,348 +12,354 @@ type pkginfo struct {
} }
var deps = [...]pkginfo{ var deps = [...]pkginfo{
{"archive/tar", "\x03j\x03E5\x01\v\x01#\x01\x01\x02\x05\n\x02\x01\x02\x02\v"}, {"archive/tar", "\x03k\x03E;\x01\n\x01$\x01\x01\x02\x05\b\x02\x01\x02\x02\f"},
{"archive/zip", "\x02\x04`\a\x16\x0205\x01+\x05\x01\x11\x03\x02\r\x04"}, {"archive/zip", "\x02\x04a\a\x03\x12\x021;\x01+\x05\x01\x0f\x03\x02\x0e\x04"},
{"bufio", "\x03j}F\x13"}, {"bufio", "\x03k\x83\x01D\x14"},
{"bytes", "m+R\x03\fH\x02\x02"}, {"bytes", "n*Y\x03\fG\x02\x02"},
{"cmp", ""}, {"cmp", ""},
{"compress/bzip2", "\x02\x02\xe6\x01C"}, {"compress/bzip2", "\x02\x02\xed\x01A"},
{"compress/flate", "\x02k\x03z\r\x025\x01\x03"}, {"compress/flate", "\x02l\x03\x80\x01\f\x033\x01\x03"},
{"compress/gzip", "\x02\x04`\a\x03\x15eU"}, {"compress/gzip", "\x02\x04a\a\x03\x14lT"},
{"compress/lzw", "\x02k\x03z"}, {"compress/lzw", "\x02l\x03\x80\x01"},
{"compress/zlib", "\x02\x04`\a\x03\x13\x01f"}, {"compress/zlib", "\x02\x04a\a\x03\x12\x01m"},
{"container/heap", "\xae\x02"}, {"container/heap", "\xb3\x02"},
{"container/list", ""}, {"container/list", ""},
{"container/ring", ""}, {"container/ring", ""},
{"context", "m\\i\x01\f"}, {"context", "n\\m\x01\r"},
{"crypto", "\x83\x01gE"}, {"crypto", "\x83\x01nC"},
{"crypto/aes", "\x10\n\a\x8e\x02"}, {"crypto/aes", "\x10\n\a\x93\x02"},
{"crypto/cipher", "\x03\x1e\x01\x01\x1d\x11\x1c,Q"}, {"crypto/cipher", "\x03\x1e\x01\x01\x1e\x11\x1c+X"},
{"crypto/des", "\x10\x13\x1d-,\x96\x01\x03"}, {"crypto/des", "\x10\x13\x1e-+\x9b\x01\x03"},
{"crypto/dsa", "@\x04)}\x0e"}, {"crypto/dsa", "A\x04)\x83\x01\r"},
{"crypto/ecdh", "\x03\v\f\x0e\x04\x14\x04\r\x1c}"}, {"crypto/ecdh", "\x03\v\f\x0e\x04\x15\x04\r\x1c\x83\x01"},
{"crypto/ecdsa", "\x0e\x05\x03\x04\x01\x0e\x16\x01\x04\f\x01\x1c}\x0e\x04L\x01"}, {"crypto/ecdsa", "\x0e\x05\x03\x04\x01\x0e\a\v\x05\x01\x04\f\x01\x1c\x83\x01\r\x05K\x01"},
{"crypto/ed25519", "\x0e\x1c\x16\n\a\x1c}E"}, {"crypto/ed25519", "\x0e\x1c\x11\x06\n\a\x1c\x83\x01C"},
{"crypto/elliptic", "0=}\x0e:"}, {"crypto/elliptic", "0>\x83\x01\r9"},
{"crypto/fips140", " \x05\x90\x01"}, {"crypto/fips140", " \x05"},
{"crypto/hkdf", "-\x12\x01-\x16"}, {"crypto/hkdf", "-\x13\x01-\x15"},
{"crypto/hmac", "\x1a\x14\x11\x01\x112"}, {"crypto/hmac", "\x1a\x14\x12\x01\x111"},
{"crypto/internal/boring", "\x0e\x02\rf"}, {"crypto/internal/boring", "\x0e\x02\rf"},
{"crypto/internal/boring/bbig", "\x1a\xde\x01M"}, {"crypto/internal/boring/bbig", "\x1a\xe4\x01M"},
{"crypto/internal/boring/bcache", "\xb3\x02\x12"}, {"crypto/internal/boring/bcache", "\xb8\x02\x13"},
{"crypto/internal/boring/sig", ""}, {"crypto/internal/boring/sig", ""},
{"crypto/internal/cryptotest", "\x03\r\n)\x0e\x19\x06\x13\x12#\a\t\x11\x11\x11\x1b\x01\f\r\x05\n"}, {"crypto/internal/cryptotest", "\x03\r\n\x06$\x0e\x19\x06\x12\x12 \x04\a\t\x16\x01\x11\x11\x1b\x01\a\x05\b\x03\x05\v"},
{"crypto/internal/entropy", "E"}, {"crypto/internal/entropy", "F"},
{"crypto/internal/fips140", ">/}9\r\x15"}, {"crypto/internal/fips140", "?/\x15\xa7\x01\v\x16"},
{"crypto/internal/fips140/aes", "\x03\x1d\x03\x02\x13\x04\x01\x01\x05*\x8c\x016"}, {"crypto/internal/fips140/aes", "\x03\x1d\x03\x02\x13\x05\x01\x01\x05*\x92\x014"},
{"crypto/internal/fips140/aes/gcm", " \x01\x02\x02\x02\x11\x04\x01\x06*\x8a\x01"}, {"crypto/internal/fips140/aes/gcm", " \x01\x02\x02\x02\x11\x05\x01\x06*\x8f\x01"},
{"crypto/internal/fips140/alias", "\xc5\x02"}, {"crypto/internal/fips140/alias", "\xcb\x02"},
{"crypto/internal/fips140/bigmod", "%\x17\x01\x06*\x8c\x01"}, {"crypto/internal/fips140/bigmod", "%\x18\x01\x06*\x92\x01"},
{"crypto/internal/fips140/check", " \x0e\x06\b\x02\xac\x01["}, {"crypto/internal/fips140/check", " \x0e\x06\t\x02\xb2\x01Z"},
{"crypto/internal/fips140/check/checktest", "%\xfe\x01\""}, {"crypto/internal/fips140/check/checktest", "%\x85\x02!"},
{"crypto/internal/fips140/drbg", "\x03\x1c\x01\x01\x04\x13\x04\b\x01(}\x0f9"}, {"crypto/internal/fips140/drbg", "\x03\x1c\x01\x01\x04\x13\x05\b\x01(\x83\x01\x0f7"},
{"crypto/internal/fips140/ecdh", "\x03\x1d\x05\x02\t\f1}\x0f9"}, {"crypto/internal/fips140/ecdh", "\x03\x1d\x05\x02\t\r1\x83\x01\x0f7"},
{"crypto/internal/fips140/ecdsa", "\x03\x1d\x04\x01\x02\a\x02\x067}H"}, {"crypto/internal/fips140/ecdsa", "\x03\x1d\x04\x01\x02\a\x02\x068\x15nF"},
{"crypto/internal/fips140/ed25519", "\x03\x1d\x05\x02\x04\v7\xc2\x01\x03"}, {"crypto/internal/fips140/ed25519", "\x03\x1d\x05\x02\x04\v8\xc6\x01\x03"},
{"crypto/internal/fips140/edwards25519", "%\a\f\x041\x8c\x019"}, {"crypto/internal/fips140/edwards25519", "%\a\f\x051\x92\x017"},
{"crypto/internal/fips140/edwards25519/field", "%\x13\x041\x8c\x01"}, {"crypto/internal/fips140/edwards25519/field", "%\x13\x051\x92\x01"},
{"crypto/internal/fips140/hkdf", "\x03\x1d\x05\t\x069"}, {"crypto/internal/fips140/hkdf", "\x03\x1d\x05\t\x06:\x15"},
{"crypto/internal/fips140/hmac", "\x03\x1d\x14\x01\x017"}, {"crypto/internal/fips140/hmac", "\x03\x1d\x14\x01\x018\x15"},
{"crypto/internal/fips140/mlkem", "\x03\x1d\x05\x02\x0e\x03\x041"}, {"crypto/internal/fips140/mlkem", "\x03\x1d\x05\x02\x0e\x03\x051"},
{"crypto/internal/fips140/nistec", "%\f\a\x041\x8c\x01*\x0f\x13"}, {"crypto/internal/fips140/nistec", "%\f\a\x051\x92\x01*\r\x14"},
{"crypto/internal/fips140/nistec/fiat", "%\x135\x8c\x01"}, {"crypto/internal/fips140/nistec/fiat", "%\x136\x92\x01"},
{"crypto/internal/fips140/pbkdf2", "\x03\x1d\x05\t\x069"}, {"crypto/internal/fips140/pbkdf2", "\x03\x1d\x05\t\x06:\x15"},
{"crypto/internal/fips140/rsa", "\x03\x1d\x04\x01\x02\r\x01\x01\x025}H"}, {"crypto/internal/fips140/rsa", "\x03\x1d\x04\x01\x02\r\x01\x01\x026\x15nF"},
{"crypto/internal/fips140/sha256", "\x03\x1d\x1c\x01\x06*\x8c\x01"}, {"crypto/internal/fips140/sha256", "\x03\x1d\x1d\x01\x06*\x15}"},
{"crypto/internal/fips140/sha3", "\x03\x1d\x18\x04\x010\x8c\x01L"}, {"crypto/internal/fips140/sha3", "\x03\x1d\x18\x05\x010\x92\x01K"},
{"crypto/internal/fips140/sha512", "\x03\x1d\x1c\x01\x06*\x8c\x01"}, {"crypto/internal/fips140/sha512", "\x03\x1d\x1d\x01\x06*\x15}"},
{"crypto/internal/fips140/ssh", " \x05"}, {"crypto/internal/fips140/ssh", "%^"},
{"crypto/internal/fips140/subtle", "#"}, {"crypto/internal/fips140/subtle", "#\x1a\xc3\x01"},
{"crypto/internal/fips140/tls12", "\x03\x1d\x05\t\x06\x027"}, {"crypto/internal/fips140/tls12", "\x03\x1d\x05\t\x06\x028\x15"},
{"crypto/internal/fips140/tls13", "\x03\x1d\x05\b\a\b1"}, {"crypto/internal/fips140/tls13", "\x03\x1d\x05\b\a\t1\x15"},
{"crypto/internal/fips140cache", "\xaa\x02\r&"},
{"crypto/internal/fips140deps", ""}, {"crypto/internal/fips140deps", ""},
{"crypto/internal/fips140deps/byteorder", "\x99\x01"}, {"crypto/internal/fips140deps/byteorder", "\x99\x01"},
{"crypto/internal/fips140deps/cpu", "\xad\x01\a"}, {"crypto/internal/fips140deps/cpu", "\xae\x01\a"},
{"crypto/internal/fips140deps/godebug", "\xb5\x01"}, {"crypto/internal/fips140deps/godebug", "\xb6\x01"},
{"crypto/internal/fips140hash", "5\x1a4\xc2\x01"}, {"crypto/internal/fips140hash", "5\x1b3\xc8\x01"},
{"crypto/internal/fips140only", "'\r\x01\x01M25"}, {"crypto/internal/fips140only", "'\r\x01\x01M3;"},
{"crypto/internal/fips140test", ""}, {"crypto/internal/fips140test", ""},
{"crypto/internal/hpke", "\x0e\x01\x01\x03\x1a\x1d#,`N"}, {"crypto/internal/hpke", "\x0e\x01\x01\x03\x053#+gM"},
{"crypto/internal/impl", "\xb0\x02"}, {"crypto/internal/impl", "\xb5\x02"},
{"crypto/internal/randutil", "\xea\x01\x12"}, {"crypto/internal/randutil", "\xf1\x01\x12"},
{"crypto/internal/sysrand", "mi!\x1f\r\x0f\x01\x01\v\x06"}, {"crypto/internal/sysrand", "nn! \r\r\x01\x01\f\x06"},
{"crypto/internal/sysrand/internal/seccomp", "m"}, {"crypto/internal/sysrand/internal/seccomp", "n"},
{"crypto/md5", "\x0e2-\x16\x16`"}, {"crypto/md5", "\x0e3-\x15\x16g"},
{"crypto/mlkem", "/"}, {"crypto/mlkem", "/"},
{"crypto/pbkdf2", "2\r\x01-\x16"}, {"crypto/pbkdf2", "2\x0e\x01-\x15"},
{"crypto/rand", "\x1a\x06\a\x19\x04\x01(}\x0eM"}, {"crypto/rand", "\x1a\x06\a\x1a\x04\x01(\x83\x01\rM"},
{"crypto/rc4", "#\x1d-\xc2\x01"}, {"crypto/rc4", "#\x1e-\xc6\x01"},
{"crypto/rsa", "\x0e\f\x01\t\x0f\f\x01\x04\x06\a\x1c\x03\x1325\r\x01"}, {"crypto/rsa", "\x0e\f\x01\t\x0f\r\x01\x04\x06\a\x1c\x03\x123;\f\x01"},
{"crypto/sha1", "\x0e\f&-\x16\x16\x14L"}, {"crypto/sha1", "\x0e\f'\x03*\x15\x16\x15R"},
{"crypto/sha256", "\x0e\f\x1aO"}, {"crypto/sha256", "\x0e\f\x1aO"},
{"crypto/sha3", "\x0e'N\xc2\x01"}, {"crypto/sha3", "\x0e'N\xc8\x01"},
{"crypto/sha512", "\x0e\f\x1cM"}, {"crypto/sha512", "\x0e\f\x1cM"},
{"crypto/subtle", "8\x96\x01U"}, {"crypto/subtle", "8\x9b\x01W"},
{"crypto/tls", "\x03\b\x02\x01\x01\x01\x01\x02\x01\x01\x01\x03\x01\a\x01\v\x02\n\x01\b\x05\x03\x01\x01\x01\x01\x02\x01\x02\x01\x17\x02\x03\x13\x16\x14\b5\x16\x16\r\n\x01\x01\x01\x02\x01\f\x06\x02\x01"}, {"crypto/tls", "\x03\b\x02\x01\x01\x01\x01\x02\x01\x01\x01\x02\x01\x01\a\x01\r\n\x01\t\x05\x03\x01\x01\x01\x01\x02\x01\x02\x01\x17\x02\x03\x12\x16\x15\b;\x16\x16\r\b\x01\x01\x01\x02\x01\r\x06\x02\x01\x0f"},
{"crypto/tls/internal/fips140tls", " \x93\x02"}, {"crypto/tls/internal/fips140tls", "\x17\xa1\x02"},
{"crypto/x509", "\x03\v\x01\x01\x01\x01\x01\x01\x01\x011\x03\x02\x01\x01\x02\x05\x0e\x06\x02\x02\x03E\x032\x01\x02\t\x01\x01\x01\a\x10\x05\x01\x06\x02\x05\f\x01\x02\r\x02\x01\x01\x02\x03\x01"}, {"crypto/x509", "\x03\v\x01\x01\x01\x01\x01\x01\x01\x012\x05\x01\x01\x02\x05\x0e\x06\x02\x02\x03E\x038\x01\x02\b\x01\x01\x02\a\x10\x05\x01\x06\x02\x05\n\x01\x02\x0e\x02\x01\x01\x02\x03\x01"},
{"crypto/x509/pkix", "c\x06\a\x88\x01G"}, {"crypto/x509/pkix", "d\x06\a\x8d\x01G"},
{"database/sql", "\x03\nJ\x16\x03z\f\x06\"\x05\n\x02\x03\x01\f\x02\x02\x02"}, {"database/sql", "\x03\nK\x16\x03\x80\x01\v\a\"\x05\b\x02\x03\x01\r\x02\x02\x02"},
{"database/sql/driver", "\r`\x03\xae\x01\x11\x10"}, {"database/sql/driver", "\ra\x03\xb4\x01\x0f\x11"},
{"debug/buildinfo", "\x03W\x02\x01\x01\b\a\x03`\x18\x02\x01+\x0f "}, {"debug/buildinfo", "\x03X\x02\x01\x01\b\a\x03e\x19\x02\x01+\x0f\x1f"},
{"debug/dwarf", "\x03c\a\x03z1\x13\x01\x01"}, {"debug/dwarf", "\x03d\a\x03\x80\x011\x11\x01\x01"},
{"debug/elf", "\x03\x06P\r\a\x03`\x19\x01,\x19\x01\x15"}, {"debug/elf", "\x03\x06Q\r\a\x03e\x1a\x01,\x17\x01\x16"},
{"debug/gosym", "\x03c\n\xbe\x01\x01\x01\x02"}, {"debug/gosym", "\x03d\n\xc2\x01\x01\x01\x02"},
{"debug/macho", "\x03\x06P\r\n`\x1a,\x19\x01"}, {"debug/macho", "\x03\x06Q\r\ne\x1b,\x17\x01"},
{"debug/pe", "\x03\x06P\r\a\x03`\x1a,\x19\x01\x15"}, {"debug/pe", "\x03\x06Q\r\a\x03e\x1b,\x17\x01\x16"},
{"debug/plan9obj", "f\a\x03`\x1a,"}, {"debug/plan9obj", "g\a\x03e\x1b,"},
{"embed", "m+:\x18\x01T"}, {"embed", "n*@\x19\x01S"},
{"embed/internal/embedtest", ""}, {"embed/internal/embedtest", ""},
{"encoding", ""}, {"encoding", ""},
{"encoding/ascii85", "\xea\x01E"}, {"encoding/ascii85", "\xf1\x01C"},
{"encoding/asn1", "\x03j\x03\x87\x01\x01&\x0f\x02\x01\x0f\x03\x01"}, {"encoding/asn1", "\x03k\x03\x8c\x01\x01'\r\x02\x01\x10\x03\x01"},
{"encoding/base32", "\xea\x01C\x02"}, {"encoding/base32", "\xf1\x01A\x02"},
{"encoding/base64", "\x99\x01QC\x02"}, {"encoding/base64", "\x99\x01XA\x02"},
{"encoding/binary", "m}\r'\x0f\x05"}, {"encoding/binary", "n\x83\x01\f(\r\x05"},
{"encoding/csv", "\x02\x01j\x03zF\x11\x02"}, {"encoding/csv", "\x02\x01k\x03\x80\x01D\x12\x02"},
{"encoding/gob", "\x02_\x05\a\x03`\x1a\f\x01\x02\x1d\b\x14\x01\x0e\x02"}, {"encoding/gob", "\x02`\x05\a\x03e\x1b\v\x01\x03\x1d\b\x12\x01\x0f\x02"},
{"encoding/hex", "m\x03zC\x03"}, {"encoding/hex", "n\x03\x80\x01A\x03"},
{"encoding/json", "\x03\x01]\x04\b\x03z\r'\x0f\x02\x01\x02\x0f\x01\x01\x02"}, {"encoding/json", "\x03\x01^\x04\b\x03\x80\x01\f(\r\x02\x01\x02\x10\x01\x01\x02"},
{"encoding/pem", "\x03b\b}C\x03"}, {"encoding/pem", "\x03c\b\x83\x01A\x03"},
{"encoding/xml", "\x02\x01^\f\x03z4\x05\f\x01\x02\x0f\x02"}, {"encoding/xml", "\x02\x01_\f\x03\x80\x014\x05\n\x01\x02\x10\x02"},
{"errors", "\xc9\x01|"}, {"errors", "\xca\x01\x81\x01"},
{"expvar", "jK9\t\n\x15\r\n\x02\x03\x01\x10"}, {"expvar", "kK?\b\v\x15\r\b\x02\x03\x01\x11"},
{"flag", "a\f\x03z,\b\x05\n\x02\x01\x0f"}, {"flag", "b\f\x03\x80\x01,\b\x05\b\x02\x01\x10"},
{"fmt", "mE8\r\x1f\b\x0f\x02\x03\x11"}, {"fmt", "nE>\f \b\r\x02\x03\x12"},
{"go/ast", "\x03\x01l\x0f\x01j\x03)\b\x0f\x02\x01"}, {"go/ast", "\x03\x01m\x0e\x01q\x03)\b\r\x02\x01"},
{"go/ast/internal/tests", ""}, {"go/build", "\x02\x01k\x03\x01\x02\x02\a\x02\x01\x17\x1f\x04\x02\t\x19\x13\x01+\x01\x04\x01\a\b\x02\x01\x12\x02\x02"},
{"go/build", "\x02\x01j\x03\x01\x03\x02\a\x02\x01\x17\x1e\x04\x02\t\x14\x12\x01+\x01\x04\x01\a\n\x02\x01\x11\x02\x02"}, {"go/build/constraint", "n\xc6\x01\x01\x12\x02"},
{"go/build/constraint", "m\xc2\x01\x01\x11\x02"}, {"go/constant", "q\x0f}\x01\x024\x01\x02\x12"},
{"go/constant", "p\x10w\x01\x016\x01\x02\x11"}, {"go/doc", "\x04m\x01\x05\t>31\x10\x02\x01\x12\x02"},
{"go/doc", "\x04l\x01\x06\t=-1\x12\x02\x01\x11\x02"}, {"go/doc/comment", "\x03n\xc1\x01\x01\x01\x01\x12\x02"},
{"go/doc/comment", "\x03m\xbd\x01\x01\x01\x01\x11\x02"}, {"go/format", "\x03n\x01\v\x01\x02qD"},
{"go/format", "\x03m\x01\f\x01\x02jF"}, {"go/importer", "s\a\x01\x01\x04\x01p9"},
{"go/importer", "s\a\x01\x01\x04\x01i9"}, {"go/internal/gccgoimporter", "\x02\x01X\x13\x03\x04\v\x01n\x02,\x01\x05\x11\x01\f\b"},
{"go/internal/gccgoimporter", "\x02\x01W\x13\x03\x05\v\x01g\x02,\x01\x05\x13\x01\v\b"}, {"go/internal/gcimporter", "\x02o\x0f\x010\x05\x0e-,\x15\x03\x02"},
{"go/internal/gcimporter", "\x02n\x10\x01/\x05\x0e',\x17\x03\x02"}, {"go/internal/srcimporter", "q\x01\x01\n\x03\x01p,\x01\x05\x12\x02\x14"},
{"go/internal/srcimporter", "p\x01\x02\n\x03\x01i,\x01\x05\x14\x02\x13"}, {"go/parser", "\x03k\x03\x01\x02\v\x01q\x01+\x06\x12"},
{"go/parser", "\x03j\x03\x01\x03\v\x01j\x01+\x06\x14"}, {"go/printer", "q\x01\x02\x03\tq\f \x15\x02\x01\x02\v\x05\x02"},
{"go/printer", "p\x01\x03\x03\tj\r\x1f\x17\x02\x01\x02\n\x05\x02"}, {"go/scanner", "\x03n\x0fq2\x10\x01\x13\x02"},
{"go/scanner", "\x03m\x10j2\x12\x01\x12\x02"}, {"go/token", "\x04m\x83\x01>\x02\x03\x01\x0f\x02"},
{"go/token", "\x04l\xbd\x01\x02\x03\x01\x0e\x02"}, {"go/types", "\x03\x01\x06d\x03\x01\x03\b\x03\x02\x15\x1f\x061\x04\x03\t \x06\a\b\x01\x01\x01\x02\x01\x0f\x02\x02"},
{"go/types", "\x03\x01\x06c\x03\x01\x04\b\x03\x02\x15\x1e\x06+\x04\x03\n%\a\n\x01\x01\x01\x02\x01\x0e\x02\x02"}, {"go/version", "\xbb\x01z"},
{"go/version", "\xba\x01v"}, {"hash", "\xf1\x01"},
{"hash", "\xea\x01"}, {"hash/adler32", "n\x15\x16"},
{"hash/adler32", "m\x16\x16"}, {"hash/crc32", "n\x15\x16\x15\x89\x01\x01\x13"},
{"hash/crc32", "m\x16\x16\x14\x85\x01\x01\x12"}, {"hash/crc64", "n\x15\x16\x9e\x01"},
{"hash/crc64", "m\x16\x16\x99\x01"}, {"hash/fnv", "n\x15\x16g"},
{"hash/fnv", "m\x16\x16`"}, {"hash/maphash", "\x83\x01\x11!\x03\x93\x01"},
{"hash/maphash", "\x94\x01\x05\x1b\x03@N"}, {"html", "\xb5\x02\x02\x12"},
{"html", "\xb0\x02\x02\x11"}, {"html/template", "\x03h\x06\x18-;\x01\n!\x05\x01\x02\x03\f\x01\x02\f\x01\x03\x02"},
{"html/template", "\x03g\x06\x19,5\x01\v \x05\x01\x02\x03\x0e\x01\x02\v\x01\x03\x02"}, {"image", "\x02l\x1ee\x0f4\x03\x01"},
{"image", "\x02k\x1f^\x0f6\x03\x01"},
{"image/color", ""}, {"image/color", ""},
{"image/color/palette", "\x8c\x01"}, {"image/color/palette", "\x8c\x01"},
{"image/draw", "\x8b\x01\x01\x04"}, {"image/draw", "\x8b\x01\x01\x04"},
{"image/gif", "\x02\x01\x05e\x03\x1b\x01\x01\x01\vQ"}, {"image/gif", "\x02\x01\x05f\x03\x1a\x01\x01\x01\vX"},
{"image/internal/imageutil", "\x8b\x01"}, {"image/internal/imageutil", "\x8b\x01"},
{"image/jpeg", "\x02k\x1e\x01\x04Z"}, {"image/jpeg", "\x02l\x1d\x01\x04a"},
{"image/png", "\x02\a]\n\x13\x02\x06\x01^E"}, {"image/png", "\x02\a^\n\x12\x02\x06\x01eC"},
{"index/suffixarray", "\x03c\a}\r*\f\x01"}, {"index/suffixarray", "\x03d\a\x83\x01\f+\n\x01"},
{"internal/abi", "\xb4\x01\x91\x01"}, {"internal/abi", "\xb5\x01\x96\x01"},
{"internal/asan", "\xc5\x02"}, {"internal/asan", "\xcb\x02"},
{"internal/bisect", "\xa3\x02\x0f\x01"}, {"internal/bisect", "\xaa\x02\r\x01"},
{"internal/buildcfg", "pG_\x06\x02\x05\f\x01"}, {"internal/buildcfg", "qGe\x06\x02\x05\n\x01"},
{"internal/bytealg", "\xad\x01\x98\x01"}, {"internal/bytealg", "\xae\x01\x9d\x01"},
{"internal/byteorder", ""}, {"internal/byteorder", ""},
{"internal/cfg", ""}, {"internal/cfg", ""},
{"internal/chacha8rand", "\x99\x01\x1b\x91\x01"}, {"internal/cgrouptest", "q[Q\x06\x0f\x02\x01\x04\x01"},
{"internal/chacha8rand", "\x99\x01\x15\a\x96\x01"},
{"internal/copyright", ""}, {"internal/copyright", ""},
{"internal/coverage", ""}, {"internal/coverage", ""},
{"internal/coverage/calloc", ""}, {"internal/coverage/calloc", ""},
{"internal/coverage/cfile", "j\x06\x17\x16\x01\x02\x01\x01\x01\x01\x01\x01\x01#\x01\x1f,\x06\a\f\x01\x03\f\x06"}, {"internal/coverage/cfile", "k\x06\x16\x17\x01\x02\x01\x01\x01\x01\x01\x01\x01#\x02$,\x06\a\n\x01\x03\r\x06"},
{"internal/coverage/cformat", "\x04l-\x04I\f7\x01\x02\f"}, {"internal/coverage/cformat", "\x04m-\x04O\v6\x01\x02\r"},
{"internal/coverage/cmerge", "p-Z"}, {"internal/coverage/cmerge", "q-_"},
{"internal/coverage/decodecounter", "f\n-\v\x02@,\x19\x16"}, {"internal/coverage/decodecounter", "g\n-\v\x02F,\x17\x17"},
{"internal/coverage/decodemeta", "\x02d\n\x17\x16\v\x02@,"}, {"internal/coverage/decodemeta", "\x02e\n\x16\x17\v\x02F,"},
{"internal/coverage/encodecounter", "\x02d\n-\f\x01\x02>\f \x17"}, {"internal/coverage/encodecounter", "\x02e\n-\f\x01\x02D\v!\x15"},
{"internal/coverage/encodemeta", "\x02\x01c\n\x13\x04\x16\r\x02>,/"}, {"internal/coverage/encodemeta", "\x02\x01d\n\x12\x04\x17\r\x02D,."},
{"internal/coverage/pods", "\x04l-y\x06\x05\f\x02\x01"}, {"internal/coverage/pods", "\x04m-\x7f\x06\x05\n\x02\x01"},
{"internal/coverage/rtcov", "\xc5\x02"}, {"internal/coverage/rtcov", "\xcb\x02"},
{"internal/coverage/slicereader", "f\nz["}, {"internal/coverage/slicereader", "g\n\x80\x01Z"},
{"internal/coverage/slicewriter", "pz"}, {"internal/coverage/slicewriter", "q\x80\x01"},
{"internal/coverage/stringtab", "p8\x04>"}, {"internal/coverage/stringtab", "q8\x04D"},
{"internal/coverage/test", ""}, {"internal/coverage/test", ""},
{"internal/coverage/uleb128", ""}, {"internal/coverage/uleb128", ""},
{"internal/cpu", "\xc5\x02"}, {"internal/cpu", "\xcb\x02"},
{"internal/dag", "\x04l\xbd\x01\x03"}, {"internal/dag", "\x04m\xc1\x01\x03"},
{"internal/diff", "\x03m\xbe\x01\x02"}, {"internal/diff", "\x03n\xc2\x01\x02"},
{"internal/exportdata", "\x02\x01j\x03\x03]\x1a,\x01\x05\x13\x01\x02"}, {"internal/exportdata", "\x02\x01k\x03\x02c\x1b,\x01\x05\x11\x01\x02"},
{"internal/filepathlite", "m+:\x19B"}, {"internal/filepathlite", "n*@\x1a@"},
{"internal/fmtsort", "\x04\x9a\x02\x0f"}, {"internal/fmtsort", "\x04\xa1\x02\r"},
{"internal/fuzz", "\x03\nA\x18\x04\x03\x03\x01\f\x0355\r\x02\x1d\x01\x05\x02\x05\f\x01\x02\x01\x01\v\x04\x02"}, {"internal/fuzz", "\x03\nB\x18\x04\x03\x03\x01\v\x036;\f\x03\x1d\x01\x05\x02\x05\n\x01\x02\x01\x01\f\x04\x02"},
{"internal/goarch", ""}, {"internal/goarch", ""},
{"internal/godebug", "\x96\x01 |\x01\x12"}, {"internal/godebug", "\x96\x01!\x80\x01\x01\x13"},
{"internal/godebugs", ""}, {"internal/godebugs", ""},
{"internal/goexperiment", ""}, {"internal/goexperiment", ""},
{"internal/goos", ""}, {"internal/goos", ""},
{"internal/goroot", "\x96\x02\x01\x05\x14\x02"}, {"internal/goroot", "\x9d\x02\x01\x05\x12\x02"},
{"internal/gover", "\x04"}, {"internal/gover", "\x04"},
{"internal/goversion", ""}, {"internal/goversion", ""},
{"internal/itoa", ""}, {"internal/itoa", ""},
{"internal/lazyregexp", "\x96\x02\v\x0f\x02"}, {"internal/lazyregexp", "\x9d\x02\v\r\x02"},
{"internal/lazytemplate", "\xea\x01,\x1a\x02\v"}, {"internal/lazytemplate", "\xf1\x01,\x18\x02\f"},
{"internal/msan", "\xc5\x02"}, {"internal/msan", "\xcb\x02"},
{"internal/nettrace", ""}, {"internal/nettrace", ""},
{"internal/obscuretestdata", "e\x85\x01,"}, {"internal/obscuretestdata", "f\x8b\x01,"},
{"internal/oserror", "m"}, {"internal/oserror", "n"},
{"internal/pkgbits", "\x03K\x18\a\x03\x05\vj\x0e\x1e\r\f\x01"}, {"internal/pkgbits", "\x03L\x18\a\x03\x04\vq\r\x1f\r\n\x01"},
{"internal/platform", ""}, {"internal/platform", ""},
{"internal/poll", "mO\x1a\x149\x0f\x01\x01\v\x06"}, {"internal/poll", "nO\x1f\x159\r\x01\x01\f\x06"},
{"internal/profile", "\x03\x04f\x03z7\r\x01\x01\x0f"}, {"internal/profile", "\x03\x04g\x03\x80\x017\v\x01\x01\x10"},
{"internal/profilerecord", ""}, {"internal/profilerecord", ""},
{"internal/race", "\x94\x01\xb1\x01"}, {"internal/race", "\x94\x01\xb7\x01"},
{"internal/reflectlite", "\x94\x01 3<\""}, {"internal/reflectlite", "\x94\x01!9<!"},
{"internal/runtime/atomic", "\xc5\x02"}, {"internal/runtime/atomic", "\xb5\x01\x96\x01"},
{"internal/runtime/exithook", "\xca\x01{"}, {"internal/runtime/cgroup", "\x98\x01:\x02w"},
{"internal/runtime/maps", "\x94\x01\x01\x1f\v\t\x05\x01w"}, {"internal/runtime/exithook", "\xcb\x01\x80\x01"},
{"internal/runtime/math", "\xb4\x01"}, {"internal/runtime/gc", "\xb5\x01"},
{"internal/runtime/sys", "\xb4\x01\x04"}, {"internal/runtime/maps", "\x94\x01\x01 \v\t\a\x03x"},
{"internal/runtime/syscall", "\xc5\x02"}, {"internal/runtime/math", "\xb5\x01"},
{"internal/saferio", "\xea\x01["}, {"internal/runtime/startlinetest", ""},
{"internal/singleflight", "\xb2\x02"}, {"internal/runtime/strconv", "\xd0\x01"},
{"internal/stringslite", "\x98\x01\xad\x01"}, {"internal/runtime/sys", "\xb5\x01\x04"},
{"internal/sync", "\x94\x01 \x14k\x12"}, {"internal/runtime/syscall", "\xb5\x01\x96\x01"},
{"internal/synctest", "\xc5\x02"}, {"internal/runtime/wasitest", ""},
{"internal/syscall/execenv", "\xb4\x02"}, {"internal/saferio", "\xf1\x01Z"},
{"internal/syscall/unix", "\xa3\x02\x10\x01\x11"}, {"internal/singleflight", "\xb7\x02"},
{"internal/sysinfo", "\x02\x01\xaa\x01=,\x1a\x02"}, {"internal/stringslite", "\x98\x01\xb3\x01"},
{"internal/sync", "\x94\x01!\x14o\x13"},
{"internal/synctest", "\x94\x01\xb7\x01"},
{"internal/syscall/execenv", "\xb9\x02"},
{"internal/syscall/unix", "\xaa\x02\x0e\x01\x12"},
{"internal/sysinfo", "\x02\x01\xab\x01C,\x18\x02"},
{"internal/syslist", ""}, {"internal/syslist", ""},
{"internal/testenv", "\x03\n`\x02\x01*\x1a\x10'+\x01\x05\a\f\x01\x02\x02\x01\n"}, {"internal/testenv", "\x03\na\x02\x01)\x1b\x10-+\x01\x05\a\n\x01\x02\x02\x01\v"},
{"internal/testlog", "\xb2\x02\x01\x12"}, {"internal/testhash", "\x03\x80\x01n\x118\v"},
{"internal/testpty", "m\x03\xa6\x01"}, {"internal/testlog", "\xb7\x02\x01\x13"},
{"internal/trace", "\x02\x01\x01\x06\\\a\x03n\x03\x03\x06\x03\n6\x01\x02\x0f\x06"}, {"internal/testpty", "n\x03\xac\x01"},
{"internal/trace/internal/testgen", "\x03c\nl\x03\x02\x03\x011\v\x0f"}, {"internal/trace", "\x02\x01\x01\x06]\a\x03t\x03\x03\x06\x03\t5\x01\x01\x01\x10\x06"},
{"internal/trace/internal/tracev1", "\x03\x01b\a\x03t\x06\r6\x01"}, {"internal/trace/internal/testgen", "\x03d\nr\x03\x02\x03\x011\v\r\x10"},
{"internal/trace/raw", "\x02d\nq\x03\x06E\x01\x11"}, {"internal/trace/internal/tracev1", "\x03\x01c\a\x03z\x06\f5\x01"},
{"internal/trace/testtrace", "\x02\x01j\x03l\x03\x06\x057\f\x02\x01"}, {"internal/trace/raw", "\x02e\nw\x03\x06C\x01\x12"},
{"internal/trace/testtrace", "\x02\x01k\x03r\x03\x05\x01\x057\n\x02\x01"},
{"internal/trace/tracev2", ""}, {"internal/trace/tracev2", ""},
{"internal/trace/traceviewer", "\x02]\v\x06\x1a<\x16\a\a\x04\t\n\x15\x01\x05\a\f\x01\x02\r"}, {"internal/trace/traceviewer", "\x02^\v\x06\x19=\x1c\a\a\x04\b\v\x15\x01\x05\a\n\x01\x02\x0e"},
{"internal/trace/traceviewer/format", ""}, {"internal/trace/traceviewer/format", ""},
{"internal/trace/version", "pq\t"}, {"internal/trace/version", "qw\t"},
{"internal/txtar", "\x03m\xa6\x01\x1a"}, {"internal/txtar", "\x03n\xac\x01\x18"},
{"internal/types/errors", "\xaf\x02"}, {"internal/types/errors", "\xb4\x02"},
{"internal/unsafeheader", "\xc5\x02"}, {"internal/unsafeheader", "\xcb\x02"},
{"internal/xcoff", "Y\r\a\x03`\x1a,\x19\x01"}, {"internal/xcoff", "Z\r\a\x03e\x1b,\x17\x01"},
{"internal/zstd", "f\a\x03z\x0f"}, {"internal/zstd", "g\a\x03\x80\x01\x0f"},
{"io", "m\xc5\x01"}, {"io", "n\xc9\x01"},
{"io/fs", "m+*(1\x12\x12\x04"}, {"io/fs", "n*+.1\x10\x13\x04"},
{"io/ioutil", "\xea\x01\x01+\x17\x03"}, {"io/ioutil", "\xf1\x01\x01+\x15\x03"},
{"iter", "\xc8\x01[\""}, {"iter", "\xc9\x01a!"},
{"log", "pz\x05'\r\x0f\x01\f"}, {"log", "q\x80\x01\x05'\r\r\x01\r"},
{"log/internal", ""}, {"log/internal", ""},
{"log/slog", "\x03\nT\t\x03\x03z\x04\x01\x02\x02\x04'\x05\n\x02\x01\x02\x01\f\x02\x02\x02"}, {"log/slog", "\x03\nU\t\x03\x03\x80\x01\x04\x01\x02\x02\x03(\x05\b\x02\x01\x02\x01\r\x02\x02\x02"},
{"log/slog/internal", ""}, {"log/slog/internal", ""},
{"log/slog/internal/benchmarks", "\r`\x03z\x06\x03<\x10"}, {"log/slog/internal/benchmarks", "\ra\x03\x80\x01\x06\x03:\x11"},
{"log/slog/internal/buffer", "\xb2\x02"}, {"log/slog/internal/buffer", "\xb7\x02"},
{"log/slog/internal/slogtest", "\xf0\x01"}, {"log/syslog", "n\x03\x84\x01\x12\x16\x18\x02\x0e"},
{"log/syslog", "m\x03~\x12\x16\x1a\x02\r"}, {"maps", "\xf4\x01W"},
{"maps", "\xed\x01X"}, {"math", "\xae\x01RK"},
{"math", "\xad\x01LL"}, {"math/big", "\x03k\x03(\x15C\f\x03\x020\x02\x01\x02\x14"},
{"math/big", "\x03j\x03)\x14=\r\x02\x024\x01\x02\x13"}, {"math/big/internal/asmgen", "\x03\x01m\x8f\x012\x03"},
{"math/bits", "\xc5\x02"}, {"math/bits", "\xcb\x02"},
{"math/cmplx", "\xf7\x01\x02"}, {"math/cmplx", "\xfd\x01\x03"},
{"math/rand", "\xb5\x01B;\x01\x12"}, {"math/rand", "\xb6\x01G:\x01\x13"},
{"math/rand/v2", "m,\x02\\\x02L"}, {"math/rand/v2", "n+\x03a\x03K"},
{"mime", "\x02\x01b\b\x03z\f \x17\x03\x02\x0f\x02"}, {"mime", "\x02\x01c\b\x03\x80\x01\v!\x15\x03\x02\x10\x02"},
{"mime/multipart", "\x02\x01G#\x03E5\f\x01\x06\x02\x15\x02\x06\x11\x02\x01\x15"}, {"mime/multipart", "\x02\x01H#\x03E;\v\x01\a\x02\x15\x02\x06\x0f\x02\x01\x16"},
{"mime/quotedprintable", "\x02\x01mz"}, {"mime/quotedprintable", "\x02\x01n\x80\x01"},
{"net", "\x04\t`+\x1d\a\x04\x05\f\x01\x04\x14\x01%\x06\r\n\x05\x01\x01\v\x06\a"}, {"net", "\x04\ta*\x1e\a\x04\x05\x11\x01\x04\x15\x01%\x06\r\b\x05\x01\x01\f\x06\a"},
{"net/http", "\x02\x01\x04\x04\x02=\b\x13\x01\a\x03E5\x01\x03\b\x01\x02\x02\x02\x01\x02\x06\x02\x01\x01\n\x01\x01\x05\x01\x02\x05\n\x01\x01\x01\x02\x01\x01\v\x02\x02\x02\b\x01\x01\x01"}, {"net/http", "\x02\x01\x04\x04\x02>\b\x13\x01\a\x03E;\x01\x03\a\x01\x03\x02\x02\x01\x02\x06\x02\x01\x01\n\x01\x01\x05\x01\x02\x05\b\x01\x01\x01\x02\x01\r\x02\x02\x02\b\x01\x01\x01"},
{"net/http/cgi", "\x02P\x1b\x03z\x04\b\n\x01\x13\x01\x01\x01\x04\x01\x05\x02\n\x02\x01\x0f\x0e"}, {"net/http/cgi", "\x02Q\x1b\x03\x80\x01\x04\a\v\x01\x13\x01\x01\x01\x04\x01\x05\x02\b\x02\x01\x10\x0e"},
{"net/http/cookiejar", "\x04i\x03\x90\x01\x01\b\f\x18\x03\x02\r\x04"}, {"net/http/cookiejar", "\x04j\x03\x96\x01\x01\b\f\x16\x03\x02\x0e\x04"},
{"net/http/fcgi", "\x02\x01\nY\a\x03z\x16\x01\x01\x14\x1a\x02\r"}, {"net/http/fcgi", "\x02\x01\nZ\a\x03\x80\x01\x16\x01\x01\x14\x18\x02\x0e"},
{"net/http/httptest", "\x02\x01\nE\x02\x1b\x01z\x04\x12\x01\n\t\x02\x19\x01\x02\r\x0e"}, {"net/http/httptest", "\x02\x01\nF\x02\x1b\x01\x80\x01\x04\x12\x01\n\t\x02\x17\x01\x02\x0e\x0e"},
{"net/http/httptrace", "\rEn@\x14\n!"}, {"net/http/httptrace", "\rFnF\x14\n "},
{"net/http/httputil", "\x02\x01\n`\x03z\x04\x0f\x03\x01\x05\x02\x01\v\x01\x1b\x02\r\x0e"}, {"net/http/httputil", "\x02\x01\na\x03\x80\x01\x04\x0f\x03\x01\x05\x02\x01\v\x01\x19\x02\x0e\x0e"},
{"net/http/internal", "\x02\x01j\x03z"}, {"net/http/internal", "\x02\x01k\x03\x80\x01"},
{"net/http/internal/ascii", "\xb0\x02\x11"}, {"net/http/internal/ascii", "\xb5\x02\x12"},
{"net/http/internal/httpcommon", "\r`\x03\x96\x01\x0e\x01\x19\x01\x01\x02\x1b\x02"}, {"net/http/internal/httpcommon", "\ra\x03\x9c\x01\x0e\x01\x17\x01\x01\x02\x1c\x02"},
{"net/http/internal/testcert", "\xb0\x02"}, {"net/http/internal/testcert", "\xb5\x02"},
{"net/http/pprof", "\x02\x01\nc\x19,\x11$\x04\x13\x14\x01\r\x06\x03\x01\x02\x01\x0f"}, {"net/http/pprof", "\x02\x01\nd\x18-\x11*\x04\x13\x14\x01\r\x04\x03\x01\x02\x01\x10"},
{"net/internal/cgotest", ""}, {"net/internal/cgotest", ""},
{"net/internal/socktest", "p\xc2\x01\x02"}, {"net/internal/socktest", "q\xc6\x01\x02"},
{"net/mail", "\x02k\x03z\x04\x0f\x03\x14\x1c\x02\r\x04"}, {"net/mail", "\x02l\x03\x80\x01\x04\x0f\x03\x14\x1a\x02\x0e\x04"},
{"net/netip", "\x04i+\x01#;\x026\x15"}, {"net/netip", "\x04j*\x01$@\x034\x16"},
{"net/rpc", "\x02f\x05\x03\x10\n`\x04\x12\x01\x1d\x0f\x03\x02"}, {"net/rpc", "\x02g\x05\x03\x0f\ng\x04\x12\x01\x1d\r\x03\x02"},
{"net/rpc/jsonrpc", "j\x03\x03z\x16\x11!"}, {"net/rpc/jsonrpc", "k\x03\x03\x80\x01\x16\x11\x1f"},
{"net/smtp", "\x19.\v\x13\b\x03z\x16\x14\x1c"}, {"net/smtp", "\x19/\v\x13\b\x03\x80\x01\x16\x14\x1a"},
{"net/textproto", "\x02\x01j\x03z\r\t/\x01\x02\x13"}, {"net/textproto", "\x02\x01k\x03\x80\x01\f\n-\x01\x02\x14"},
{"net/url", "m\x03\x86\x01%\x12\x02\x01\x15"}, {"net/url", "n\x03\x8b\x01&\x10\x02\x01\x16"},
{"os", "m+\x01\x18\x03\b\t\r\x03\x01\x04\x10\x018\n\x05\x01\x01\v\x06"}, {"os", "n*\x01\x19\x03\b\t\x12\x03\x01\x05\x10\x018\b\x05\x01\x01\f\x06"},
{"os/exec", "\x03\n`H \x01\x14\x01+\x06\a\f\x01\x04\v"}, {"os/exec", "\x03\naH%\x01\x15\x01+\x06\a\n\x01\x04\f"},
{"os/exec/internal/fdtest", "\xb4\x02"}, {"os/exec/internal/fdtest", "\xb9\x02"},
{"os/signal", "\r\x89\x02\x17\x05\x02"}, {"os/signal", "\r\x90\x02\x15\x05\x02"},
{"os/user", "\x02\x01j\x03z,\r\f\x01\x02"}, {"os/user", "\x02\x01k\x03\x80\x01,\r\n\x01\x02"},
{"path", "m+\xab\x01"}, {"path", "n*\xb1\x01"},
{"path/filepath", "m+\x19:+\r\n\x03\x04\x0f"}, {"path/filepath", "n*\x1a@+\r\b\x03\x04\x10"},
{"plugin", "m"}, {"plugin", "n"},
{"reflect", "m'\x04\x1c\b\f\x04\x02\x19\x10,\f\x03\x0f\x02\x02"}, {"reflect", "n&\x04\x1d\b\f\x06\x04\x1b\x06\t-\n\x03\x10\x02\x02"},
{"reflect/internal/example1", ""}, {"reflect/internal/example1", ""},
{"reflect/internal/example2", ""}, {"reflect/internal/example2", ""},
{"regexp", "\x03\xe7\x018\v\x02\x01\x02\x0f\x02"}, {"regexp", "\x03\xee\x018\t\x02\x01\x02\x10\x02"},
{"regexp/syntax", "\xad\x02\x01\x01\x01\x11\x02"}, {"regexp/syntax", "\xb2\x02\x01\x01\x01\x02\x10\x02"},
{"runtime", "\x94\x01\x04\x01\x02\f\x06\a\x02\x01\x01\x0f\x03\x01\x01\x01\x01\x01\x03\x0fd"}, {"runtime", "\x94\x01\x04\x01\x03\f\x06\a\x02\x01\x01\x0f\x03\x01\x01\x01\x01\x01\x02\x01\x01\x04\x10c"},
{"runtime/coverage", "\x9f\x01K"}, {"runtime/coverage", "\xa0\x01Q"},
{"runtime/debug", "pUQ\r\n\x02\x01\x0f\x06"}, {"runtime/debug", "qUW\r\b\x02\x01\x10\x06"},
{"runtime/internal/startlinetest", ""}, {"runtime/metrics", "\xb7\x01F-!"},
{"runtime/internal/wasitest", ""}, {"runtime/pprof", "\x02\x01\x01\x03\x06Z\a\x03#4)\f \r\b\x01\x01\x01\x02\x02\t\x03\x06"},
{"runtime/metrics", "\xb6\x01A,\""}, {"runtime/race", "\xb0\x02"},
{"runtime/pprof", "\x02\x01\x01\x03\x06Y\a\x03$3#\r\x1f\r\n\x01\x01\x01\x02\x02\b\x03\x06"},
{"runtime/race", "\xab\x02"},
{"runtime/race/internal/amd64v1", ""}, {"runtime/race/internal/amd64v1", ""},
{"runtime/trace", "\rcz9\x0f\x01\x12"}, {"runtime/trace", "\ra\x03w\t9\b\x05\x01\r\x06"},
{"slices", "\x04\xe9\x01\fL"}, {"slices", "\x04\xf0\x01\fK"},
{"sort", "\xc9\x0104"}, {"sort", "\xca\x0162"},
{"strconv", "m+:%\x02J"}, {"strconv", "n*@%\x03I"},
{"strings", "m'\x04:\x18\x03\f9\x0f\x02\x02"}, {"strings", "n&\x04@\x19\x03\f7\x10\x02\x02"},
{"structs", ""}, {"structs", ""},
{"sync", "\xc8\x01\vP\x10\x12"}, {"sync", "\xc9\x01\x10\x01P\x0e\x13"},
{"sync/atomic", "\xc5\x02"}, {"sync/atomic", "\xcb\x02"},
{"syscall", "m(\x03\x01\x1b\b\x03\x03\x06\aT\n\x05\x01\x12"}, {"syscall", "n'\x03\x01\x1c\b\x03\x03\x06\vV\b\x05\x01\x13"},
{"testing", "\x03\n`\x02\x01X\x0f\x13\r\x04\x1b\x06\x02\x05\x02\a\x01\x02\x01\x02\x01\f\x02\x02\x02"}, {"testing", "\x03\na\x02\x01X\x14\x14\f\x05\x1b\x06\x02\x05\x02\x05\x01\x02\x01\x02\x01\r\x02\x02\x02"},
{"testing/fstest", "m\x03z\x01\v%\x12\x03\b\a"}, {"testing/fstest", "n\x03\x80\x01\x01\n&\x10\x03\b\b"},
{"testing/internal/testdeps", "\x02\v\xa6\x01'\x10,\x03\x05\x03\b\a\x02\r"}, {"testing/internal/testdeps", "\x02\v\xa7\x01-\x10,\x03\x05\x03\x06\a\x02\x0e"},
{"testing/iotest", "\x03j\x03z\x04"}, {"testing/iotest", "\x03k\x03\x80\x01\x04"},
{"testing/quick", "o\x01\x87\x01\x04#\x12\x0f"}, {"testing/quick", "p\x01\x8c\x01\x05#\x10\x10"},
{"testing/slogtest", "\r`\x03\x80\x01.\x05\x12\n"}, {"testing/slogtest", "\ra\x03\x86\x01.\x05\x10\v"},
{"text/scanner", "\x03mz,+\x02"}, {"testing/synctest", "\xda\x01`\x11"},
{"text/tabwriter", "pzY"}, {"text/scanner", "\x03n\x80\x01,*\x02"},
{"text/template", "m\x03B8\x01\v\x1f\x01\x05\x01\x02\x05\r\x02\f\x03\x02"}, {"text/tabwriter", "q\x80\x01X"},
{"text/template/parse", "\x03m\xb3\x01\f\x01\x11\x02"}, {"text/template", "n\x03B>\x01\n \x01\x05\x01\x02\x05\v\x02\r\x03\x02"},
{"time", "m+\x1d\x1d'*\x0f\x02\x11"}, {"text/template/parse", "\x03n\xb9\x01\n\x01\x12\x02"},
{"time/tzdata", "m\xc7\x01\x11"}, {"time", "n*\x1e\"(*\r\x02\x12"},
{"time/tzdata", "n\xcb\x01\x12"},
{"unicode", ""}, {"unicode", ""},
{"unicode/utf16", ""}, {"unicode/utf16", ""},
{"unicode/utf8", ""}, {"unicode/utf8", ""},
{"unique", "\x94\x01>\x01P\x0f\x13\x12"}, {"unique", "\x94\x01!#\x01Q\r\x01\x13\x12"},
{"unsafe", ""}, {"unsafe", ""},
{"vendor/golang.org/x/crypto/chacha20", "\x10V\a\x8c\x01*'"}, {"vendor/golang.org/x/crypto/chacha20", "\x10W\a\x92\x01*&"},
{"vendor/golang.org/x/crypto/chacha20poly1305", "\x10V\a\xd9\x01\x04\x01\a"}, {"vendor/golang.org/x/crypto/chacha20poly1305", "\x10W\a\xde\x01\x04\x01\a"},
{"vendor/golang.org/x/crypto/cryptobyte", "c\n\x03\x88\x01&!\n"}, {"vendor/golang.org/x/crypto/cryptobyte", "d\n\x03\x8d\x01' \n"},
{"vendor/golang.org/x/crypto/cryptobyte/asn1", ""}, {"vendor/golang.org/x/crypto/cryptobyte/asn1", ""},
{"vendor/golang.org/x/crypto/internal/alias", "\xc5\x02"}, {"vendor/golang.org/x/crypto/internal/alias", "\xcb\x02"},
{"vendor/golang.org/x/crypto/internal/poly1305", "Q\x15\x93\x01"}, {"vendor/golang.org/x/crypto/internal/poly1305", "R\x15\x99\x01"},
{"vendor/golang.org/x/net/dns/dnsmessage", "m"}, {"vendor/golang.org/x/net/dns/dnsmessage", "n"},
{"vendor/golang.org/x/net/http/httpguts", "\x80\x02\x14\x1c\x13\r"}, {"vendor/golang.org/x/net/http/httpguts", "\x87\x02\x14\x1a\x14\r"},
{"vendor/golang.org/x/net/http/httpproxy", "m\x03\x90\x01\x15\x01\x1a\x13\r"}, {"vendor/golang.org/x/net/http/httpproxy", "n\x03\x96\x01\x10\x05\x01\x18\x14\r"},
{"vendor/golang.org/x/net/http2/hpack", "\x03j\x03zH"}, {"vendor/golang.org/x/net/http2/hpack", "\x03k\x03\x80\x01F"},
{"vendor/golang.org/x/net/idna", "p\x87\x019\x13\x10\x02\x01"}, {"vendor/golang.org/x/net/idna", "q\x8c\x018\x14\x10\x02\x01"},
{"vendor/golang.org/x/net/nettest", "\x03c\a\x03z\x11\x05\x16\x01\f\f\x01\x02\x02\x01\n"}, {"vendor/golang.org/x/net/nettest", "\x03d\a\x03\x80\x01\x11\x05\x16\x01\f\n\x01\x02\x02\x01\v"},
{"vendor/golang.org/x/sys/cpu", "\x96\x02\r\f\x01\x15"}, {"vendor/golang.org/x/sys/cpu", "\x9d\x02\r\n\x01\x16"},
{"vendor/golang.org/x/text/secure/bidirule", "m\xd6\x01\x11\x01"}, {"vendor/golang.org/x/text/secure/bidirule", "n\xdb\x01\x11\x01"},
{"vendor/golang.org/x/text/transform", "\x03j}Y"}, {"vendor/golang.org/x/text/transform", "\x03k\x83\x01X"},
{"vendor/golang.org/x/text/unicode/bidi", "\x03\be~@\x15"}, {"vendor/golang.org/x/text/unicode/bidi", "\x03\bf\x84\x01>\x16"},
{"vendor/golang.org/x/text/unicode/norm", "f\nzH\x11\x11"}, {"vendor/golang.org/x/text/unicode/norm", "g\n\x80\x01F\x12\x11"},
{"weak", "\x94\x01\x8f\x01\""}, {"weak", "\x94\x01\x96\x01!"},
} }

View file

@ -502,6 +502,7 @@ var PackageSymbols = map[string][]Symbol{
{"MD4", Const, 0, ""}, {"MD4", Const, 0, ""},
{"MD5", Const, 0, ""}, {"MD5", Const, 0, ""},
{"MD5SHA1", Const, 0, ""}, {"MD5SHA1", Const, 0, ""},
{"MessageSigner", Type, 25, ""},
{"PrivateKey", Type, 0, ""}, {"PrivateKey", Type, 0, ""},
{"PublicKey", Type, 2, ""}, {"PublicKey", Type, 2, ""},
{"RIPEMD160", Const, 0, ""}, {"RIPEMD160", Const, 0, ""},
@ -517,6 +518,7 @@ var PackageSymbols = map[string][]Symbol{
{"SHA512", Const, 0, ""}, {"SHA512", Const, 0, ""},
{"SHA512_224", Const, 5, ""}, {"SHA512_224", Const, 5, ""},
{"SHA512_256", Const, 5, ""}, {"SHA512_256", Const, 5, ""},
{"SignMessage", Func, 25, "func(signer Signer, rand io.Reader, msg []byte, opts SignerOpts) (signature []byte, err error)"},
{"Signer", Type, 4, ""}, {"Signer", Type, 4, ""},
{"SignerOpts", Type, 4, ""}, {"SignerOpts", Type, 4, ""},
}, },
@ -600,10 +602,12 @@ var PackageSymbols = map[string][]Symbol{
{"X25519", Func, 20, "func() Curve"}, {"X25519", Func, 20, "func() Curve"},
}, },
"crypto/ecdsa": { "crypto/ecdsa": {
{"(*PrivateKey).Bytes", Method, 25, ""},
{"(*PrivateKey).ECDH", Method, 20, ""}, {"(*PrivateKey).ECDH", Method, 20, ""},
{"(*PrivateKey).Equal", Method, 15, ""}, {"(*PrivateKey).Equal", Method, 15, ""},
{"(*PrivateKey).Public", Method, 4, ""}, {"(*PrivateKey).Public", Method, 4, ""},
{"(*PrivateKey).Sign", Method, 4, ""}, {"(*PrivateKey).Sign", Method, 4, ""},
{"(*PublicKey).Bytes", Method, 25, ""},
{"(*PublicKey).ECDH", Method, 20, ""}, {"(*PublicKey).ECDH", Method, 20, ""},
{"(*PublicKey).Equal", Method, 15, ""}, {"(*PublicKey).Equal", Method, 15, ""},
{"(PrivateKey).Add", Method, 0, ""}, {"(PrivateKey).Add", Method, 0, ""},
@ -619,6 +623,8 @@ var PackageSymbols = map[string][]Symbol{
{"(PublicKey).ScalarBaseMult", Method, 0, ""}, {"(PublicKey).ScalarBaseMult", Method, 0, ""},
{"(PublicKey).ScalarMult", Method, 0, ""}, {"(PublicKey).ScalarMult", Method, 0, ""},
{"GenerateKey", Func, 0, "func(c elliptic.Curve, rand io.Reader) (*PrivateKey, error)"}, {"GenerateKey", Func, 0, "func(c elliptic.Curve, rand io.Reader) (*PrivateKey, error)"},
{"ParseRawPrivateKey", Func, 25, "func(curve elliptic.Curve, data []byte) (*PrivateKey, error)"},
{"ParseUncompressedPublicKey", Func, 25, "func(curve elliptic.Curve, data []byte) (*PublicKey, error)"},
{"PrivateKey", Type, 0, ""}, {"PrivateKey", Type, 0, ""},
{"PrivateKey.D", Field, 0, ""}, {"PrivateKey.D", Field, 0, ""},
{"PrivateKey.PublicKey", Field, 0, ""}, {"PrivateKey.PublicKey", Field, 0, ""},
@ -815,6 +821,7 @@ var PackageSymbols = map[string][]Symbol{
"crypto/sha3": { "crypto/sha3": {
{"(*SHA3).AppendBinary", Method, 24, ""}, {"(*SHA3).AppendBinary", Method, 24, ""},
{"(*SHA3).BlockSize", Method, 24, ""}, {"(*SHA3).BlockSize", Method, 24, ""},
{"(*SHA3).Clone", Method, 25, ""},
{"(*SHA3).MarshalBinary", Method, 24, ""}, {"(*SHA3).MarshalBinary", Method, 24, ""},
{"(*SHA3).Reset", Method, 24, ""}, {"(*SHA3).Reset", Method, 24, ""},
{"(*SHA3).Size", Method, 24, ""}, {"(*SHA3).Size", Method, 24, ""},
@ -967,6 +974,7 @@ var PackageSymbols = map[string][]Symbol{
{"Config.GetCertificate", Field, 4, ""}, {"Config.GetCertificate", Field, 4, ""},
{"Config.GetClientCertificate", Field, 8, ""}, {"Config.GetClientCertificate", Field, 8, ""},
{"Config.GetConfigForClient", Field, 8, ""}, {"Config.GetConfigForClient", Field, 8, ""},
{"Config.GetEncryptedClientHelloKeys", Field, 25, ""},
{"Config.InsecureSkipVerify", Field, 0, ""}, {"Config.InsecureSkipVerify", Field, 0, ""},
{"Config.KeyLogWriter", Field, 8, ""}, {"Config.KeyLogWriter", Field, 8, ""},
{"Config.MaxVersion", Field, 2, ""}, {"Config.MaxVersion", Field, 2, ""},
@ -5463,6 +5471,7 @@ var PackageSymbols = map[string][]Symbol{
{"ParenExpr.X", Field, 0, ""}, {"ParenExpr.X", Field, 0, ""},
{"Pkg", Const, 0, ""}, {"Pkg", Const, 0, ""},
{"Preorder", Func, 23, "func(root Node) iter.Seq[Node]"}, {"Preorder", Func, 23, "func(root Node) iter.Seq[Node]"},
{"PreorderStack", Func, 25, "func(root Node, stack []Node, f func(n Node, stack []Node) bool)"},
{"Print", Func, 0, "func(fset *token.FileSet, x any) error"}, {"Print", Func, 0, "func(fset *token.FileSet, x any) error"},
{"RECV", Const, 0, ""}, {"RECV", Const, 0, ""},
{"RangeStmt", Type, 0, ""}, {"RangeStmt", Type, 0, ""},
@ -5933,6 +5942,7 @@ var PackageSymbols = map[string][]Symbol{
{"(*File).SetLines", Method, 0, ""}, {"(*File).SetLines", Method, 0, ""},
{"(*File).SetLinesForContent", Method, 0, ""}, {"(*File).SetLinesForContent", Method, 0, ""},
{"(*File).Size", Method, 0, ""}, {"(*File).Size", Method, 0, ""},
{"(*FileSet).AddExistingFiles", Method, 25, ""},
{"(*FileSet).AddFile", Method, 0, ""}, {"(*FileSet).AddFile", Method, 0, ""},
{"(*FileSet).Base", Method, 0, ""}, {"(*FileSet).Base", Method, 0, ""},
{"(*FileSet).File", Method, 0, ""}, {"(*FileSet).File", Method, 0, ""},
@ -6382,7 +6392,7 @@ var PackageSymbols = map[string][]Symbol{
{"Label", Type, 5, ""}, {"Label", Type, 5, ""},
{"LocalVar", Const, 25, ""}, {"LocalVar", Const, 25, ""},
{"LookupFieldOrMethod", Func, 5, "func(T Type, addressable bool, pkg *Package, name string) (obj Object, index []int, indirect bool)"}, {"LookupFieldOrMethod", Func, 5, "func(T Type, addressable bool, pkg *Package, name string) (obj Object, index []int, indirect bool)"},
{"LookupSelection", Func, 25, ""}, {"LookupSelection", Func, 25, "func(T Type, addressable bool, pkg *Package, name string) (Selection, bool)"},
{"Map", Type, 5, ""}, {"Map", Type, 5, ""},
{"MethodExpr", Const, 5, ""}, {"MethodExpr", Const, 5, ""},
{"MethodSet", Type, 5, ""}, {"MethodSet", Type, 5, ""},
@ -6490,9 +6500,11 @@ var PackageSymbols = map[string][]Symbol{
{"Lang", Func, 22, "func(x string) string"}, {"Lang", Func, 22, "func(x string) string"},
}, },
"hash": { "hash": {
{"Cloner", Type, 25, ""},
{"Hash", Type, 0, ""}, {"Hash", Type, 0, ""},
{"Hash32", Type, 0, ""}, {"Hash32", Type, 0, ""},
{"Hash64", Type, 0, ""}, {"Hash64", Type, 0, ""},
{"XOF", Type, 25, ""},
}, },
"hash/adler32": { "hash/adler32": {
{"Checksum", Func, 0, "func(data []byte) uint32"}, {"Checksum", Func, 0, "func(data []byte) uint32"},
@ -6533,6 +6545,7 @@ var PackageSymbols = map[string][]Symbol{
}, },
"hash/maphash": { "hash/maphash": {
{"(*Hash).BlockSize", Method, 14, ""}, {"(*Hash).BlockSize", Method, 14, ""},
{"(*Hash).Clone", Method, 25, ""},
{"(*Hash).Reset", Method, 14, ""}, {"(*Hash).Reset", Method, 14, ""},
{"(*Hash).Seed", Method, 14, ""}, {"(*Hash).Seed", Method, 14, ""},
{"(*Hash).SetSeed", Method, 14, ""}, {"(*Hash).SetSeed", Method, 14, ""},
@ -7133,7 +7146,7 @@ var PackageSymbols = map[string][]Symbol{
{"FormatFileInfo", Func, 21, "func(info FileInfo) string"}, {"FormatFileInfo", Func, 21, "func(info FileInfo) string"},
{"Glob", Func, 16, "func(fsys FS, pattern string) (matches []string, err error)"}, {"Glob", Func, 16, "func(fsys FS, pattern string) (matches []string, err error)"},
{"GlobFS", Type, 16, ""}, {"GlobFS", Type, 16, ""},
{"Lstat", Func, 25, ""}, {"Lstat", Func, 25, "func(fsys FS, name string) (FileInfo, error)"},
{"ModeAppend", Const, 16, ""}, {"ModeAppend", Const, 16, ""},
{"ModeCharDevice", Const, 16, ""}, {"ModeCharDevice", Const, 16, ""},
{"ModeDevice", Const, 16, ""}, {"ModeDevice", Const, 16, ""},
@ -7158,7 +7171,7 @@ var PackageSymbols = map[string][]Symbol{
{"ReadDirFile", Type, 16, ""}, {"ReadDirFile", Type, 16, ""},
{"ReadFile", Func, 16, "func(fsys FS, name string) ([]byte, error)"}, {"ReadFile", Func, 16, "func(fsys FS, name string) ([]byte, error)"},
{"ReadFileFS", Type, 16, ""}, {"ReadFileFS", Type, 16, ""},
{"ReadLink", Func, 25, ""}, {"ReadLink", Func, 25, "func(fsys FS, name string) (string, error)"},
{"ReadLinkFS", Type, 25, ""}, {"ReadLinkFS", Type, 25, ""},
{"SkipAll", Var, 20, ""}, {"SkipAll", Var, 20, ""},
{"SkipDir", Var, 16, ""}, {"SkipDir", Var, 16, ""},
@ -7275,6 +7288,7 @@ var PackageSymbols = map[string][]Symbol{
{"(Record).Attrs", Method, 21, ""}, {"(Record).Attrs", Method, 21, ""},
{"(Record).Clone", Method, 21, ""}, {"(Record).Clone", Method, 21, ""},
{"(Record).NumAttrs", Method, 21, ""}, {"(Record).NumAttrs", Method, 21, ""},
{"(Record).Source", Method, 25, ""},
{"(Value).Any", Method, 21, ""}, {"(Value).Any", Method, 21, ""},
{"(Value).Bool", Method, 21, ""}, {"(Value).Bool", Method, 21, ""},
{"(Value).Duration", Method, 21, ""}, {"(Value).Duration", Method, 21, ""},
@ -7306,6 +7320,7 @@ var PackageSymbols = map[string][]Symbol{
{"Float64", Func, 21, "func(key string, v float64) Attr"}, {"Float64", Func, 21, "func(key string, v float64) Attr"},
{"Float64Value", Func, 21, "func(v float64) Value"}, {"Float64Value", Func, 21, "func(v float64) Value"},
{"Group", Func, 21, "func(key string, args ...any) Attr"}, {"Group", Func, 21, "func(key string, args ...any) Attr"},
{"GroupAttrs", Func, 25, "func(key string, attrs ...Attr) Attr"},
{"GroupValue", Func, 21, "func(as ...Attr) Value"}, {"GroupValue", Func, 21, "func(as ...Attr) Value"},
{"Handler", Type, 21, ""}, {"Handler", Type, 21, ""},
{"HandlerOptions", Type, 21, ""}, {"HandlerOptions", Type, 21, ""},
@ -7916,7 +7931,7 @@ var PackageSymbols = map[string][]Symbol{
{"(*Writer).WriteField", Method, 0, ""}, {"(*Writer).WriteField", Method, 0, ""},
{"ErrMessageTooLarge", Var, 9, ""}, {"ErrMessageTooLarge", Var, 9, ""},
{"File", Type, 0, ""}, {"File", Type, 0, ""},
{"FileContentDisposition", Func, 25, ""}, {"FileContentDisposition", Func, 25, "func(fieldname string, filename string) string"},
{"FileHeader", Type, 0, ""}, {"FileHeader", Type, 0, ""},
{"FileHeader.Filename", Field, 0, ""}, {"FileHeader.Filename", Field, 0, ""},
{"FileHeader.Header", Field, 0, ""}, {"FileHeader.Header", Field, 0, ""},
@ -8294,6 +8309,11 @@ var PackageSymbols = map[string][]Symbol{
{"(*Client).PostForm", Method, 0, ""}, {"(*Client).PostForm", Method, 0, ""},
{"(*Cookie).String", Method, 0, ""}, {"(*Cookie).String", Method, 0, ""},
{"(*Cookie).Valid", Method, 18, ""}, {"(*Cookie).Valid", Method, 18, ""},
{"(*CrossOriginProtection).AddInsecureBypassPattern", Method, 25, ""},
{"(*CrossOriginProtection).AddTrustedOrigin", Method, 25, ""},
{"(*CrossOriginProtection).Check", Method, 25, ""},
{"(*CrossOriginProtection).Handler", Method, 25, ""},
{"(*CrossOriginProtection).SetDenyHandler", Method, 25, ""},
{"(*MaxBytesError).Error", Method, 19, ""}, {"(*MaxBytesError).Error", Method, 19, ""},
{"(*ProtocolError).Error", Method, 0, ""}, {"(*ProtocolError).Error", Method, 0, ""},
{"(*ProtocolError).Is", Method, 21, ""}, {"(*ProtocolError).Is", Method, 21, ""},
@ -8388,6 +8408,7 @@ var PackageSymbols = map[string][]Symbol{
{"Cookie.Unparsed", Field, 0, ""}, {"Cookie.Unparsed", Field, 0, ""},
{"Cookie.Value", Field, 0, ""}, {"Cookie.Value", Field, 0, ""},
{"CookieJar", Type, 0, ""}, {"CookieJar", Type, 0, ""},
{"CrossOriginProtection", Type, 25, ""},
{"DefaultClient", Var, 0, ""}, {"DefaultClient", Var, 0, ""},
{"DefaultMaxHeaderBytes", Const, 0, ""}, {"DefaultMaxHeaderBytes", Const, 0, ""},
{"DefaultMaxIdleConnsPerHost", Const, 0, ""}, {"DefaultMaxIdleConnsPerHost", Const, 0, ""},
@ -8460,6 +8481,7 @@ var PackageSymbols = map[string][]Symbol{
{"MethodPost", Const, 6, ""}, {"MethodPost", Const, 6, ""},
{"MethodPut", Const, 6, ""}, {"MethodPut", Const, 6, ""},
{"MethodTrace", Const, 6, ""}, {"MethodTrace", Const, 6, ""},
{"NewCrossOriginProtection", Func, 25, "func() *CrossOriginProtection"},
{"NewFileTransport", Func, 0, "func(fs FileSystem) RoundTripper"}, {"NewFileTransport", Func, 0, "func(fs FileSystem) RoundTripper"},
{"NewFileTransportFS", Func, 22, "func(fsys fs.FS) RoundTripper"}, {"NewFileTransportFS", Func, 22, "func(fsys fs.FS) RoundTripper"},
{"NewRequest", Func, 0, "func(method string, url string, body io.Reader) (*Request, error)"}, {"NewRequest", Func, 0, "func(method string, url string, body io.Reader) (*Request, error)"},
@ -9174,15 +9196,19 @@ var PackageSymbols = map[string][]Symbol{
{"(*Root).Link", Method, 25, ""}, {"(*Root).Link", Method, 25, ""},
{"(*Root).Lstat", Method, 24, ""}, {"(*Root).Lstat", Method, 24, ""},
{"(*Root).Mkdir", Method, 24, ""}, {"(*Root).Mkdir", Method, 24, ""},
{"(*Root).MkdirAll", Method, 25, ""},
{"(*Root).Name", Method, 24, ""}, {"(*Root).Name", Method, 24, ""},
{"(*Root).Open", Method, 24, ""}, {"(*Root).Open", Method, 24, ""},
{"(*Root).OpenFile", Method, 24, ""}, {"(*Root).OpenFile", Method, 24, ""},
{"(*Root).OpenRoot", Method, 24, ""}, {"(*Root).OpenRoot", Method, 24, ""},
{"(*Root).ReadFile", Method, 25, ""},
{"(*Root).Readlink", Method, 25, ""}, {"(*Root).Readlink", Method, 25, ""},
{"(*Root).Remove", Method, 24, ""}, {"(*Root).Remove", Method, 24, ""},
{"(*Root).RemoveAll", Method, 25, ""},
{"(*Root).Rename", Method, 25, ""}, {"(*Root).Rename", Method, 25, ""},
{"(*Root).Stat", Method, 24, ""}, {"(*Root).Stat", Method, 24, ""},
{"(*Root).Symlink", Method, 25, ""}, {"(*Root).Symlink", Method, 25, ""},
{"(*Root).WriteFile", Method, 25, ""},
{"(*SyscallError).Error", Method, 0, ""}, {"(*SyscallError).Error", Method, 0, ""},
{"(*SyscallError).Timeout", Method, 10, ""}, {"(*SyscallError).Timeout", Method, 10, ""},
{"(*SyscallError).Unwrap", Method, 13, ""}, {"(*SyscallError).Unwrap", Method, 13, ""},
@ -9623,6 +9649,7 @@ var PackageSymbols = map[string][]Symbol{
{"StructTag", Type, 0, ""}, {"StructTag", Type, 0, ""},
{"Swapper", Func, 8, "func(slice any) func(i int, j int)"}, {"Swapper", Func, 8, "func(slice any) func(i int, j int)"},
{"Type", Type, 0, ""}, {"Type", Type, 0, ""},
{"TypeAssert", Func, 25, "func[T any](v Value) (T, bool)"},
{"TypeFor", Func, 22, "func[T any]() Type"}, {"TypeFor", Func, 22, "func[T any]() Type"},
{"TypeOf", Func, 0, "func(i any) Type"}, {"TypeOf", Func, 0, "func(i any) Type"},
{"Uint", Const, 0, ""}, {"Uint", Const, 0, ""},
@ -9909,6 +9936,7 @@ var PackageSymbols = map[string][]Symbol{
{"SetBlockProfileRate", Func, 1, "func(rate int)"}, {"SetBlockProfileRate", Func, 1, "func(rate int)"},
{"SetCPUProfileRate", Func, 0, "func(hz int)"}, {"SetCPUProfileRate", Func, 0, "func(hz int)"},
{"SetCgoTraceback", Func, 7, "func(version int, traceback unsafe.Pointer, context unsafe.Pointer, symbolizer unsafe.Pointer)"}, {"SetCgoTraceback", Func, 7, "func(version int, traceback unsafe.Pointer, context unsafe.Pointer, symbolizer unsafe.Pointer)"},
{"SetDefaultGOMAXPROCS", Func, 25, "func()"},
{"SetFinalizer", Func, 0, "func(obj any, finalizer any)"}, {"SetFinalizer", Func, 0, "func(obj any, finalizer any)"},
{"SetMutexProfileFraction", Func, 8, "func(rate int) int"}, {"SetMutexProfileFraction", Func, 8, "func(rate int) int"},
{"Stack", Func, 0, "func(buf []byte, all bool) int"}, {"Stack", Func, 0, "func(buf []byte, all bool) int"},
@ -10021,11 +10049,20 @@ var PackageSymbols = map[string][]Symbol{
{"WriteHeapProfile", Func, 0, "func(w io.Writer) error"}, {"WriteHeapProfile", Func, 0, "func(w io.Writer) error"},
}, },
"runtime/trace": { "runtime/trace": {
{"(*FlightRecorder).Enabled", Method, 25, ""},
{"(*FlightRecorder).Start", Method, 25, ""},
{"(*FlightRecorder).Stop", Method, 25, ""},
{"(*FlightRecorder).WriteTo", Method, 25, ""},
{"(*Region).End", Method, 11, ""}, {"(*Region).End", Method, 11, ""},
{"(*Task).End", Method, 11, ""}, {"(*Task).End", Method, 11, ""},
{"FlightRecorder", Type, 25, ""},
{"FlightRecorderConfig", Type, 25, ""},
{"FlightRecorderConfig.MaxBytes", Field, 25, ""},
{"FlightRecorderConfig.MinAge", Field, 25, ""},
{"IsEnabled", Func, 11, "func() bool"}, {"IsEnabled", Func, 11, "func() bool"},
{"Log", Func, 11, "func(ctx context.Context, category string, message string)"}, {"Log", Func, 11, "func(ctx context.Context, category string, message string)"},
{"Logf", Func, 11, "func(ctx context.Context, category string, format string, args ...any)"}, {"Logf", Func, 11, "func(ctx context.Context, category string, format string, args ...any)"},
{"NewFlightRecorder", Func, 25, "func(cfg FlightRecorderConfig) *FlightRecorder"},
{"NewTask", Func, 11, "func(pctx context.Context, taskType string) (ctx context.Context, task *Task)"}, {"NewTask", Func, 11, "func(pctx context.Context, taskType string) (ctx context.Context, task *Task)"},
{"Region", Type, 11, ""}, {"Region", Type, 11, ""},
{"Start", Func, 5, "func(w io.Writer) error"}, {"Start", Func, 5, "func(w io.Writer) error"},
@ -16642,6 +16679,7 @@ var PackageSymbols = map[string][]Symbol{
{"ValueOf", Func, 0, ""}, {"ValueOf", Func, 0, ""},
}, },
"testing": { "testing": {
{"(*B).Attr", Method, 25, ""},
{"(*B).Chdir", Method, 24, ""}, {"(*B).Chdir", Method, 24, ""},
{"(*B).Cleanup", Method, 14, ""}, {"(*B).Cleanup", Method, 14, ""},
{"(*B).Context", Method, 24, ""}, {"(*B).Context", Method, 24, ""},
@ -16658,6 +16696,7 @@ var PackageSymbols = map[string][]Symbol{
{"(*B).Logf", Method, 0, ""}, {"(*B).Logf", Method, 0, ""},
{"(*B).Loop", Method, 24, ""}, {"(*B).Loop", Method, 24, ""},
{"(*B).Name", Method, 8, ""}, {"(*B).Name", Method, 8, ""},
{"(*B).Output", Method, 25, ""},
{"(*B).ReportAllocs", Method, 1, ""}, {"(*B).ReportAllocs", Method, 1, ""},
{"(*B).ReportMetric", Method, 13, ""}, {"(*B).ReportMetric", Method, 13, ""},
{"(*B).ResetTimer", Method, 0, ""}, {"(*B).ResetTimer", Method, 0, ""},
@ -16674,6 +16713,7 @@ var PackageSymbols = map[string][]Symbol{
{"(*B).StopTimer", Method, 0, ""}, {"(*B).StopTimer", Method, 0, ""},
{"(*B).TempDir", Method, 15, ""}, {"(*B).TempDir", Method, 15, ""},
{"(*F).Add", Method, 18, ""}, {"(*F).Add", Method, 18, ""},
{"(*F).Attr", Method, 25, ""},
{"(*F).Chdir", Method, 24, ""}, {"(*F).Chdir", Method, 24, ""},
{"(*F).Cleanup", Method, 18, ""}, {"(*F).Cleanup", Method, 18, ""},
{"(*F).Context", Method, 24, ""}, {"(*F).Context", Method, 24, ""},
@ -16689,6 +16729,7 @@ var PackageSymbols = map[string][]Symbol{
{"(*F).Log", Method, 18, ""}, {"(*F).Log", Method, 18, ""},
{"(*F).Logf", Method, 18, ""}, {"(*F).Logf", Method, 18, ""},
{"(*F).Name", Method, 18, ""}, {"(*F).Name", Method, 18, ""},
{"(*F).Output", Method, 25, ""},
{"(*F).Setenv", Method, 18, ""}, {"(*F).Setenv", Method, 18, ""},
{"(*F).Skip", Method, 18, ""}, {"(*F).Skip", Method, 18, ""},
{"(*F).SkipNow", Method, 18, ""}, {"(*F).SkipNow", Method, 18, ""},
@ -16697,6 +16738,7 @@ var PackageSymbols = map[string][]Symbol{
{"(*F).TempDir", Method, 18, ""}, {"(*F).TempDir", Method, 18, ""},
{"(*M).Run", Method, 4, ""}, {"(*M).Run", Method, 4, ""},
{"(*PB).Next", Method, 3, ""}, {"(*PB).Next", Method, 3, ""},
{"(*T).Attr", Method, 25, ""},
{"(*T).Chdir", Method, 24, ""}, {"(*T).Chdir", Method, 24, ""},
{"(*T).Cleanup", Method, 14, ""}, {"(*T).Cleanup", Method, 14, ""},
{"(*T).Context", Method, 24, ""}, {"(*T).Context", Method, 24, ""},
@ -16712,6 +16754,7 @@ var PackageSymbols = map[string][]Symbol{
{"(*T).Log", Method, 0, ""}, {"(*T).Log", Method, 0, ""},
{"(*T).Logf", Method, 0, ""}, {"(*T).Logf", Method, 0, ""},
{"(*T).Name", Method, 8, ""}, {"(*T).Name", Method, 8, ""},
{"(*T).Output", Method, 25, ""},
{"(*T).Parallel", Method, 0, ""}, {"(*T).Parallel", Method, 0, ""},
{"(*T).Run", Method, 7, ""}, {"(*T).Run", Method, 7, ""},
{"(*T).Setenv", Method, 17, ""}, {"(*T).Setenv", Method, 17, ""},
@ -16834,6 +16877,10 @@ var PackageSymbols = map[string][]Symbol{
{"Run", Func, 22, "func(t *testing.T, newHandler func(*testing.T) slog.Handler, result func(*testing.T) map[string]any)"}, {"Run", Func, 22, "func(t *testing.T, newHandler func(*testing.T) slog.Handler, result func(*testing.T) map[string]any)"},
{"TestHandler", Func, 21, "func(h slog.Handler, results func() []map[string]any) error"}, {"TestHandler", Func, 21, "func(h slog.Handler, results func() []map[string]any) error"},
}, },
"testing/synctest": {
{"Test", Func, 25, "func(t *testing.T, f func(*testing.T))"},
{"Wait", Func, 25, "func()"},
},
"text/scanner": { "text/scanner": {
{"(*Position).IsValid", Method, 0, ""}, {"(*Position).IsValid", Method, 0, ""},
{"(*Scanner).Init", Method, 0, ""}, {"(*Scanner).Init", Method, 0, ""},
@ -17347,6 +17394,7 @@ var PackageSymbols = map[string][]Symbol{
{"CaseRange.Lo", Field, 0, ""}, {"CaseRange.Lo", Field, 0, ""},
{"CaseRanges", Var, 0, ""}, {"CaseRanges", Var, 0, ""},
{"Categories", Var, 0, ""}, {"Categories", Var, 0, ""},
{"CategoryAliases", Var, 25, ""},
{"Caucasian_Albanian", Var, 4, ""}, {"Caucasian_Albanian", Var, 4, ""},
{"Cc", Var, 0, ""}, {"Cc", Var, 0, ""},
{"Cf", Var, 0, ""}, {"Cf", Var, 0, ""},
@ -17354,6 +17402,7 @@ var PackageSymbols = map[string][]Symbol{
{"Cham", Var, 0, ""}, {"Cham", Var, 0, ""},
{"Cherokee", Var, 0, ""}, {"Cherokee", Var, 0, ""},
{"Chorasmian", Var, 16, ""}, {"Chorasmian", Var, 16, ""},
{"Cn", Var, 25, ""},
{"Co", Var, 0, ""}, {"Co", Var, 0, ""},
{"Common", Var, 0, ""}, {"Common", Var, 0, ""},
{"Coptic", Var, 0, ""}, {"Coptic", Var, 0, ""},
@ -17432,6 +17481,7 @@ var PackageSymbols = map[string][]Symbol{
{"Khojki", Var, 4, ""}, {"Khojki", Var, 4, ""},
{"Khudawadi", Var, 4, ""}, {"Khudawadi", Var, 4, ""},
{"L", Var, 0, ""}, {"L", Var, 0, ""},
{"LC", Var, 25, ""},
{"Lao", Var, 0, ""}, {"Lao", Var, 0, ""},
{"Latin", Var, 0, ""}, {"Latin", Var, 0, ""},
{"Lepcha", Var, 0, ""}, {"Lepcha", Var, 0, ""},

View file

@ -73,8 +73,8 @@ golang.org/x/text/internal/tag
golang.org/x/text/language golang.org/x/text/language
golang.org/x/text/transform golang.org/x/text/transform
golang.org/x/text/unicode/norm golang.org/x/text/unicode/norm
# golang.org/x/tools v0.36.1-0.20250808220315-8866876b956f # golang.org/x/tools v0.36.1-0.20250904192731-a09a2fba1c08
## explicit; go 1.23.0 ## explicit; go 1.24.0
golang.org/x/tools/cmd/bisect golang.org/x/tools/cmd/bisect
golang.org/x/tools/cover golang.org/x/tools/cover
golang.org/x/tools/go/analysis golang.org/x/tools/go/analysis
@ -128,6 +128,8 @@ golang.org/x/tools/internal/analysisinternal
golang.org/x/tools/internal/analysisinternal/typeindex golang.org/x/tools/internal/analysisinternal/typeindex
golang.org/x/tools/internal/astutil golang.org/x/tools/internal/astutil
golang.org/x/tools/internal/bisect golang.org/x/tools/internal/bisect
golang.org/x/tools/internal/diff
golang.org/x/tools/internal/diff/lcs
golang.org/x/tools/internal/facts golang.org/x/tools/internal/facts
golang.org/x/tools/internal/fmtstr golang.org/x/tools/internal/fmtstr
golang.org/x/tools/internal/stdlib golang.org/x/tools/internal/stdlib