diff --git a/cmd/file2fuzz/main.go b/cmd/file2fuzz/main.go
new file mode 100644
index 00000000000..f0c8939ad0b
--- /dev/null
+++ b/cmd/file2fuzz/main.go
@@ -0,0 +1,132 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// file2fuzz converts binary files, such as those used by go-fuzz, to the Go
+// fuzzing corpus format.
+//
+// Usage:
+//
+//	file2fuzz [-o output] [input...]
+//
+// The defualt behavior is to read input from stdin and write the converted
+// output to stdout. If any position arguments are provided stdin is ignored
+// and the arguments are assumed to be input files to convert.
+//
+// The -o flag provides an path to write output files to. If only one positional
+// argument is specified it may be a file path or an existing directory, if there are
+// multiple inputs specified it must be a directory. If a directory is provided
+// the name of the file will be the SHA-256 hash of its contents.
+//
+package main
+
+import (
+	"crypto/sha256"
+	"errors"
+	"flag"
+	"fmt"
+	"io"
+	"io/ioutil"
+	"log"
+	"os"
+	"path/filepath"
+)
+
+// encVersion1 is version 1 Go fuzzer corpus encoding.
+var encVersion1 = "go test fuzz v1"
+
+func encodeByteSlice(b []byte) []byte {
+	return []byte(fmt.Sprintf("%s\n[]byte(%q)", encVersion1, b))
+}
+
+func usage() {
+	fmt.Fprintf(os.Stderr, "usage: file2fuzz [-o output] [input...]\nconverts files to Go fuzzer corpus format\n")
+	fmt.Fprintf(os.Stderr, "\tinput: files to convert\n")
+	fmt.Fprintf(os.Stderr, "\t-o: where to write converted file(s)\n")
+	os.Exit(2)
+}
+func dirWriter(dir string) func([]byte) error {
+	return func(b []byte) error {
+		sum := fmt.Sprintf("%x", sha256.Sum256(b))
+		name := filepath.Join(dir, sum)
+		if err := os.MkdirAll(dir, 0777); err != nil {
+			return err
+		}
+		if err := ioutil.WriteFile(name, b, 0666); err != nil {
+			os.Remove(name)
+			return err
+		}
+		return nil
+	}
+}
+
+func convert(inputArgs []string, outputArg string) error {
+	var input []io.Reader
+	if args := inputArgs; len(args) == 0 {
+		input = []io.Reader{os.Stdin}
+	} else {
+		for _, a := range args {
+			f, err := os.Open(a)
+			if err != nil {
+				return fmt.Errorf("unable to open %q: %s", a, err)
+			}
+			defer f.Close()
+			if fi, err := f.Stat(); err != nil {
+				return fmt.Errorf("unable to open %q: %s", a, err)
+			} else if fi.IsDir() {
+				return fmt.Errorf("%q is a directory, not a file", a)
+			}
+			input = append(input, f)
+		}
+	}
+
+	var output func([]byte) error
+	if outputArg == "" {
+		if len(inputArgs) > 1 {
+			return errors.New("-o required with multiple input files")
+		}
+		output = func(b []byte) error {
+			_, err := os.Stdout.Write(b)
+			return err
+		}
+	} else {
+		if len(inputArgs) > 1 {
+			output = dirWriter(outputArg)
+		} else {
+			if fi, err := os.Stat(outputArg); err != nil && !os.IsNotExist(err) {
+				return fmt.Errorf("unable to open %q for writing: %s", outputArg, err)
+			} else if err == nil && fi.IsDir() {
+				output = dirWriter(outputArg)
+			} else {
+				output = func(b []byte) error {
+					return ioutil.WriteFile(outputArg, b, 0666)
+				}
+			}
+		}
+	}
+
+	for _, f := range input {
+		b, err := ioutil.ReadAll(f)
+		if err != nil {
+			return fmt.Errorf("unable to read input: %s", err)
+		}
+		if err := output(encodeByteSlice(b)); err != nil {
+			return fmt.Errorf("unable to write output: %s", err)
+		}
+	}
+
+	return nil
+}
+
+func main() {
+	log.SetFlags(0)
+	log.SetPrefix("file2fuzz: ")
+
+	output := flag.String("o", "", "where to write converted file(s)")
+	flag.Usage = usage
+	flag.Parse()
+
+	if err := convert(flag.Args(), *output); err != nil {
+		log.Fatal(err)
+	}
+}
diff --git a/cmd/file2fuzz/main_test.go b/cmd/file2fuzz/main_test.go
new file mode 100644
index 00000000000..55d824cf9e5
--- /dev/null
+++ b/cmd/file2fuzz/main_test.go
@@ -0,0 +1,181 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package main
+
+import (
+	"fmt"
+	"io/ioutil"
+	"os"
+	"os/exec"
+	"path/filepath"
+	"runtime"
+	"strings"
+	"sync"
+	"testing"
+)
+
+// The setup for this test is mostly cribbed from x/exp/txtar.
+
+var buildBin struct {
+	once sync.Once
+	name string
+	err  error
+}
+
+func binPath(t *testing.T) string {
+	t.Helper()
+	if _, err := exec.LookPath("go"); err != nil {
+		t.Skipf("cannot build file2fuzz binary: %v", err)
+	}
+
+	buildBin.once.Do(func() {
+		exe, err := ioutil.TempFile("", "file2fuzz-*.exe")
+		if err != nil {
+			buildBin.err = err
+			return
+		}
+		exe.Close()
+		buildBin.name = exe.Name()
+
+		cmd := exec.Command("go", "build", "-o", buildBin.name, ".")
+		out, err := cmd.CombinedOutput()
+		if err != nil {
+			buildBin.err = fmt.Errorf("%s: %v\n%s", strings.Join(cmd.Args, " "), err, out)
+		}
+	})
+
+	if buildBin.err != nil {
+		if runtime.GOOS == "android" {
+			t.Skipf("skipping test after failing to build file2fuzz binary: go_android_exec may have failed to copy needed dependencies (see https://golang.org/issue/37088)")
+		}
+		t.Fatal(buildBin.err)
+	}
+	return buildBin.name
+}
+
+func TestMain(m *testing.M) {
+	os.Exit(m.Run())
+	if buildBin.name != "" {
+		os.Remove(buildBin.name)
+	}
+}
+
+func file2fuzz(t *testing.T, dir string, args []string, stdin string) (string, bool) {
+	t.Helper()
+	cmd := exec.Command(binPath(t), args...)
+	cmd.Dir = dir
+	if stdin != "" {
+		cmd.Stdin = strings.NewReader(stdin)
+	}
+	out, err := cmd.CombinedOutput()
+	if err != nil {
+		return string(out), true
+	}
+	return string(out), false
+}
+
+func TestFile2Fuzz(t *testing.T) {
+	type file struct {
+		name    string
+		dir     bool
+		content string
+	}
+	tests := []struct {
+		name           string
+		args           []string
+		stdin          string
+		inputFiles     []file
+		expectedStdout string
+		expectedFiles  []file
+		expectedError  string
+	}{
+		{
+			name:           "stdin, stdout",
+			stdin:          "hello",
+			expectedStdout: "go test fuzz v1\n[]byte(\"hello\")",
+		},
+		{
+			name:          "stdin, output file",
+			stdin:         "hello",
+			args:          []string{"-o", "output"},
+			expectedFiles: []file{{name: "output", content: "go test fuzz v1\n[]byte(\"hello\")"}},
+		},
+		{
+			name:          "stdin, output directory",
+			stdin:         "hello",
+			args:          []string{"-o", "output"},
+			inputFiles:    []file{{name: "output", dir: true}},
+			expectedFiles: []file{{name: "output/ffc7b87a0377262d4f77926bd235551d78e6037bbe970d81ec39ac1d95542f7b", content: "go test fuzz v1\n[]byte(\"hello\")"}},
+		},
+		{
+			name:          "input file, output file",
+			args:          []string{"-o", "output", "input"},
+			inputFiles:    []file{{name: "input", content: "hello"}},
+			expectedFiles: []file{{name: "output", content: "go test fuzz v1\n[]byte(\"hello\")"}},
+		},
+		{
+			name:          "input file, output directory",
+			args:          []string{"-o", "output", "input"},
+			inputFiles:    []file{{name: "output", dir: true}, {name: "input", content: "hello"}},
+			expectedFiles: []file{{name: "output/ffc7b87a0377262d4f77926bd235551d78e6037bbe970d81ec39ac1d95542f7b", content: "go test fuzz v1\n[]byte(\"hello\")"}},
+		},
+		{
+			name:       "input files, output directory",
+			args:       []string{"-o", "output", "input", "input-2"},
+			inputFiles: []file{{name: "output", dir: true}, {name: "input", content: "hello"}, {name: "input-2", content: "hello :)"}},
+			expectedFiles: []file{
+				{name: "output/ffc7b87a0377262d4f77926bd235551d78e6037bbe970d81ec39ac1d95542f7b", content: "go test fuzz v1\n[]byte(\"hello\")"},
+				{name: "output/28059db30ce420ff65b2c29b749804c69c601aeca21b3cbf0644244ff080d7a5", content: "go test fuzz v1\n[]byte(\"hello :)\")"},
+			},
+		},
+		{
+			name:          "input files, no output",
+			args:          []string{"input", "input-2"},
+			inputFiles:    []file{{name: "output", dir: true}, {name: "input", content: "hello"}, {name: "input-2", content: "hello :)"}},
+			expectedError: "file2fuzz: -o required with multiple input files\n",
+		},
+	}
+
+	for _, tc := range tests {
+		t.Run(tc.name, func(t *testing.T) {
+			tmp, err := ioutil.TempDir(os.TempDir(), "file2fuzz")
+			if err != nil {
+				t.Fatalf("ioutil.TempDir failed: %s", err)
+			}
+			defer os.RemoveAll(tmp)
+			for _, f := range tc.inputFiles {
+				if f.dir {
+					if err := os.Mkdir(filepath.Join(tmp, f.name), 0777); err != nil {
+						t.Fatalf("failed to create test directory: %s", err)
+					}
+				} else {
+					if err := ioutil.WriteFile(filepath.Join(tmp, f.name), []byte(f.content), 0666); err != nil {
+						t.Fatalf("failed to create test input file: %s", err)
+					}
+				}
+			}
+
+			out, failed := file2fuzz(t, tmp, tc.args, tc.stdin)
+			if failed && tc.expectedError == "" {
+				t.Fatalf("file2fuzz failed unexpectedly: %s", out)
+			} else if failed && out != tc.expectedError {
+				t.Fatalf("file2fuzz returned unexpected error: got %q, want %q", out, tc.expectedError)
+			}
+			if !failed && out != tc.expectedStdout {
+				t.Fatalf("file2fuzz unexpected stdout: got %q, want %q", out, tc.expectedStdout)
+			}
+
+			for _, f := range tc.expectedFiles {
+				c, err := ioutil.ReadFile(filepath.Join(tmp, f.name))
+				if err != nil {
+					t.Fatalf("failed to read expected output file %q: %s", f.name, err)
+				}
+				if string(c) != f.content {
+					t.Fatalf("expected output file %q contains unexpected content: got %s, want %s", f.name, string(c), f.content)
+				}
+			}
+		})
+	}
+}
diff --git a/copyright/copyright.go b/copyright/copyright.go
index a20d6239cae..4a04d132a54 100644
--- a/copyright/copyright.go
+++ b/copyright/copyright.go
@@ -2,6 +2,9 @@
 // Use of this source code is governed by a BSD-style
 // license that can be found in the LICENSE file.
 
+//go:build go1.18
+// +build go1.18
+
 // Package copyright checks that files have the correct copyright notices.
 package copyright
 
@@ -9,8 +12,8 @@ import (
 	"go/ast"
 	"go/parser"
 	"go/token"
+	"io/fs"
 	"io/ioutil"
-	"os"
 	"path/filepath"
 	"regexp"
 	"strings"
@@ -18,13 +21,18 @@ import (
 
 func checkCopyright(dir string) ([]string, error) {
 	var files []string
-	err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error {
+	err := filepath.WalkDir(dir, func(path string, d fs.DirEntry, err error) error {
 		if err != nil {
 			return err
 		}
-		if info.IsDir() {
+		if d.IsDir() {
 			// Skip directories like ".git".
-			if strings.HasPrefix(info.Name(), ".") {
+			if strings.HasPrefix(d.Name(), ".") {
+				return filepath.SkipDir
+			}
+			// Skip any directory that starts with an underscore, as the go
+			// command would.
+			if strings.HasPrefix(d.Name(), "_") {
 				return filepath.SkipDir
 			}
 			return nil
diff --git a/copyright/copyright_test.go b/copyright/copyright_test.go
index bfab43ca01c..1d63147a19a 100644
--- a/copyright/copyright_test.go
+++ b/copyright/copyright_test.go
@@ -2,6 +2,9 @@
 // Use of this source code is governed by a BSD-style
 // license that can be found in the LICENSE file.
 
+//go:build go1.18
+// +build go1.18
+
 package copyright
 
 import (
diff --git a/go.mod b/go.mod
index 69c5e749264..ff8184facb2 100644
--- a/go.mod
+++ b/go.mod
@@ -3,10 +3,11 @@ module golang.org/x/tools
 go 1.17
 
 require (
-	github.com/yuin/goldmark v1.3.5
+	github.com/yuin/goldmark v1.4.0
 	golang.org/x/mod v0.4.2
-	golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4
+	golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d
 	golang.org/x/sync v0.0.0-20210220032951-036812b2e83c
-	golang.org/x/sys v0.0.0-20210510120138-977fb7262007
+	golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e
+	golang.org/x/text v0.3.6
 	golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1
 )
diff --git a/go.sum b/go.sum
index 9dde34b5d0a..b4edbe6be0a 100644
--- a/go.sum
+++ b/go.sum
@@ -1,25 +1,28 @@
-github.com/yuin/goldmark v1.3.5 h1:dPmz1Snjq0kmkz159iL7S6WzdahUTHnHB5M56WFVifs=
-github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
+github.com/yuin/goldmark v1.4.0 h1:OtISOGfH6sOWa1/qXqqAiOIAO6Z5J3AEAE18WAq6BiQ=
+github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
 golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
 golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
 golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo=
 golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
 golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
 golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4 h1:4nGaVu0QrbjT/AK2PRLuQfQuh6DJve+pELhqTdAj3x0=
-golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
+golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d h1:20cMwl2fHAzkJMEA+8J4JgqBQcQGzbisXo31MIeenXI=
+golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
 golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ=
 golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
 golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210510120138-977fb7262007 h1:gG67DSER+11cZvqIMb8S8bt0vZtiN6xWYARwirrOSfE=
-golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e h1:WUoyKPm6nCo1BnNUvPGnFG3T5DUVem42yDJZZ4CNxMA=
+golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
+golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
 golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
-golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M=
+golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
 golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
 golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
 golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
diff --git a/go/analysis/passes/asmdecl/asmdecl.go b/go/analysis/passes/asmdecl/asmdecl.go
index eb0016b18f1..7b82d0b6ddb 100644
--- a/go/analysis/passes/asmdecl/asmdecl.go
+++ b/go/analysis/passes/asmdecl/asmdecl.go
@@ -51,6 +51,11 @@ type asmArch struct {
 	bigEndian bool
 	stack     string
 	lr        bool
+	// retRegs is a list of registers for return value in register ABI (ABIInternal).
+	// For now, as we only check whether we write to any result, here we only need to
+	// include the first integer register and first floating-point register. Accessing
+	// any of them counts as writing to result.
+	retRegs []string
 	// calculated during initialization
 	sizes    types.Sizes
 	intSize  int
@@ -79,8 +84,8 @@ type asmVar struct {
 var (
 	asmArch386      = asmArch{name: "386", bigEndian: false, stack: "SP", lr: false}
 	asmArchArm      = asmArch{name: "arm", bigEndian: false, stack: "R13", lr: true}
-	asmArchArm64    = asmArch{name: "arm64", bigEndian: false, stack: "RSP", lr: true}
-	asmArchAmd64    = asmArch{name: "amd64", bigEndian: false, stack: "SP", lr: false}
+	asmArchArm64    = asmArch{name: "arm64", bigEndian: false, stack: "RSP", lr: true, retRegs: []string{"R0", "F0"}}
+	asmArchAmd64    = asmArch{name: "amd64", bigEndian: false, stack: "SP", lr: false, retRegs: []string{"AX", "X0"}}
 	asmArchMips     = asmArch{name: "mips", bigEndian: true, stack: "R29", lr: true}
 	asmArchMipsLE   = asmArch{name: "mipsle", bigEndian: false, stack: "R29", lr: true}
 	asmArchMips64   = asmArch{name: "mips64", bigEndian: true, stack: "R29", lr: true}
@@ -137,7 +142,7 @@ var (
 	asmSP        = re(`[^+\-0-9](([0-9]+)\(([A-Z0-9]+)\))`)
 	asmOpcode    = re(`^\s*(?:[A-Z0-9a-z_]+:)?\s*([A-Z]+)\s*([^,]*)(?:,\s*(.*))?`)
 	ppc64Suff    = re(`([BHWD])(ZU|Z|U|BR)?$`)
-	abiSuff      = re(`^(.+)<ABI.+>$`)
+	abiSuff      = re(`^(.+)<(ABI.+)>$`)
 )
 
 func run(pass *analysis.Pass) (interface{}, error) {
@@ -185,6 +190,7 @@ Files:
 		var (
 			fn                 *asmFunc
 			fnName             string
+			abi                string
 			localSize, argSize int
 			wroteSP            bool
 			noframe            bool
@@ -195,18 +201,22 @@ Files:
 		flushRet := func() {
 			if fn != nil && fn.vars["ret"] != nil && !haveRetArg && len(retLine) > 0 {
 				v := fn.vars["ret"]
+				resultStr := fmt.Sprintf("%d-byte ret+%d(FP)", v.size, v.off)
+				if abi == "ABIInternal" {
+					resultStr = "result register"
+				}
 				for _, line := range retLine {
-					pass.Reportf(analysisutil.LineStart(tf, line), "[%s] %s: RET without writing to %d-byte ret+%d(FP)", arch, fnName, v.size, v.off)
+					pass.Reportf(analysisutil.LineStart(tf, line), "[%s] %s: RET without writing to %s", arch, fnName, resultStr)
 				}
 			}
 			retLine = nil
 		}
-		trimABI := func(fnName string) string {
+		trimABI := func(fnName string) (string, string) {
 			m := abiSuff.FindStringSubmatch(fnName)
 			if m != nil {
-				return m[1]
+				return m[1], m[2]
 			}
-			return fnName
+			return fnName, ""
 		}
 		for lineno, line := range lines {
 			lineno++
@@ -273,11 +283,12 @@ Files:
 						// log.Printf("%s:%d: [%s] cannot check cross-package assembly function: %s is in package %s", fname, lineno, arch, fnName, pkgPath)
 						fn = nil
 						fnName = ""
+						abi = ""
 						continue
 					}
 				}
 				// Trim off optional ABI selector.
-				fnName := trimABI(fnName)
+				fnName, abi = trimABI(fnName)
 				flag := m[3]
 				fn = knownFunc[fnName][arch]
 				if fn != nil {
@@ -305,6 +316,7 @@ Files:
 				flushRet()
 				fn = nil
 				fnName = ""
+				abi = ""
 				continue
 			}
 
@@ -335,6 +347,15 @@ Files:
 				haveRetArg = true
 			}
 
+			if abi == "ABIInternal" && !haveRetArg {
+				for _, reg := range archDef.retRegs {
+					if strings.Contains(line, reg) {
+						haveRetArg = true
+						break
+					}
+				}
+			}
+
 			for _, m := range asmSP.FindAllStringSubmatch(line, -1) {
 				if m[3] != archDef.stack || wroteSP || noframe {
 					continue
diff --git a/go/analysis/passes/asmdecl/testdata/src/a/asm.go b/go/analysis/passes/asmdecl/testdata/src/a/asm.go
index 6bcfb2f3a61..1413b74696f 100644
--- a/go/analysis/passes/asmdecl/testdata/src/a/asm.go
+++ b/go/analysis/passes/asmdecl/testdata/src/a/asm.go
@@ -52,4 +52,7 @@ func pickStableABI(x int)
 func pickInternalABI(x int)
 func pickFutureABI(x int)
 
+func returnABIInternal() int
+func returnmissingABIInternal() int
+
 func retjmp() int
diff --git a/go/analysis/passes/asmdecl/testdata/src/a/asm1.s b/go/analysis/passes/asmdecl/testdata/src/a/asm1.s
index 8c43223524d..c3ef9f40fec 100644
--- a/go/analysis/passes/asmdecl/testdata/src/a/asm1.s
+++ b/go/analysis/passes/asmdecl/testdata/src/a/asm1.s
@@ -346,6 +346,14 @@ TEXT ·pickFutureABI<ABISomethingNotyetInvented>(SB), NOSPLIT, $32
 	MOVQ	x+0(FP), AX
 	RET
 
+// writing to result in ABIInternal function
+TEXT ·returnABIInternal<ABIInternal>(SB), NOSPLIT, $32
+	MOVQ	$123, AX
+	RET
+TEXT ·returnmissingABIInternal<ABIInternal>(SB), NOSPLIT, $32
+	MOVQ	$123, CX
+	RET // want `RET without writing to result register`
+
 // return jump
 TEXT ·retjmp(SB), NOSPLIT, $0-8
 	RET	retjmp1(SB) // It's okay to not write results if there's a tail call.
diff --git a/go/analysis/passes/nilness/nilness.go b/go/analysis/passes/nilness/nilness.go
index f0d2c7edfec..2eb782b4276 100644
--- a/go/analysis/passes/nilness/nilness.go
+++ b/go/analysis/passes/nilness/nilness.go
@@ -135,6 +135,11 @@ func runFunc(pass *analysis.Pass, fn *ssa.Function) {
 				if nilnessOf(stack, instr.X) == isnil {
 					reportf("nilpanic", instr.Pos(), "panic with nil value")
 				}
+			case *ssa.SliceToArrayPointer:
+				nn := nilnessOf(stack, instr.X)
+				if nn == isnil && slice2ArrayPtrLen(instr) > 0 {
+					reportf("conversionpanic", instr.Pos(), "nil slice being cast to an array of len > 0 will always panic")
+				}
 			}
 		}
 
@@ -259,6 +264,26 @@ func nilnessOf(stack []fact, v ssa.Value) nilness {
 		if underlying := nilnessOf(stack, v.X); underlying != unknown {
 			return underlying
 		}
+	case *ssa.SliceToArrayPointer:
+		nn := nilnessOf(stack, v.X)
+		if slice2ArrayPtrLen(v) > 0 {
+			if nn == isnil {
+				// We know that *(*[1]byte)(nil) is going to panic because of the
+				// conversion. So return unknown to the caller, prevent useless
+				// nil deference reporting due to * operator.
+				return unknown
+			}
+			// Otherwise, the conversion will yield a non-nil pointer to array.
+			// Note that the instruction can still panic if array length greater
+			// than slice length. If the value is used by another instruction,
+			// that instruction can assume the panic did not happen when that
+			// instruction is reached.
+			return isnonnil
+		}
+		// In case array length is zero, the conversion result depends on nilness of the slice.
+		if nn != unknown {
+			return nn
+		}
 	}
 
 	// Is value intrinsically nil or non-nil?
@@ -292,6 +317,10 @@ func nilnessOf(stack []fact, v ssa.Value) nilness {
 	return unknown
 }
 
+func slice2ArrayPtrLen(v *ssa.SliceToArrayPointer) int64 {
+	return v.Type().(*types.Pointer).Elem().Underlying().(*types.Array).Len()
+}
+
 // If b ends with an equality comparison, eq returns the operation and
 // its true (equal) and false (not equal) successors.
 func eq(b *ssa.BasicBlock) (op *ssa.BinOp, tsucc, fsucc *ssa.BasicBlock) {
diff --git a/go/analysis/passes/nilness/nilness_go117_test.go b/go/analysis/passes/nilness/nilness_go117_test.go
new file mode 100644
index 00000000000..1501f4fc77d
--- /dev/null
+++ b/go/analysis/passes/nilness/nilness_go117_test.go
@@ -0,0 +1,20 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.17
+// +build go1.17
+
+package nilness_test
+
+import (
+	"testing"
+
+	"golang.org/x/tools/go/analysis/analysistest"
+	"golang.org/x/tools/go/analysis/passes/nilness"
+)
+
+func TestNilnessGo117(t *testing.T) {
+	testdata := analysistest.TestData()
+	analysistest.Run(t, testdata, nilness.Analyzer, "b")
+}
diff --git a/go/analysis/passes/nilness/testdata/src/b/b.go b/go/analysis/passes/nilness/testdata/src/b/b.go
new file mode 100644
index 00000000000..d31f6fb9047
--- /dev/null
+++ b/go/analysis/passes/nilness/testdata/src/b/b.go
@@ -0,0 +1,32 @@
+package b
+
+func f() {
+	var s []int
+	t := (*[0]int)(s)
+	_ = *t // want "nil dereference in load"
+	_ = (*[0]int)(s)
+	_ = *(*[0]int)(s) // want "nil dereference in load"
+
+	// these operation is panic
+	_ = (*[1]int)(s)  // want "nil slice being cast to an array of len > 0 will always panic"
+	_ = *(*[1]int)(s) // want "nil slice being cast to an array of len > 0 will always panic"
+}
+
+func g() {
+	var s = make([]int, 0)
+	t := (*[0]int)(s)
+	println(*t)
+}
+
+func h() {
+	var s = make([]int, 1)
+	t := (*[1]int)(s)
+	println(*t)
+}
+
+func i(x []int) {
+	a := (*[1]int)(x)
+	if a != nil { // want "tautological condition: non-nil != nil"
+		_ = *a
+	}
+}
diff --git a/go/analysis/passes/printf/printf.go b/go/analysis/passes/printf/printf.go
index 6589478af0f..de0369a428b 100644
--- a/go/analysis/passes/printf/printf.go
+++ b/go/analysis/passes/printf/printf.go
@@ -490,7 +490,7 @@ func printfNameAndKind(pass *analysis.Pass, call *ast.CallExpr) (fn *types.Func,
 		_, ok = isPrint[strings.ToLower(fn.Name())]
 	}
 	if ok {
-		if fn.Name() == "Errorf" {
+		if fn.FullName() == "fmt.Errorf" {
 			kind = KindErrorf
 		} else if strings.HasSuffix(fn.Name(), "f") {
 			kind = KindPrintf
@@ -590,12 +590,9 @@ func checkPrintf(pass *analysis.Pass, kind Kind, call *ast.CallExpr, fn *types.F
 		}
 		if state.verb == 'w' {
 			switch kind {
-			case KindNone, KindPrint:
+			case KindNone, KindPrint, KindPrintf:
 				pass.Reportf(call.Pos(), "%s does not support error-wrapping directive %%w", state.name)
 				return
-			case KindPrintf:
-				pass.Reportf(call.Pos(), "%s call has error-wrapping directive %%w, which is only supported for functions backed by fmt.Errorf", state.name)
-				return
 			}
 			if anyW {
 				pass.Reportf(call.Pos(), "%s call has more than one error-wrapping directive %%w", state.name)
diff --git a/go/analysis/passes/printf/testdata/src/a/a.go b/go/analysis/passes/printf/testdata/src/a/a.go
index e27dd054c34..89ef9ba1742 100644
--- a/go/analysis/passes/printf/testdata/src/a/a.go
+++ b/go/analysis/passes/printf/testdata/src/a/a.go
@@ -327,14 +327,26 @@ func PrintfTests() {
 	dbg("", 1) // no error "call has arguments but no formatting directive"
 
 	// %w
-	_ = fmt.Errorf("%w", err)
-	_ = fmt.Errorf("%#w", err)
-	_ = fmt.Errorf("%[2]w %[1]s", "x", err)
-	_ = fmt.Errorf("%[2]w %[1]s", e, "x") // want `fmt.Errorf format %\[2\]w has arg "x" of wrong type string`
-	_ = fmt.Errorf("%w", "x")             // want `fmt.Errorf format %w has arg "x" of wrong type string`
-	_ = fmt.Errorf("%w %w", err, err)     // want `fmt.Errorf call has more than one error-wrapping directive %w`
-	fmt.Printf("%w", err)                 // want `fmt.Printf call has error-wrapping directive %w`
-	Errorf(0, "%w", err)
+	_ = fmt.Errorf("%w", err)               // OK
+	_ = fmt.Errorf("%#w", err)              // OK
+	_ = fmt.Errorf("%[2]w %[1]s", "x", err) // OK
+	_ = fmt.Errorf("%[2]w %[1]s", e, "x")   // want `fmt.Errorf format %\[2\]w has arg "x" of wrong type string`
+	_ = fmt.Errorf("%w", "x")               // want `fmt.Errorf format %w has arg "x" of wrong type string`
+	_ = fmt.Errorf("%w %w", err, err)       // want `fmt.Errorf call has more than one error-wrapping directive %w`
+	fmt.Printf("%w", err)                   // want `fmt.Printf does not support error-wrapping directive %w`
+	var wt *testing.T
+	wt.Errorf("%w", err)          // want `\(\*testing.common\).Errorf does not support error-wrapping directive %w`
+	wt.Errorf("%[1][3]d x", 1, 2) // want `\(\*testing.common\).Errorf format %\[1\]\[ has unknown verb \[`
+	wt.Errorf("%[1]d x", 1, 2)    // OK
+	// Errorf is a printfWrapper, not an errorfWrapper.
+	Errorf(0, "%w", err) // want `a.Errorf does not support error-wrapping directive %w`
+	// %w should work on fmt.Errorf-based wrappers.
+	var es errorfStruct
+	var eis errorfIntStruct
+	var ess errorfStringStruct
+	es.Errorf("%w", err)           // OK
+	eis.Errorf(0, "%w", err)       // OK
+	ess.Errorf("ERROR", "%w", err) // OK
 }
 
 func someString() string { return "X" }
@@ -379,13 +391,36 @@ func printf(format string, args ...interface{}) { // want printf:"printfWrapper"
 
 // Errorf is used by the test for a case in which the first parameter
 // is not a format string.
-func Errorf(i int, format string, args ...interface{}) { // want Errorf:"errorfWrapper"
-	_ = fmt.Errorf(format, args...)
+func Errorf(i int, format string, args ...interface{}) { // want Errorf:"printfWrapper"
+	fmt.Sprintf(format, args...)
 }
 
 // errorf is used by the test for a case in which the function accepts multiple
 // string parameters before variadic arguments
-func errorf(level, format string, args ...interface{}) { // want errorf:"errorfWrapper"
+func errorf(level, format string, args ...interface{}) { // want errorf:"printfWrapper"
+	fmt.Sprintf(format, args...)
+}
+
+type errorfStruct struct{}
+
+// Errorf is used to test %w works on errorf wrappers.
+func (errorfStruct) Errorf(format string, args ...interface{}) { // want Errorf:"errorfWrapper"
+	_ = fmt.Errorf(format, args...)
+}
+
+type errorfStringStruct struct{}
+
+// Errorf is used by the test for a case in which the function accepts multiple
+// string parameters before variadic arguments
+func (errorfStringStruct) Errorf(level, format string, args ...interface{}) { // want Errorf:"errorfWrapper"
+	_ = fmt.Errorf(format, args...)
+}
+
+type errorfIntStruct struct{}
+
+// Errorf is used by the test for a case in which the first parameter
+// is not a format string.
+func (errorfIntStruct) Errorf(i int, format string, args ...interface{}) { // want Errorf:"errorfWrapper"
 	_ = fmt.Errorf(format, args...)
 }
 
diff --git a/go/analysis/passes/testinggoroutine/testdata/src/a/a.go b/go/analysis/passes/testinggoroutine/testdata/src/a/a.go
index 5fe90417c3b..c8fc91bb29b 100644
--- a/go/analysis/passes/testinggoroutine/testdata/src/a/a.go
+++ b/go/analysis/passes/testinggoroutine/testdata/src/a/a.go
@@ -36,28 +36,43 @@ func TestOKErrorf(t *testing.T) {
 	}
 }
 
-func BenchmarkBadFatalf(b *testing.B) {
+func TestBadFatal(t *testing.T) {
 	var wg sync.WaitGroup
 	defer wg.Wait()
 
-	for i := 0; i < b.N; i++ {
+	for i := 0; i < 2; i++ {
 		wg.Add(1)
 		go func(id int) {
 			defer wg.Done()
-			b.Fatalf("TestFailed: id = %v\n", id) // want "call to .+B.+Fatalf from a non-test goroutine"
+			t.Fatal("TestFailed") // want "call to .+T.+Fatal from a non-test goroutine"
 		}(i)
 	}
 }
 
-func TestBadFatal(t *testing.T) {
+func f(t *testing.T, _ string) {
+	t.Fatal("TestFailed")
+}
+
+func g() {}
+
+func TestBadFatalIssue47470(t *testing.T) {
+	go f(t, "failed test 1") // want "call to .+T.+Fatal from a non-test goroutine"
+
+	g := func(t *testing.T, _ string) {
+		t.Fatal("TestFailed")
+	}
+	go g(t, "failed test 2") // want "call to .+T.+Fatal from a non-test goroutine"
+}
+
+func BenchmarkBadFatalf(b *testing.B) {
 	var wg sync.WaitGroup
 	defer wg.Wait()
 
-	for i := 0; i < 2; i++ {
+	for i := 0; i < b.N; i++ {
 		wg.Add(1)
 		go func(id int) {
 			defer wg.Done()
-			t.Fatal("TestFailed") // want "call to .+T.+Fatal from a non-test goroutine"
+			b.Fatalf("TestFailed: id = %v\n", id) // want "call to .+B.+Fatalf from a non-test goroutine"
 		}(i)
 	}
 }
@@ -259,3 +274,7 @@ func TestWithCustomType(t *testing.T) {
 		}(i)
 	}
 }
+
+func TestIssue48124(t *testing.T) {
+	go h()
+}
diff --git a/go/analysis/passes/testinggoroutine/testdata/src/a/b.go b/go/analysis/passes/testinggoroutine/testdata/src/a/b.go
new file mode 100644
index 00000000000..5e95177f404
--- /dev/null
+++ b/go/analysis/passes/testinggoroutine/testdata/src/a/b.go
@@ -0,0 +1,7 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package a
+
+func h() {}
diff --git a/go/analysis/passes/testinggoroutine/testinggoroutine.go b/go/analysis/passes/testinggoroutine/testinggoroutine.go
index d2b9a5640d9..ce05a56cca3 100644
--- a/go/analysis/passes/testinggoroutine/testinggoroutine.go
+++ b/go/analysis/passes/testinggoroutine/testinggoroutine.go
@@ -119,11 +119,33 @@ func typeIsTestingDotTOrB(expr ast.Expr) (string, bool) {
 	return varTypeName, ok
 }
 
+// goStmtFunc returns the ast.Node of a call expression
+// that was invoked as a go statement. Currently, only
+// function literals declared in the same function, and
+// static calls within the same package are supported.
+func goStmtFun(goStmt *ast.GoStmt) ast.Node {
+	switch goStmt.Call.Fun.(type) {
+	case *ast.Ident:
+		id := goStmt.Call.Fun.(*ast.Ident)
+		// TODO(cuonglm): improve this once golang/go#48141 resolved.
+		if id.Obj == nil {
+			break
+		}
+		if funDecl, ok := id.Obj.Decl.(ast.Node); ok {
+			return funDecl
+		}
+	case *ast.FuncLit:
+		return goStmt.Call.Fun
+	}
+	return goStmt.Call
+}
+
 // checkGoStmt traverses the goroutine and checks for the
 // use of the forbidden *testing.(B, T) methods.
 func checkGoStmt(pass *analysis.Pass, goStmt *ast.GoStmt) {
+	fn := goStmtFun(goStmt)
 	// Otherwise examine the goroutine to check for the forbidden methods.
-	ast.Inspect(goStmt, func(n ast.Node) bool {
+	ast.Inspect(fn, func(n ast.Node) bool {
 		selExpr, ok := n.(*ast.SelectorExpr)
 		if !ok {
 			return true
@@ -147,7 +169,11 @@ func checkGoStmt(pass *analysis.Pass, goStmt *ast.GoStmt) {
 			return true
 		}
 		if typeName, ok := typeIsTestingDotTOrB(field.Type); ok {
-			pass.ReportRangef(selExpr, "call to (*%s).%s from a non-test goroutine", typeName, selExpr.Sel)
+			var fnRange analysis.Range = goStmt
+			if _, ok := fn.(*ast.FuncLit); ok {
+				fnRange = selExpr
+			}
+			pass.ReportRangef(fnRange, "call to (*%s).%s from a non-test goroutine", typeName, selExpr.Sel)
 		}
 		return true
 	})
diff --git a/go/ast/astutil/rewrite.go b/go/ast/astutil/rewrite.go
index b949fc84079..5fe75b14c75 100644
--- a/go/ast/astutil/rewrite.go
+++ b/go/ast/astutil/rewrite.go
@@ -439,8 +439,10 @@ func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.
 		}
 
 	default:
-		if typeparams.IsListExpr(n) {
-			a.applyList(n, "ElemList")
+		if ix := typeparams.GetIndexExprData(n); ix != nil {
+			a.apply(n, "X", nil, ix.X)
+			// *ast.IndexExpr was handled above, so n must be an *ast.MultiIndexExpr.
+			a.applyList(n, "Indices")
 		} else {
 			panic(fmt.Sprintf("Apply: unexpected node type %T", n))
 		}
diff --git a/go/ast/astutil/rewrite_test.go b/go/ast/astutil/rewrite_test.go
index 3a74afa0d55..9d23170a5d6 100644
--- a/go/ast/astutil/rewrite_test.go
+++ b/go/ast/astutil/rewrite_test.go
@@ -205,15 +205,18 @@ type R T[int, string]
 `,
 			want: `package p
 
-type T[P1, P2 any] int32
+type S[P1, P2 any] int32
 
-type R T[int32, string]
+type R S[int32, string]
 `,
 			post: func(c *astutil.Cursor) bool {
 				if ident, ok := c.Node().(*ast.Ident); ok {
 					if ident.Name == "int" {
 						c.Replace(ast.NewIdent("int32"))
 					}
+					if ident.Name == "T" {
+						c.Replace(ast.NewIdent("S"))
+					}
 				}
 				return true
 			},
diff --git a/go/callgraph/vta/graph.go b/go/callgraph/vta/graph.go
index 1b7b1052ecf..6c9e6a576d2 100644
--- a/go/callgraph/vta/graph.go
+++ b/go/callgraph/vta/graph.go
@@ -72,7 +72,7 @@ func (mv mapValue) String() string {
 	return fmt.Sprintf("MapValue(%v)", mv.Type())
 }
 
-// sliceElem node for VTA, modeling reachable slice element types.
+// sliceElem node for VTA, modeling reachable slice and array element types.
 type sliceElem struct {
 	typ types.Type
 }
@@ -346,8 +346,11 @@ func (b *builder) instr(instr ssa.Instruction) {
 		b.rtrn(i)
 	case *ssa.MakeChan, *ssa.MakeMap, *ssa.MakeSlice, *ssa.BinOp,
 		*ssa.Alloc, *ssa.DebugRef, *ssa.Convert, *ssa.Jump, *ssa.If,
-		*ssa.Slice, *ssa.Range, *ssa.RunDefers:
+		*ssa.Slice, *ssa.SliceToArrayPointer, *ssa.Range, *ssa.RunDefers:
 		// No interesting flow here.
+		// Notes on individual instructions:
+		// SliceToArrayPointer: t1 = slice to array pointer *[4]T <- []T (t0)
+		// No interesting flow as sliceArrayElem(t1) == sliceArrayElem(t0).
 		return
 	default:
 		panic(fmt.Sprintf("unsupported instruction %v\n", instr))
diff --git a/go/callgraph/vta/helpers_test.go b/go/callgraph/vta/helpers_test.go
index 4451f579f65..0e00aeb28a5 100644
--- a/go/callgraph/vta/helpers_test.go
+++ b/go/callgraph/vta/helpers_test.go
@@ -5,11 +5,14 @@
 package vta
 
 import (
+	"fmt"
 	"go/ast"
 	"go/parser"
 	"io/ioutil"
+	"sort"
 	"strings"
 
+	"golang.org/x/tools/go/callgraph"
 	"golang.org/x/tools/go/ssa/ssautil"
 
 	"golang.org/x/tools/go/loader"
@@ -81,3 +84,31 @@ func funcName(f *ssa.Function) string {
 	tp := recv.Type().String()
 	return tp[strings.LastIndex(tp, ".")+1:] + "." + f.Name()
 }
+
+// callGraphStr stringifes `g` into a list of strings where
+// each entry is of the form
+//   f: cs1 -> f1, f2, ...; ...; csw -> fx, fy, ...
+// f is a function, cs1, ..., csw are call sites in f, and
+// f1, f2, ..., fx, fy, ... are the resolved callees.
+func callGraphStr(g *callgraph.Graph) []string {
+	var gs []string
+	for f, n := range g.Nodes {
+		c := make(map[string][]string)
+		for _, edge := range n.Out {
+			cs := edge.Site.String()
+			c[cs] = append(c[cs], funcName(edge.Callee.Func))
+		}
+
+		var cs []string
+		for site, fs := range c {
+			sort.Strings(fs)
+			entry := fmt.Sprintf("%v -> %v", site, strings.Join(fs, ", "))
+			cs = append(cs, entry)
+		}
+
+		sort.Strings(cs)
+		entry := fmt.Sprintf("%v: %v", funcName(f), strings.Join(cs, "; "))
+		gs = append(gs, entry)
+	}
+	return gs
+}
diff --git a/go/callgraph/vta/propagation.go b/go/callgraph/vta/propagation.go
index 6c11801a262..4956f652d22 100644
--- a/go/callgraph/vta/propagation.go
+++ b/go/callgraph/vta/propagation.go
@@ -118,12 +118,6 @@ func (ptm propTypeMap) propTypes(n node) map[propType]bool {
 // reaching the node. `canon` is used for type uniqueness.
 func propagate(graph vtaGraph, canon *typeutil.Map) propTypeMap {
 	nodeToScc, sccID := scc(graph)
-	// Initialize sccToTypes to avoid repeated check
-	// for initialization later.
-	sccToTypes := make(map[int]map[propType]bool, sccID)
-	for i := 0; i <= sccID; i++ {
-		sccToTypes[i] = make(map[propType]bool)
-	}
 
 	// We also need the reverse map, from ids to SCCs.
 	sccs := make(map[int][]node, sccID)
@@ -131,14 +125,18 @@ func propagate(graph vtaGraph, canon *typeutil.Map) propTypeMap {
 		sccs[id] = append(sccs[id], n)
 	}
 
+	// Initialize sccToTypes to avoid repeated check
+	// for initialization later.
+	sccToTypes := make(map[int]map[propType]bool, sccID)
+	for i := 0; i <= sccID; i++ {
+		sccToTypes[i] = nodeTypes(sccs[i], canon)
+	}
+
 	for i := len(sccs) - 1; i >= 0; i-- {
-		nodes := sccs[i]
-		// Save the types induced by the nodes of the SCC.
-		mergeTypes(sccToTypes[i], nodeTypes(nodes, canon))
-		nextSccs := make(map[int]bool)
-		for _, node := range nodes {
+		nextSccs := make(map[int]struct{})
+		for _, node := range sccs[i] {
 			for succ := range graph[node] {
-				nextSccs[nodeToScc[succ]] = true
+				nextSccs[nodeToScc[succ]] = struct{}{}
 			}
 		}
 		// Propagate types to all successor SCCs.
@@ -146,7 +144,6 @@ func propagate(graph vtaGraph, canon *typeutil.Map) propTypeMap {
 			mergeTypes(sccToTypes[nextScc], sccToTypes[i])
 		}
 	}
-
 	return propTypeMap{nodeToScc: nodeToScc, sccToTypes: sccToTypes}
 }
 
diff --git a/go/callgraph/vta/testdata/callgraph_fields.go b/go/callgraph/vta/testdata/callgraph_fields.go
new file mode 100644
index 00000000000..00aa649ceec
--- /dev/null
+++ b/go/callgraph/vta/testdata/callgraph_fields.go
@@ -0,0 +1,91 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// go:build ignore
+
+package testdata
+
+type I interface {
+	Foo()
+}
+
+type A struct {
+	I
+}
+
+func (a *A) Do() {
+	a.Foo()
+}
+
+type B struct{}
+
+func (b B) Foo() {}
+
+func NewA(b B) *A {
+	return &A{I: &b}
+}
+
+func Baz(b B) {
+	a := NewA(b)
+	a.Do()
+}
+
+// Relevant SSA:
+// func Baz(b B):
+//        t0 = local B (b)
+//        *t0 = b
+//        t1 = *t0
+//        t2 = NewA(t1)
+//        t3 = (*A).Do(t2)
+//        return
+//
+// func (a *A) Do():
+//        t0 = &a.I [#0]
+//        t1 = *t0
+//        t2 = invoke t1.Foo()
+//        return
+//
+// Name: (testdata.A).Foo
+// Synthetic: wrapper for func (testdata.I).Foo()
+// Location: testdata/callgraph_fields.go:10:2
+// func (arg0 testdata.A) Foo():
+//	  t0 = local testdata.A ()
+//        *t0 = arg0
+//        t1 = &t0.I [#0]
+//        t2 = *t1
+//        t3 = invoke t2.Foo()
+//        return
+//
+// Name: (*testdata.A).Foo
+// Synthetic: wrapper for func (testdata.I).Foo()
+// Location: testdata/callgraph_fields.go:10:2
+// func (arg0 *testdata.A) Foo():
+//        t0 = &arg0.I [#0]
+//        t1 = *t0
+//        t2 = invoke t1.Foo()
+//        return
+//
+// func (b B) Foo():
+//        t0 = local B (b)
+//        *t0 = b
+//        return
+//
+// func (b *testdata.B) Foo():
+//        t0 = ssa:wrapnilchk(b, "testdata.B":string, "Foo":string)
+//        t1 = *t0
+//        t2 = (testdata.B).Foo(t1)
+//        return
+//
+// func NewA(b B) *A:
+//        t0 = new B (b)
+//        *t0 = b
+//        t1 = new A (complit)
+//        t2 = &t1.I [#0]
+//        t3 = make I <- *B (t0)
+//        *t2 = t3
+//        return t1
+
+// WANT:
+// Baz: (*A).Do(t2) -> A.Do; NewA(t1) -> NewA
+// A.Do: invoke t1.Foo() -> B.Foo
diff --git a/go/callgraph/vta/testdata/dynamic_calls.go b/go/callgraph/vta/testdata/dynamic_calls.go
index fa4270b7255..b8c14b238ec 100644
--- a/go/callgraph/vta/testdata/dynamic_calls.go
+++ b/go/callgraph/vta/testdata/dynamic_calls.go
@@ -37,7 +37,13 @@ func Baz(x B, h func() I, i I) I {
 //   t4 = h()
 //   return t4
 
+// Local(t2) has seemingly duplicates of successors. This
+// happens in stringification of type propagation graph.
+// Due to CHA, we analyze A.foo and *A.foo as well as B.foo
+// and *B.foo, which have similar bodies and hence similar
+// type flow that gets merged together during stringification.
+
 // WANT:
-// Local(t2) -> Local(ai), Local(bi)
+// Local(t2) -> Local(ai), Local(ai), Local(bi), Local(bi)
 // Constant(testdata.I) -> Local(t4)
 // Local(t1) -> Local(t2)
diff --git a/go/callgraph/vta/testdata/go117.go b/go/callgraph/vta/testdata/go117.go
new file mode 100644
index 00000000000..750152e505e
--- /dev/null
+++ b/go/callgraph/vta/testdata/go117.go
@@ -0,0 +1,40 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// go:build ignore
+
+package testdata
+
+type J interface {
+	Foo()
+	Bar()
+}
+
+type B struct {
+	p int
+}
+
+func (b B) Foo() {}
+func (b B) Bar() {}
+
+func Wobble(b *B, s []J) {
+	x := (*[3]J)(s)
+	x[1] = b
+
+	a := &s[2]
+	(*a).Bar()
+}
+
+// Relevant SSA:
+// func Wobble(b *B, s []J):
+//   t0 = slice to array pointer *[3]J <- []J (s)                      *[3]J
+//   t1 = &t0[1:int]                                                      *J
+//   t2 = make J <- *B (b)                                                 J
+//   *t1 = t2
+//   t3 = &s[2:int]                                                       *J
+//   ...
+
+// WANT:
+// Local(t1) -> Slice([]testdata.J)
+// Slice([]testdata.J) -> Local(t1), Local(t3)
diff --git a/go/callgraph/vta/utils.go b/go/callgraph/vta/utils.go
index 69361abcd6f..cabc93be610 100644
--- a/go/callgraph/vta/utils.go
+++ b/go/callgraph/vta/utils.go
@@ -114,10 +114,8 @@ func siteCallees(c ssa.CallInstruction, callgraph *callgraph.Graph) []*ssa.Funct
 	}
 
 	for _, edge := range node.Out {
-		callee := edge.Callee.Func
-		// Skip synthetic functions wrapped around source functions.
-		if edge.Site == c && callee.Synthetic == "" {
-			matches = append(matches, callee)
+		if edge.Site == c {
+			matches = append(matches, edge.Callee.Func)
 		}
 	}
 	return matches
diff --git a/go/callgraph/vta/vta.go b/go/callgraph/vta/vta.go
index 6a0e55d84d1..a350223e09b 100644
--- a/go/callgraph/vta/vta.go
+++ b/go/callgraph/vta/vta.go
@@ -62,10 +62,13 @@ import (
 )
 
 // CallGraph uses the VTA algorithm to compute call graph for all functions
-// f such that f:true is in `funcs`. VTA refines the results of 'initial'
-// callgraph and uses it to establish interprocedural data flow. VTA is
-// sound if 'initial` is sound modulo reflection and unsage. The resulting
-// callgraph does not have a root node.
+// f:true in funcs. VTA refines the results of initial call graph and uses it
+// to establish interprocedural type flow. The resulting graph does not have
+// a root node.
+//
+// CallGraph does not make any assumptions on initial types global variables
+// and function/method inputs can have. CallGraph is then sound, modulo use of
+// reflection and unsafe, if the initial call graph is sound.
 func CallGraph(funcs map[*ssa.Function]bool, initial *callgraph.Graph) *callgraph.Graph {
 	vtaG, canon := typePropGraph(funcs, initial)
 	types := propagate(vtaG, canon)
diff --git a/go/callgraph/vta/vta_go117_test.go b/go/callgraph/vta/vta_go117_test.go
new file mode 100644
index 00000000000..fae657c407a
--- /dev/null
+++ b/go/callgraph/vta/vta_go117_test.go
@@ -0,0 +1,31 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.17
+// +build go1.17
+
+package vta
+
+import (
+	"testing"
+
+	"golang.org/x/tools/go/callgraph/cha"
+	"golang.org/x/tools/go/ssa/ssautil"
+)
+
+func TestVTACallGraphGo117(t *testing.T) {
+	file := "testdata/go117.go"
+	prog, want, err := testProg(file)
+	if err != nil {
+		t.Fatalf("couldn't load test file '%s': %s", file, err)
+	}
+	if len(want) == 0 {
+		t.Fatalf("couldn't find want in `%s`", file)
+	}
+
+	g, _ := typePropGraph(ssautil.AllFunctions(prog), cha.CallGraph(prog))
+	if gs := vtaGraphStr(g); !subGraph(want, gs) {
+		t.Errorf("`%s`: want superset of %v;\n got %v", file, want, gs)
+	}
+}
diff --git a/go/callgraph/vta/vta_test.go b/go/callgraph/vta/vta_test.go
index 79ab31cd72d..b0d2de7836a 100644
--- a/go/callgraph/vta/vta_test.go
+++ b/go/callgraph/vta/vta_test.go
@@ -5,46 +5,13 @@
 package vta
 
 import (
-	"fmt"
-	"sort"
-	"strings"
 	"testing"
 
-	"golang.org/x/tools/go/callgraph"
 	"golang.org/x/tools/go/callgraph/cha"
 	"golang.org/x/tools/go/ssa"
-
 	"golang.org/x/tools/go/ssa/ssautil"
 )
 
-// callGraphStr stringifes `g` into a list of strings where
-// each entry is of the form
-//   f: cs1 -> f1, f2, ...; ...; csw -> fx, fy, ...
-// f is a function, cs1, ..., csw are call sites in f, and
-// f1, f2, ..., fx, fy, ... are the resolved callees.
-func callGraphStr(g *callgraph.Graph) []string {
-	var gs []string
-	for f, n := range g.Nodes {
-		c := make(map[string][]string)
-		for _, edge := range n.Out {
-			cs := edge.Site.String()
-			c[cs] = append(c[cs], funcName(edge.Callee.Func))
-		}
-
-		var cs []string
-		for site, fs := range c {
-			sort.Strings(fs)
-			entry := fmt.Sprintf("%v -> %v", site, strings.Join(fs, ", "))
-			cs = append(cs, entry)
-		}
-
-		sort.Strings(cs)
-		entry := fmt.Sprintf("%v: %v", funcName(f), strings.Join(cs, "; "))
-		gs = append(gs, entry)
-	}
-	return gs
-}
-
 func TestVTACallGraph(t *testing.T) {
 	for _, file := range []string{
 		"testdata/callgraph_static.go",
@@ -52,6 +19,7 @@ func TestVTACallGraph(t *testing.T) {
 		"testdata/callgraph_interfaces.go",
 		"testdata/callgraph_pointers.go",
 		"testdata/callgraph_collections.go",
+		"testdata/callgraph_fields.go",
 	} {
 		t.Run(file, func(t *testing.T) {
 			prog, want, err := testProg(file)
diff --git a/go/internal/gcimporter/bimport.go b/go/internal/gcimporter/bimport.go
index e9f73d14a18..b023120001b 100644
--- a/go/internal/gcimporter/bimport.go
+++ b/go/internal/gcimporter/bimport.go
@@ -1029,6 +1029,7 @@ func predeclared() []types.Type {
 			// used internally by gc; never used by this package or in .a files
 			anyType{},
 		}
+		predecl = append(predecl, additionalPredeclared()...)
 	})
 	return predecl
 }
diff --git a/go/internal/gcimporter/iexport_test.go b/go/internal/gcimporter/iexport_test.go
index 53850111c74..b75d5398634 100644
--- a/go/internal/gcimporter/iexport_test.go
+++ b/go/internal/gcimporter/iexport_test.go
@@ -31,6 +31,7 @@ import (
 	"golang.org/x/tools/go/buildutil"
 	"golang.org/x/tools/go/internal/gcimporter"
 	"golang.org/x/tools/go/loader"
+	"golang.org/x/tools/internal/testenv"
 )
 
 func readExportFile(filename string) ([]byte, error) {
@@ -63,6 +64,7 @@ func iexport(fset *token.FileSet, pkg *types.Package) ([]byte, error) {
 }
 
 func TestIExportData_stdlib(t *testing.T) {
+	testenv.SkipAfterGo1Point(t, 17)
 	if runtime.Compiler == "gccgo" {
 		t.Skip("gccgo standard library is inaccessible")
 	}
diff --git a/go/internal/gcimporter/iimport.go b/go/internal/gcimporter/iimport.go
index 8ed8bc62d68..6f166d7f5af 100644
--- a/go/internal/gcimporter/iimport.go
+++ b/go/internal/gcimporter/iimport.go
@@ -18,6 +18,8 @@ import (
 	"go/types"
 	"io"
 	"sort"
+
+	"golang.org/x/tools/internal/typeparams"
 )
 
 type intReader struct {
@@ -41,6 +43,21 @@ func (r *intReader) uint64() uint64 {
 	return i
 }
 
+// Keep this in sync with constants in iexport.go.
+const (
+	iexportVersionGo1_11 = 0
+	iexportVersionPosCol = 1
+	// TODO: before release, change this back to 2.
+	iexportVersionGenerics = iexportVersionPosCol
+
+	iexportVersionCurrent = iexportVersionGenerics
+)
+
+type ident struct {
+	pkg  string
+	name string
+}
+
 const predeclReserved = 32
 
 type itag uint64
@@ -56,6 +73,9 @@ const (
 	signatureType
 	structType
 	interfaceType
+	typeParamType
+	instanceType
+	unionType
 )
 
 // IImportData imports a package from the serialized package data
@@ -101,9 +121,13 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data
 
 	version = int64(r.uint64())
 	switch version {
-	case currentVersion, 0:
+	case /* iexportVersionGenerics, */ iexportVersionPosCol, iexportVersionGo1_11:
 	default:
-		errorf("unknown iexport format version %d", version)
+		if version > iexportVersionGenerics {
+			errorf("unstable iexport format version %d, just rebuild compiler and std library", version)
+		} else {
+			errorf("unknown iexport format version %d", version)
+		}
 	}
 
 	sLen := int64(r.uint64())
@@ -115,8 +139,9 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data
 	r.Seek(sLen+dLen, io.SeekCurrent)
 
 	p := iimporter{
-		ipath:   path,
-		version: int(version),
+		exportVersion: version,
+		ipath:         path,
+		version:       int(version),
 
 		stringData:  stringData,
 		stringCache: make(map[uint64]string),
@@ -125,6 +150,9 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data
 		declData: declData,
 		pkgIndex: make(map[*types.Package]map[string]uint64),
 		typCache: make(map[uint64]types.Type),
+		// Separate map for typeparams, keyed by their package and unique
+		// name (name with subscript).
+		tparamIndex: make(map[ident]types.Type),
 
 		fake: fakeFileSet{
 			fset:  fset,
@@ -216,16 +244,18 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data
 }
 
 type iimporter struct {
-	ipath   string
-	version int
+	exportVersion int64
+	ipath         string
+	version       int
 
 	stringData  []byte
 	stringCache map[uint64]string
 	pkgCache    map[uint64]*types.Package
 
-	declData []byte
-	pkgIndex map[*types.Package]map[string]uint64
-	typCache map[uint64]types.Type
+	declData    []byte
+	pkgIndex    map[*types.Package]map[string]uint64
+	typCache    map[uint64]types.Type
+	tparamIndex map[ident]types.Type
 
 	fake          fakeFileSet
 	interfaceList []*types.Interface
@@ -315,17 +345,27 @@ func (r *importReader) obj(name string) {
 
 		r.declare(types.NewConst(pos, r.currPkg, name, typ, val))
 
-	case 'F':
+	case 'F', 'G':
+		var tparams []*typeparams.TypeParam
+		if tag == 'G' {
+			tparams = r.tparamList()
+		}
 		sig := r.signature(nil)
-
+		typeparams.SetForSignature(sig, tparams)
 		r.declare(types.NewFunc(pos, r.currPkg, name, sig))
 
-	case 'T':
+	case 'T', 'U':
 		// Types can be recursive. We need to setup a stub
 		// declaration before recursing.
 		obj := types.NewTypeName(pos, r.currPkg, name, nil)
 		named := types.NewNamed(obj, nil, nil)
+		// Declare obj before calling r.tparamList, so the new type name is recognized
+		// if used in the constraint of one of its own typeparams (see #48280).
 		r.declare(obj)
+		if tag == 'U' {
+			tparams := r.tparamList()
+			typeparams.SetForNamed(named, tparams)
+		}
 
 		underlying := r.p.typAt(r.uint64(), named).Underlying()
 		named.SetUnderlying(underlying)
@@ -337,10 +377,46 @@ func (r *importReader) obj(name string) {
 				recv := r.param()
 				msig := r.signature(recv)
 
+				// If the receiver has any targs, set those as the
+				// rparams of the method (since those are the
+				// typeparams being used in the method sig/body).
+				targs := typeparams.NamedTypeArgs(baseType(msig.Recv().Type()))
+				if len(targs) > 0 {
+					rparams := make([]*typeparams.TypeParam, len(targs))
+					for i := range rparams {
+						rparams[i], _ = targs[i].(*typeparams.TypeParam)
+					}
+					typeparams.SetRecvTypeParams(msig, rparams)
+				}
+
 				named.AddMethod(types.NewFunc(mpos, r.currPkg, mname, msig))
 			}
 		}
 
+	case 'P':
+		// We need to "declare" a typeparam in order to have a name that
+		// can be referenced recursively (if needed) in the type param's
+		// bound.
+		if r.p.exportVersion < iexportVersionGenerics {
+			errorf("unexpected type param type")
+		}
+		name0, sub := parseSubscript(name)
+		tn := types.NewTypeName(pos, r.currPkg, name0, nil)
+		t := typeparams.NewTypeParam(tn, nil)
+		if sub == 0 {
+			errorf("missing subscript")
+		}
+
+		// TODO(rfindley): can we use a different, stable ID?
+		// t.SetId(sub)
+
+		// To handle recursive references to the typeparam within its
+		// bound, save the partial type in tparamIndex before reading the bounds.
+		id := ident{r.currPkg.Name(), name}
+		r.p.tparamIndex[id] = t
+
+		typeparams.SetTypeParamConstraint(t, r.typ())
+
 	case 'V':
 		typ := r.typ()
 
@@ -618,6 +694,49 @@ func (r *importReader) doType(base *types.Named) types.Type {
 		typ := newInterface(methods, embeddeds)
 		r.p.interfaceList = append(r.p.interfaceList, typ)
 		return typ
+
+	case typeParamType:
+		if r.p.exportVersion < iexportVersionGenerics {
+			errorf("unexpected type param type")
+		}
+		pkg, name := r.qualifiedIdent()
+		id := ident{pkg.Name(), name}
+		if t, ok := r.p.tparamIndex[id]; ok {
+			// We're already in the process of importing this typeparam.
+			return t
+		}
+		// Otherwise, import the definition of the typeparam now.
+		r.p.doDecl(pkg, name)
+		return r.p.tparamIndex[id]
+
+	case instanceType:
+		if r.p.exportVersion < iexportVersionGenerics {
+			errorf("unexpected instantiation type")
+		}
+		// pos does not matter for instances: they are positioned on the original
+		// type.
+		_ = r.pos()
+		len := r.uint64()
+		targs := make([]types.Type, len)
+		for i := range targs {
+			targs[i] = r.typ()
+		}
+		baseType := r.typ()
+		// The imported instantiated type doesn't include any methods, so
+		// we must always use the methods of the base (orig) type.
+		// TODO provide a non-nil *Environment
+		t, _ := typeparams.Instantiate(nil, baseType, targs, false)
+		return t
+
+	case unionType:
+		if r.p.exportVersion < iexportVersionGenerics {
+			errorf("unexpected instantiation type")
+		}
+		terms := make([]*typeparams.Term, r.uint64())
+		for i := range terms {
+			terms[i] = typeparams.NewTerm(r.bool(), r.typ())
+		}
+		return typeparams.NewUnion(terms)
 	}
 }
 
@@ -632,6 +751,20 @@ func (r *importReader) signature(recv *types.Var) *types.Signature {
 	return types.NewSignature(recv, params, results, variadic)
 }
 
+func (r *importReader) tparamList() []*typeparams.TypeParam {
+	n := r.uint64()
+	if n == 0 {
+		return nil
+	}
+	xs := make([]*typeparams.TypeParam, n)
+	for i := range xs {
+		// Note: the standard library importer is tolerant of nil types here,
+		// though would panic in SetTypeParams.
+		xs[i] = r.typ().(*typeparams.TypeParam)
+	}
+	return xs
+}
+
 func (r *importReader) paramList() *types.Tuple {
 	xs := make([]*types.Var, r.uint64())
 	for i := range xs {
@@ -674,3 +807,33 @@ func (r *importReader) byte() byte {
 	}
 	return x
 }
+
+func baseType(typ types.Type) *types.Named {
+	// pointer receivers are never types.Named types
+	if p, _ := typ.(*types.Pointer); p != nil {
+		typ = p.Elem()
+	}
+	// receiver base types are always (possibly generic) types.Named types
+	n, _ := typ.(*types.Named)
+	return n
+}
+
+func parseSubscript(name string) (string, uint64) {
+	// Extract the subscript value from the type param name. We export
+	// and import the subscript value, so that all type params have
+	// unique names.
+	sub := uint64(0)
+	startsub := -1
+	for i, r := range name {
+		if '₀' <= r && r < '₀'+10 {
+			if startsub == -1 {
+				startsub = i
+			}
+			sub = sub*10 + uint64(r-'₀')
+		}
+	}
+	if startsub >= 0 {
+		name = name[:startsub]
+	}
+	return name, sub
+}
diff --git a/go/internal/gcimporter/support_go117.go b/go/internal/gcimporter/support_go117.go
new file mode 100644
index 00000000000..e6403e18a91
--- /dev/null
+++ b/go/internal/gcimporter/support_go117.go
@@ -0,0 +1,14 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !go1.18
+// +build !go1.18
+
+package gcimporter
+
+import "go/types"
+
+func additionalPredeclared() []types.Type {
+	return nil
+}
diff --git a/go/internal/gcimporter/support_go118.go b/go/internal/gcimporter/support_go118.go
new file mode 100644
index 00000000000..a5c74856a72
--- /dev/null
+++ b/go/internal/gcimporter/support_go118.go
@@ -0,0 +1,18 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18
+// +build go1.18
+
+package gcimporter
+
+import "go/types"
+
+// additionalPredeclared returns additional predeclared types in go.1.18.
+func additionalPredeclared() []types.Type {
+	return []types.Type{
+		// comparable
+		types.Universe.Lookup("comparable").Type(),
+	}
+}
diff --git a/go/pointer/gen.go b/go/pointer/gen.go
index 5d2d6210fa7..ef5108a5b56 100644
--- a/go/pointer/gen.go
+++ b/go/pointer/gen.go
@@ -791,7 +791,7 @@ func (a *analysis) genCall(caller *cgnode, instr ssa.CallInstruction) {
 // Some SSA instructions always have singletons points-to sets:
 // 	Alloc, Function, Global, MakeChan, MakeClosure,  MakeInterface,  MakeMap,  MakeSlice.
 // Others may be singletons depending on their operands:
-// 	FreeVar, Const, Convert, FieldAddr, IndexAddr, Slice.
+// 	FreeVar, Const, Convert, FieldAddr, IndexAddr, Slice, SliceToArrayPointer.
 //
 // Idempotent.  Objects are created as needed, possibly via recursion
 // down the SSA value graph, e.g IndexAddr(FieldAddr(Alloc))).
@@ -882,6 +882,11 @@ func (a *analysis) objectNode(cgn *cgnode, v ssa.Value) nodeid {
 		case *ssa.Slice:
 			obj = a.objectNode(cgn, v.X)
 
+		case *ssa.SliceToArrayPointer:
+			// Going from a []T to a *[k]T for some k.
+			// A slice []T is treated as if it were a *T pointer.
+			obj = a.objectNode(cgn, v.X)
+
 		case *ssa.Convert:
 			// TODO(adonovan): opt: handle these cases too:
 			// - unsafe.Pointer->*T conversion acts like Alloc
@@ -1030,6 +1035,12 @@ func (a *analysis) genInstr(cgn *cgnode, instr ssa.Instruction) {
 	case *ssa.Slice:
 		a.copy(a.valueNode(instr), a.valueNode(instr.X), 1)
 
+	case *ssa.SliceToArrayPointer:
+		// Going from a []T to a *[k]T (for some k) is a single `dst = src` constraint.
+		// Both []T and *[k]T are modelled as an *IdArrayT where IdArrayT is the identity
+		// node for an array of type T, i.e `type IdArrayT struct{elem T}`.
+		a.copy(a.valueNode(instr), a.valueNode(instr.X), 1)
+
 	case *ssa.If, *ssa.Jump:
 		// no-op.
 
@@ -1055,16 +1066,42 @@ func (a *analysis) genInstr(cgn *cgnode, instr ssa.Instruction) {
 		// Do nothing.  Next{Iter: *ssa.Range} handles this case.
 
 	case *ssa.Next:
-		if !instr.IsString { // map
-			// Assumes that Next is always directly applied to a Range result.
+		if !instr.IsString {
+			// Assumes that Next is always directly applied to a Range result
+			// for a map.
+
+			// Next results in a destination tuple (ok, dk, dv).
+			// Recall a map is modeled as type *M where M = struct{sk K; sv V}.
+			// Next copies from a src map struct{sk K; sv V} to a dst tuple (ok, dk, dv)
+			//
+			// When keys or value is a blank identifier in a range statement, e.g
+			//   for _, v := range m { ... }
+			// or
+			//   for _, _ = range m { ... }
+			// we skip copying from sk or dk as there is no use. dk and dv will have
+			// Invalid types if they are blank identifiers. This means that the
+			//   size( (ok, dk, dv) )  may differ from 1 + size(struct{sk K; sv V}).
+			//
+			// We encode Next using one load of size sz from an offset in src osrc to an
+			// offset in dst odst. There are 4 cases to consider:
+			//           odst       | osrc     | sz
+			//   k, v  | 1          | 0        | size(sk) + size(sv)
+			//   k, _  | 1          | 0        | size(sk)
+			//   _, v  | 1+size(dk) | size(sk) | size(sv)
+			//   _, _  | 1+size(dk) | size(sk) | 0
+
+			// get the source key and value size.  Note the source types
+			// may be different than the 3-tuple types, but if this is the
+			// case then the source is assignable to the destination.
 			theMap := instr.Iter.(*ssa.Range).X
 			tMap := theMap.Type().Underlying().(*types.Map)
 
-			ksize := a.sizeof(tMap.Key())
-			vsize := a.sizeof(tMap.Elem())
+			sksize := a.sizeof(tMap.Key())
+			svsize := a.sizeof(tMap.Elem())
 
-			// The k/v components of the Next tuple may each be invalid.
+			// get the key size of the destination tuple.
 			tTuple := instr.Type().(*types.Tuple)
+			dksize := a.sizeof(tTuple.At(1).Type())
 
 			// Load from the map's (k,v) into the tuple's (ok, k, v).
 			osrc := uint32(0) // offset within map object
@@ -1073,15 +1110,15 @@ func (a *analysis) genInstr(cgn *cgnode, instr ssa.Instruction) {
 
 			// Is key valid?
 			if tTuple.At(1).Type() != tInvalid {
-				sz += ksize
+				sz += sksize
 			} else {
-				odst += ksize
-				osrc += ksize
+				odst += dksize
+				osrc += sksize
 			}
 
 			// Is value valid?
 			if tTuple.At(2).Type() != tInvalid {
-				sz += vsize
+				sz += svsize
 			}
 
 			a.genLoad(cgn, a.valueNode(instr)+nodeid(odst), theMap, osrc, sz)
diff --git a/go/pointer/pointer_go117_test.go b/go/pointer/pointer_go117_test.go
new file mode 100644
index 00000000000..7546a066047
--- /dev/null
+++ b/go/pointer/pointer_go117_test.go
@@ -0,0 +1,41 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// No testdata on Android.
+
+//go:build !android && go1.17
+// +build !android,go1.17
+
+package pointer_test
+
+import (
+	"fmt"
+	"io/ioutil"
+	"os"
+	"testing"
+)
+
+func TestSliceToArrayPointer(t *testing.T) {
+	// Based on TestInput. Keep this up to date with that.
+	filename := "testdata/arrays_go117.go"
+
+	if testing.Short() {
+		t.Skip("skipping in short mode; this test requires tons of memory; https://golang.org/issue/14113")
+	}
+
+	wd, err := os.Getwd()
+	if err != nil {
+		t.Fatalf("os.Getwd: %s", err)
+	}
+	fmt.Fprintf(os.Stderr, "Entering directory `%s'\n", wd)
+
+	content, err := ioutil.ReadFile(filename)
+	if err != nil {
+		t.Fatalf("couldn't read file '%s': %s", filename, err)
+	}
+
+	if !doOneInput(string(content), filename) {
+		t.Fail()
+	}
+}
diff --git a/go/pointer/testdata/arrays_go117.go b/go/pointer/testdata/arrays_go117.go
new file mode 100644
index 00000000000..7a66f67280c
--- /dev/null
+++ b/go/pointer/testdata/arrays_go117.go
@@ -0,0 +1,173 @@
+//go:build ignore
+// +build ignore
+
+package main
+
+// Forked from arrays.go. Requires go1.17 to parse slice to array casts.
+// TODO(taking): Merge back into arrays.go once we can assume go1.17.
+
+var unknown bool // defeat dead-code elimination
+
+var a, b int
+
+func array1() {
+	sliceA := make([]*int, 10) // @line a1make
+	sliceA[0] = &a
+
+	var sliceB []*int
+	sliceB = append(sliceB, &b) // @line a1append
+
+	print(sliceA)    // @pointsto makeslice@a1make:16
+	print(sliceA[0]) // @pointsto main.a
+
+	print(sliceB)      // @pointsto append@a1append:17
+	print(sliceB[100]) // @pointsto main.b
+}
+
+func array2() {
+	sliceA := make([]*int, 10) // @line a2make
+	sliceA[0] = &a
+
+	sliceB := sliceA[:]
+
+	print(sliceA)    // @pointsto makeslice@a2make:16
+	print(sliceA[0]) // @pointsto main.a
+
+	print(sliceB)    // @pointsto makeslice@a2make:16
+	print(sliceB[0]) // @pointsto main.a
+}
+
+func array3() {
+	a := []interface{}{"", 1}
+	b := []interface{}{true, func() {}}
+	print(a[0]) // @types string | int
+	print(b[0]) // @types bool | func()
+}
+
+// Test of append, copy, slice.
+func array4() {
+	var s2 struct { // @line a4L0
+		a [3]int
+		b struct{ c, d int }
+	}
+	var sl1 = make([]*int, 10) // @line a4make
+	var someint int            // @line a4L1
+	sl1[1] = &someint
+	sl2 := append(sl1, &s2.a[1]) // @line a4append1
+	print(sl1)                   // @pointsto makeslice@a4make:16
+	print(sl2)                   // @pointsto append@a4append1:15 | makeslice@a4make:16
+	print(sl1[0])                // @pointsto someint@a4L1:6 | s2.a[*]@a4L0:6
+	print(sl2[0])                // @pointsto someint@a4L1:6 | s2.a[*]@a4L0:6
+
+	// In z=append(x,y) we should observe flow from y[*] to x[*].
+	var sl3 = make([]*int, 10) // @line a4L2
+	_ = append(sl3, &s2.a[1])
+	print(sl3)    // @pointsto makeslice@a4L2:16
+	print(sl3[0]) // @pointsto s2.a[*]@a4L0:6
+
+	var sl4 = []*int{&a} // @line a4L3
+	sl4a := append(sl4)  // @line a4L4
+	print(sl4a)          // @pointsto slicelit@a4L3:18 | append@a4L4:16
+	print(&sl4a[0])      // @pointsto slicelit[*]@a4L3:18 | append[*]@a4L4:16
+	print(sl4a[0])       // @pointsto main.a
+
+	var sl5 = []*int{&b} // @line a4L5
+	copy(sl5, sl4)
+	print(sl5)     // @pointsto slicelit@a4L5:18
+	print(&sl5[0]) // @pointsto slicelit[*]@a4L5:18
+	print(sl5[0])  // @pointsto main.b | main.a
+
+	var sl6 = sl5[:0]
+	print(sl6)     // @pointsto slicelit@a4L5:18
+	print(&sl6[0]) // @pointsto slicelit[*]@a4L5:18
+	print(sl6[0])  // @pointsto main.b | main.a
+}
+
+func array5() {
+	var arr [2]*int
+	arr[0] = &a
+	arr[1] = &b
+
+	var n int
+	print(arr[n]) // @pointsto main.a | main.b
+}
+
+func array6() {
+	var n int
+
+	sl0 := []*int{&a}
+	ap0 := (*[1]*int)(sl0)
+	ar0 := *ap0
+
+	print(ap0[n]) // @pointsto main.a
+	print(sl0[n]) // @pointsto main.a
+	print(ar0[n]) // @pointsto main.a
+
+	sl1 := []*int{&a}
+	ap1 := (*[1]*int)(sl1)
+	ar1 := *ap1
+
+	ar1[0] = &b
+	print(ap1[n]) // @pointsto main.a
+	print(sl1[n]) // @pointsto main.a
+	print(ar1[n]) // @pointsto main.a | main.b
+
+	sl2 := []*int{&a}
+	ap2 := (*[1]*int)(sl2)
+	ar2 := *ap2
+
+	ap2[0] = &b
+	print(ap2[n]) // @pointsto main.a | main.b
+	print(sl2[n]) // @pointsto main.a | main.b
+	print(ar2[n]) // @pointsto main.a | main.b
+
+	sl3 := []*int{&b, nil}
+	ap3 := (*[1]*int)(sl3)
+	ar3 := *ap3
+
+	print(sl3[n]) // @pointsto main.b
+	print(ap3[n]) // @pointsto main.b
+	print(ar3[n]) // @pointsto main.b
+}
+
+func array7() {
+	var n int
+
+	sl0 := []*int{nil, nil, nil}
+	ap0 := (*[2]*int)(sl0)
+	ap1 := (*[1]*int)(sl0[2:])
+
+	ap1[0] = &a
+
+	print(sl0[n]) // @pointsto main.a
+	print(ap0[n]) // @pointsto main.a
+	print(ap1[n]) // @pointsto main.a
+}
+
+func array8() {
+	var n int
+
+	sl1 := make([]*int, 1, 1)
+	sl2 := make([]*int, 1, 1)
+	pa1 := (*[1]*int)(sl1)
+	pa2 := (*[1]*int)(sl2)
+	sl1[0] = &a
+	sl2[0] = &b
+	print(pa1[n]) // @pointsto main.a
+	print(pa2[n]) // @pointsto main.b
+
+	pa2 = pa1
+	print(pa1[n]) // @pointsto main.a
+	print(pa2[n]) // @pointsto main.a
+}
+
+func main() {
+	array1()
+	array2()
+	array3()
+	array4()
+	array5()
+	array6()
+	array7()
+	array8()
+}
diff --git a/go/pointer/testdata/maps.go b/go/pointer/testdata/maps.go
index 67293045bc0..f73a6ea1987 100644
--- a/go/pointer/testdata/maps.go
+++ b/go/pointer/testdata/maps.go
@@ -58,8 +58,8 @@ func maps3() {
 	// is ill-typed.
 
 	// sizeof(K) > 1, abstractly
-	type K struct{ a, b *float64 }
-	k := K{new(float64), nil}
+	type K struct{ a, b, c, d *float64 }
+	k := K{new(float64), nil, nil, nil}
 	m := map[K]*int{k: &g}
 
 	for _, v := range m {
@@ -67,8 +67,42 @@ func maps3() {
 	}
 }
 
+var v float64
+
+func maps4() {
+	// Regression test for generating constraints for cases of key and values
+	// being blank identifiers or different types assignable from the
+	// corresponding map types in a range stmt.
+	type K struct{ a *float64 }
+	k := K{&v}
+	m := map[K]*int{k: &g}
+
+	for x, y := range m {
+		print(x.a) // @pointsto main.v
+		print(y)   // @pointsto main.g
+	}
+	var i struct{ a *float64 }
+	for i, _ = range m {
+		print(i.a) // @pointsto main.v
+	}
+	var j interface{}
+	for _, j = range m {
+		// TODO support the statement `print(j.(*int))`
+		print(j) // @pointsto main.g
+	}
+	for _, _ = range m {
+	}
+	// do something after 'for _, _ =' to exercise the
+	// effects of indexing
+	for _, j = range m {
+		// TODO support the statement `print(j.(*int))`
+		print(j) // @pointsto main.g
+	}
+}
+
 func main() {
 	maps1()
 	maps2()
 	maps3()
+	maps4()
 }
diff --git a/go/ssa/builder.go b/go/ssa/builder.go
index 2d0fdaa4e69..e1540dbdc05 100644
--- a/go/ssa/builder.go
+++ b/go/ssa/builder.go
@@ -579,6 +579,8 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
 					y.pos = e.Lparen
 				case *MakeInterface:
 					y.pos = e.Lparen
+				case *SliceToArrayPointer:
+					y.pos = e.Lparen
 				}
 			}
 			return y
diff --git a/go/ssa/builder_go117_test.go b/go/ssa/builder_go117_test.go
index e7ba21422f0..f6545e5e2cf 100644
--- a/go/ssa/builder_go117_test.go
+++ b/go/ssa/builder_go117_test.go
@@ -49,3 +49,34 @@ func TestBuildPackageGo117(t *testing.T) {
 		})
 	}
 }
+
+func TestBuildPackageFailuresGo117(t *testing.T) {
+	tests := []struct {
+		name     string
+		src      string
+		importer types.Importer
+	}{
+		{"slice to array pointer - source is not a slice", "package p; var s [4]byte; var _ = (*[4]byte)(s)", nil},
+		{"slice to array pointer - dest is not a pointer", "package p; var s []byte; var _ = ([4]byte)(s)", nil},
+		{"slice to array pointer - dest pointer elem is not an array", "package p; var s []byte; var _ = (*byte)(s)", nil},
+	}
+
+	for _, tc := range tests {
+		tc := tc
+		t.Run(tc.name, func(t *testing.T) {
+			t.Parallel()
+			fset := token.NewFileSet()
+			f, err := parser.ParseFile(fset, "p.go", tc.src, parser.ParseComments)
+			if err != nil {
+				t.Error(err)
+			}
+			files := []*ast.File{f}
+
+			pkg := types.NewPackage("p", "")
+			conf := &types.Config{Importer: tc.importer}
+			if _, _, err := ssautil.BuildPackage(conf, fset, pkg, files, ssa.SanityCheckFunctions); err == nil {
+				t.Error("want error, but got nil")
+			}
+		})
+	}
+}
diff --git a/go/ssa/doc.go b/go/ssa/doc.go
index 1a13640f9d5..71511bff397 100644
--- a/go/ssa/doc.go
+++ b/go/ssa/doc.go
@@ -50,50 +50,51 @@
 // Instruction interfaces.  The following table shows for each
 // concrete type which of these interfaces it implements.
 //
-//                      Value?          Instruction?    Member?
-//   *Alloc             ✔               ✔
-//   *BinOp             ✔               ✔
-//   *Builtin           ✔
-//   *Call              ✔               ✔
-//   *ChangeInterface   ✔               ✔
-//   *ChangeType        ✔               ✔
-//   *Const             ✔
-//   *Convert           ✔               ✔
-//   *DebugRef                          ✔
-//   *Defer                             ✔
-//   *Extract           ✔               ✔
-//   *Field             ✔               ✔
-//   *FieldAddr         ✔               ✔
-//   *FreeVar           ✔
-//   *Function          ✔                               ✔ (func)
-//   *Global            ✔                               ✔ (var)
-//   *Go                                ✔
-//   *If                                ✔
-//   *Index             ✔               ✔
-//   *IndexAddr         ✔               ✔
-//   *Jump                              ✔
-//   *Lookup            ✔               ✔
-//   *MakeChan          ✔               ✔
-//   *MakeClosure       ✔               ✔
-//   *MakeInterface     ✔               ✔
-//   *MakeMap           ✔               ✔
-//   *MakeSlice         ✔               ✔
-//   *MapUpdate                         ✔
-//   *NamedConst                                        ✔ (const)
-//   *Next              ✔               ✔
-//   *Panic                             ✔
-//   *Parameter         ✔
-//   *Phi               ✔               ✔
-//   *Range             ✔               ✔
-//   *Return                            ✔
-//   *RunDefers                         ✔
-//   *Select            ✔               ✔
-//   *Send                              ✔
-//   *Slice             ✔               ✔
-//   *Store                             ✔
-//   *Type                                              ✔ (type)
-//   *TypeAssert        ✔               ✔
-//   *UnOp              ✔               ✔
+//                      Value?          Instruction?      Member?
+//   *Alloc                ✔               ✔
+//   *BinOp                ✔               ✔
+//   *Builtin              ✔
+//   *Call                 ✔               ✔
+//   *ChangeInterface      ✔               ✔
+//   *ChangeType           ✔               ✔
+//   *Const                ✔
+//   *Convert              ✔               ✔
+//   *DebugRef                             ✔
+//   *Defer                                ✔
+//   *Extract              ✔               ✔
+//   *Field                ✔               ✔
+//   *FieldAddr            ✔               ✔
+//   *FreeVar              ✔
+//   *Function             ✔                               ✔ (func)
+//   *Global               ✔                               ✔ (var)
+//   *Go                                   ✔
+//   *If                                   ✔
+//   *Index                ✔               ✔
+//   *IndexAddr            ✔               ✔
+//   *Jump                                 ✔
+//   *Lookup               ✔               ✔
+//   *MakeChan             ✔               ✔
+//   *MakeClosure          ✔               ✔
+//   *MakeInterface        ✔               ✔
+//   *MakeMap              ✔               ✔
+//   *MakeSlice            ✔               ✔
+//   *MapUpdate                            ✔
+//   *NamedConst                                           ✔ (const)
+//   *Next                 ✔               ✔
+//   *Panic                                ✔
+//   *Parameter            ✔
+//   *Phi                  ✔               ✔
+//   *Range                ✔               ✔
+//   *Return                               ✔
+//   *RunDefers                            ✔
+//   *Select               ✔               ✔
+//   *Send                                 ✔
+//   *Slice                ✔               ✔
+//   *SliceToArrayPointer  ✔               ✔
+//   *Store                                ✔
+//   *Type                                                 ✔ (type)
+//   *TypeAssert           ✔               ✔
+//   *UnOp                 ✔               ✔
 //
 // Other key types in this package include: Program, Package, Function
 // and BasicBlock.
diff --git a/go/ssa/emit.go b/go/ssa/emit.go
index df9ca4ff0f7..7c8cfdc6614 100644
--- a/go/ssa/emit.go
+++ b/go/ssa/emit.go
@@ -231,8 +231,8 @@ func emitConv(f *Function, val Value, typ types.Type) Value {
 	// Conversion from slice to array pointer?
 	if slice, ok := ut_src.(*types.Slice); ok {
 		if ptr, ok := ut_dst.(*types.Pointer); ok {
-			if arr, ok := ptr.Elem().(*types.Array); ok && types.Identical(slice.Elem(), arr.Elem()) {
-				c := &Convert{X: val}
+			if arr, ok := ptr.Elem().Underlying().(*types.Array); ok && types.Identical(slice.Elem(), arr.Elem()) {
+				c := &SliceToArrayPointer{X: val}
 				c.setType(ut_dst)
 				return f.emit(c)
 			}
diff --git a/go/ssa/interp/interp.go b/go/ssa/interp/interp.go
index d776594271f..bf7862289f9 100644
--- a/go/ssa/interp/interp.go
+++ b/go/ssa/interp/interp.go
@@ -210,6 +210,9 @@ func visitInstr(fr *frame, instr ssa.Instruction) continuation {
 	case *ssa.Convert:
 		fr.env[instr] = conv(instr.Type(), instr.X.Type(), fr.get(instr.X))
 
+	case *ssa.SliceToArrayPointer:
+		fr.env[instr] = sliceToArrayPointer(instr.Type(), instr.X.Type(), fr.get(instr.X))
+
 	case *ssa.MakeInterface:
 		fr.env[instr] = iface{t: instr.X.Type(), v: fr.get(instr.X)}
 
diff --git a/go/ssa/interp/interp_go117_test.go b/go/ssa/interp/interp_go117_test.go
new file mode 100644
index 00000000000..58bbaa39c91
--- /dev/null
+++ b/go/ssa/interp/interp_go117_test.go
@@ -0,0 +1,12 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.17
+// +build go1.17
+
+package interp_test
+
+func init() {
+	testdataTests = append(testdataTests, "slice2arrayptr.go")
+}
diff --git a/go/ssa/interp/ops.go b/go/ssa/interp/ops.go
index 90d945291b7..6af7847c039 100644
--- a/go/ssa/interp/ops.go
+++ b/go/ssa/interp/ops.go
@@ -1357,6 +1357,31 @@ func conv(t_dst, t_src types.Type, x value) value {
 	panic(fmt.Sprintf("unsupported conversion: %s  -> %s, dynamic type %T", t_src, t_dst, x))
 }
 
+// sliceToArrayPointer converts the value x of type slice to type t_dst
+// a pointer to array and returns the result.
+func sliceToArrayPointer(t_dst, t_src types.Type, x value) value {
+	utSrc := t_src.Underlying()
+	utDst := t_dst.Underlying()
+
+	if _, ok := utSrc.(*types.Slice); ok {
+		if utSrc, ok := utDst.(*types.Pointer); ok {
+			if arr, ok := utSrc.Elem().(*types.Array); ok {
+				x := x.([]value)
+				if arr.Len() > int64(len(x)) {
+					panic("array length is greater than slice length")
+				}
+				if x == nil {
+					return zero(utSrc)
+				}
+				v := value(array(x[:arr.Len()]))
+				return &v
+			}
+		}
+	}
+
+	panic(fmt.Sprintf("unsupported conversion: %s  -> %s, dynamic type %T", t_src, t_dst, x))
+}
+
 // checkInterface checks that the method set of x implements the
 // interface itype.
 // On success it returns "", on failure, an error message.
diff --git a/go/ssa/interp/testdata/slice2arrayptr.go b/go/ssa/interp/testdata/slice2arrayptr.go
new file mode 100644
index 00000000000..ff2d9b55ccd
--- /dev/null
+++ b/go/ssa/interp/testdata/slice2arrayptr.go
@@ -0,0 +1,55 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Test for slice to array pointer conversion introduced in go1.17
+// See: https://tip.golang.org/ref/spec#Conversions_from_slice_to_array_pointer
+
+package main
+
+func main() {
+	s := make([]byte, 2, 4)
+	if s0 := (*[0]byte)(s); s0 == nil {
+		panic("converted from non-nil slice result in nil array pointer")
+	}
+	if s2 := (*[2]byte)(s); &s2[0] != &s[0] {
+		panic("the converted array is not slice underlying array")
+	}
+	wantPanic(
+		func() {
+			_ = (*[4]byte)(s) // panics: len([4]byte) > len(s)
+		},
+		"runtime error: array length is greater than slice length",
+	)
+
+	var t []string
+	if t0 := (*[0]string)(t); t0 != nil {
+		panic("nil slice converted to *[0]byte should be nil")
+	}
+	wantPanic(
+		func() {
+			_ = (*[1]string)(t) // panics: len([1]string) > len(t)
+		},
+		"runtime error: array length is greater than slice length",
+	)
+}
+
+type arr [2]int
+
+func f() {
+	s := []int{1, 2, 3, 4}
+	_ = *(*arr)(s)
+}
+
+func wantPanic(fn func(), s string) {
+	defer func() {
+		err := recover()
+		if err == nil {
+			panic("expected panic")
+		}
+		if got := err.(error).Error(); got != s {
+			panic("expected panic " + s + " got " + got)
+		}
+	}()
+	fn()
+}
diff --git a/go/ssa/print.go b/go/ssa/print.go
index 3333ba41a00..c1b6d22b3e3 100644
--- a/go/ssa/print.go
+++ b/go/ssa/print.go
@@ -159,10 +159,11 @@ func printConv(prefix string, v, x Value) string {
 		relName(x, v.(Instruction)))
 }
 
-func (v *ChangeType) String() string      { return printConv("changetype", v, v.X) }
-func (v *Convert) String() string         { return printConv("convert", v, v.X) }
-func (v *ChangeInterface) String() string { return printConv("change interface", v, v.X) }
-func (v *MakeInterface) String() string   { return printConv("make", v, v.X) }
+func (v *ChangeType) String() string          { return printConv("changetype", v, v.X) }
+func (v *Convert) String() string             { return printConv("convert", v, v.X) }
+func (v *ChangeInterface) String() string     { return printConv("change interface", v, v.X) }
+func (v *SliceToArrayPointer) String() string { return printConv("slice to array pointer", v, v.X) }
+func (v *MakeInterface) String() string       { return printConv("make", v, v.X) }
 
 func (v *MakeClosure) String() string {
 	var b bytes.Buffer
diff --git a/go/ssa/sanity.go b/go/ssa/sanity.go
index 16df7e4f0c3..1d4e20f6a2d 100644
--- a/go/ssa/sanity.go
+++ b/go/ssa/sanity.go
@@ -132,14 +132,8 @@ func (s *sanity) checkInstr(idx int, instr Instruction) {
 	case *Call:
 	case *ChangeInterface:
 	case *ChangeType:
+	case *SliceToArrayPointer:
 	case *Convert:
-		if _, ok := instr.X.Type().Underlying().(*types.Slice); ok {
-			if ptr, ok := instr.Type().Underlying().(*types.Pointer); ok {
-				if _, ok := ptr.Elem().(*types.Array); ok {
-					break
-				}
-			}
-		}
 		if _, ok := instr.X.Type().Underlying().(*types.Basic); !ok {
 			if _, ok := instr.Type().Underlying().(*types.Basic); !ok {
 				s.errorf("convert %s -> %s: at least one type must be basic", instr.X.Type(), instr.Type())
diff --git a/go/ssa/ssa.go b/go/ssa/ssa.go
index d3faf44388d..8358681c7f2 100644
--- a/go/ssa/ssa.go
+++ b/go/ssa/ssa.go
@@ -615,9 +615,10 @@ type ChangeType struct {
 //    - between pointers and unsafe.Pointer.
 //    - between unsafe.Pointer and uintptr.
 //    - from (Unicode) integer to (UTF-8) string.
-//    - from slice to array pointer.
 // A conversion may imply a type name change also.
 //
+// This operation cannot fail dynamically.
+//
 // Conversions of untyped string/number/bool constants to a specific
 // representation are eliminated during SSA construction.
 //
@@ -649,6 +650,20 @@ type ChangeInterface struct {
 	X Value
 }
 
+// The SliceToArrayPointer instruction yields the conversion of slice X to
+// array pointer.
+//
+// Pos() returns the ast.CallExpr.Lparen, if the instruction arose
+// from an explicit conversion in the source.
+//
+// Example printed form:
+// 	t1 = slice to array pointer *[4]byte <- []byte (t0)
+//
+type SliceToArrayPointer struct {
+	register
+	X Value
+}
+
 // MakeInterface constructs an instance of an interface type from a
 // value of a concrete type.
 //
@@ -1566,6 +1581,10 @@ func (v *Convert) Operands(rands []*Value) []*Value {
 	return append(rands, &v.X)
 }
 
+func (v *SliceToArrayPointer) Operands(rands []*Value) []*Value {
+	return append(rands, &v.X)
+}
+
 func (s *DebugRef) Operands(rands []*Value) []*Value {
 	return append(rands, &s.X)
 }
diff --git a/go/types/objectpath/objectpath.go b/go/types/objectpath/objectpath.go
index cffd7acbee7..81e8fdcf0c1 100644
--- a/go/types/objectpath/objectpath.go
+++ b/go/types/objectpath/objectpath.go
@@ -58,7 +58,7 @@ type Path string
 // - The only OT operator is Object.Type,
 //   which we encode as '.' because dot cannot appear in an identifier.
 // - The TT operators are encoded as [EKPRU].
-// - The OT operators are encoded as [AFMO];
+// - The TO operators are encoded as [AFMO];
 //   three of these (At,Field,Method) require an integer operand,
 //   which is encoded as a string of decimal digits.
 //   These indices are stable across different representations
diff --git a/gopls/README.md b/gopls/README.md
index 85de62a1fe5..df7add08807 100644
--- a/gopls/README.md
+++ b/gopls/README.md
@@ -86,9 +86,11 @@ an older Go version causes irreconcilable CI failures, we may drop support for
 that Go version in CI if it is 3 or 4 Go versions old.
 
 `gopls` currently only supports the `go` command, so if you are using a
-different build system, `gopls` will not work well. Bazel support is currently
-blocked on
-[bazelbuild/rules_go#512](https://github.com/bazelbuild/rules_go/issues/512).
+different build system, `gopls` will not work well. Bazel is not officially
+supported, but Bazel support is in development (see
+[bazelbuild/rules_go#512](https://github.com/bazelbuild/rules_go/issues/512)).
+You can follow [these instructions](https://github.com/bazelbuild/rules_go/wiki/Editor-setup)
+to configure your `gopls` to work with Bazel.
 
 ## Additional information
 
diff --git a/gopls/api-diff/api_diff.go b/gopls/api-diff/api_diff.go
new file mode 100644
index 00000000000..1b98a64476c
--- /dev/null
+++ b/gopls/api-diff/api_diff.go
@@ -0,0 +1,264 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18
+// +build go1.18
+
+package main
+
+import (
+	"bytes"
+	"encoding/json"
+	"flag"
+	"fmt"
+	"io"
+	"io/ioutil"
+	"log"
+	"os"
+	"os/exec"
+	"path/filepath"
+	"strings"
+
+	difflib "golang.org/x/tools/internal/lsp/diff"
+	"golang.org/x/tools/internal/lsp/diff/myers"
+	"golang.org/x/tools/internal/lsp/source"
+)
+
+var (
+	previousVersionFlag = flag.String("prev", "", "version to compare against")
+	versionFlag         = flag.String("version", "", "version being tagged, or current version if omitted")
+)
+
+func main() {
+	flag.Parse()
+
+	apiDiff, err := diffAPI(*versionFlag, *previousVersionFlag)
+	if err != nil {
+		log.Fatal(err)
+	}
+	fmt.Printf(`
+%s
+`, apiDiff)
+}
+
+type JSON interface {
+	String() string
+	Write(io.Writer)
+}
+
+func diffAPI(version, prev string) (string, error) {
+	previousApi, err := loadAPI(prev)
+	if err != nil {
+		return "", err
+	}
+	var currentApi *source.APIJSON
+	if version == "" {
+		currentApi = source.GeneratedAPIJSON
+	} else {
+		var err error
+		currentApi, err = loadAPI(version)
+		if err != nil {
+			return "", err
+		}
+	}
+
+	b := &strings.Builder{}
+	if err := diff(b, previousApi.Commands, currentApi.Commands, "command", func(c *source.CommandJSON) string {
+		return c.Command
+	}, diffCommands); err != nil {
+		return "", err
+	}
+	if diff(b, previousApi.Analyzers, currentApi.Analyzers, "analyzer", func(a *source.AnalyzerJSON) string {
+		return a.Name
+	}, diffAnalyzers); err != nil {
+		return "", err
+	}
+	if err := diff(b, previousApi.Lenses, currentApi.Lenses, "code lens", func(l *source.LensJSON) string {
+		return l.Lens
+	}, diffLenses); err != nil {
+		return "", err
+	}
+	for key, prev := range previousApi.Options {
+		current, ok := currentApi.Options[key]
+		if !ok {
+			panic(fmt.Sprintf("unexpected option key: %s", key))
+		}
+		if err := diff(b, prev, current, "option", func(o *source.OptionJSON) string {
+			return o.Name
+		}, diffOptions); err != nil {
+			return "", err
+		}
+	}
+
+	return b.String(), nil
+}
+
+func diff[T JSON](b *strings.Builder, previous, new []T, kind string, uniqueKey func(T) string, diffFunc func(*strings.Builder, T, T)) error {
+	prevJSON := collect(previous, uniqueKey)
+	newJSON := collect(new, uniqueKey)
+	for k := range newJSON {
+		delete(prevJSON, k)
+	}
+	for _, deleted := range prevJSON {
+		b.WriteString(fmt.Sprintf("%s %s was deleted.\n", kind, deleted))
+	}
+	for _, prev := range previous {
+		delete(newJSON, uniqueKey(prev))
+	}
+	if len(newJSON) > 0 {
+		b.WriteString("The following commands were added:\n")
+		for _, n := range newJSON {
+			n.Write(b)
+			b.WriteByte('\n')
+		}
+	}
+	previousMap := collect(previous, uniqueKey)
+	for _, current := range new {
+		prev, ok := previousMap[uniqueKey(current)]
+		if !ok {
+			continue
+		}
+		c, p := bytes.NewBuffer(nil), bytes.NewBuffer(nil)
+		prev.Write(p)
+		current.Write(c)
+		if diff, err := diffStr(p.String(), c.String()); err == nil && diff != "" {
+			diffFunc(b, prev, current)
+			b.WriteString("\n--\n")
+		}
+	}
+	return nil
+}
+
+func collect[T JSON](args []T, uniqueKey func(T) string) map[string]T {
+	m := map[string]T{}
+	for _, arg := range args {
+		m[uniqueKey(arg)] = arg
+	}
+	return m
+}
+
+func loadAPI(version string) (*source.APIJSON, error) {
+	dir, err := ioutil.TempDir("", "gopath*")
+	if err != nil {
+		return nil, err
+	}
+	defer os.RemoveAll(dir)
+
+	if err := os.Mkdir(fmt.Sprintf("%s/src", dir), 0776); err != nil {
+		return nil, err
+	}
+	goCmd, err := exec.LookPath("go")
+	if err != nil {
+		return nil, err
+	}
+	cmd := exec.Cmd{
+		Path: goCmd,
+		Args: []string{"go", "get", fmt.Sprintf("golang.org/x/tools/gopls@%s", version)},
+		Dir:  dir,
+		Env:  append(os.Environ(), fmt.Sprintf("GOPATH=%s", dir)),
+	}
+	if err := cmd.Run(); err != nil {
+		return nil, err
+	}
+	cmd = exec.Cmd{
+		Path: filepath.Join(dir, "bin", "gopls"),
+		Args: []string{"gopls", "api-json"},
+		Dir:  dir,
+	}
+	out, err := cmd.Output()
+	if err != nil {
+		return nil, err
+	}
+	apiJson := &source.APIJSON{}
+	if err := json.Unmarshal(out, apiJson); err != nil {
+		return nil, err
+	}
+	return apiJson, nil
+}
+
+func diffCommands(b *strings.Builder, prev, current *source.CommandJSON) {
+	if prev.Title != current.Title {
+		b.WriteString(fmt.Sprintf("Title changed from %q to %q\n", prev.Title, current.Title))
+	}
+	if prev.Doc != current.Doc {
+		b.WriteString(fmt.Sprintf("Documentation changed from %q to %q\n", prev.Doc, current.Doc))
+	}
+	if prev.ArgDoc != current.ArgDoc {
+		b.WriteString("Arguments changed from " + formatBlock(prev.ArgDoc) + " to " + formatBlock(current.ArgDoc))
+	}
+	if prev.ResultDoc != current.ResultDoc {
+		b.WriteString("Results changed from " + formatBlock(prev.ResultDoc) + " to " + formatBlock(current.ResultDoc))
+	}
+}
+
+func diffAnalyzers(b *strings.Builder, previous, current *source.AnalyzerJSON) {
+	b.WriteString(fmt.Sprintf("Changes to analyzer %s:\n\n", current.Name))
+	if previous.Doc != current.Doc {
+		b.WriteString(fmt.Sprintf("Documentation changed from %q to %q\n", previous.Doc, current.Doc))
+	}
+	if previous.Default != current.Default {
+		b.WriteString(fmt.Sprintf("Default changed from %v to %v\n", previous.Default, current.Default))
+	}
+}
+
+func diffLenses(b *strings.Builder, previous, current *source.LensJSON) {
+	b.WriteString(fmt.Sprintf("Changes to code lens %s:\n\n", current.Title))
+	if previous.Title != current.Title {
+		b.WriteString(fmt.Sprintf("Title changed from %q to %q\n", previous.Title, current.Title))
+	}
+	if previous.Doc != current.Doc {
+		b.WriteString(fmt.Sprintf("Documentation changed from %q to %q\n", previous.Doc, current.Doc))
+	}
+}
+
+func diffOptions(b *strings.Builder, previous, current *source.OptionJSON) {
+	b.WriteString(fmt.Sprintf("Changes to option %s:\n\n", current.Name))
+	if previous.Doc != current.Doc {
+		diff, err := diffStr(previous.Doc, current.Doc)
+		if err != nil {
+			panic(err)
+		}
+		b.WriteString(fmt.Sprintf("Documentation changed:\n%s\n", diff))
+	}
+	if previous.Default != current.Default {
+		b.WriteString(fmt.Sprintf("Default changed from %q to %q\n", previous.Default, current.Default))
+	}
+	if previous.Hierarchy != current.Hierarchy {
+		b.WriteString(fmt.Sprintf("Categorization changed from %q to %q\n", previous.Hierarchy, current.Hierarchy))
+	}
+	if previous.Status != current.Status {
+		b.WriteString(fmt.Sprintf("Status changed from %q to %q\n", previous.Status, current.Status))
+	}
+	if previous.Type != current.Type {
+		b.WriteString(fmt.Sprintf("Type changed from %q to %q\n", previous.Type, current.Type))
+	}
+	// TODO(rstambler): Handle possibility of same number but different keys/values.
+	if len(previous.EnumKeys.Keys) != len(current.EnumKeys.Keys) {
+		b.WriteString(fmt.Sprintf("Enum keys changed from\n%s\n to \n%s\n", previous.EnumKeys, current.EnumKeys))
+	}
+	if len(previous.EnumValues) != len(current.EnumValues) {
+		b.WriteString(fmt.Sprintf("Enum values changed from\n%s\n to \n%s\n", previous.EnumValues, current.EnumValues))
+	}
+}
+
+func formatBlock(str string) string {
+	if str == "" {
+		return `""`
+	}
+	return "\n```\n" + str + "\n```\n"
+}
+
+func diffStr(before, after string) (string, error) {
+	// Add newlines to avoid newline messages in diff.
+	if before == after {
+		return "", nil
+	}
+	before += "\n"
+	after += "\n"
+	d, err := myers.ComputeEdits("", before, after)
+	if err != nil {
+		return "", err
+	}
+	return fmt.Sprintf("%q", difflib.ToUnified("previous", "current", before, d)), err
+}
diff --git a/gopls/doc/advanced.md b/gopls/doc/advanced.md
index 0fa11392478..04f1840b12f 100644
--- a/gopls/doc/advanced.md
+++ b/gopls/doc/advanced.md
@@ -41,33 +41,30 @@ parameters proposal ([golang/go#43651](https://golang.org/issues/43651)) and
 type set addendum ([golang/go#45346](https://golang.org/issues/45346)).
 
 To enable this support, you need to build gopls with a version of Go that
-supports type parameters: the
-[dev.typeparams branch](https://github.com/golang/go/tree/dev.typeparams). This
-can be done by checking out this branch in the Go repository, or by using
+supports type parameters, currently just tip. This can be done by checking
+out the `master` branch in the Go repository, or by using
 `golang.org/dl/gotip`:
 
 ```
 $ go get golang.org/dl/gotip
-$ gotip download dev.typeparams
+$ gotip download
 ```
 
 For building gopls with type parameter support, it is recommended that you
 build gopls at tip. External APIs are under active development on the
-`dev.typeparams` branch, so building gopls at tip minimizes the chances of
-a build failure (though it is still possible). To get enhanced gopls features
-for generic code, build gopls with the `typeparams` build constraint (though
-this increases your chances of a build failure).
+Go `master` branch, so building gopls at tip minimizes the chances of
+a build failure.
 
 ```
-$ GO111MODULE=on gotip get -tags=typeparams golang.org/x/tools/gopls@master golang.org/x/tools@master
+$ GO111MODULE=on gotip get golang.org/x/tools/gopls@master golang.org/x/tools@master
 ```
 
 This will build a version of gopls that understands generic code. To actually
-run the generic code you develop, you must also tell the compiler to speak
-generics using the `-G=3` compiler flag. For example
+run the generic code you develop, you must also use the tip version of the Go
+compiler. For example:
 
 ```
-$ gotip run -gcflags=-G=3 .
+$ gotip run .
 ```
 
 [Go project]: https://go.googlesource.com/go
diff --git a/gopls/doc/generate.go b/gopls/doc/generate.go
index 91d45baed6e..b6153e1271a 100644
--- a/gopls/doc/generate.go
+++ b/gopls/doc/generate.go
@@ -370,7 +370,6 @@ func valueDoc(name, value, doc string) string {
 }
 
 func loadCommands(pkg *packages.Package) ([]*source.CommandJSON, error) {
-
 	var commands []*source.CommandJSON
 
 	_, cmds, err := commandmeta.Load()
@@ -553,8 +552,6 @@ func rewriteAPI(_ []byte, api *source.APIJSON) ([]byte, error) {
 	return buf.Bytes(), nil
 }
 
-var parBreakRE = regexp.MustCompile("\n{2,}")
-
 type optionsGroup struct {
 	title   string
 	final   string
@@ -583,10 +580,8 @@ func rewriteSettings(doc []byte, api *source.APIJSON) ([]byte, error) {
 			writeTitle(section, h.final, level)
 			for _, opt := range h.options {
 				header := strMultiply("#", level+1)
-				fmt.Fprintf(section, "%s **%v** *%v*\n\n", header, opt.Name, opt.Type)
-				writeStatus(section, opt.Status)
-				enumValues := collectEnums(opt)
-				fmt.Fprintf(section, "%v%v\nDefault: `%v`.\n\n", opt.Doc, enumValues, opt.Default)
+				section.Write([]byte(fmt.Sprintf("%s ", header)))
+				opt.Write(section)
 			}
 		}
 		var err error
@@ -657,38 +652,6 @@ func collectGroups(opts []*source.OptionJSON) []optionsGroup {
 	return groups
 }
 
-func collectEnums(opt *source.OptionJSON) string {
-	var b strings.Builder
-	write := func(name, doc string, index, len int) {
-		if doc != "" {
-			unbroken := parBreakRE.ReplaceAllString(doc, "\\\n")
-			fmt.Fprintf(&b, "* %s", unbroken)
-		} else {
-			fmt.Fprintf(&b, "* `%s`", name)
-		}
-		if index < len-1 {
-			fmt.Fprint(&b, "\n")
-		}
-	}
-	if len(opt.EnumValues) > 0 && opt.Type == "enum" {
-		b.WriteString("\nMust be one of:\n\n")
-		for i, val := range opt.EnumValues {
-			write(val.Value, val.Doc, i, len(opt.EnumValues))
-		}
-	} else if len(opt.EnumKeys.Keys) > 0 && shouldShowEnumKeysInSettings(opt.Name) {
-		b.WriteString("\nCan contain any of:\n\n")
-		for i, val := range opt.EnumKeys.Keys {
-			write(val.Name, val.Doc, i, len(opt.EnumKeys.Keys))
-		}
-	}
-	return b.String()
-}
-
-func shouldShowEnumKeysInSettings(name string) bool {
-	// Both of these fields have too many possible options to print.
-	return !hardcodedEnumKeys(name)
-}
-
 func hardcodedEnumKeys(name string) bool {
 	return name == "analyses" || name == "codelenses"
 }
@@ -710,20 +673,6 @@ func writeTitle(w io.Writer, title string, level int) {
 	fmt.Fprintf(w, "%s %s\n\n", strMultiply("#", level), capitalize(title))
 }
 
-func writeStatus(section io.Writer, status string) {
-	switch status {
-	case "":
-	case "advanced":
-		fmt.Fprint(section, "**This is an advanced setting and should not be configured by most `gopls` users.**\n\n")
-	case "debug":
-		fmt.Fprint(section, "**This setting is for debugging purposes only.**\n\n")
-	case "experimental":
-		fmt.Fprint(section, "**This setting is experimental and may be deleted.**\n\n")
-	default:
-		fmt.Fprintf(section, "**Status: %s.**\n\n", status)
-	}
-}
-
 func capitalize(s string) string {
 	return string(unicode.ToUpper(rune(s[0]))) + s[1:]
 }
@@ -739,13 +688,7 @@ func strMultiply(str string, count int) string {
 func rewriteCommands(doc []byte, api *source.APIJSON) ([]byte, error) {
 	section := bytes.NewBuffer(nil)
 	for _, command := range api.Commands {
-		fmt.Fprintf(section, "### **%v**\nIdentifier: `%v`\n\n%v\n\n", command.Title, command.Command, command.Doc)
-		if command.ArgDoc != "" {
-			fmt.Fprintf(section, "Args:\n\n```\n%s\n```\n\n", command.ArgDoc)
-		}
-		if command.ResultDoc != "" {
-			fmt.Fprintf(section, "Result:\n\n```\n%s\n```\n\n", command.ResultDoc)
-		}
+		command.Write(section)
 	}
 	return replaceSection(doc, "Commands", section.Bytes())
 }
diff --git a/gopls/doc/semantictokens.md b/gopls/doc/semantictokens.md
index fc541fbff12..c9124b796e0 100644
--- a/gopls/doc/semantictokens.md
+++ b/gopls/doc/semantictokens.md
@@ -16,7 +16,7 @@ don't make intuitive sense (although `async documentation` has a certain appeal)
 
 The 22 semantic tokens are `namespace`, `type`, `class`, `enum`, `interface`,
 		`struct`, `typeParameter`, `parameter`, `variable`, `property`, `enumMember`,
-		`event`, `function`, `member`, `macro`, `keyword`, `modifier`, `comment`,
+		`event`, `function`, `method`, `macro`, `keyword`, `modifier`, `comment`,
 		`string`, `number`, `regexp`, `operator`.
 
 The 10 modifiers are `declaration`, `definition`, `readonly`, `static`,
@@ -72,7 +72,7 @@ alias, it would be marked. Otherwise the last component of the import path is ma
 1. __`type`__ Objects of type ```types.TypeName``` are marked `type`.
 If they are also ```types.Basic```
 the modifier is `defaultLibrary`. (And in ```type B struct{C}```, ```B``` has modifier `definition`.)
-1. __`parameter`__ The formal arguments in ```ast.FuncDecl``` nodes are marked `parameter`.
+1. __`parameter`__ The formal arguments in ```ast.FuncDecl``` and ```ast.FuncType``` nodes are marked `parameter`.
 1. __`variable`__  Identifiers in the
 scope of ```const``` are modified with `readonly`. ```nil``` is usually a `variable` modified with both
 `readonly` and `defaultLibrary`. (```nil``` is a predefined identifier; the user can redefine it,
@@ -80,8 +80,8 @@ in which case it would just be a variable, or whatever.) Identifiers of type ```
 not surprisingly, marked `variable`. Identifiers being defined (node ```ast.GenDecl```) are modified
 by `definition` and, if appropriate, `readonly`. Receivers (in method declarations) are
 `variable`.
-1. __`member`__ Members are marked at their definition (```func (x foo) bar() {}```) or declaration
-in an ```interface```. Members are not marked where they are used.
+1. __`method`__ Methods are marked at their definition (```func (x foo) bar() {}```) or declaration
+in an ```interface```. Methods are not marked where they are used.
 In ```x.bar()```, ```x``` will be marked
 either as a `namespace` if it is a package name, or as a `variable` if it is an interface value,
 so distinguishing ```bar``` seemed superfluous.
diff --git a/gopls/doc/settings.md b/gopls/doc/settings.md
index 5be569f8621..ad7cf1295f2 100644
--- a/gopls/doc/settings.md
+++ b/gopls/doc/settings.md
@@ -70,7 +70,7 @@ Include only project_a: `-` (exclude everything), `+project_a`
 
 Include only project_a, but not node_modules inside it: `-`, `+project_a`, `-project_a/node_modules`
 
-Default: `[]`.
+Default: `["-node_modules"]`.
 
 #### **memoryMode** *enum*
 
@@ -196,7 +196,7 @@ Example Usage:
 ```json5
 "gopls": {
 ...
-  "codelens": {
+  "codelenses": {
     "generate": false,  // Don't show the `go generate` lens.
     "gc_details": true  // Show a code lens toggling the display of gc's choices.
   }
@@ -397,6 +397,7 @@ Must be one of:
 
 * `"CaseInsensitive"`
 * `"CaseSensitive"`
+* `"FastFuzzy"`
 * `"Fuzzy"`
 Default: `"Fuzzy"`.
 
@@ -411,7 +412,7 @@ Example Usage:
 ```json5
 "gopls": {
 ...
-  "symbolStyle": "dynamic",
+  "symbolStyle": "Dynamic",
 ...
 }
 ```
diff --git a/gopls/doc/vim.md b/gopls/doc/vim.md
index 48c9b03d591..a6b40a46b3f 100644
--- a/gopls/doc/vim.md
+++ b/gopls/doc/vim.md
@@ -165,7 +165,7 @@ lua <<EOF
   -- …
 
   function goimports(timeout_ms)
-    local context = { source = { organizeImports = true } }
+    local context = { only = { "source.organizeImports" } }
     vim.validate { context = { context, "t", true } }
 
     local params = vim.lsp.util.make_range_params()
diff --git a/gopls/go.mod b/gopls/go.mod
index a993cce149d..127b7226ff2 100644
--- a/gopls/go.mod
+++ b/gopls/go.mod
@@ -1,23 +1,23 @@
 module golang.org/x/tools/gopls
 
-go 1.17
+go 1.18
 
 require (
-	github.com/BurntSushi/toml v0.3.1 // indirect
-	github.com/google/go-cmp v0.5.5
+	github.com/BurntSushi/toml v0.4.1 // indirect
+	github.com/google/go-cmp v0.5.6
 	github.com/google/safehtml v0.0.2 // indirect
 	github.com/jba/templatecheck v0.6.0
-	github.com/sanity-io/litter v1.5.0
+	github.com/sanity-io/litter v1.5.1
 	github.com/sergi/go-diff v1.1.0
 	golang.org/x/mod v0.4.2
 	golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect
-	golang.org/x/sys v0.0.0-20210510120138-977fb7262007
-	golang.org/x/text v0.3.6 // indirect
-	golang.org/x/tools v0.1.0
+	golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e
+	golang.org/x/text v0.3.7 // indirect
+	golang.org/x/tools v0.1.5
 	golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
 	honnef.co/go/tools v0.2.0
 	mvdan.cc/gofumpt v0.1.1
-	mvdan.cc/xurls/v2 v2.2.0
+	mvdan.cc/xurls/v2 v2.3.0
 )
 
 replace golang.org/x/tools => ../
diff --git a/gopls/go.sum b/gopls/go.sum
index b889ac4e7bb..f026fde81e0 100644
--- a/gopls/go.sum
+++ b/gopls/go.sum
@@ -1,54 +1,56 @@
-github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
 github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
+github.com/BurntSushi/toml v0.4.1 h1:GaI7EiDXDRfa8VshkTj7Fym7ha+y8/XxIgD2okUIjLw=
+github.com/BurntSushi/toml v0.4.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
 github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
 github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
 github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
 github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
 github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
-github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU=
-github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ=
+github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
 github.com/google/safehtml v0.0.2 h1:ZOt2VXg4x24bW0m2jtzAOkhoXV0iM8vNKc0paByCZqM=
 github.com/google/safehtml v0.0.2/go.mod h1:L4KWwDsUJdECRAEpZoBn3O64bQaywRscowZjJAzjHnU=
 github.com/jba/templatecheck v0.6.0 h1:SwM8C4hlK/YNLsdcXStfnHWE2HKkuTVwy5FKQHt5ro8=
 github.com/jba/templatecheck v0.6.0/go.mod h1:/1k7EajoSErFI9GLHAsiIJEaNLt3ALKNw2TV7z2SYv4=
-github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
 github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
 github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
 github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
+github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
 github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
 github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
 github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/rogpeppe/go-internal v1.5.2/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
 github.com/rogpeppe/go-internal v1.6.2/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
-github.com/sanity-io/litter v1.5.0 h1:cHM1wTJiOETY9LKRPd3tqUHGquaBaTteD1tZFesEoi8=
-github.com/sanity-io/litter v1.5.0/go.mod h1:5Z71SvaYy5kcGtyglXOC9rrUi3c1E8CamFWjQsazTh0=
+github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
+github.com/sanity-io/litter v1.5.1 h1:dwnrSypP6q56o3lFxTU+t2fwQ9A+U5qrXVO4Qg9KwVU=
+github.com/sanity-io/litter v1.5.1/go.mod h1:5Z71SvaYy5kcGtyglXOC9rrUi3c1E8CamFWjQsazTh0=
 github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
 github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
 github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
 github.com/stretchr/testify v0.0.0-20161117074351-18a02ba4a312/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
 github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
 github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
-github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
+github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
 golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
 golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
 golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
 golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo=
 golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
 golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
-golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
+golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
 golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ=
 golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
 golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210510120138-977fb7262007 h1:gG67DSER+11cZvqIMb8S8bt0vZtiN6xWYARwirrOSfE=
-golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e h1:WUoyKPm6nCo1BnNUvPGnFG3T5DUVem42yDJZZ4CNxMA=
+golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
 golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
 golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
-golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M=
 golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=
+golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
 golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
 golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
 golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
@@ -60,11 +62,9 @@ gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
 gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
 gopkg.in/yaml.v2 v2.2.4 h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I=
 gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-honnef.co/go/tools v0.1.4 h1:SadWOkti5uVN1FAMgxn165+Mw00fuQKyk4Gyn/inxNQ=
-honnef.co/go/tools v0.1.4/go.mod h1:NgwopIslSNH47DimFoV78dnkksY2EFtX0ajyb3K/las=
 honnef.co/go/tools v0.2.0 h1:ws8AfbgTX3oIczLPNPCu5166oBg9ST2vNs0rcht+mDE=
 honnef.co/go/tools v0.2.0/go.mod h1:lPVVZ2BS5TfnjLyizF7o7hv7j9/L+8cZY2hLyjP9cGY=
 mvdan.cc/gofumpt v0.1.1 h1:bi/1aS/5W00E2ny5q65w9SnKpWEF/UIOqDYBILpo9rA=
 mvdan.cc/gofumpt v0.1.1/go.mod h1:yXG1r1WqZVKWbVRtBWKWX9+CxGYfA51nSomhM0woR48=
-mvdan.cc/xurls/v2 v2.2.0 h1:NSZPykBXJFCetGZykLAxaL6SIpvbVy/UFEniIfHAa8A=
-mvdan.cc/xurls/v2 v2.2.0/go.mod h1:EV1RMtya9D6G5DMYPGD8zTQzaHet6Jh8gFlRgGRJeO8=
+mvdan.cc/xurls/v2 v2.3.0 h1:59Olnbt67UKpxF1EwVBopJvkSUBmgtb468E4GVWIZ1I=
+mvdan.cc/xurls/v2 v2.3.0/go.mod h1:AjuTy7gEiUArFMjgBBDU4SMxlfUYsRokpJQgNWOt3e4=
diff --git a/gopls/internal/hooks/diff.go b/gopls/internal/hooks/diff.go
index 46d7dd74bda..a307ba77fd6 100644
--- a/gopls/internal/hooks/diff.go
+++ b/gopls/internal/hooks/diff.go
@@ -14,7 +14,7 @@ import (
 
 func ComputeEdits(uri span.URI, before, after string) (edits []diff.TextEdit, err error) {
 	// The go-diff library has an unresolved panic (see golang/go#278774).
-	// TOOD(rstambler): Remove the recover once the issue has been fixed
+	// TODO(rstambler): Remove the recover once the issue has been fixed
 	// upstream.
 	defer func() {
 		if r := recover(); r != nil {
diff --git a/gopls/internal/regtest/bench/bench_test.go b/gopls/internal/regtest/bench/bench_test.go
index 360e9563c9d..9cbf2f4d92c 100644
--- a/gopls/internal/regtest/bench/bench_test.go
+++ b/gopls/internal/regtest/bench/bench_test.go
@@ -42,9 +42,7 @@ func benchmarkOptions(dir string) []RunOption {
 }
 
 func printBenchmarkResults(result testing.BenchmarkResult) {
-	fmt.Println("Benchmark Statistics:")
-	fmt.Println(result.String())
-	fmt.Println(result.MemString())
+	fmt.Printf("BenchmarkStatistics\t%s\t%s\n", result.String(), result.MemString())
 }
 
 var iwlOptions struct {
@@ -91,7 +89,7 @@ func TestBenchmarkSymbols(t *testing.T) {
 		t.Skip("-symbol_workdir not configured")
 	}
 
-	opts := stressTestOptions(symbolOptions.workdir)
+	opts := benchmarkOptions(symbolOptions.workdir)
 	conf := EditorConfig{}
 	if symbolOptions.matcher != "" {
 		conf.SymbolMatcher = &symbolOptions.matcher
diff --git a/gopls/internal/regtest/bench/completion_bench_test.go b/gopls/internal/regtest/bench/completion_bench_test.go
index c6773393163..a8ef47c207e 100644
--- a/gopls/internal/regtest/bench/completion_bench_test.go
+++ b/gopls/internal/regtest/bench/completion_bench_test.go
@@ -7,7 +7,6 @@ package bench
 import (
 	"flag"
 	"fmt"
-	"runtime"
 	"strings"
 	"testing"
 
@@ -55,11 +54,6 @@ func benchmarkCompletion(options completionBenchOptions, t *testing.T) {
 			options.preCompletionEdits(env)
 		}
 
-		// Add a comment as a marker at the start of the file, we'll replace
-		// this in every iteration to trigger type checking and hence emulate
-		// a more real world scenario.
-		env.EditBuffer(options.file, fake.Edit{Text: "// 0\n"})
-
 		// Run a completion to make sure the system is warm.
 		pos := env.RegexpSearch(options.file, options.locationRegexp)
 		completions := env.Completion(options.file, pos)
@@ -73,16 +67,6 @@ func benchmarkCompletion(options completionBenchOptions, t *testing.T) {
 
 		results := testing.Benchmark(func(b *testing.B) {
 			for i := 0; i < b.N; i++ {
-				b.StopTimer()
-				env.RegexpReplace(options.file, `\/\/ \d*`, fmt.Sprintf("// %d", i))
-
-				// explicitly garbage collect since we don't want to count this
-				// time in completion benchmarks.
-				if i%10 == 0 {
-					runtime.GC()
-				}
-				b.StartTimer()
-
 				env.Completion(options.file, pos)
 			}
 		})
@@ -107,7 +91,7 @@ func endPosInBuffer(env *Env, name string) fake.Pos {
 // Benchmark completion at a specified file and location. When no CLI options
 // are specified, this test is skipped.
 // To Run (from x/tools/gopls) against the dummy function above:
-// 	go test -v ./internal/regtest -run=TestBenchmarkConfiguredCompletion
+// 	go test -v ./internal/regtest/bench -run=TestBenchmarkConfiguredCompletion
 // 	-completion_workdir="$HOME/Developer/tools"
 // 	-completion_file="gopls/internal/regtest/completion_bench_test.go"
 // 	-completion_regexp="dummyCompletionFunction.*fmt\.Printf\(\"%s\", s(\))"
@@ -116,7 +100,7 @@ func TestBenchmarkConfiguredCompletion(t *testing.T) {
 }
 
 // To run (from x/tools/gopls):
-// 	go test -v ./internal/regtest -run TestBenchmark<>Completion
+// 	go test -v ./internal/regtest/bench -run TestBenchmark<>Completion
 //	-completion_workdir="$HOME/Developer/tools"
 // where <> is one of the tests below. completion_workdir should be path to
 // x/tools on your system.
diff --git a/gopls/internal/regtest/codelens/codelens_test.go b/gopls/internal/regtest/codelens/codelens_test.go
index d89b8e0bd86..ad35a299114 100644
--- a/gopls/internal/regtest/codelens/codelens_test.go
+++ b/gopls/internal/regtest/codelens/codelens_test.go
@@ -303,8 +303,7 @@ package main
 import "fmt"
 
 func main() {
-	var x string
-	fmt.Println(x)
+	fmt.Println(42)
 }
 `
 	WithOptions(
@@ -320,7 +319,7 @@ func main() {
 		d := &protocol.PublishDiagnosticsParams{}
 		env.Await(
 			OnceMet(
-				DiagnosticAt("main.go", 6, 12),
+				DiagnosticAt("main.go", 5, 13),
 				ReadDiagnostics("main.go", d),
 			),
 		)
@@ -330,12 +329,12 @@ func main() {
 			if d.Severity != protocol.SeverityInformation {
 				t.Fatalf("unexpected diagnostic severity %v, wanted Information", d.Severity)
 			}
-			if strings.Contains(d.Message, "x escapes") {
+			if strings.Contains(d.Message, "42 escapes") {
 				found = true
 			}
 		}
 		if !found {
-			t.Fatalf(`expected to find diagnostic with message "escape(x escapes to heap)", found none`)
+			t.Fatalf(`expected to find diagnostic with message "escape(42 escapes to heap)", found none`)
 		}
 
 		// Editing a buffer should cause gc_details diagnostics to disappear, since
@@ -346,7 +345,7 @@ func main() {
 		// Saving a buffer should re-format back to the original state, and
 		// re-enable the gc_details diagnostics.
 		env.SaveBuffer("main.go")
-		env.Await(DiagnosticAt("main.go", 6, 12))
+		env.Await(DiagnosticAt("main.go", 5, 13))
 
 		// Toggle the GC details code lens again so now it should be off.
 		env.ExecuteCodeLensCommand("main.go", command.GCDetails)
diff --git a/gopls/internal/regtest/completion/completion_test.go b/gopls/internal/regtest/completion/completion_test.go
index cd70ccabb9e..795f7ae1fb3 100644
--- a/gopls/internal/regtest/completion/completion_test.go
+++ b/gopls/internal/regtest/completion/completion_test.go
@@ -504,7 +504,6 @@ func doit() {
 }
 
 func TestUnimportedCompletion_VSCodeIssue1489(t *testing.T) {
-	t.Skip("broken due to golang/vscode-go#1489")
 	testenv.NeedsGo1Point(t, 14)
 
 	const src = `
@@ -524,8 +523,7 @@ func main() {
 }
 `
 	WithOptions(
-		WindowsLineEndings,
-		ProxyFiles(proxy),
+		EditorConfig{WindowsLineEndings: true},
 	).Run(t, src, func(t *testing.T, env *Env) {
 		// Trigger unimported completions for the example.com/blah package.
 		env.OpenFile("main.go")
@@ -537,6 +535,10 @@ func main() {
 		}
 		env.AcceptCompletion("main.go", pos, completions.Items[0])
 		env.Await(env.DoneWithChange())
-		t.Log(env.Editor.BufferText("main.go"))
+		got := env.Editor.BufferText("main.go")
+		want := "package main\r\n\r\nimport (\r\n\t\"fmt\"\r\n\t\"math\"\r\n)\r\n\r\nfunc main() {\r\n\tfmt.Println(\"a\")\r\n\tmath.Sqrt(${1:})\r\n}\r\n"
+		if got != want {
+			t.Errorf("unimported completion: got %q, want %q", got, want)
+		}
 	})
 }
diff --git a/gopls/internal/regtest/misc/definition_test.go b/gopls/internal/regtest/misc/definition_test.go
index e6181c70224..2b7d1a47d29 100644
--- a/gopls/internal/regtest/misc/definition_test.go
+++ b/gopls/internal/regtest/misc/definition_test.go
@@ -10,6 +10,7 @@ import (
 	"testing"
 
 	. "golang.org/x/tools/internal/lsp/regtest"
+	"golang.org/x/tools/internal/testenv"
 
 	"golang.org/x/tools/internal/lsp/fake"
 	"golang.org/x/tools/internal/lsp/tests"
@@ -234,3 +235,45 @@ func main() {}
 		})
 	}
 }
+
+// Test for golang/go#47825.
+func TestImportTestVariant(t *testing.T) {
+	testenv.NeedsGo1Point(t, 13)
+
+	const mod = `
+-- go.mod --
+module mod.com
+
+go 1.12
+-- client/test/role.go --
+package test
+
+import _ "mod.com/client"
+
+type RoleSetup struct{}
+-- client/client_role_test.go --
+package client_test
+
+import (
+	"testing"
+	_ "mod.com/client"
+	ctest "mod.com/client/test"
+)
+
+func TestClient(t *testing.T) {
+	_ = ctest.RoleSetup{}
+}
+-- client/client_test.go --
+package client
+
+import "testing"
+
+func TestClient(t *testing.T) {}
+-- client.go --
+package client
+`
+	Run(t, mod, func(t *testing.T, env *Env) {
+		env.OpenFile("client/client_role_test.go")
+		env.GoToDefinition("client/client_role_test.go", env.RegexpSearch("client/client_role_test.go", "RoleSetup"))
+	})
+}
diff --git a/gopls/internal/regtest/misc/embed_test.go b/gopls/internal/regtest/misc/embed_test.go
index 8fb654ba180..2e66d7866ca 100644
--- a/gopls/internal/regtest/misc/embed_test.go
+++ b/gopls/internal/regtest/misc/embed_test.go
@@ -22,6 +22,9 @@ import (
 	_ "embed"
 )
 
+// Issue 47436
+func F() {}
+
 //go:embed NONEXISTENT
 var foo string
 `
diff --git a/gopls/internal/regtest/misc/formatting_test.go b/gopls/internal/regtest/misc/formatting_test.go
index 52d89e4ba2b..1e14237afcc 100644
--- a/gopls/internal/regtest/misc/formatting_test.go
+++ b/gopls/internal/regtest/misc/formatting_test.go
@@ -171,7 +171,7 @@ func TestFormattingOnSave(t *testing.T) {
 // Import organization in these files has historically been a source of bugs.
 func TestCRLFLineEndings(t *testing.T) {
 	for _, tt := range []struct {
-		issue, want string
+		issue, input, want string
 	}{
 		{
 			issue: "41057",
@@ -222,12 +222,40 @@ func main() {
 type Tree struct {
 	arr []string
 }
+`,
+		},
+		{
+			issue: "47200",
+			input: `package main
+
+import "fmt"
+
+func main() {
+	math.Sqrt(9)
+	fmt.Println("hello")
+}
+`,
+			want: `package main
+
+import (
+	"fmt"
+	"math"
+)
+
+func main() {
+	math.Sqrt(9)
+	fmt.Println("hello")
+}
 `,
 		},
 	} {
 		t.Run(tt.issue, func(t *testing.T) {
 			Run(t, "-- main.go --", func(t *testing.T, env *Env) {
-				crlf := strings.ReplaceAll(tt.want, "\n", "\r\n")
+				input := tt.input
+				if input == "" {
+					input = tt.want
+				}
+				crlf := strings.ReplaceAll(input, "\n", "\r\n")
 				env.CreateBuffer("main.go", crlf)
 				env.Await(env.DoneWithOpen())
 				env.OrganizeImports("main.go")
diff --git a/gopls/internal/regtest/misc/hover_test.go b/gopls/internal/regtest/misc/hover_test.go
index 7a361f9150f..fbd0ac5c9e5 100644
--- a/gopls/internal/regtest/misc/hover_test.go
+++ b/gopls/internal/regtest/misc/hover_test.go
@@ -9,6 +9,7 @@ import (
 	"testing"
 
 	. "golang.org/x/tools/internal/lsp/regtest"
+	"golang.org/x/tools/internal/testenv"
 )
 
 func TestHoverUnexported(t *testing.T) {
@@ -25,6 +26,10 @@ type Mixed struct {
 	Exported   int
 	unexported string
 }
+
+func printMixed(m Mixed) {
+	println(m)
+}
 `
 	const mod = `
 -- go.mod --
@@ -34,7 +39,7 @@ go 1.12
 
 require golang.org/x/structs v1.0.0
 -- go.sum --
-golang.org/x/structs v1.0.0 h1:oxD5q25qV458xBbXf5+QX+Johgg71KFtwuJzt145c9A=
+golang.org/x/structs v1.0.0 h1:3DlrFfd3OsEen7FnCHfqtnJvjBZ8ZFKmrD/+HjpdJj0=
 golang.org/x/structs v1.0.0/go.mod h1:47gkSIdo5AaQaWJS0upVORsxfEr1LL1MWv9dmYF3iq4=
 -- main.go --
 package main
@@ -50,9 +55,61 @@ func main() {
 		ProxyFiles(proxy),
 	).Run(t, mod, func(t *testing.T, env *Env) {
 		env.OpenFile("main.go")
-		got, _ := env.Hover("main.go", env.RegexpSearch("main.go", "Mixed"))
+		mixedPos := env.RegexpSearch("main.go", "Mixed")
+		got, _ := env.Hover("main.go", mixedPos)
+		if !strings.Contains(got.Value, "unexported") {
+			t.Errorf("Workspace hover: missing expected field 'unexported'. Got:\n%q", got.Value)
+		}
+		cacheFile, _ := env.GoToDefinition("main.go", mixedPos)
+		argPos := env.RegexpSearch(cacheFile, "printMixed.*(Mixed)")
+		got, _ = env.Hover(cacheFile, argPos)
 		if !strings.Contains(got.Value, "unexported") {
-			t.Errorf("Hover: missing expected field 'unexported'. Got:\n%q", got.Value)
+			t.Errorf("Non-workspace hover: missing expected field 'unexported'. Got:\n%q", got.Value)
 		}
 	})
 }
+
+func TestHoverIntLiteral(t *testing.T) {
+	testenv.NeedsGo1Point(t, 13)
+	const source = `
+-- main.go --
+package main
+
+var (
+	bigBin = 0b1001001
+)
+
+var hex = 0xe34e
+
+func main() {
+}
+`
+	Run(t, source, func(t *testing.T, env *Env) {
+		env.OpenFile("main.go")
+		hexExpected := "58190"
+		got, _ := env.Hover("main.go", env.RegexpSearch("main.go", "hex"))
+		if got != nil && !strings.Contains(got.Value, hexExpected) {
+			t.Errorf("Hover: missing expected field '%s'. Got:\n%q", hexExpected, got.Value)
+		}
+
+		binExpected := "73"
+		got, _ = env.Hover("main.go", env.RegexpSearch("main.go", "bigBin"))
+		if got != nil && !strings.Contains(got.Value, binExpected) {
+			t.Errorf("Hover: missing expected field '%s'. Got:\n%q", binExpected, got.Value)
+		}
+	})
+}
+
+// Tests that hovering does not trigger the panic in golang/go#48249.
+func TestPanicInHoverBrokenCode(t *testing.T) {
+	testenv.NeedsGo1Point(t, 13)
+	const source = `
+-- main.go --
+package main
+
+type Example struct`
+	Run(t, source, func(t *testing.T, env *Env) {
+		env.OpenFile("main.go")
+		env.Editor.Hover(env.Ctx, "main.go", env.RegexpSearch("main.go", "Example"))
+	})
+}
diff --git a/gopls/internal/regtest/misc/multiple_adhoc_test.go b/gopls/internal/regtest/misc/multiple_adhoc_test.go
new file mode 100644
index 00000000000..5f803e4e385
--- /dev/null
+++ b/gopls/internal/regtest/misc/multiple_adhoc_test.go
@@ -0,0 +1,44 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package misc
+
+import (
+	"testing"
+
+	. "golang.org/x/tools/internal/lsp/regtest"
+)
+
+func TestMultipleAdHocPackages(t *testing.T) {
+	Run(t, `
+-- a/a.go --
+package main
+
+import "fmt"
+
+func main() {
+	fmt.Println("")
+}
+-- a/b.go --
+package main
+
+import "fmt"
+
+func main() () {
+	fmt.Println("")
+}
+`, func(t *testing.T, env *Env) {
+		env.OpenFile("a/a.go")
+		if list := env.Completion("a/a.go", env.RegexpSearch("a/a.go", "Println")); list == nil || len(list.Items) == 0 {
+			t.Fatal("expected completions, got none")
+		}
+		env.OpenFile("a/b.go")
+		if list := env.Completion("a/b.go", env.RegexpSearch("a/b.go", "Println")); list == nil || len(list.Items) == 0 {
+			t.Fatal("expected completions, got none")
+		}
+		if list := env.Completion("a/a.go", env.RegexpSearch("a/a.go", "Println")); list == nil || len(list.Items) == 0 {
+			t.Fatal("expected completions, got none")
+		}
+	})
+}
diff --git a/gopls/internal/regtest/misc/references_test.go b/gopls/internal/regtest/misc/references_test.go
index f3a23e436f5..768251680f9 100644
--- a/gopls/internal/regtest/misc/references_test.go
+++ b/gopls/internal/regtest/misc/references_test.go
@@ -42,3 +42,42 @@ func main() {
 		}
 	})
 }
+
+// This reproduces and tests golang/go#48400.
+func TestReferencesPanicOnError(t *testing.T) {
+	const files = `
+-- go.mod --
+module mod.com
+
+go 1.12
+-- main.go --
+package main
+
+type t interface {
+	error
+}
+
+type s struct{}
+
+func (*s) Error() string {
+	return ""
+}
+
+func _() {
+	var s s
+	_ = s.Error()
+}
+`
+	Run(t, files, func(t *testing.T, env *Env) {
+		env.OpenFile("main.go")
+		file, pos := env.GoToDefinition("main.go", env.RegexpSearch("main.go", `Error`))
+		refs, err := env.Editor.References(env.Ctx, file, pos)
+		if err == nil {
+			t.Fatalf("expected error for references, instead got %v", refs)
+		}
+		wantErr := "no position for func (error).Error() string"
+		if err.Error() != wantErr {
+			t.Fatalf("expected error with message %s, instead got %s", wantErr, err.Error())
+		}
+	})
+}
diff --git a/gopls/internal/regtest/misc/rename_test.go b/gopls/internal/regtest/misc/rename_test.go
new file mode 100644
index 00000000000..121b70725b4
--- /dev/null
+++ b/gopls/internal/regtest/misc/rename_test.go
@@ -0,0 +1,58 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package misc
+
+import (
+	"strings"
+	"testing"
+
+	. "golang.org/x/tools/internal/lsp/regtest"
+)
+
+// Test for golang/go#47564.
+func TestRenameInTestVariant(t *testing.T) {
+	const files = `
+-- go.mod --
+module mod.com
+
+go 1.12
+-- stringutil/stringutil.go --
+package stringutil
+
+func Identity(s string) string {
+	return s
+}
+-- stringutil/stringutil_test.go --
+package stringutil
+
+func TestIdentity(t *testing.T) {
+	if got := Identity("foo"); got != "foo" {
+		t.Errorf("bad")
+	}
+}
+-- main.go --
+package main
+
+import (
+	"fmt"
+
+	"mod.com/stringutil"
+)
+
+func main() {
+	fmt.Println(stringutil.Identity("hello world"))
+}
+`
+
+	Run(t, files, func(t *testing.T, env *Env) {
+		env.OpenFile("main.go")
+		pos := env.RegexpSearch("main.go", `stringutil\.(Identity)`)
+		env.Rename("main.go", pos, "Identityx")
+		text := env.Editor.BufferText("stringutil/stringutil_test.go")
+		if !strings.Contains(text, "Identityx") {
+			t.Errorf("stringutil/stringutil_test.go: missing expected token `Identityx` after rename:\n%s", text)
+		}
+	})
+}
diff --git a/internal/imports/mkstdlib.go b/internal/imports/mkstdlib.go
index f5ea292f9f9..8eb4e124796 100644
--- a/internal/imports/mkstdlib.go
+++ b/internal/imports/mkstdlib.go
@@ -69,6 +69,7 @@ func main() {
 		mustOpen(api("go1.14.txt")),
 		mustOpen(api("go1.15.txt")),
 		mustOpen(api("go1.16.txt")),
+		mustOpen(api("go1.17.txt")),
 
 		// The API of the syscall/js package needs to be computed explicitly,
 		// because it's not included in the GOROOT/api/go1.*.txt files at this time.
diff --git a/internal/imports/zstdlib.go b/internal/imports/zstdlib.go
index ccdd4e0ffcf..7de2be9b4b7 100644
--- a/internal/imports/zstdlib.go
+++ b/internal/imports/zstdlib.go
@@ -180,6 +180,8 @@ var stdlib = map[string][]string{
 		"NewReader",
 		"NewWriter",
 		"Order",
+		"Reader",
+		"Writer",
 	},
 	"compress/zlib": []string{
 		"BestCompression",
@@ -641,7 +643,9 @@ var stdlib = map[string][]string{
 		"Named",
 		"NamedArg",
 		"NullBool",
+		"NullByte",
 		"NullFloat64",
+		"NullInt16",
 		"NullInt32",
 		"NullInt64",
 		"NullString",
@@ -2248,6 +2252,7 @@ var stdlib = map[string][]string{
 		"SHT_LOOS",
 		"SHT_LOPROC",
 		"SHT_LOUSER",
+		"SHT_MIPS_ABIFLAGS",
 		"SHT_NOBITS",
 		"SHT_NOTE",
 		"SHT_NULL",
@@ -3061,6 +3066,7 @@ var stdlib = map[string][]string{
 		"ParseExpr",
 		"ParseExprFrom",
 		"ParseFile",
+		"SkipObjectResolution",
 		"SpuriousErrors",
 		"Trace",
 	},
@@ -3441,6 +3447,7 @@ var stdlib = map[string][]string{
 		"Pt",
 		"RGBA",
 		"RGBA64",
+		"RGBA64Image",
 		"Rect",
 		"Rectangle",
 		"RegisterFormat",
@@ -3507,6 +3514,7 @@ var stdlib = map[string][]string{
 		"Op",
 		"Over",
 		"Quantizer",
+		"RGBA64Image",
 		"Src",
 	},
 	"image/gif": []string{
@@ -3612,6 +3620,7 @@ var stdlib = map[string][]string{
 		"FS",
 		"File",
 		"FileInfo",
+		"FileInfoToDirEntry",
 		"FileMode",
 		"Glob",
 		"GlobFS",
@@ -3772,15 +3781,18 @@ var stdlib = map[string][]string{
 		"Max",
 		"MaxFloat32",
 		"MaxFloat64",
+		"MaxInt",
 		"MaxInt16",
 		"MaxInt32",
 		"MaxInt64",
 		"MaxInt8",
+		"MaxUint",
 		"MaxUint16",
 		"MaxUint32",
 		"MaxUint64",
 		"MaxUint8",
 		"Min",
+		"MinInt",
 		"MinInt16",
 		"MinInt32",
 		"MinInt64",
@@ -4078,6 +4090,7 @@ var stdlib = map[string][]string{
 		"UnknownNetworkError",
 	},
 	"net/http": []string{
+		"AllowQuerySemicolons",
 		"CanonicalHeaderKey",
 		"Client",
 		"CloseNotifier",
@@ -4660,6 +4673,7 @@ var stdlib = map[string][]string{
 		"Value",
 		"ValueError",
 		"ValueOf",
+		"VisibleFields",
 		"Zero",
 	},
 	"regexp": []string{
@@ -4799,6 +4813,10 @@ var stdlib = map[string][]string{
 		"UnlockOSThread",
 		"Version",
 	},
+	"runtime/cgo": []string{
+		"Handle",
+		"NewHandle",
+	},
 	"runtime/debug": []string{
 		"BuildInfo",
 		"FreeOSMemory",
@@ -4915,6 +4933,7 @@ var stdlib = map[string][]string{
 		"QuoteRuneToGraphic",
 		"QuoteToASCII",
 		"QuoteToGraphic",
+		"QuotedPrefix",
 		"Unquote",
 		"UnquoteChar",
 	},
@@ -10334,6 +10353,7 @@ var stdlib = map[string][]string{
 		"PipeNode",
 		"Pos",
 		"RangeNode",
+		"SkipFuncCheck",
 		"StringNode",
 		"TemplateNode",
 		"TextNode",
@@ -10358,6 +10378,7 @@ var stdlib = map[string][]string{
 		"July",
 		"June",
 		"Kitchen",
+		"Layout",
 		"LoadLocation",
 		"LoadLocationFromTZData",
 		"Local",
@@ -10406,6 +10427,8 @@ var stdlib = map[string][]string{
 		"UTC",
 		"Unix",
 		"UnixDate",
+		"UnixMicro",
+		"UnixMilli",
 		"Until",
 		"Wednesday",
 		"Weekday",
diff --git a/internal/lsp/cache/analysis.go b/internal/lsp/cache/analysis.go
index faf030655cd..baaad5af67a 100644
--- a/internal/lsp/cache/analysis.go
+++ b/internal/lsp/cache/analysis.go
@@ -30,7 +30,7 @@ func (s *snapshot) Analyze(ctx context.Context, id string, analyzers []*source.A
 		if !a.IsEnabled(s.view) {
 			continue
 		}
-		ah, err := s.actionHandle(ctx, packageID(id), a.Analyzer)
+		ah, err := s.actionHandle(ctx, PackageID(id), a.Analyzer)
 		if err != nil {
 			return nil, err
 		}
@@ -84,7 +84,7 @@ type packageFactKey struct {
 	typ reflect.Type
 }
 
-func (s *snapshot) actionHandle(ctx context.Context, id packageID, a *analysis.Analyzer) (*actionHandle, error) {
+func (s *snapshot) actionHandle(ctx context.Context, id PackageID, a *analysis.Analyzer) (*actionHandle, error) {
 	ph, err := s.buildPackageHandle(ctx, id, source.ParseFull)
 	if err != nil {
 		return nil, err
@@ -121,13 +121,13 @@ func (s *snapshot) actionHandle(ctx context.Context, id packageID, a *analysis.A
 		// An analysis that consumes/produces facts
 		// must run on the package's dependencies too.
 		if len(a.FactTypes) > 0 {
-			importIDs := make([]string, 0, len(ph.m.deps))
-			for _, importID := range ph.m.deps {
+			importIDs := make([]string, 0, len(ph.m.Deps))
+			for _, importID := range ph.m.Deps {
 				importIDs = append(importIDs, string(importID))
 			}
 			sort.Strings(importIDs) // for determinism
 			for _, importID := range importIDs {
-				depActionHandle, err := s.actionHandle(ctx, packageID(importID), a)
+				depActionHandle, err := s.actionHandle(ctx, PackageID(importID), a)
 				if err != nil {
 					return nil, err
 				}
diff --git a/internal/lsp/cache/cache.go b/internal/lsp/cache/cache.go
index 7221874a507..be03e638e07 100644
--- a/internal/lsp/cache/cache.go
+++ b/internal/lsp/cache/cache.go
@@ -198,7 +198,7 @@ func (c *Cache) ID() string                     { return c.id }
 func (c *Cache) MemStats() map[reflect.Type]int { return c.store.Stats() }
 
 type packageStat struct {
-	id        packageID
+	id        PackageID
 	mode      source.ParseMode
 	file      int64
 	ast       int64
@@ -224,7 +224,7 @@ func (c *Cache) PackageStats(withNames bool) template.HTML {
 				typInfoCost = typesInfoCost(v.pkg.typesInfo)
 			}
 			stat := packageStat{
-				id:        v.pkg.m.id,
+				id:        v.pkg.m.ID,
 				mode:      v.pkg.mode,
 				types:     typsCost,
 				typesInfo: typInfoCost,
diff --git a/internal/lsp/cache/check.go b/internal/lsp/cache/check.go
index 89094b0e3e9..2eb2d1e6111 100644
--- a/internal/lsp/cache/check.go
+++ b/internal/lsp/cache/check.go
@@ -42,7 +42,7 @@ type packageHandle struct {
 	mode source.ParseMode
 
 	// m is the metadata associated with the package.
-	m *knownMetadata
+	m *KnownMetadata
 
 	// key is the hashed key for the package.
 	key packageHandleKey
@@ -50,7 +50,7 @@ type packageHandle struct {
 
 func (ph *packageHandle) packageKey() packageKey {
 	return packageKey{
-		id:   ph.m.id,
+		id:   ph.m.ID,
 		mode: ph.mode,
 	}
 }
@@ -85,7 +85,7 @@ type packageData struct {
 // It assumes that the given ID already has metadata available, so it does not
 // attempt to reload missing or invalid metadata. The caller must reload
 // metadata if needed.
-func (s *snapshot) buildPackageHandle(ctx context.Context, id packageID, mode source.ParseMode) (*packageHandle, error) {
+func (s *snapshot) buildPackageHandle(ctx context.Context, id PackageID, mode source.ParseMode) (*packageHandle, error) {
 	if ph := s.getPackage(id, mode); ph != nil {
 		return ph, nil
 	}
@@ -121,7 +121,7 @@ func (s *snapshot) buildPackageHandle(ctx context.Context, id packageID, mode so
 		}
 
 		data := &packageData{}
-		data.pkg, data.err = typeCheck(ctx, snapshot, m.metadata, mode, deps)
+		data.pkg, data.err = typeCheck(ctx, snapshot, m.Metadata, mode, deps)
 		// Make sure that the workers above have finished before we return,
 		// especially in case of cancellation.
 		wg.Wait()
@@ -140,16 +140,16 @@ func (s *snapshot) buildPackageHandle(ctx context.Context, id packageID, mode so
 }
 
 // buildKey computes the key for a given packageHandle.
-func (s *snapshot) buildKey(ctx context.Context, id packageID, mode source.ParseMode) (*packageHandle, map[packagePath]*packageHandle, error) {
+func (s *snapshot) buildKey(ctx context.Context, id PackageID, mode source.ParseMode) (*packageHandle, map[PackagePath]*packageHandle, error) {
 	m := s.getMetadata(id)
 	if m == nil {
 		return nil, nil, errors.Errorf("no metadata for %s", id)
 	}
-	goFiles, err := s.parseGoHandles(ctx, m.goFiles, mode)
+	goFiles, err := s.parseGoHandles(ctx, m.GoFiles, mode)
 	if err != nil {
 		return nil, nil, err
 	}
-	compiledGoFiles, err := s.parseGoHandles(ctx, m.compiledGoFiles, mode)
+	compiledGoFiles, err := s.parseGoHandles(ctx, m.CompiledGoFiles, mode)
 	if err != nil {
 		return nil, nil, err
 	}
@@ -160,12 +160,12 @@ func (s *snapshot) buildKey(ctx context.Context, id packageID, mode source.Parse
 		mode:            mode,
 	}
 	// Make sure all of the depList are sorted.
-	depList := append([]packageID{}, m.deps...)
+	depList := append([]PackageID{}, m.Deps...)
 	sort.Slice(depList, func(i, j int) bool {
 		return depList[i] < depList[j]
 	})
 
-	deps := make(map[packagePath]*packageHandle)
+	deps := make(map[PackagePath]*packageHandle)
 
 	// Begin computing the key by getting the depKeys for all dependencies.
 	var depKeys []packageHandleKey
@@ -174,7 +174,7 @@ func (s *snapshot) buildKey(ctx context.Context, id packageID, mode source.Parse
 		// Don't use invalid metadata for dependencies if the top-level
 		// metadata is valid. We only load top-level packages, so if the
 		// top-level is valid, all of its dependencies should be as well.
-		if err != nil || m.valid && !depHandle.m.valid {
+		if err != nil || m.Valid && !depHandle.m.Valid {
 			if err != nil {
 				event.Error(ctx, fmt.Sprintf("%s: no dep handle for %s", id, depID), err, tag.Snapshot.Of(s.id))
 			} else {
@@ -189,15 +189,15 @@ func (s *snapshot) buildKey(ctx context.Context, id packageID, mode source.Parse
 			depKeys = append(depKeys, packageHandleKey(fmt.Sprintf("%s import not found", depID)))
 			continue
 		}
-		deps[depHandle.m.pkgPath] = depHandle
+		deps[depHandle.m.PkgPath] = depHandle
 		depKeys = append(depKeys, depHandle.key)
 	}
 	experimentalKey := s.View().Options().ExperimentalPackageCacheKey
-	ph.key = checkPackageKey(ph.m.id, compiledGoFiles, m.config, depKeys, mode, experimentalKey)
+	ph.key = checkPackageKey(ph.m.ID, compiledGoFiles, m.Config, depKeys, mode, experimentalKey)
 	return ph, deps, nil
 }
 
-func (s *snapshot) workspaceParseMode(id packageID) source.ParseMode {
+func (s *snapshot) workspaceParseMode(id PackageID) source.ParseMode {
 	s.mu.Lock()
 	defer s.mu.Unlock()
 	_, ws := s.workspacePackages[id]
@@ -207,21 +207,13 @@ func (s *snapshot) workspaceParseMode(id packageID) source.ParseMode {
 	if s.view.Options().MemoryMode == source.ModeNormal {
 		return source.ParseFull
 	}
-
-	// Degraded mode. Check for open files.
-	m, ok := s.metadata[id]
-	if !ok {
-		return source.ParseExported
-	}
-	for _, cgf := range m.compiledGoFiles {
-		if s.isOpenLocked(cgf) {
-			return source.ParseFull
-		}
+	if s.isActiveLocked(id, nil) {
+		return source.ParseFull
 	}
 	return source.ParseExported
 }
 
-func checkPackageKey(id packageID, pghs []*parseGoHandle, cfg *packages.Config, deps []packageHandleKey, mode source.ParseMode, experimentalKey bool) packageHandleKey {
+func checkPackageKey(id PackageID, pghs []*parseGoHandle, cfg *packages.Config, deps []packageHandleKey, mode source.ParseMode, experimentalKey bool) packageHandleKey {
 	b := bytes.NewBuffer(nil)
 	b.WriteString(string(id))
 	if !experimentalKey {
@@ -285,17 +277,17 @@ func (ph *packageHandle) check(ctx context.Context, s *snapshot) (*pkg, error) {
 }
 
 func (ph *packageHandle) CompiledGoFiles() []span.URI {
-	return ph.m.compiledGoFiles
+	return ph.m.CompiledGoFiles
 }
 
 func (ph *packageHandle) ID() string {
-	return string(ph.m.id)
+	return string(ph.m.ID)
 }
 
 func (ph *packageHandle) cached(g *memoize.Generation) (*pkg, error) {
 	v := ph.handle.Cached(g)
 	if v == nil {
-		return nil, errors.Errorf("no cached type information for %s", ph.m.pkgPath)
+		return nil, errors.Errorf("no cached type information for %s", ph.m.PkgPath)
 	}
 	data := v.(*packageData)
 	return data.pkg, data.err
@@ -313,7 +305,7 @@ func (s *snapshot) parseGoHandles(ctx context.Context, files []span.URI, mode so
 	return pghs, nil
 }
 
-func typeCheck(ctx context.Context, snapshot *snapshot, m *metadata, mode source.ParseMode, deps map[packagePath]*packageHandle) (*pkg, error) {
+func typeCheck(ctx context.Context, snapshot *snapshot, m *Metadata, mode source.ParseMode, deps map[PackagePath]*packageHandle) (*pkg, error) {
 	var filter *unexportedFilter
 	if mode == source.ParseExported {
 		filter = &unexportedFilter{uses: map[string]bool{}}
@@ -329,7 +321,7 @@ func typeCheck(ctx context.Context, snapshot *snapshot, m *metadata, mode source
 		// time keeping those names.
 		missing, unexpected := filter.ProcessErrors(pkg.typeErrors)
 		if len(unexpected) == 0 && len(missing) != 0 {
-			event.Log(ctx, fmt.Sprintf("discovered missing identifiers: %v", missing), tag.Package.Of(string(m.id)))
+			event.Log(ctx, fmt.Sprintf("discovered missing identifiers: %v", missing), tag.Package.Of(string(m.ID)))
 			pkg, err = doTypeCheck(ctx, snapshot, m, mode, deps, filter)
 			if err != nil {
 				return nil, err
@@ -337,7 +329,7 @@ func typeCheck(ctx context.Context, snapshot *snapshot, m *metadata, mode source
 			missing, unexpected = filter.ProcessErrors(pkg.typeErrors)
 		}
 		if len(unexpected) != 0 || len(missing) != 0 {
-			event.Log(ctx, fmt.Sprintf("falling back to safe trimming due to type errors: %v or still-missing identifiers: %v", unexpected, missing), tag.Package.Of(string(m.id)))
+			event.Log(ctx, fmt.Sprintf("falling back to safe trimming due to type errors: %v or still-missing identifiers: %v", unexpected, missing), tag.Package.Of(string(m.ID)))
 			pkg, err = doTypeCheck(ctx, snapshot, m, mode, deps, nil)
 			if err != nil {
 				return nil, err
@@ -346,13 +338,13 @@ func typeCheck(ctx context.Context, snapshot *snapshot, m *metadata, mode source
 	}
 	// If this is a replaced module in the workspace, the version is
 	// meaningless, and we don't want clients to access it.
-	if m.module != nil {
-		version := m.module.Version
+	if m.Module != nil {
+		version := m.Module.Version
 		if source.IsWorkspaceModuleVersion(version) {
 			version = ""
 		}
 		pkg.version = &module.Version{
-			Path:    m.module.Path,
+			Path:    m.Module.Path,
 			Version: version,
 		}
 	}
@@ -362,7 +354,7 @@ func typeCheck(ctx context.Context, snapshot *snapshot, m *metadata, mode source
 		return pkg, nil
 	}
 
-	for _, e := range m.errors {
+	for _, e := range m.Errors {
 		diags, err := goPackagesErrorDiagnostics(snapshot, pkg, e)
 		if err != nil {
 			event.Error(ctx, "unable to compute positions for list errors", err, tag.Package.Of(pkg.ID()))
@@ -428,15 +420,15 @@ func typeCheck(ctx context.Context, snapshot *snapshot, m *metadata, mode source
 	return pkg, nil
 }
 
-func doTypeCheck(ctx context.Context, snapshot *snapshot, m *metadata, mode source.ParseMode, deps map[packagePath]*packageHandle, astFilter *unexportedFilter) (*pkg, error) {
-	ctx, done := event.Start(ctx, "cache.typeCheck", tag.Package.Of(string(m.id)))
+func doTypeCheck(ctx context.Context, snapshot *snapshot, m *Metadata, mode source.ParseMode, deps map[PackagePath]*packageHandle, astFilter *unexportedFilter) (*pkg, error) {
+	ctx, done := event.Start(ctx, "cache.typeCheck", tag.Package.Of(string(m.ID)))
 	defer done()
 
 	pkg := &pkg{
 		m:       m,
 		mode:    mode,
-		imports: make(map[packagePath]*pkg),
-		types:   types.NewPackage(string(m.pkgPath), string(m.name)),
+		imports: make(map[PackagePath]*pkg),
+		types:   types.NewPackage(string(m.PkgPath), string(m.Name)),
 		typesInfo: &types.Info{
 			Types:      make(map[ast.Expr]types.TypeAndValue),
 			Defs:       make(map[*ast.Ident]types.Object),
@@ -445,11 +437,11 @@ func doTypeCheck(ctx context.Context, snapshot *snapshot, m *metadata, mode sour
 			Selections: make(map[*ast.SelectorExpr]*types.Selection),
 			Scopes:     make(map[ast.Node]*types.Scope),
 		},
-		typesSizes: m.typesSizes,
+		typesSizes: m.TypesSizes,
 	}
 	typeparams.InitInferred(pkg.typesInfo)
 
-	for _, gf := range pkg.m.goFiles {
+	for _, gf := range pkg.m.GoFiles {
 		// In the presence of line directives, we may need to report errors in
 		// non-compiled Go files, so we need to register them on the package.
 		// However, we only need to really parse them in ParseFull mode, when
@@ -474,18 +466,18 @@ func doTypeCheck(ctx context.Context, snapshot *snapshot, m *metadata, mode sour
 	}
 
 	// Use the default type information for the unsafe package.
-	if m.pkgPath == "unsafe" {
+	if m.PkgPath == "unsafe" {
 		// Don't type check Unsafe: it's unnecessary, and doing so exposes a data
 		// race to Unsafe.completed.
 		pkg.types = types.Unsafe
 		return pkg, nil
 	}
 
-	if len(m.compiledGoFiles) == 0 {
+	if len(m.CompiledGoFiles) == 0 {
 		// No files most likely means go/packages failed. Try to attach error
 		// messages to the file as much as possible.
 		var found bool
-		for _, e := range m.errors {
+		for _, e := range m.Errors {
 			srcDiags, err := goPackagesErrorDiagnostics(snapshot, pkg, e)
 			if err != nil {
 				continue
@@ -496,7 +488,7 @@ func doTypeCheck(ctx context.Context, snapshot *snapshot, m *metadata, mode sour
 		if found {
 			return pkg, nil
 		}
-		return nil, errors.Errorf("no parsed files for package %s, expected: %v, errors: %v", pkg.m.pkgPath, pkg.compiledGoFiles, m.errors)
+		return nil, errors.Errorf("no parsed files for package %s, expected: %v, errors: %v", pkg.m.PkgPath, pkg.compiledGoFiles, m.Errors)
 	}
 
 	cfg := &types.Config{
@@ -512,14 +504,14 @@ func doTypeCheck(ctx context.Context, snapshot *snapshot, m *metadata, mode sour
 			if dep == nil {
 				return nil, snapshot.missingPkgError(ctx, pkgPath)
 			}
-			if !source.IsValidImport(string(m.pkgPath), string(dep.m.pkgPath)) {
+			if !source.IsValidImport(string(m.PkgPath), string(dep.m.PkgPath)) {
 				return nil, errors.Errorf("invalid use of internal package %s", pkgPath)
 			}
 			depPkg, err := dep.check(ctx, snapshot)
 			if err != nil {
 				return nil, err
 			}
-			pkg.imports[depPkg.m.pkgPath] = depPkg
+			pkg.imports[depPkg.m.PkgPath] = depPkg
 			return depPkg.types, nil
 		}),
 	}
@@ -552,7 +544,7 @@ func doTypeCheck(ctx context.Context, snapshot *snapshot, m *metadata, mode sour
 }
 
 func parseCompiledGoFiles(ctx context.Context, snapshot *snapshot, mode source.ParseMode, pkg *pkg, astFilter *unexportedFilter) error {
-	for _, cgf := range pkg.m.compiledGoFiles {
+	for _, cgf := range pkg.m.CompiledGoFiles {
 		fh, err := snapshot.GetFile(ctx, cgf)
 		if err != nil {
 			return err
@@ -613,7 +605,7 @@ func (s *snapshot) depsErrors(ctx context.Context, pkg *pkg) ([]*source.Diagnost
 		}
 
 		directImporter := depsError.ImportStack[directImporterIdx]
-		if s.isWorkspacePackage(packageID(directImporter)) {
+		if s.isWorkspacePackage(PackageID(directImporter)) {
 			continue
 		}
 		relevantErrors = append(relevantErrors, depsError)
@@ -648,7 +640,7 @@ func (s *snapshot) depsErrors(ctx context.Context, pkg *pkg) ([]*source.Diagnost
 	for _, depErr := range relevantErrors {
 		for i := len(depErr.ImportStack) - 1; i >= 0; i-- {
 			item := depErr.ImportStack[i]
-			if s.isWorkspacePackage(packageID(item)) {
+			if s.isWorkspacePackage(PackageID(item)) {
 				break
 			}
 
@@ -694,11 +686,11 @@ func (s *snapshot) depsErrors(ctx context.Context, pkg *pkg) ([]*source.Diagnost
 	for _, depErr := range relevantErrors {
 		for i := len(depErr.ImportStack) - 1; i >= 0; i-- {
 			item := depErr.ImportStack[i]
-			m := s.getMetadata(packageID(item))
-			if m == nil || m.module == nil {
+			m := s.getMetadata(PackageID(item))
+			if m == nil || m.Module == nil {
 				continue
 			}
-			modVer := module.Version{Path: m.module.Path, Version: m.module.Version}
+			modVer := module.Version{Path: m.Module.Path, Version: m.Module.Version}
 			reference := findModuleReference(pm.File, modVer)
 			if reference == nil {
 				continue
@@ -817,14 +809,14 @@ func expandErrors(errs []types.Error, supportsRelatedInformation bool) []extende
 // resolveImportPath resolves an import path in pkg to a package from deps.
 // It should produce the same results as resolveImportPath:
 // https://cs.opensource.google/go/go/+/master:src/cmd/go/internal/load/pkg.go;drc=641918ee09cb44d282a30ee8b66f99a0b63eaef9;l=990.
-func resolveImportPath(importPath string, pkg *pkg, deps map[packagePath]*packageHandle) *packageHandle {
-	if dep := deps[packagePath(importPath)]; dep != nil {
+func resolveImportPath(importPath string, pkg *pkg, deps map[PackagePath]*packageHandle) *packageHandle {
+	if dep := deps[PackagePath(importPath)]; dep != nil {
 		return dep
 	}
 	// We may be in GOPATH mode, in which case we need to check vendor dirs.
 	searchDir := path.Dir(pkg.PkgPath())
 	for {
-		vdir := packagePath(path.Join(searchDir, "vendor", importPath))
+		vdir := PackagePath(path.Join(searchDir, "vendor", importPath))
 		if vdep := deps[vdir]; vdep != nil {
 			return vdep
 		}
diff --git a/internal/lsp/cache/errors.go b/internal/lsp/cache/errors.go
index b866646b9e3..3f58d67c0fc 100644
--- a/internal/lsp/cache/errors.go
+++ b/internal/lsp/cache/errors.go
@@ -41,7 +41,7 @@ func goPackagesErrorDiagnostics(snapshot *snapshot, pkg *pkg, e packages.Error)
 
 	var spn span.Span
 	if e.Pos == "" {
-		spn = parseGoListError(e.Msg, pkg.m.config.Dir)
+		spn = parseGoListError(e.Msg, pkg.m.Config.Dir)
 		// We may not have been able to parse a valid span. Apply the errors to all files.
 		if _, err := spanToRange(pkg, spn); err != nil {
 			var diags []*source.Diagnostic
@@ -56,7 +56,7 @@ func goPackagesErrorDiagnostics(snapshot *snapshot, pkg *pkg, e packages.Error)
 			return diags, nil
 		}
 	} else {
-		spn = span.ParseInDir(e.Pos, pkg.m.config.Dir)
+		spn = span.ParseInDir(e.Pos, pkg.m.Config.Dir)
 	}
 
 	rng, err := spanToRange(pkg, spn)
diff --git a/internal/lsp/cache/load.go b/internal/lsp/cache/load.go
index 1baa3e5ee22..c5b5a3da465 100644
--- a/internal/lsp/cache/load.go
+++ b/internal/lsp/cache/load.go
@@ -8,7 +8,6 @@ import (
 	"context"
 	"crypto/sha256"
 	"fmt"
-	"go/types"
 	"io/ioutil"
 	"os"
 	"path/filepath"
@@ -28,30 +27,6 @@ import (
 	errors "golang.org/x/xerrors"
 )
 
-// metadata holds package metadata extracted from a call to packages.Load.
-type metadata struct {
-	id              packageID
-	pkgPath         packagePath
-	name            packageName
-	goFiles         []span.URI
-	compiledGoFiles []span.URI
-	forTest         packagePath
-	typesSizes      types.Sizes
-	errors          []packages.Error
-	deps            []packageID
-	missingDeps     map[packagePath]struct{}
-	module          *packages.Module
-	depsErrors      []*packagesinternal.PackageError
-
-	// config is the *packages.Config associated with the loaded package.
-	config *packages.Config
-
-	// isIntermediateTestVariant reports whether the given package is an
-	// intermediate test variant, e.g.
-	// "golang.org/x/tools/internal/lsp/cache [golang.org/x/tools/internal/lsp/source.test]".
-	isIntermediateTestVariant bool
-}
-
 // load calls packages.Load for the given scopes, updating package metadata,
 // import graph, and mapped files with the result.
 func (s *snapshot) load(ctx context.Context, allowNetwork bool, scopes ...interface{}) (err error) {
@@ -70,7 +45,7 @@ func (s *snapshot) load(ctx context.Context, allowNetwork bool, scopes ...interf
 			s.clearShouldLoad(scope)
 		}()
 		switch scope := scope.(type) {
-		case packagePath:
+		case PackagePath:
 			if source.IsCommandLineArguments(string(scope)) {
 				panic("attempted to load command-line-arguments")
 			}
@@ -117,9 +92,7 @@ func (s *snapshot) load(ctx context.Context, allowNetwork bool, scopes ...interf
 	if s.view.Options().VerboseWorkDoneProgress {
 		work := s.view.session.progress.Start(ctx, "Load", fmt.Sprintf("Loading query=%s", query), nil, nil)
 		defer func() {
-			go func() {
-				work.End("Done.")
-			}()
+			work.End("Done.")
 		}()
 	}
 
@@ -195,12 +168,12 @@ func (s *snapshot) load(ctx context.Context, allowNetwork bool, scopes ...interf
 		}
 		// Set the metadata for this package.
 		s.mu.Lock()
-		m, err := s.setMetadataLocked(ctx, packagePath(pkg.PkgPath), pkg, cfg, map[packageID]struct{}{})
+		m, err := s.setMetadataLocked(ctx, PackagePath(pkg.PkgPath), pkg, cfg, query, map[PackageID]struct{}{})
 		s.mu.Unlock()
 		if err != nil {
 			return err
 		}
-		if _, err := s.buildPackageHandle(ctx, m.id, s.workspaceParseMode(m.id)); err != nil {
+		if _, err := s.buildPackageHandle(ctx, m.ID, s.workspaceParseMode(m.ID)); err != nil {
 			return err
 		}
 	}
@@ -393,20 +366,25 @@ func getWorkspaceDir(ctx context.Context, h *memoize.Handle, g *memoize.Generati
 // setMetadataLocked extracts metadata from pkg and records it in s. It
 // recurses through pkg.Imports to ensure that metadata exists for all
 // dependencies.
-func (s *snapshot) setMetadataLocked(ctx context.Context, pkgPath packagePath, pkg *packages.Package, cfg *packages.Config, seen map[packageID]struct{}) (*metadata, error) {
-	id := packageID(pkg.ID)
+func (s *snapshot) setMetadataLocked(ctx context.Context, pkgPath PackagePath, pkg *packages.Package, cfg *packages.Config, query []string, seen map[PackageID]struct{}) (*Metadata, error) {
+	id := PackageID(pkg.ID)
+	if source.IsCommandLineArguments(pkg.ID) {
+		suffix := ":" + strings.Join(query, ",")
+		id = PackageID(string(id) + suffix)
+		pkgPath = PackagePath(string(pkgPath) + suffix)
+	}
 	if _, ok := seen[id]; ok {
 		return nil, errors.Errorf("import cycle detected: %q", id)
 	}
 	// Recreate the metadata rather than reusing it to avoid locking.
-	m := &metadata{
-		id:         id,
-		pkgPath:    pkgPath,
-		name:       packageName(pkg.Name),
-		forTest:    packagePath(packagesinternal.GetForTest(pkg)),
-		typesSizes: pkg.TypesSizes,
-		config:     cfg,
-		module:     pkg.Module,
+	m := &Metadata{
+		ID:         id,
+		PkgPath:    pkgPath,
+		Name:       PackageName(pkg.Name),
+		ForTest:    PackagePath(packagesinternal.GetForTest(pkg)),
+		TypesSizes: pkg.TypesSizes,
+		Config:     cfg,
+		Module:     pkg.Module,
 		depsErrors: packagesinternal.GetDepsErrors(pkg),
 	}
 
@@ -417,45 +395,45 @@ func (s *snapshot) setMetadataLocked(ctx context.Context, pkgPath packagePath, p
 		if strings.Contains(err.Msg, "expected '") {
 			continue
 		}
-		m.errors = append(m.errors, err)
+		m.Errors = append(m.Errors, err)
 	}
 
 	uris := map[span.URI]struct{}{}
 	for _, filename := range pkg.CompiledGoFiles {
 		uri := span.URIFromPath(filename)
-		m.compiledGoFiles = append(m.compiledGoFiles, uri)
+		m.CompiledGoFiles = append(m.CompiledGoFiles, uri)
 		uris[uri] = struct{}{}
 	}
 	for _, filename := range pkg.GoFiles {
 		uri := span.URIFromPath(filename)
-		m.goFiles = append(m.goFiles, uri)
+		m.GoFiles = append(m.GoFiles, uri)
 		uris[uri] = struct{}{}
 	}
 	s.updateIDForURIsLocked(id, uris)
 
 	// TODO(rstambler): is this still necessary?
-	copied := map[packageID]struct{}{
+	copied := map[PackageID]struct{}{
 		id: {},
 	}
 	for k, v := range seen {
 		copied[k] = v
 	}
 	for importPath, importPkg := range pkg.Imports {
-		importPkgPath := packagePath(importPath)
-		importID := packageID(importPkg.ID)
+		importPkgPath := PackagePath(importPath)
+		importID := PackageID(importPkg.ID)
 
-		m.deps = append(m.deps, importID)
+		m.Deps = append(m.Deps, importID)
 
 		// Don't remember any imports with significant errors.
 		if importPkgPath != "unsafe" && len(importPkg.CompiledGoFiles) == 0 {
-			if m.missingDeps == nil {
-				m.missingDeps = make(map[packagePath]struct{})
+			if m.MissingDeps == nil {
+				m.MissingDeps = make(map[PackagePath]struct{})
 			}
-			m.missingDeps[importPkgPath] = struct{}{}
+			m.MissingDeps[importPkgPath] = struct{}{}
 			continue
 		}
 		if s.noValidMetadataForIDLocked(importID) {
-			if _, err := s.setMetadataLocked(ctx, importPkgPath, importPkg, cfg, copied); err != nil {
+			if _, err := s.setMetadataLocked(ctx, importPkgPath, importPkg, cfg, query, copied); err != nil {
 				event.Error(ctx, "error in dependency", err)
 			}
 		}
@@ -464,25 +442,25 @@ func (s *snapshot) setMetadataLocked(ctx context.Context, pkgPath packagePath, p
 	// Add the metadata to the cache.
 
 	// If we've already set the metadata for this snapshot, reuse it.
-	if original, ok := s.metadata[m.id]; ok && original.valid {
+	if original, ok := s.metadata[m.ID]; ok && original.Valid {
 		// Since we've just reloaded, clear out shouldLoad.
-		original.shouldLoad = false
-		m = original.metadata
+		original.ShouldLoad = false
+		m = original.Metadata
 	} else {
-		s.metadata[m.id] = &knownMetadata{
-			metadata: m,
-			valid:    true,
+		s.metadata[m.ID] = &KnownMetadata{
+			Metadata: m,
+			Valid:    true,
 		}
 		// Invalidate any packages we may have associated with this metadata.
 		for _, mode := range []source.ParseMode{source.ParseHeader, source.ParseExported, source.ParseFull} {
-			key := packageKey{mode, m.id}
+			key := packageKey{mode, m.ID}
 			delete(s.packages, key)
 		}
 	}
 
 	// Set the workspace packages. If any of the package's files belong to the
 	// view, then the package may be a workspace package.
-	for _, uri := range append(m.compiledGoFiles, m.goFiles...) {
+	for _, uri := range append(m.CompiledGoFiles, m.GoFiles...) {
 		if !s.view.contains(uri) {
 			continue
 		}
@@ -494,16 +472,16 @@ func (s *snapshot) setMetadataLocked(ctx context.Context, pkgPath packagePath, p
 		}
 
 		switch {
-		case m.forTest == "":
+		case m.ForTest == "":
 			// A normal package.
-			s.workspacePackages[m.id] = pkgPath
-		case m.forTest == m.pkgPath, m.forTest+"_test" == m.pkgPath:
+			s.workspacePackages[m.ID] = pkgPath
+		case m.ForTest == m.PkgPath, m.ForTest+"_test" == m.PkgPath:
 			// The test variant of some workspace package or its x_test.
 			// To load it, we need to load the non-test variant with -test.
-			s.workspacePackages[m.id] = m.forTest
+			s.workspacePackages[m.ID] = m.ForTest
 		default:
 			// A test variant of some intermediate package. We don't care about it.
-			m.isIntermediateTestVariant = true
+			m.IsIntermediateTestVariant = true
 		}
 	}
 	return m, nil
diff --git a/internal/lsp/cache/metadata.go b/internal/lsp/cache/metadata.go
new file mode 100644
index 00000000000..bef7bf8e708
--- /dev/null
+++ b/internal/lsp/cache/metadata.go
@@ -0,0 +1,69 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package cache
+
+import (
+	"go/types"
+
+	"golang.org/x/tools/go/packages"
+	"golang.org/x/tools/internal/packagesinternal"
+	"golang.org/x/tools/internal/span"
+)
+
+// Declare explicit types for package paths, names, and IDs to ensure that we
+// never use an ID where a path belongs, and vice versa. If we confused these,
+// it would result in confusing errors because package IDs often look like
+// package paths.
+type (
+	PackageID   string
+	PackagePath string
+	PackageName string
+)
+
+// Metadata holds package Metadata extracted from a call to packages.Load.
+type Metadata struct {
+	ID              PackageID
+	PkgPath         PackagePath
+	Name            PackageName
+	GoFiles         []span.URI
+	CompiledGoFiles []span.URI
+	ForTest         PackagePath
+	TypesSizes      types.Sizes
+	Errors          []packages.Error
+	Deps            []PackageID
+	MissingDeps     map[PackagePath]struct{}
+	Module          *packages.Module
+	depsErrors      []*packagesinternal.PackageError
+
+	// Config is the *packages.Config associated with the loaded package.
+	Config *packages.Config
+
+	// IsIntermediateTestVariant reports whether the given package is an
+	// intermediate test variant, e.g.
+	// "golang.org/x/tools/internal/lsp/cache [golang.org/x/tools/internal/lsp/source.test]".
+	IsIntermediateTestVariant bool
+}
+
+// Name implements the source.Metadata interface.
+func (m *Metadata) PackageName() string {
+	return string(m.Name)
+}
+
+// PkgPath implements the source.Metadata interface.
+func (m *Metadata) PackagePath() string {
+	return string(m.PkgPath)
+}
+
+// KnownMetadata is a wrapper around metadata that tracks its validity.
+type KnownMetadata struct {
+	*Metadata
+
+	// Valid is true if the given metadata is Valid.
+	// Invalid metadata can still be used if a metadata reload fails.
+	Valid bool
+
+	// ShouldLoad is true if the given metadata should be reloaded.
+	ShouldLoad bool
+}
diff --git a/internal/lsp/cache/os_darwin.go b/internal/lsp/cache/os_darwin.go
index 73c26fd4294..2c88be1fcbe 100644
--- a/internal/lsp/cache/os_darwin.go
+++ b/internal/lsp/cache/os_darwin.go
@@ -52,7 +52,7 @@ func darwinCheckPathCase(path string) error {
 			break
 		}
 		if g != w {
-			return fmt.Errorf("case mismatch in path %q: component %q should be %q", path, g, w)
+			return fmt.Errorf("case mismatch in path %q: component %q is listed by macOS as %q", path, g, w)
 		}
 	}
 	return nil
diff --git a/internal/lsp/cache/os_windows.go b/internal/lsp/cache/os_windows.go
index 4bf51702f48..7ff1cce7469 100644
--- a/internal/lsp/cache/os_windows.go
+++ b/internal/lsp/cache/os_windows.go
@@ -48,7 +48,7 @@ func windowsCheckPathCase(path string) error {
 	}
 	for got, want := path, longstr; !isRoot(got) && !isRoot(want); got, want = filepath.Dir(got), filepath.Dir(want) {
 		if g, w := filepath.Base(got), filepath.Base(want); g != w {
-			return fmt.Errorf("case mismatch in path %q: component %q should be %q", path, g, w)
+			return fmt.Errorf("case mismatch in path %q: component %q is listed by Windows as %q", path, g, w)
 		}
 	}
 	return nil
diff --git a/internal/lsp/cache/parse.go b/internal/lsp/cache/parse.go
index f7cf1af0bd7..742f48f6a1d 100644
--- a/internal/lsp/cache/parse.go
+++ b/internal/lsp/cache/parse.go
@@ -107,9 +107,9 @@ type astCacheKey struct {
 
 func (s *snapshot) astCacheData(ctx context.Context, spkg source.Package, pos token.Pos) (*astCacheData, error) {
 	pkg := spkg.(*pkg)
-	pkgHandle := s.getPackage(pkg.m.id, pkg.mode)
+	pkgHandle := s.getPackage(pkg.m.ID, pkg.mode)
 	if pkgHandle == nil {
-		return nil, fmt.Errorf("could not reconstruct package handle for %v", pkg.m.id)
+		return nil, fmt.Errorf("could not reconstruct package handle for %v", pkg.m.ID)
 	}
 	tok := s.FileSet().File(pos)
 	if tok == nil {
@@ -1070,7 +1070,17 @@ func fixArrayType(bad *ast.BadExpr, parent ast.Node, tok *token.File, src []byte
 
 	exprBytes := make([]byte, 0, int(to-from)+3)
 	// Avoid doing tok.Offset(to) since that panics if badExpr ends at EOF.
-	exprBytes = append(exprBytes, src[tok.Offset(from):tok.Offset(to-1)+1]...)
+	// It also panics if the position is not in the range of the file, and
+	// badExprs may not necessarily have good positions, so check first.
+	if !source.InRange(tok, from) {
+		return false
+	}
+	if !source.InRange(tok, to-1) {
+		return false
+	}
+	fromOffset := tok.Offset(from)
+	toOffset := tok.Offset(to-1) + 1
+	exprBytes = append(exprBytes, src[fromOffset:toOffset]...)
 	exprBytes = bytes.TrimSpace(exprBytes)
 
 	// If our expression ends in "]" (e.g. "[]"), add a phantom selector
diff --git a/internal/lsp/cache/pkg.go b/internal/lsp/cache/pkg.go
index 5a87a149bee..88ea88665d1 100644
--- a/internal/lsp/cache/pkg.go
+++ b/internal/lsp/cache/pkg.go
@@ -17,12 +17,12 @@ import (
 
 // pkg contains the type information needed by the source package.
 type pkg struct {
-	m               *metadata
+	m               *Metadata
 	mode            source.ParseMode
 	goFiles         []*source.ParsedGoFile
 	compiledGoFiles []*source.ParsedGoFile
 	diagnostics     []*source.Diagnostic
-	imports         map[packagePath]*pkg
+	imports         map[PackagePath]*pkg
 	version         *module.Version
 	parseErrors     []scanner.ErrorList
 	typeErrors      []types.Error
@@ -32,16 +32,6 @@ type pkg struct {
 	hasFixedFiles   bool
 }
 
-// Declare explicit types for package paths, names, and IDs to ensure that we
-// never use an ID where a path belongs, and vice versa. If we confused these,
-// it would result in confusing errors because package IDs often look like
-// package paths.
-type (
-	packageID   string
-	packagePath string
-	packageName string
-)
-
 // Declare explicit types for files and directories to distinguish between the two.
 type (
 	fileURI         span.URI
@@ -50,15 +40,15 @@ type (
 )
 
 func (p *pkg) ID() string {
-	return string(p.m.id)
+	return string(p.m.ID)
 }
 
 func (p *pkg) Name() string {
-	return string(p.m.name)
+	return string(p.m.Name)
 }
 
 func (p *pkg) PkgPath() string {
-	return string(p.m.pkgPath)
+	return string(p.m.PkgPath)
 }
 
 func (p *pkg) ParseMode() source.ParseMode {
@@ -80,7 +70,7 @@ func (p *pkg) File(uri span.URI) (*source.ParsedGoFile, error) {
 			return gf, nil
 		}
 	}
-	return nil, errors.Errorf("no parsed file for %s in %v", uri, p.m.id)
+	return nil, errors.Errorf("no parsed file for %s in %v", uri, p.m.ID)
 }
 
 func (p *pkg) GetSyntax() []*ast.File {
@@ -108,11 +98,11 @@ func (p *pkg) IsIllTyped() bool {
 }
 
 func (p *pkg) ForTest() string {
-	return string(p.m.forTest)
+	return string(p.m.ForTest)
 }
 
 func (p *pkg) GetImport(pkgPath string) (source.Package, error) {
-	if imp := p.imports[packagePath(pkgPath)]; imp != nil {
+	if imp := p.imports[PackagePath(pkgPath)]; imp != nil {
 		return imp, nil
 	}
 	// Don't return a nil pointer because that still satisfies the interface.
@@ -124,14 +114,14 @@ func (p *pkg) MissingDependencies() []string {
 	// imports via the *types.Package. Only use metadata if p.types is nil.
 	if p.types == nil {
 		var md []string
-		for i := range p.m.missingDeps {
+		for i := range p.m.MissingDeps {
 			md = append(md, string(i))
 		}
 		return md
 	}
 	var md []string
 	for _, pkg := range p.types.Imports() {
-		if _, ok := p.m.missingDeps[packagePath(pkg.Path())]; ok {
+		if _, ok := p.m.MissingDeps[PackagePath(pkg.Path())]; ok {
 			md = append(md, pkg.Path())
 		}
 	}
@@ -151,7 +141,7 @@ func (p *pkg) Version() *module.Version {
 }
 
 func (p *pkg) HasListOrParseErrors() bool {
-	return len(p.m.errors) != 0 || len(p.parseErrors) != 0
+	return len(p.m.Errors) != 0 || len(p.parseErrors) != 0
 }
 
 func (p *pkg) HasTypeErrors() bool {
diff --git a/internal/lsp/cache/session.go b/internal/lsp/cache/session.go
index 2cd85b93400..bcb799a6686 100644
--- a/internal/lsp/cache/session.go
+++ b/internal/lsp/cache/session.go
@@ -140,13 +140,15 @@ func (s *Session) SetProgressTracker(tracker *progress.Tracker) {
 }
 
 func (s *Session) Shutdown(ctx context.Context) {
+	var views []*View
 	s.viewMu.Lock()
-	defer s.viewMu.Unlock()
-	for _, view := range s.views {
-		view.shutdown(ctx)
-	}
+	views = append(views, s.views...)
 	s.views = nil
 	s.viewMap = nil
+	s.viewMu.Unlock()
+	for _, view := range views {
+		view.shutdown(ctx)
+	}
 	event.Log(ctx, "Shutdown session", KeyShutdownSession.Of(s))
 }
 
@@ -228,13 +230,14 @@ func (s *Session) createView(ctx context.Context, name string, folder, tempWorks
 		initializeOnce:    &sync.Once{},
 		generation:        s.cache.store.Generation(generationName(v, 0)),
 		packages:          make(map[packageKey]*packageHandle),
-		ids:               make(map[span.URI][]packageID),
-		metadata:          make(map[packageID]*knownMetadata),
+		ids:               make(map[span.URI][]PackageID),
+		metadata:          make(map[PackageID]*KnownMetadata),
 		files:             make(map[span.URI]source.VersionedFileHandle),
 		goFiles:           make(map[parseKey]*parseGoHandle),
-		importedBy:        make(map[packageID][]packageID),
+		symbols:           make(map[span.URI]*symbolHandle),
+		importedBy:        make(map[PackageID][]PackageID),
 		actions:           make(map[actionKey]*actionHandle),
-		workspacePackages: make(map[packageID]packagePath),
+		workspacePackages: make(map[PackageID]PackagePath),
 		unloadableFiles:   make(map[span.URI]struct{}),
 		parseModHandles:   make(map[span.URI]*parseModHandle),
 		modTidyHandles:    make(map[span.URI]*modTidyHandle),
diff --git a/internal/lsp/cache/snapshot.go b/internal/lsp/cache/snapshot.go
index c741885d1f8..7744f9ebc8f 100644
--- a/internal/lsp/cache/snapshot.go
+++ b/internal/lsp/cache/snapshot.go
@@ -69,14 +69,14 @@ type snapshot struct {
 
 	// ids maps file URIs to package IDs.
 	// It may be invalidated on calls to go/packages.
-	ids map[span.URI][]packageID
+	ids map[span.URI][]PackageID
 
 	// metadata maps file IDs to their associated metadata.
 	// It may invalidated on calls to go/packages.
-	metadata map[packageID]*knownMetadata
+	metadata map[PackageID]*KnownMetadata
 
 	// importedBy maps package IDs to the list of packages that import them.
-	importedBy map[packageID][]packageID
+	importedBy map[PackageID][]PackageID
 
 	// files maps file URIs to their corresponding FileHandles.
 	// It may invalidated when a file's content changes.
@@ -85,6 +85,9 @@ type snapshot struct {
 	// goFiles maps a parseKey to its parseGoHandle.
 	goFiles map[parseKey]*parseGoHandle
 
+	// TODO(rfindley): consider merging this with files to reduce burden on clone.
+	symbols map[span.URI]*symbolHandle
+
 	// packages maps a packageKey to a set of packageHandles to which that file belongs.
 	// It may be invalidated when a file's content changes.
 	packages map[packageKey]*packageHandle
@@ -94,7 +97,7 @@ type snapshot struct {
 
 	// workspacePackages contains the workspace's packages, which are loaded
 	// when the view is created.
-	workspacePackages map[packageID]packagePath
+	workspacePackages map[PackageID]PackagePath
 
 	// unloadableFiles keeps track of files that we've failed to load.
 	unloadableFiles map[span.URI]struct{}
@@ -123,7 +126,7 @@ type snapshot struct {
 
 type packageKey struct {
 	mode source.ParseMode
-	id   packageID
+	id   PackageID
 }
 
 type actionKey struct {
@@ -131,18 +134,6 @@ type actionKey struct {
 	analyzer *analysis.Analyzer
 }
 
-// knownMetadata is a wrapper around metadata that tracks its validity.
-type knownMetadata struct {
-	*metadata
-
-	// valid is true if the given metadata is valid.
-	// Invalid metadata can still be used if a metadata reload fails.
-	valid bool
-
-	// shouldLoad is true if the given metadata should be reloaded.
-	shouldLoad bool
-}
-
 func (s *snapshot) ID() uint64 {
 	return s.id
 }
@@ -462,10 +453,10 @@ func hashUnsavedOverlays(files map[span.URI]source.VersionedFileHandle) string {
 	return hashContents([]byte(strings.Join(unsaved, "")))
 }
 
-func (s *snapshot) PackagesForFile(ctx context.Context, uri span.URI, mode source.TypecheckMode) ([]source.Package, error) {
+func (s *snapshot) PackagesForFile(ctx context.Context, uri span.URI, mode source.TypecheckMode, includeTestVariants bool) ([]source.Package, error) {
 	ctx = event.Label(ctx, tag.URI.Of(uri))
 
-	phs, err := s.packageHandlesForFile(ctx, uri, mode)
+	phs, err := s.packageHandlesForFile(ctx, uri, mode, includeTestVariants)
 	if err != nil {
 		return nil, err
 	}
@@ -483,7 +474,7 @@ func (s *snapshot) PackagesForFile(ctx context.Context, uri span.URI, mode sourc
 func (s *snapshot) PackageForFile(ctx context.Context, uri span.URI, mode source.TypecheckMode, pkgPolicy source.PackageFilter) (source.Package, error) {
 	ctx = event.Label(ctx, tag.URI.Of(uri))
 
-	phs, err := s.packageHandlesForFile(ctx, uri, mode)
+	phs, err := s.packageHandlesForFile(ctx, uri, mode, false)
 	if err != nil {
 		return nil, err
 	}
@@ -512,7 +503,7 @@ func (s *snapshot) PackageForFile(ctx context.Context, uri span.URI, mode source
 	return ph.check(ctx, s)
 }
 
-func (s *snapshot) packageHandlesForFile(ctx context.Context, uri span.URI, mode source.TypecheckMode) ([]*packageHandle, error) {
+func (s *snapshot) packageHandlesForFile(ctx context.Context, uri span.URI, mode source.TypecheckMode, includeTestVariants bool) ([]*packageHandle, error) {
 	// Check if we should reload metadata for the file. We don't invalidate IDs
 	// (though we should), so the IDs will be a better source of truth than the
 	// metadata. If there are no IDs for the file, then we should also reload.
@@ -523,39 +514,16 @@ func (s *snapshot) packageHandlesForFile(ctx context.Context, uri span.URI, mode
 	if fh.Kind() != source.Go {
 		return nil, fmt.Errorf("no packages for non-Go file %s", uri)
 	}
-	knownIDs := s.getIDsForURI(uri)
-	reload := len(knownIDs) == 0
-	for _, id := range knownIDs {
-		// Reload package metadata if any of the metadata has missing
-		// dependencies, in case something has changed since the last time we
-		// reloaded it.
-		if s.noValidMetadataForID(id) {
-			reload = true
-			break
-		}
-		// TODO(golang/go#36918): Previously, we would reload any package with
-		// missing dependencies. This is expensive and results in too many
-		// calls to packages.Load. Determine what we should do instead.
-	}
-	if reload {
-		err = s.load(ctx, false, fileURI(uri))
-
-		if !s.useInvalidMetadata() && err != nil {
-			return nil, err
-		}
-		// We've tried to reload and there are still no known IDs for the URI.
-		// Return the load error, if there was one.
-		knownIDs = s.getIDsForURI(uri)
-		if len(knownIDs) == 0 {
-			return nil, err
-		}
+	knownIDs, err := s.getOrLoadIDsForURI(ctx, uri)
+	if err != nil {
+		return nil, err
 	}
 
 	var phs []*packageHandle
 	for _, id := range knownIDs {
 		// Filter out any intermediate test variants. We typically aren't
 		// interested in these packages for file= style queries.
-		if m := s.getMetadata(id); m != nil && m.isIntermediateTestVariant {
+		if m := s.getMetadata(id); m != nil && m.IsIntermediateTestVariant && !includeTestVariants {
 			continue
 		}
 		var parseModes []source.ParseMode
@@ -583,6 +551,37 @@ func (s *snapshot) packageHandlesForFile(ctx context.Context, uri span.URI, mode
 	return phs, nil
 }
 
+func (s *snapshot) getOrLoadIDsForURI(ctx context.Context, uri span.URI) ([]PackageID, error) {
+	knownIDs := s.getIDsForURI(uri)
+	reload := len(knownIDs) == 0
+	for _, id := range knownIDs {
+		// Reload package metadata if any of the metadata has missing
+		// dependencies, in case something has changed since the last time we
+		// reloaded it.
+		if s.noValidMetadataForID(id) {
+			reload = true
+			break
+		}
+		// TODO(golang/go#36918): Previously, we would reload any package with
+		// missing dependencies. This is expensive and results in too many
+		// calls to packages.Load. Determine what we should do instead.
+	}
+	if reload {
+		err := s.load(ctx, false, fileURI(uri))
+
+		if !s.useInvalidMetadata() && err != nil {
+			return nil, err
+		}
+		// We've tried to reload and there are still no known IDs for the URI.
+		// Return the load error, if there was one.
+		knownIDs = s.getIDsForURI(uri)
+		if len(knownIDs) == 0 {
+			return nil, err
+		}
+	}
+	return knownIDs, nil
+}
+
 // Only use invalid metadata for Go versions >= 1.13. Go 1.12 and below has
 // issues with overlays that will cause confusing error messages if we reuse
 // old metadata.
@@ -594,11 +593,11 @@ func (s *snapshot) GetReverseDependencies(ctx context.Context, id string) ([]sou
 	if err := s.awaitLoaded(ctx); err != nil {
 		return nil, err
 	}
-	ids := make(map[packageID]struct{})
-	s.transitiveReverseDependencies(packageID(id), ids)
+	ids := make(map[PackageID]struct{})
+	s.transitiveReverseDependencies(PackageID(id), ids)
 
 	// Make sure to delete the original package ID from the map.
-	delete(ids, packageID(id))
+	delete(ids, PackageID(id))
 
 	var pkgs []source.Package
 	for id := range ids {
@@ -611,7 +610,7 @@ func (s *snapshot) GetReverseDependencies(ctx context.Context, id string) ([]sou
 	return pkgs, nil
 }
 
-func (s *snapshot) checkedPackage(ctx context.Context, id packageID, mode source.ParseMode) (*pkg, error) {
+func (s *snapshot) checkedPackage(ctx context.Context, id PackageID, mode source.ParseMode) (*pkg, error) {
 	ph, err := s.buildPackageHandle(ctx, id, mode)
 	if err != nil {
 		return nil, err
@@ -621,13 +620,13 @@ func (s *snapshot) checkedPackage(ctx context.Context, id packageID, mode source
 
 // transitiveReverseDependencies populates the ids map with package IDs
 // belonging to the provided package and its transitive reverse dependencies.
-func (s *snapshot) transitiveReverseDependencies(id packageID, ids map[packageID]struct{}) {
+func (s *snapshot) transitiveReverseDependencies(id PackageID, ids map[PackageID]struct{}) {
 	if _, ok := ids[id]; ok {
 		return
 	}
 	m := s.getMetadata(id)
 	// Only use invalid metadata if we support it.
-	if m == nil || !(m.valid || s.useInvalidMetadata()) {
+	if m == nil || !(m.Valid || s.useInvalidMetadata()) {
 		return
 	}
 	ids[id] = struct{}{}
@@ -671,13 +670,13 @@ func (s *snapshot) getModTidyHandle(uri span.URI) *modTidyHandle {
 	return s.modTidyHandles[uri]
 }
 
-func (s *snapshot) getImportedBy(id packageID) []packageID {
+func (s *snapshot) getImportedBy(id PackageID) []PackageID {
 	s.mu.Lock()
 	defer s.mu.Unlock()
 	return s.getImportedByLocked(id)
 }
 
-func (s *snapshot) getImportedByLocked(id packageID) []packageID {
+func (s *snapshot) getImportedByLocked(id PackageID) []PackageID {
 	// If we haven't rebuilt the import graph since creating the snapshot.
 	if len(s.importedBy) == 0 {
 		s.rebuildImportGraph()
@@ -690,13 +689,13 @@ func (s *snapshot) clearAndRebuildImportGraph() {
 	defer s.mu.Unlock()
 
 	// Completely invalidate the original map.
-	s.importedBy = make(map[packageID][]packageID)
+	s.importedBy = make(map[PackageID][]PackageID)
 	s.rebuildImportGraph()
 }
 
 func (s *snapshot) rebuildImportGraph() {
 	for id, m := range s.metadata {
-		for _, importID := range m.deps {
+		for _, importID := range m.Deps {
 			s.importedBy[importID] = append(s.importedBy[importID], id)
 		}
 	}
@@ -715,7 +714,7 @@ func (s *snapshot) addPackageHandle(ph *packageHandle) *packageHandle {
 	return ph
 }
 
-func (s *snapshot) workspacePackageIDs() (ids []packageID) {
+func (s *snapshot) workspacePackageIDs() (ids []PackageID) {
 	s.mu.Lock()
 	defer s.mu.Unlock()
 
@@ -725,20 +724,66 @@ func (s *snapshot) workspacePackageIDs() (ids []packageID) {
 	return ids
 }
 
-func (s *snapshot) getWorkspacePkgPath(id packageID) packagePath {
+func (s *snapshot) activePackageIDs() (ids []PackageID) {
+	if s.view.Options().MemoryMode == source.ModeNormal {
+		return s.workspacePackageIDs()
+	}
+
+	s.mu.Lock()
+	defer s.mu.Unlock()
+
+	seen := make(map[PackageID]bool)
+	for id := range s.workspacePackages {
+		if s.isActiveLocked(id, seen) {
+			ids = append(ids, id)
+		}
+	}
+	return ids
+}
+
+func (s *snapshot) isActiveLocked(id PackageID, seen map[PackageID]bool) (active bool) {
+	if seen == nil {
+		seen = make(map[PackageID]bool)
+	}
+	if seen, ok := seen[id]; ok {
+		return seen
+	}
+	defer func() {
+		seen[id] = active
+	}()
+	m, ok := s.metadata[id]
+	if !ok {
+		return false
+	}
+	for _, cgf := range m.CompiledGoFiles {
+		if s.isOpenLocked(cgf) {
+			return true
+		}
+	}
+	for _, dep := range m.Deps {
+		if s.isActiveLocked(dep, seen) {
+			return true
+		}
+	}
+	return false
+}
+
+func (s *snapshot) getWorkspacePkgPath(id PackageID) PackagePath {
 	s.mu.Lock()
 	defer s.mu.Unlock()
 
 	return s.workspacePackages[id]
 }
 
+const fileExtensions = "go,mod,sum,work,tmpl"
+
 func (s *snapshot) fileWatchingGlobPatterns(ctx context.Context) map[string]struct{} {
 	// Work-around microsoft/vscode#100870 by making sure that we are,
 	// at least, watching the user's entire workspace. This will still be
 	// applied to every folder in the workspace.
 	patterns := map[string]struct{}{
-		"**/*.{go,mod,sum}": {},
-		"**/*.*tmpl":        {},
+		fmt.Sprintf("**/*.{%s}", fileExtensions): {},
+		"**/*.*tmpl":                             {},
 	}
 	dirs := s.workspace.dirs(ctx, s)
 	for _, dir := range dirs {
@@ -752,7 +797,7 @@ func (s *snapshot) fileWatchingGlobPatterns(ctx context.Context) map[string]stru
 		// TODO(rstambler): If microsoft/vscode#3025 is resolved before
 		// microsoft/vscode#101042, we will need a work-around for Windows
 		// drive letter casing.
-		patterns[fmt.Sprintf("%s/**/*.{go,mod,sum,tmpl}", dirName)] = struct{}{}
+		patterns[fmt.Sprintf("%s/**/*.{%s}", dirName, fileExtensions)] = struct{}{}
 	}
 
 	// Some clients do not send notifications for changes to directories that
@@ -870,8 +915,23 @@ func (s *snapshot) knownFilesInDir(ctx context.Context, dir span.URI) []span.URI
 	return files
 }
 
-func (s *snapshot) WorkspacePackages(ctx context.Context) ([]source.Package, error) {
-	phs, err := s.workspacePackageHandles(ctx)
+func (s *snapshot) workspacePackageHandles(ctx context.Context) ([]*packageHandle, error) {
+	if err := s.awaitLoaded(ctx); err != nil {
+		return nil, err
+	}
+	var phs []*packageHandle
+	for _, pkgID := range s.workspacePackageIDs() {
+		ph, err := s.buildPackageHandle(ctx, pkgID, s.workspaceParseMode(pkgID))
+		if err != nil {
+			return nil, err
+		}
+		phs = append(phs, ph)
+	}
+	return phs, nil
+}
+
+func (s *snapshot) ActivePackages(ctx context.Context) ([]source.Package, error) {
+	phs, err := s.activePackageHandles(ctx)
 	if err != nil {
 		return nil, err
 	}
@@ -886,12 +946,12 @@ func (s *snapshot) WorkspacePackages(ctx context.Context) ([]source.Package, err
 	return pkgs, nil
 }
 
-func (s *snapshot) workspacePackageHandles(ctx context.Context) ([]*packageHandle, error) {
+func (s *snapshot) activePackageHandles(ctx context.Context) ([]*packageHandle, error) {
 	if err := s.awaitLoaded(ctx); err != nil {
 		return nil, err
 	}
 	var phs []*packageHandle
-	for _, pkgID := range s.workspacePackageIDs() {
+	for _, pkgID := range s.activePackageIDs() {
 		ph, err := s.buildPackageHandle(ctx, pkgID, s.workspaceParseMode(pkgID))
 		if err != nil {
 			return nil, err
@@ -901,6 +961,33 @@ func (s *snapshot) workspacePackageHandles(ctx context.Context) ([]*packageHandl
 	return phs, nil
 }
 
+func (s *snapshot) Symbols(ctx context.Context) (map[span.URI][]source.Symbol, error) {
+	result := make(map[span.URI][]source.Symbol)
+	for uri, f := range s.files {
+		sh := s.buildSymbolHandle(ctx, f)
+		v, err := sh.handle.Get(ctx, s.generation, s)
+		if err != nil {
+			return nil, err
+		}
+		data := v.(*symbolData)
+		result[uri] = data.symbols
+	}
+	return result, nil
+}
+
+func (s *snapshot) MetadataForFile(ctx context.Context, uri span.URI) ([]source.Metadata, error) {
+	knownIDs, err := s.getOrLoadIDsForURI(ctx, uri)
+	if err != nil {
+		return nil, err
+	}
+	var mds []source.Metadata
+	for _, id := range knownIDs {
+		md := s.getMetadata(id)
+		mds = append(mds, md)
+	}
+	return mds, nil
+}
+
 func (s *snapshot) KnownPackages(ctx context.Context) ([]source.Package, error) {
 	if err := s.awaitLoaded(ctx); err != nil {
 		return nil, err
@@ -974,7 +1061,7 @@ func moduleForURI(modFiles map[span.URI]struct{}, uri span.URI) span.URI {
 	return match
 }
 
-func (s *snapshot) getPackage(id packageID, mode source.ParseMode) *packageHandle {
+func (s *snapshot) getPackage(id PackageID, mode source.ParseMode) *packageHandle {
 	s.mu.Lock()
 	defer s.mu.Unlock()
 
@@ -985,7 +1072,28 @@ func (s *snapshot) getPackage(id packageID, mode source.ParseMode) *packageHandl
 	return s.packages[key]
 }
 
-func (s *snapshot) getActionHandle(id packageID, m source.ParseMode, a *analysis.Analyzer) *actionHandle {
+func (s *snapshot) getSymbolHandle(uri span.URI) *symbolHandle {
+	s.mu.Lock()
+	defer s.mu.Unlock()
+
+	return s.symbols[uri]
+}
+
+func (s *snapshot) addSymbolHandle(sh *symbolHandle) *symbolHandle {
+	s.mu.Lock()
+	defer s.mu.Unlock()
+
+	uri := sh.fh.URI()
+	// If the package handle has already been cached,
+	// return the cached handle instead of overriding it.
+	if sh, ok := s.symbols[uri]; ok {
+		return sh
+	}
+	s.symbols[uri] = sh
+	return sh
+}
+
+func (s *snapshot) getActionHandle(id PackageID, m source.ParseMode, a *analysis.Analyzer) *actionHandle {
 	s.mu.Lock()
 	defer s.mu.Unlock()
 
@@ -1006,7 +1114,7 @@ func (s *snapshot) addActionHandle(ah *actionHandle) *actionHandle {
 	key := actionKey{
 		analyzer: ah.analyzer,
 		pkg: packageKey{
-			id:   ah.pkg.m.id,
+			id:   ah.pkg.m.ID,
 			mode: ah.pkg.mode,
 		},
 	}
@@ -1017,14 +1125,14 @@ func (s *snapshot) addActionHandle(ah *actionHandle) *actionHandle {
 	return ah
 }
 
-func (s *snapshot) getIDsForURI(uri span.URI) []packageID {
+func (s *snapshot) getIDsForURI(uri span.URI) []PackageID {
 	s.mu.Lock()
 	defer s.mu.Unlock()
 
 	return s.ids[uri]
 }
 
-func (s *snapshot) getMetadata(id packageID) *knownMetadata {
+func (s *snapshot) getMetadata(id PackageID) *KnownMetadata {
 	s.mu.Lock()
 	defer s.mu.Unlock()
 
@@ -1036,15 +1144,15 @@ func (s *snapshot) shouldLoad(scope interface{}) bool {
 	defer s.mu.Unlock()
 
 	switch scope := scope.(type) {
-	case packagePath:
-		var meta *knownMetadata
+	case PackagePath:
+		var meta *KnownMetadata
 		for _, m := range s.metadata {
-			if m.pkgPath != scope {
+			if m.PkgPath != scope {
 				continue
 			}
 			meta = m
 		}
-		if meta == nil || meta.shouldLoad {
+		if meta == nil || meta.ShouldLoad {
 			return true
 		}
 		return false
@@ -1056,7 +1164,7 @@ func (s *snapshot) shouldLoad(scope interface{}) bool {
 		}
 		for _, id := range ids {
 			m, ok := s.metadata[id]
-			if !ok || m.shouldLoad {
+			if !ok || m.ShouldLoad {
 				return true
 			}
 		}
@@ -1071,17 +1179,17 @@ func (s *snapshot) clearShouldLoad(scope interface{}) {
 	defer s.mu.Unlock()
 
 	switch scope := scope.(type) {
-	case packagePath:
-		var meta *knownMetadata
+	case PackagePath:
+		var meta *KnownMetadata
 		for _, m := range s.metadata {
-			if m.pkgPath == scope {
+			if m.PkgPath == scope {
 				meta = m
 			}
 		}
 		if meta == nil {
 			return
 		}
-		meta.shouldLoad = false
+		meta.ShouldLoad = false
 	case fileURI:
 		uri := span.URI(scope)
 		ids := s.ids[uri]
@@ -1090,7 +1198,7 @@ func (s *snapshot) clearShouldLoad(scope interface{}) {
 		}
 		for _, id := range ids {
 			if m, ok := s.metadata[id]; ok {
-				m.shouldLoad = false
+				m.ShouldLoad = false
 			}
 		}
 	}
@@ -1104,7 +1212,7 @@ func (s *snapshot) noValidMetadataForURILocked(uri span.URI) bool {
 		return true
 	}
 	for _, id := range ids {
-		if m, ok := s.metadata[id]; ok && m.valid {
+		if m, ok := s.metadata[id]; ok && m.Valid {
 			return false
 		}
 	}
@@ -1113,15 +1221,15 @@ func (s *snapshot) noValidMetadataForURILocked(uri span.URI) bool {
 
 // noValidMetadataForID reports whether there is no valid metadata for the
 // given ID.
-func (s *snapshot) noValidMetadataForID(id packageID) bool {
+func (s *snapshot) noValidMetadataForID(id PackageID) bool {
 	s.mu.Lock()
 	defer s.mu.Unlock()
 	return s.noValidMetadataForIDLocked(id)
 }
 
-func (s *snapshot) noValidMetadataForIDLocked(id packageID) bool {
+func (s *snapshot) noValidMetadataForIDLocked(id PackageID) bool {
 	m := s.metadata[id]
-	return m == nil || !m.valid
+	return m == nil || !m.Valid
 }
 
 // updateIDForURIsLocked adds the given ID to the set of known IDs for the given URI.
@@ -1129,10 +1237,10 @@ func (s *snapshot) noValidMetadataForIDLocked(id packageID) bool {
 // not "command-line-arguments" are preferred, so if a new ID comes in for a
 // URI that previously only had "command-line-arguments", the new ID will
 // replace the "command-line-arguments" ID.
-func (s *snapshot) updateIDForURIsLocked(id packageID, uris map[span.URI]struct{}) {
+func (s *snapshot) updateIDForURIsLocked(id PackageID, uris map[span.URI]struct{}) {
 	for uri := range uris {
 		// Collect the new set of IDs, preserving any valid existing IDs.
-		newIDs := []packageID{id}
+		newIDs := []PackageID{id}
 		for _, existingID := range s.ids[uri] {
 			// Don't set duplicates of the same ID.
 			if existingID == id {
@@ -1146,7 +1254,7 @@ func (s *snapshot) updateIDForURIsLocked(id packageID, uris map[span.URI]struct{
 			}
 			// If the metadata for an existing ID is invalid, and we are
 			// setting metadata for a new, valid ID--don't preserve the old ID.
-			if m, ok := s.metadata[existingID]; !ok || !m.valid {
+			if m, ok := s.metadata[existingID]; !ok || !m.Valid {
 				continue
 			}
 			newIDs = append(newIDs, existingID)
@@ -1158,7 +1266,7 @@ func (s *snapshot) updateIDForURIsLocked(id packageID, uris map[span.URI]struct{
 	}
 }
 
-func (s *snapshot) isWorkspacePackage(id packageID) bool {
+func (s *snapshot) isWorkspacePackage(id PackageID) bool {
 	s.mu.Lock()
 	defer s.mu.Unlock()
 
@@ -1244,7 +1352,7 @@ func (s *snapshot) awaitLoaded(ctx context.Context) error {
 		return nil
 	}
 	for _, m := range s.metadata {
-		if m.valid {
+		if m.Valid {
 			return nil
 		}
 	}
@@ -1263,7 +1371,7 @@ func (s *snapshot) GetCriticalError(ctx context.Context) *source.CriticalError {
 	// Even if packages didn't fail to load, we still may want to show
 	// additional warnings.
 	if loadErr == nil {
-		wsPkgs, _ := s.WorkspacePackages(ctx)
+		wsPkgs, _ := s.ActivePackages(ctx)
 		if msg := shouldShowAdHocPackagesWarning(s, wsPkgs); msg != "" {
 			return &source.CriticalError{
 				MainError: errors.New(msg),
@@ -1368,9 +1476,9 @@ func (s *snapshot) reloadWorkspace(ctx context.Context) error {
 	// See which of the workspace packages are missing metadata.
 	s.mu.Lock()
 	missingMetadata := len(s.workspacePackages) == 0 || len(s.metadata) == 0
-	pkgPathSet := map[packagePath]struct{}{}
+	pkgPathSet := map[PackagePath]struct{}{}
 	for id, pkgPath := range s.workspacePackages {
-		if m, ok := s.metadata[id]; ok && m.valid {
+		if m, ok := s.metadata[id]; ok && m.Valid {
 			continue
 		}
 		missingMetadata = true
@@ -1515,7 +1623,7 @@ func checkSnapshotLocked(ctx context.Context, s *snapshot) {
 	// belonging to that workspace package.
 	for wsID := range s.workspacePackages {
 		if m, ok := s.metadata[wsID]; ok {
-			for _, uri := range m.goFiles {
+			for _, uri := range m.GoFiles {
 				found := false
 				for _, id := range s.ids[uri] {
 					if id == wsID {
@@ -1567,14 +1675,15 @@ func (s *snapshot) clone(ctx, bgCtx context.Context, changes map[span.URI]*fileC
 		builtin:           s.builtin,
 		initializeOnce:    s.initializeOnce,
 		initializedErr:    s.initializedErr,
-		ids:               make(map[span.URI][]packageID, len(s.ids)),
-		importedBy:        make(map[packageID][]packageID, len(s.importedBy)),
-		metadata:          make(map[packageID]*knownMetadata, len(s.metadata)),
+		ids:               make(map[span.URI][]PackageID, len(s.ids)),
+		importedBy:        make(map[PackageID][]PackageID, len(s.importedBy)),
+		metadata:          make(map[PackageID]*KnownMetadata, len(s.metadata)),
 		packages:          make(map[packageKey]*packageHandle, len(s.packages)),
 		actions:           make(map[actionKey]*actionHandle, len(s.actions)),
 		files:             make(map[span.URI]source.VersionedFileHandle, len(s.files)),
 		goFiles:           make(map[parseKey]*parseGoHandle, len(s.goFiles)),
-		workspacePackages: make(map[packageID]packagePath, len(s.workspacePackages)),
+		symbols:           make(map[span.URI]*symbolHandle, len(s.symbols)),
+		workspacePackages: make(map[PackageID]PackagePath, len(s.workspacePackages)),
 		unloadableFiles:   make(map[span.URI]struct{}, len(s.unloadableFiles)),
 		parseModHandles:   make(map[span.URI]*parseModHandle, len(s.parseModHandles)),
 		modTidyHandles:    make(map[span.URI]*modTidyHandle, len(s.modTidyHandles)),
@@ -1592,6 +1701,16 @@ func (s *snapshot) clone(ctx, bgCtx context.Context, changes map[span.URI]*fileC
 	for k, v := range s.files {
 		result.files[k] = v
 	}
+	for k, v := range s.symbols {
+		if change, ok := changes[k]; ok {
+			if change.exists {
+				result.symbols[k] = result.buildSymbolHandle(ctx, change.fileHandle)
+			}
+			continue
+		}
+		newGen.Inherit(v.handle)
+		result.symbols[k] = v
+	}
 
 	// Copy the set of unloadable files.
 	for k, v := range s.unloadableFiles {
@@ -1637,7 +1756,7 @@ func (s *snapshot) clone(ctx, bgCtx context.Context, changes map[span.URI]*fileC
 
 	// directIDs keeps track of package IDs that have directly changed.
 	// It maps id->invalidateMetadata.
-	directIDs := map[packageID]bool{}
+	directIDs := map[PackageID]bool{}
 
 	// Invalidate all package metadata if the workspace module has changed.
 	if workspaceReload {
@@ -1646,7 +1765,7 @@ func (s *snapshot) clone(ctx, bgCtx context.Context, changes map[span.URI]*fileC
 		}
 	}
 
-	changedPkgNames := map[packageID]struct{}{}
+	changedPkgNames := map[PackageID]struct{}{}
 	anyImportDeleted := false
 	for uri, change := range changes {
 		// Maybe reinitialize the view if we see a change in the vendor
@@ -1720,7 +1839,7 @@ func (s *snapshot) clone(ctx, bgCtx context.Context, changes map[span.URI]*fileC
 	// starting point to compare with.
 	if anyImportDeleted {
 		for id, metadata := range s.metadata {
-			if len(metadata.errors) > 0 {
+			if len(metadata.Errors) > 0 {
 				directIDs[id] = true
 			}
 		}
@@ -1731,9 +1850,9 @@ func (s *snapshot) clone(ctx, bgCtx context.Context, changes map[span.URI]*fileC
 	// idsToInvalidate keeps track of transitive reverse dependencies.
 	// If an ID is present in the map, invalidate its types.
 	// If an ID's value is true, invalidate its metadata too.
-	idsToInvalidate := map[packageID]bool{}
-	var addRevDeps func(packageID, bool)
-	addRevDeps = func(id packageID, invalidateMetadata bool) {
+	idsToInvalidate := map[PackageID]bool{}
+	var addRevDeps func(PackageID, bool)
+	addRevDeps = func(id PackageID, invalidateMetadata bool) {
 		current, seen := idsToInvalidate[id]
 		newInvalidateMetadata := current || invalidateMetadata
 
@@ -1773,7 +1892,7 @@ func (s *snapshot) clone(ctx, bgCtx context.Context, changes map[span.URI]*fileC
 	// If a file has been deleted, we must delete metadata all packages
 	// containing that file.
 	workspaceModeChanged := s.workspaceMode() != result.workspaceMode()
-	skipID := map[packageID]bool{}
+	skipID := map[PackageID]bool{}
 	for _, c := range changes {
 		if c.exists {
 			continue
@@ -1788,9 +1907,9 @@ func (s *snapshot) clone(ctx, bgCtx context.Context, changes map[span.URI]*fileC
 
 	// Collect all of the IDs that are reachable from the workspace packages.
 	// Any unreachable IDs will have their metadata deleted outright.
-	reachableID := map[packageID]bool{}
-	var addForwardDeps func(packageID)
-	addForwardDeps = func(id packageID) {
+	reachableID := map[PackageID]bool{}
+	var addForwardDeps func(PackageID)
+	addForwardDeps = func(id PackageID) {
 		if reachableID[id] {
 			return
 		}
@@ -1799,7 +1918,7 @@ func (s *snapshot) clone(ctx, bgCtx context.Context, changes map[span.URI]*fileC
 		if !ok {
 			return
 		}
-		for _, depID := range m.deps {
+		for _, depID := range m.Deps {
 			addForwardDeps(depID)
 		}
 	}
@@ -1810,7 +1929,7 @@ func (s *snapshot) clone(ctx, bgCtx context.Context, changes map[span.URI]*fileC
 	// Copy the URI to package ID mappings, skipping only those URIs whose
 	// metadata will be reloaded in future calls to load.
 	deleteInvalidMetadata := forceReloadMetadata || workspaceModeChanged
-	idsInSnapshot := map[packageID]bool{} // track all known IDs
+	idsInSnapshot := map[PackageID]bool{} // track all known IDs
 	for uri, ids := range s.ids {
 		for _, id := range ids {
 			invalidateMetadata := idsToInvalidate[id]
@@ -1837,24 +1956,10 @@ func (s *snapshot) clone(ctx, bgCtx context.Context, changes map[span.URI]*fileC
 		}
 		invalidateMetadata := idsToInvalidate[k]
 		// Mark invalidated metadata rather than deleting it outright.
-		result.metadata[k] = &knownMetadata{
-			metadata:   v.metadata,
-			valid:      v.valid && !invalidateMetadata,
-			shouldLoad: v.shouldLoad || invalidateMetadata,
-		}
-	}
-	// Copy the URI to package ID mappings, skipping only those URIs whose
-	// metadata will be reloaded in future calls to load.
-	for k, ids := range s.ids {
-		var newIDs []packageID
-		for _, id := range ids {
-			if invalidateMetadata, ok := idsToInvalidate[id]; invalidateMetadata && ok {
-				continue
-			}
-			newIDs = append(newIDs, id)
-		}
-		if len(newIDs) != 0 {
-			result.ids[k] = newIDs
+		result.metadata[k] = &KnownMetadata{
+			Metadata:   v.Metadata,
+			Valid:      v.Valid && !invalidateMetadata,
+			ShouldLoad: v.ShouldLoad || invalidateMetadata,
 		}
 	}
 
@@ -1874,11 +1979,11 @@ func (s *snapshot) clone(ctx, bgCtx context.Context, changes map[span.URI]*fileC
 		// the package is gone and we should no longer try to load it.
 		if m := s.metadata[id]; m != nil {
 			hasFiles := false
-			for _, uri := range s.metadata[id].goFiles {
+			for _, uri := range s.metadata[id].GoFiles {
 				// For internal tests, we need _test files, not just the normal
 				// ones. External tests only have _test files, but we can check
 				// them anyway.
-				if m.forTest != "" && !strings.HasSuffix(string(uri), "_test.go") {
+				if m.ForTest != "" && !strings.HasSuffix(string(uri), "_test.go") {
 					continue
 				}
 				if _, ok := result.files[uri]; ok {
@@ -1918,7 +2023,7 @@ func (s *snapshot) clone(ctx, bgCtx context.Context, changes map[span.URI]*fileC
 	// If the snapshot's workspace mode has changed, the packages loaded using
 	// the previous mode are no longer relevant, so clear them out.
 	if workspaceModeChanged {
-		result.workspacePackages = map[packageID]packagePath{}
+		result.workspacePackages = map[PackageID]PackagePath{}
 	}
 
 	// The snapshot may need to be reinitialized.
@@ -1934,7 +2039,7 @@ func (s *snapshot) clone(ctx, bgCtx context.Context, changes map[span.URI]*fileC
 // seen this URI before, we guess based on files in the same directory. This
 // is of course incorrect in build systems where packages are not organized by
 // directory.
-func guessPackageIDsForURI(uri span.URI, known map[span.URI][]packageID) []packageID {
+func guessPackageIDsForURI(uri span.URI, known map[span.URI][]PackageID) []PackageID {
 	packages := known[uri]
 	if len(packages) > 0 {
 		// We've seen this file before.
@@ -1967,7 +2072,7 @@ func guessPackageIDsForURI(uri span.URI, known map[span.URI][]packageID) []packa
 	}
 
 	// Aggregate all possibly relevant package IDs.
-	var found []packageID
+	var found []PackageID
 	for knownURI, ids := range known {
 		knownDir := filepath.Dir(knownURI.Filename())
 		knownFI, err := getInfo(knownDir)
@@ -1997,8 +2102,8 @@ func fileWasSaved(originalFH, currentFH source.FileHandle) bool {
 	return !o.saved && c.saved
 }
 
-// shouldInvalidateMetadata reparses a file's package and import declarations to
-// determine if the file requires a metadata reload.
+// shouldInvalidateMetadata reparses the full file's AST to determine
+// if the file requires a metadata reload.
 func (s *snapshot) shouldInvalidateMetadata(ctx context.Context, newSnapshot *snapshot, originalFH, currentFH source.FileHandle) (invalidate, pkgNameChanged, importDeleted bool) {
 	if originalFH == nil {
 		return true, false, false
@@ -2010,8 +2115,8 @@ func (s *snapshot) shouldInvalidateMetadata(ctx context.Context, newSnapshot *sn
 	// Get the original and current parsed files in order to check package name
 	// and imports. Use the new snapshot to parse to avoid modifying the
 	// current snapshot.
-	original, originalErr := newSnapshot.ParseGo(ctx, originalFH, source.ParseHeader)
-	current, currentErr := newSnapshot.ParseGo(ctx, currentFH, source.ParseHeader)
+	original, originalErr := newSnapshot.ParseGo(ctx, originalFH, source.ParseFull)
+	current, currentErr := newSnapshot.ParseGo(ctx, currentFH, source.ParseFull)
 	if originalErr != nil || currentErr != nil {
 		return (originalErr == nil) != (currentErr == nil), false, (currentErr != nil) // we don't know if an import was deleted
 	}
diff --git a/internal/lsp/cache/symbols.go b/internal/lsp/cache/symbols.go
new file mode 100644
index 00000000000..d1ecf2a121b
--- /dev/null
+++ b/internal/lsp/cache/symbols.go
@@ -0,0 +1,211 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package cache
+
+import (
+	"context"
+	"go/ast"
+	"go/token"
+	"go/types"
+	"strings"
+
+	"golang.org/x/tools/internal/lsp/protocol"
+	"golang.org/x/tools/internal/lsp/source"
+	"golang.org/x/tools/internal/memoize"
+	"golang.org/x/tools/internal/span"
+)
+
+type symbolHandle struct {
+	handle *memoize.Handle
+
+	fh source.FileHandle
+
+	// key is the hashed key for the package.
+	key symbolHandleKey
+}
+
+// symbolData contains the data produced by extracting symbols from a file.
+type symbolData struct {
+	symbols []source.Symbol
+	err     error
+}
+
+type symbolHandleKey string
+
+func (s *snapshot) buildSymbolHandle(ctx context.Context, fh source.FileHandle) *symbolHandle {
+	if h := s.getSymbolHandle(fh.URI()); h != nil {
+		return h
+	}
+	key := symbolHandleKey(fh.FileIdentity().Hash)
+	h := s.generation.Bind(key, func(ctx context.Context, arg memoize.Arg) interface{} {
+		snapshot := arg.(*snapshot)
+		data := &symbolData{}
+		data.symbols, data.err = symbolize(ctx, snapshot, fh)
+		return data
+	}, nil)
+
+	sh := &symbolHandle{
+		handle: h,
+		fh:     fh,
+		key:    key,
+	}
+	return s.addSymbolHandle(sh)
+}
+
+// symbolize extracts symbols from a file. It does not parse the file through the cache.
+func symbolize(ctx context.Context, snapshot *snapshot, fh source.FileHandle) ([]source.Symbol, error) {
+	var w symbolWalker
+	fset := token.NewFileSet() // don't use snapshot.FileSet, as that would needlessly leak memory.
+	data := parseGo(ctx, fset, fh, source.ParseFull)
+	if data.parsed != nil && data.parsed.File != nil {
+		w.curFile = data.parsed
+		w.curURI = protocol.URIFromSpanURI(data.parsed.URI)
+		w.fileDecls(data.parsed.File.Decls)
+	}
+	return w.symbols, w.firstError
+}
+
+type symbolWalker struct {
+	curFile    *source.ParsedGoFile
+	pkgName    string
+	curURI     protocol.DocumentURI
+	symbols    []source.Symbol
+	firstError error
+}
+
+func (w *symbolWalker) atNode(node ast.Node, name string, kind protocol.SymbolKind, path ...*ast.Ident) {
+	var b strings.Builder
+	for _, ident := range path {
+		if ident != nil {
+			b.WriteString(ident.Name)
+			b.WriteString(".")
+		}
+	}
+	b.WriteString(name)
+
+	rng, err := fileRange(w.curFile, node.Pos(), node.End())
+	if err != nil {
+		w.error(err)
+		return
+	}
+	sym := source.Symbol{
+		Name:  b.String(),
+		Kind:  kind,
+		Range: rng,
+	}
+	w.symbols = append(w.symbols, sym)
+}
+
+func (w *symbolWalker) error(err error) {
+	if err != nil && w.firstError == nil {
+		w.firstError = err
+	}
+}
+
+func fileRange(pgf *source.ParsedGoFile, start, end token.Pos) (protocol.Range, error) {
+	s, err := span.FileSpan(pgf.Tok, pgf.Mapper.Converter, start, end)
+	if err != nil {
+		return protocol.Range{}, nil
+	}
+	return pgf.Mapper.Range(s)
+}
+
+func (w *symbolWalker) fileDecls(decls []ast.Decl) {
+	for _, decl := range decls {
+		switch decl := decl.(type) {
+		case *ast.FuncDecl:
+			kind := protocol.Function
+			var recv *ast.Ident
+			if decl.Recv.NumFields() > 0 {
+				kind = protocol.Method
+				recv = unpackRecv(decl.Recv.List[0].Type)
+			}
+			w.atNode(decl.Name, decl.Name.Name, kind, recv)
+		case *ast.GenDecl:
+			for _, spec := range decl.Specs {
+				switch spec := spec.(type) {
+				case *ast.TypeSpec:
+					kind := guessKind(spec)
+					w.atNode(spec.Name, spec.Name.Name, kind)
+					w.walkType(spec.Type, spec.Name)
+				case *ast.ValueSpec:
+					for _, name := range spec.Names {
+						kind := protocol.Variable
+						if decl.Tok == token.CONST {
+							kind = protocol.Constant
+						}
+						w.atNode(name, name.Name, kind)
+					}
+				}
+			}
+		}
+	}
+}
+
+func guessKind(spec *ast.TypeSpec) protocol.SymbolKind {
+	switch spec.Type.(type) {
+	case *ast.InterfaceType:
+		return protocol.Interface
+	case *ast.StructType:
+		return protocol.Struct
+	case *ast.FuncType:
+		return protocol.Function
+	}
+	return protocol.Class
+}
+
+func unpackRecv(rtyp ast.Expr) *ast.Ident {
+	// Extract the receiver identifier. Lifted from go/types/resolver.go
+L:
+	for {
+		switch t := rtyp.(type) {
+		case *ast.ParenExpr:
+			rtyp = t.X
+		case *ast.StarExpr:
+			rtyp = t.X
+		default:
+			break L
+		}
+	}
+	if name, _ := rtyp.(*ast.Ident); name != nil {
+		return name
+	}
+	return nil
+}
+
+// walkType processes symbols related to a type expression. path is path of
+// nested type identifiers to the type expression.
+func (w *symbolWalker) walkType(typ ast.Expr, path ...*ast.Ident) {
+	switch st := typ.(type) {
+	case *ast.StructType:
+		for _, field := range st.Fields.List {
+			w.walkField(field, protocol.Field, protocol.Field, path...)
+		}
+	case *ast.InterfaceType:
+		for _, field := range st.Methods.List {
+			w.walkField(field, protocol.Interface, protocol.Method, path...)
+		}
+	}
+}
+
+// walkField processes symbols related to the struct field or interface method.
+//
+// unnamedKind and namedKind are the symbol kinds if the field is resp. unnamed
+// or named. path is the path of nested identifiers containing the field.
+func (w *symbolWalker) walkField(field *ast.Field, unnamedKind, namedKind protocol.SymbolKind, path ...*ast.Ident) {
+	if len(field.Names) == 0 {
+		switch typ := field.Type.(type) {
+		case *ast.SelectorExpr:
+			// embedded qualified type
+			w.atNode(field, typ.Sel.Name, unnamedKind, path...)
+		default:
+			w.atNode(field, types.ExprString(field.Type), unnamedKind, path...)
+		}
+	}
+	for _, name := range field.Names {
+		w.atNode(name, name.Name, namedKind, path...)
+		w.walkType(field.Type, append(path, name)...)
+	}
+}
diff --git a/internal/lsp/cache/view.go b/internal/lsp/cache/view.go
index 3f398824693..b54210ef67b 100644
--- a/internal/lsp/cache/view.go
+++ b/internal/lsp/cache/view.go
@@ -611,7 +611,7 @@ func (s *snapshot) loadWorkspace(ctx context.Context, firstAttempt bool) {
 	// If we're loading anything, ensure we also load builtin.
 	// TODO(rstambler): explain the rationale for this.
 	if len(scopes) > 0 {
-		scopes = append(scopes, packagePath("builtin"))
+		scopes = append(scopes, PackagePath("builtin"))
 	}
 	err := s.load(ctx, firstAttempt, scopes...)
 
@@ -727,7 +727,7 @@ func (v *View) updateWorkspaceLocked(ctx context.Context) error {
 
 func (s *Session) getWorkspaceInformation(ctx context.Context, folder span.URI, options *source.Options) (*workspaceInformation, error) {
 	if err := checkPathCase(folder.Filename()); err != nil {
-		return nil, errors.Errorf("invalid workspace configuration: %w", err)
+		return nil, errors.Errorf("invalid workspace folder path: %w; check that the casing of the configured workspace folder path agrees with the casing reported by the operating system", err)
 	}
 	var err error
 	inv := gocommand.Invocation{
diff --git a/internal/lsp/cmd/test/cmdtest.go b/internal/lsp/cmd/test/cmdtest.go
index b63a92aece7..832d794c7d1 100644
--- a/internal/lsp/cmd/test/cmdtest.go
+++ b/internal/lsp/cmd/test/cmdtest.go
@@ -100,10 +100,18 @@ func (r *runner) FunctionExtraction(t *testing.T, start span.Span, end span.Span
 	//TODO: function extraction not supported on command line
 }
 
+func (r *runner) MethodExtraction(t *testing.T, start span.Span, end span.Span) {
+	//TODO: function extraction not supported on command line
+}
+
 func (r *runner) AddImport(t *testing.T, uri span.URI, expectedImport string) {
 	//TODO: import addition not supported on command line
 }
 
+func (r *runner) Hover(t *testing.T, spn span.Span, info string) {
+	//TODO: hovering not supported on command line
+}
+
 func (r *runner) runGoplsCmd(t testing.TB, args ...string) (string, string) {
 	rStdout, wStdout, err := os.Pipe()
 	if err != nil {
diff --git a/internal/lsp/code_action.go b/internal/lsp/code_action.go
index 1c5ad4d636a..b58e9540308 100644
--- a/internal/lsp/code_action.go
+++ b/internal/lsp/code_action.go
@@ -289,8 +289,8 @@ func extractionFixes(ctx context.Context, snapshot source.Snapshot, pkg source.P
 	}
 	puri := protocol.URIFromSpanURI(uri)
 	var commands []protocol.Command
-	if _, ok, _ := source.CanExtractFunction(snapshot.FileSet(), srng, pgf.Src, pgf.File); ok {
-		cmd, err := command.NewApplyFixCommand("Extract to function", command.ApplyFixArgs{
+	if _, ok, methodOk, _ := source.CanExtractFunction(snapshot.FileSet(), srng, pgf.Src, pgf.File); ok {
+		cmd, err := command.NewApplyFixCommand("Extract function", command.ApplyFixArgs{
 			URI:   puri,
 			Fix:   source.ExtractFunction,
 			Range: rng,
@@ -299,6 +299,17 @@ func extractionFixes(ctx context.Context, snapshot source.Snapshot, pkg source.P
 			return nil, err
 		}
 		commands = append(commands, cmd)
+		if methodOk {
+			cmd, err := command.NewApplyFixCommand("Extract method", command.ApplyFixArgs{
+				URI:   puri,
+				Fix:   source.ExtractMethod,
+				Range: rng,
+			})
+			if err != nil {
+				return nil, err
+			}
+			commands = append(commands, cmd)
+		}
 	}
 	if _, _, ok, _ := source.CanExtractVariable(srng, pgf.File); ok {
 		cmd, err := command.NewApplyFixCommand("Extract variable", command.ApplyFixArgs{
diff --git a/internal/lsp/command.go b/internal/lsp/command.go
index d810735b7d1..36c319f1196 100644
--- a/internal/lsp/command.go
+++ b/internal/lsp/command.go
@@ -73,11 +73,15 @@ type commandFunc func(context.Context, commandDeps) error
 
 func (c *commandHandler) run(ctx context.Context, cfg commandConfig, run commandFunc) (err error) {
 	if cfg.requireSave {
+		var unsaved []string
 		for _, overlay := range c.s.session.Overlays() {
 			if !overlay.Saved() {
-				return errors.New("All files must be saved first")
+				unsaved = append(unsaved, overlay.URI().Filename())
 			}
 		}
+		if len(unsaved) > 0 {
+			return errors.Errorf("All files must be saved first (unsaved: %v).", unsaved)
+		}
 	}
 	var deps commandDeps
 	if cfg.forURI != "" {
@@ -354,7 +358,7 @@ func (c *commandHandler) RunTests(ctx context.Context, args command.RunTestsArgs
 
 func (c *commandHandler) runTests(ctx context.Context, snapshot source.Snapshot, work *progress.WorkDone, uri protocol.DocumentURI, tests, benchmarks []string) error {
 	// TODO: fix the error reporting when this runs async.
-	pkgs, err := snapshot.PackagesForFile(ctx, uri.SpanURI(), source.TypecheckWorkspace)
+	pkgs, err := snapshot.PackagesForFile(ctx, uri.SpanURI(), source.TypecheckWorkspace, false)
 	if err != nil {
 		return err
 	}
diff --git a/internal/lsp/completion.go b/internal/lsp/completion.go
index 4bec6cda990..4523d34e2a8 100644
--- a/internal/lsp/completion.go
+++ b/internal/lsp/completion.go
@@ -33,7 +33,12 @@ func (s *Server) completion(ctx context.Context, params *protocol.CompletionPara
 	case source.Mod:
 		candidates, surrounding = nil, nil
 	case source.Tmpl:
-		candidates, surrounding, err = template.Completion(ctx, snapshot, fh, params.Position, params.Context)
+		var cl *protocol.CompletionList
+		cl, err = template.Completion(ctx, snapshot, fh, params.Position, params.Context)
+		if err != nil {
+			break // use common error handling, candidates==nil
+		}
+		return cl, nil
 	}
 	if err != nil {
 		event.Error(ctx, "no completions found", err, tag.Position.Of(params.Position))
diff --git a/internal/lsp/diagnostics.go b/internal/lsp/diagnostics.go
index ef233729323..acf38202848 100644
--- a/internal/lsp/diagnostics.go
+++ b/internal/lsp/diagnostics.go
@@ -153,7 +153,7 @@ func (s *Server) diagnoseChangedFiles(ctx context.Context, snapshot source.Snaps
 		if snapshot.IsBuiltin(ctx, uri) {
 			continue
 		}
-		pkgs, err := snapshot.PackagesForFile(ctx, uri, source.TypecheckFull)
+		pkgs, err := snapshot.PackagesForFile(ctx, uri, source.TypecheckFull, false)
 		if err != nil {
 			// TODO (findleyr): we should probably do something with the error here,
 			// but as of now this can fail repeatedly if load fails, so can be too
@@ -211,7 +211,7 @@ func (s *Server) diagnose(ctx context.Context, snapshot source.Snapshot, forceAn
 	}
 
 	// Diagnose all of the packages in the workspace.
-	wsPkgs, err := snapshot.WorkspacePackages(ctx)
+	wsPkgs, err := snapshot.ActivePackages(ctx)
 	if s.shouldIgnoreError(ctx, snapshot, err) {
 		return
 	}
@@ -423,7 +423,7 @@ func (s *Server) checkForOrphanedFile(ctx context.Context, snapshot source.Snaps
 	if snapshot.IsBuiltin(ctx, fh.URI()) {
 		return nil
 	}
-	pkgs, err := snapshot.PackagesForFile(ctx, fh.URI(), source.TypecheckWorkspace)
+	pkgs, err := snapshot.PackagesForFile(ctx, fh.URI(), source.TypecheckWorkspace, false)
 	if len(pkgs) > 0 || err == nil {
 		return nil
 	}
diff --git a/internal/lsp/fake/client.go b/internal/lsp/fake/client.go
index a105110a5bc..331b9bd3436 100644
--- a/internal/lsp/fake/client.go
+++ b/internal/lsp/fake/client.go
@@ -7,7 +7,6 @@ package fake
 import (
 	"context"
 	"fmt"
-	"os"
 
 	"golang.org/x/tools/internal/lsp/protocol"
 )
@@ -121,19 +120,7 @@ func (c *Client) ApplyEdit(ctx context.Context, params *protocol.ApplyWorkspaceE
 		return &protocol.ApplyWorkspaceEditResponse{FailureReason: "Edit.Changes is unsupported"}, nil
 	}
 	for _, change := range params.Edit.DocumentChanges {
-		path := c.editor.sandbox.Workdir.URIToPath(change.TextDocument.URI)
-		edits := convertEdits(change.Edits)
-		if !c.editor.HasBuffer(path) {
-			err := c.editor.OpenFile(ctx, path)
-			if os.IsNotExist(err) {
-				c.editor.CreateBuffer(ctx, path, "")
-				err = nil
-			}
-			if err != nil {
-				return nil, err
-			}
-		}
-		if err := c.editor.EditBuffer(ctx, path, edits); err != nil {
+		if err := c.editor.applyProtocolEdit(ctx, change); err != nil {
 			return nil, err
 		}
 	}
diff --git a/internal/lsp/fake/editor.go b/internal/lsp/fake/editor.go
index 61867d592fd..c9780b8bbdc 100644
--- a/internal/lsp/fake/editor.go
+++ b/internal/lsp/fake/editor.go
@@ -1095,6 +1095,49 @@ func (e *Editor) References(ctx context.Context, path string, pos Pos) ([]protoc
 	return locations, nil
 }
 
+func (e *Editor) Rename(ctx context.Context, path string, pos Pos, newName string) error {
+	if e.Server == nil {
+		return nil
+	}
+	params := &protocol.RenameParams{
+		TextDocument: e.textDocumentIdentifier(path),
+		Position:     pos.ToProtocolPosition(),
+		NewName:      newName,
+	}
+	wsEdits, err := e.Server.Rename(ctx, params)
+	if err != nil {
+		return err
+	}
+	for _, change := range wsEdits.DocumentChanges {
+		if err := e.applyProtocolEdit(ctx, change); err != nil {
+			return err
+		}
+	}
+	return nil
+}
+
+func (e *Editor) applyProtocolEdit(ctx context.Context, change protocol.TextDocumentEdit) error {
+	path := e.sandbox.Workdir.URIToPath(change.TextDocument.URI)
+	if ver := int32(e.BufferVersion(path)); ver != change.TextDocument.Version {
+		return fmt.Errorf("buffer versions for %q do not match: have %d, editing %d", path, ver, change.TextDocument.Version)
+	}
+	if !e.HasBuffer(path) {
+		err := e.OpenFile(ctx, path)
+		if os.IsNotExist(err) {
+			// TODO: it's unclear if this is correct. Here we create the buffer (with
+			// version 1), then apply edits. Perhaps we should apply the edits before
+			// sending the didOpen notification.
+			e.CreateBuffer(ctx, path, "")
+			err = nil
+		}
+		if err != nil {
+			return err
+		}
+	}
+	fakeEdits := convertEdits(change.Edits)
+	return e.EditBuffer(ctx, path, fakeEdits)
+}
+
 // CodeAction executes a codeAction request on the server.
 func (e *Editor) CodeAction(ctx context.Context, path string, rng *protocol.Range, diagnostics []protocol.Diagnostic) ([]protocol.CodeAction, error) {
 	if e.Server == nil {
diff --git a/internal/lsp/fuzzy/input.go b/internal/lsp/fuzzy/input.go
index ac377035ec6..c1038163f1a 100644
--- a/internal/lsp/fuzzy/input.go
+++ b/internal/lsp/fuzzy/input.go
@@ -27,23 +27,23 @@ const (
 // RuneRoles detects the roles of each byte rune in an input string and stores it in the output
 // slice. The rune role depends on the input type. Stops when it parsed all the runes in the string
 // or when it filled the output. If output is nil, then it gets created.
-func RuneRoles(str string, reuse []RuneRole) []RuneRole {
+func RuneRoles(candidate []byte, reuse []RuneRole) []RuneRole {
 	var output []RuneRole
-	if cap(reuse) < len(str) {
-		output = make([]RuneRole, 0, len(str))
+	if cap(reuse) < len(candidate) {
+		output = make([]RuneRole, 0, len(candidate))
 	} else {
 		output = reuse[:0]
 	}
 
 	prev, prev2 := rtNone, rtNone
-	for i := 0; i < len(str); i++ {
-		r := rune(str[i])
+	for i := 0; i < len(candidate); i++ {
+		r := rune(candidate[i])
 
 		role := RNone
 
 		curr := rtLower
-		if str[i] <= unicode.MaxASCII {
-			curr = runeType(rt[str[i]] - '0')
+		if candidate[i] <= unicode.MaxASCII {
+			curr = runeType(rt[candidate[i]] - '0')
 		}
 
 		if curr == rtLower {
@@ -58,7 +58,7 @@ func RuneRoles(str string, reuse []RuneRole) []RuneRole {
 			if prev == rtUpper {
 				// This and previous characters are both upper case.
 
-				if i+1 == len(str) {
+				if i+1 == len(candidate) {
 					// This is last character, previous was also uppercase -> this is UCTail
 					// i.e., (current char is C): aBC / BC / ABC
 					role = RUCTail
@@ -118,11 +118,26 @@ func LastSegment(input string, roles []RuneRole) string {
 	return input[start+1 : end+1]
 }
 
-// ToLower transforms the input string to lower case, which is stored in the output byte slice.
+// fromChunks copies string chunks into the given buffer.
+func fromChunks(chunks []string, buffer []byte) []byte {
+	ii := 0
+	for _, chunk := range chunks {
+		for i := 0; i < len(chunk); i++ {
+			if ii >= cap(buffer) {
+				break
+			}
+			buffer[ii] = chunk[i]
+			ii++
+		}
+	}
+	return buffer[:ii]
+}
+
+// toLower transforms the input string to lower case, which is stored in the output byte slice.
 // The lower casing considers only ASCII values - non ASCII values are left unmodified.
 // Stops when parsed all input or when it filled the output slice. If output is nil, then it gets
 // created.
-func ToLower(input string, reuse []byte) []byte {
+func toLower(input []byte, reuse []byte) []byte {
 	output := reuse
 	if cap(reuse) < len(input) {
 		output = make([]byte, len(input))
@@ -130,7 +145,7 @@ func ToLower(input string, reuse []byte) []byte {
 
 	for i := 0; i < len(input); i++ {
 		r := rune(input[i])
-		if r <= unicode.MaxASCII {
+		if input[i] <= unicode.MaxASCII {
 			if 'A' <= r && r <= 'Z' {
 				r += 'a' - 'A'
 			}
diff --git a/internal/lsp/fuzzy/input_test.go b/internal/lsp/fuzzy/input_test.go
index dffafa596b6..0228347e4f0 100644
--- a/internal/lsp/fuzzy/input_test.go
+++ b/internal/lsp/fuzzy/input_test.go
@@ -36,7 +36,7 @@ func rolesString(roles []fuzzy.RuneRole) string {
 func TestRoles(t *testing.T) {
 	for _, tc := range rolesTests {
 		gotRoles := make([]fuzzy.RuneRole, len(tc.str))
-		fuzzy.RuneRoles(tc.str, gotRoles)
+		fuzzy.RuneRoles([]byte(tc.str), gotRoles)
 		got := rolesString(gotRoles)
 		if got != tc.want {
 			t.Errorf("roles(%s) = %v; want %v", tc.str, got, tc.want)
@@ -68,7 +68,7 @@ var wordSplitTests = []struct {
 
 func TestWordSplit(t *testing.T) {
 	for _, tc := range wordSplitTests {
-		roles := fuzzy.RuneRoles(tc.input, nil)
+		roles := fuzzy.RuneRoles([]byte(tc.input), nil)
 
 		var got []string
 		consumer := func(i, j int) {
@@ -120,7 +120,7 @@ var lastSegmentSplitTests = []struct {
 
 func TestLastSegment(t *testing.T) {
 	for _, tc := range lastSegmentSplitTests {
-		roles := fuzzy.RuneRoles(tc.str, nil)
+		roles := fuzzy.RuneRoles([]byte(tc.str), nil)
 
 		got := fuzzy.LastSegment(tc.str, roles)
 
@@ -135,7 +135,7 @@ func BenchmarkRoles(b *testing.B) {
 	out := make([]fuzzy.RuneRole, len(str))
 
 	for i := 0; i < b.N; i++ {
-		fuzzy.RuneRoles(str, out)
+		fuzzy.RuneRoles([]byte(str), out)
 	}
 	b.SetBytes(int64(len(str)))
 }
diff --git a/internal/lsp/fuzzy/matcher.go b/internal/lsp/fuzzy/matcher.go
index 16a643097de..265cdcf1604 100644
--- a/internal/lsp/fuzzy/matcher.go
+++ b/internal/lsp/fuzzy/matcher.go
@@ -51,8 +51,12 @@ type Matcher struct {
 	lastCandidateLen     int // in bytes
 	lastCandidateMatched bool
 
-	// Here we save the last candidate in lower-case. This is basically a byte slice we reuse for
-	// performance reasons, so the slice is not reallocated for every candidate.
+	// Reusable buffers to avoid allocating for every candidate.
+	//  - inputBuf stores the concatenated input chunks
+	//  - lowerBuf stores the last candidate in lower-case
+	//  - rolesBuf stores the calculated roles for each rune in the last
+	//    candidate.
+	inputBuf [MaxInputSize]byte
 	lowerBuf [MaxInputSize]byte
 	rolesBuf [MaxInputSize]RuneRole
 }
@@ -72,7 +76,7 @@ func NewMatcher(pattern string) *Matcher {
 
 	m := &Matcher{
 		pattern:      pattern,
-		patternLower: ToLower(pattern, nil),
+		patternLower: toLower([]byte(pattern), nil),
 	}
 
 	for i, c := range m.patternLower {
@@ -88,7 +92,7 @@ func NewMatcher(pattern string) *Matcher {
 		m.patternShort = m.patternLower
 	}
 
-	m.patternRoles = RuneRoles(pattern, nil)
+	m.patternRoles = RuneRoles([]byte(pattern), nil)
 
 	if len(pattern) > 0 {
 		maxCharScore := 4
@@ -102,10 +106,15 @@ func NewMatcher(pattern string) *Matcher {
 // This is not designed for parallel use. Multiple candidates must be scored sequentially.
 // Returns a score between 0 and 1 (0 - no match, 1 - perfect match).
 func (m *Matcher) Score(candidate string) float32 {
+	return m.ScoreChunks([]string{candidate})
+}
+
+func (m *Matcher) ScoreChunks(chunks []string) float32 {
+	candidate := fromChunks(chunks, m.inputBuf[:])
 	if len(candidate) > MaxInputSize {
 		candidate = candidate[:MaxInputSize]
 	}
-	lower := ToLower(candidate, m.lowerBuf[:])
+	lower := toLower(candidate, m.lowerBuf[:])
 	m.lastCandidateLen = len(candidate)
 
 	if len(m.pattern) == 0 {
@@ -174,7 +183,7 @@ func (m *Matcher) MatchedRanges() []int {
 	return ret
 }
 
-func (m *Matcher) match(candidate string, candidateLower []byte) bool {
+func (m *Matcher) match(candidate []byte, candidateLower []byte) bool {
 	i, j := 0, 0
 	for ; i < len(candidateLower) && j < len(m.patternLower); i++ {
 		if candidateLower[i] == m.patternLower[j] {
@@ -192,7 +201,7 @@ func (m *Matcher) match(candidate string, candidateLower []byte) bool {
 	return true
 }
 
-func (m *Matcher) computeScore(candidate string, candidateLower []byte) int {
+func (m *Matcher) computeScore(candidate []byte, candidateLower []byte) int {
 	pattLen, candLen := len(m.pattern), len(candidate)
 
 	for j := 0; j <= len(m.pattern); j++ {
diff --git a/internal/lsp/fuzzy/symbol.go b/internal/lsp/fuzzy/symbol.go
new file mode 100644
index 00000000000..062f491fb5c
--- /dev/null
+++ b/internal/lsp/fuzzy/symbol.go
@@ -0,0 +1,224 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package fuzzy
+
+import (
+	"unicode"
+)
+
+// SymbolMatcher implements a fuzzy matching algorithm optimized for Go symbols
+// of the form:
+//  example.com/path/to/package.object.field
+//
+// Knowing that we are matching symbols like this allows us to make the
+// following optimizations:
+//  - We can incorporate right-to-left relevance directly into the score
+//    calculation.
+//  - We can match from right to left, discarding leading bytes if the input is
+//    too long.
+//  - We just take the right-most match without losing too much precision. This
+//    allows us to use an O(n) algorithm.
+//  - We can operate directly on chunked strings; in many cases we will
+//    be storing the package path and/or package name separately from the
+//    symbol or identifiers, so doing this avoids allocating strings.
+//  - We can return the index of the right-most match, allowing us to trim
+//    irrelevant qualification.
+//
+// This implementation is experimental, serving as a reference fast algorithm
+// to compare to the fuzzy algorithm implemented by Matcher.
+type SymbolMatcher struct {
+	// Using buffers of length 256 is both a reasonable size for most qualified
+	// symbols, and makes it easy to avoid bounds checks by using uint8 indexes.
+	pattern     [256]rune
+	patternLen  uint8
+	inputBuffer [256]rune   // avoid allocating when considering chunks
+	roles       [256]uint32 // which roles does a rune play (word start, etc.)
+	segments    [256]uint8  // how many segments from the right is each rune
+}
+
+const (
+	segmentStart uint32 = 1 << iota
+	wordStart
+	separator
+)
+
+// NewSymbolMatcher creates a SymbolMatcher that may be used to match the given
+// search pattern.
+//
+// Currently this matcher only accepts case-insensitive fuzzy patterns.
+//
+// TODO(rfindley):
+//  - implement smart-casing
+//  - implement space-separated groups
+//  - implement ', ^, and $ modifiers
+//
+// An empty pattern matches no input.
+func NewSymbolMatcher(pattern string) *SymbolMatcher {
+	m := &SymbolMatcher{}
+	for _, p := range pattern {
+		m.pattern[m.patternLen] = unicode.ToLower(p)
+		m.patternLen++
+		if m.patternLen == 255 || int(m.patternLen) == len(pattern) {
+			// break at 255 so that we can represent patternLen with a uint8.
+			break
+		}
+	}
+	return m
+}
+
+// Match looks for the right-most match of the search pattern within the symbol
+// represented by concatenating the given chunks, returning its offset and
+// score.
+//
+// If a match is found, the first return value will hold the absolute byte
+// offset within all chunks for the start of the symbol. In other words, the
+// index of the match within strings.Join(chunks, ""). If no match is found,
+// the first return value will be -1.
+//
+// The second return value will be the score of the match, which is always
+// between 0 and 1, inclusive. A score of 0 indicates no match.
+func (m *SymbolMatcher) Match(chunks []string) (int, float64) {
+	// Explicit behavior for an empty pattern.
+	//
+	// As a minor optimization, this also avoids nilness checks later on, since
+	// the compiler can prove that m != nil.
+	if m.patternLen == 0 {
+		return -1, 0
+	}
+
+	// First phase: populate the input buffer with lower-cased runes.
+	//
+	// We could also check for a forward match here, but since we'd have to write
+	// the entire input anyway this has negligible impact on performance.
+
+	var (
+		inputLen  = uint8(0)
+		modifiers = wordStart | segmentStart
+	)
+
+input:
+	for _, chunk := range chunks {
+		for _, r := range chunk {
+			if r == '.' || r == '/' {
+				modifiers |= separator
+			}
+			// optimization: avoid calls to unicode.ToLower, which can't be inlined.
+			l := r
+			if r <= unicode.MaxASCII {
+				if 'A' <= r && r <= 'Z' {
+					l = r + 'a' - 'A'
+				}
+			} else {
+				l = unicode.ToLower(r)
+			}
+			if l != r {
+				modifiers |= wordStart
+			}
+			m.inputBuffer[inputLen] = l
+			m.roles[inputLen] = modifiers
+			inputLen++
+			if m.roles[inputLen-1]&separator != 0 {
+				modifiers = wordStart | segmentStart
+			} else {
+				modifiers = 0
+			}
+			// TODO: we should prefer the right-most input if it overflows, rather
+			//       than the left-most as we're doing here.
+			if inputLen == 255 {
+				break input
+			}
+		}
+	}
+
+	// Second phase: find the right-most match, and count segments from the
+	// right.
+
+	var (
+		pi    = uint8(m.patternLen - 1) // pattern index
+		p     = m.pattern[pi]           // pattern rune
+		start = -1                      // start offset of match
+		rseg  = uint8(0)
+	)
+	const maxSeg = 3 // maximum number of segments from the right to count, for scoring purposes.
+
+	for ii := inputLen - 1; ; ii-- {
+		r := m.inputBuffer[ii]
+		if rseg < maxSeg && m.roles[ii]&separator != 0 {
+			rseg++
+		}
+		m.segments[ii] = rseg
+		if p == r {
+			if pi == 0 {
+				start = int(ii)
+				break
+			}
+			pi--
+			p = m.pattern[pi]
+		}
+		// Don't check ii >= 0 in the loop condition: ii is a uint8.
+		if ii == 0 {
+			break
+		}
+	}
+
+	if start < 0 {
+		// no match: skip scoring
+		return -1, 0
+	}
+
+	// Third phase: find the shortest match, and compute the score.
+
+	// Score is the average score for each character.
+	//
+	// A character score is the multiple of:
+	//   1. 1.0 if the character starts a segment, .8 if the character start a
+	//      mid-segment word, otherwise 0.6. This carries over to immediately
+	//      following characters.
+	//   2. 1.0 if the character is part of the last segment, otherwise
+	//      1.0-.2*<segments from the right>, with a max segment count of 3.
+	//
+	// This is a very naive algorithm, but it is fast. There's lots of prior art
+	// here, and we should leverage it. For example, we could explicitly consider
+	// character distance, and exact matches of words or segments.
+	//
+	// Also note that this might not actually find the highest scoring match, as
+	// doing so could require a non-linear algorithm, depending on how the score
+	// is calculated.
+
+	pi = 0
+	p = m.pattern[pi]
+
+	const (
+		segStreak  = 1.0
+		wordStreak = 0.8
+		noStreak   = 0.6
+		perSegment = 0.2 // we count at most 3 segments above
+	)
+
+	streakBonus := noStreak
+	totScore := 0.0
+	for ii := uint8(start); ii < inputLen; ii++ {
+		r := m.inputBuffer[ii]
+		if r == p {
+			pi++
+			p = m.pattern[pi]
+			// Note: this could be optimized with some bit operations.
+			switch {
+			case m.roles[ii]&segmentStart != 0 && segStreak > streakBonus:
+				streakBonus = segStreak
+			case m.roles[ii]&wordStart != 0 && wordStreak > streakBonus:
+				streakBonus = wordStreak
+			}
+			totScore += streakBonus * (1.0 - float64(m.segments[ii])*perSegment)
+			if pi >= m.patternLen {
+				break
+			}
+		} else {
+			streakBonus = noStreak
+		}
+	}
+
+	return start, totScore / float64(m.patternLen)
+}
diff --git a/internal/lsp/fuzzy/symbol_test.go b/internal/lsp/fuzzy/symbol_test.go
new file mode 100644
index 00000000000..9dc710e917e
--- /dev/null
+++ b/internal/lsp/fuzzy/symbol_test.go
@@ -0,0 +1,78 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package fuzzy_test
+
+import (
+	"testing"
+
+	. "golang.org/x/tools/internal/lsp/fuzzy"
+)
+
+func TestSymbolMatchIndex(t *testing.T) {
+	tests := []struct {
+		pattern, input string
+		want           int
+	}{
+		{"test", "foo.TestFoo", 4},
+		{"test", "test", 0},
+		{"test", "Test", 0},
+		{"test", "est", -1},
+		{"t", "shortest", 7},
+		{"", "foo", -1},
+		{"", string([]rune{0}), -1}, // verify that we don't default to an empty pattern.
+		{"anything", "", -1},
+	}
+
+	for _, test := range tests {
+		matcher := NewSymbolMatcher(test.pattern)
+		if got, _ := matcher.Match([]string{test.input}); got != test.want {
+			t.Errorf("NewSymbolMatcher(%q).Match(%q) = %v, _, want %v, _", test.pattern, test.input, got, test.want)
+		}
+	}
+}
+
+func TestSymbolRanking(t *testing.T) {
+	matcher := NewSymbolMatcher("test")
+
+	// symbols to match, in ascending order of ranking.
+	symbols := []string{
+		"this.is.better.than.most",
+		"test.foo.bar",
+		"atest",
+		"thebest",
+		"test.foo",
+		"tTest",
+		"test.foo",
+		"foo.test",
+		"test",
+	}
+	prev := 0.0
+	for _, sym := range symbols {
+		_, score := matcher.Match([]string{sym})
+		t.Logf("Match(%q) = %v", sym, score)
+		if score < prev {
+			t.Errorf("Match(%q) = _, %v, want > %v", sym, score, prev)
+		}
+		prev = score
+	}
+}
+
+func TestChunkedMatch(t *testing.T) {
+	matcher := NewSymbolMatcher("test")
+
+	chunked := [][]string{
+		{"test"},
+		{"", "test"},
+		{"test", ""},
+		{"te", "st"},
+	}
+
+	for _, chunks := range chunked {
+		offset, score := matcher.Match(chunks)
+		if offset != 0 || score != 1.0 {
+			t.Errorf("Match(%v) = %v, %v, want 0, 1.0", chunks, offset, score)
+		}
+	}
+}
diff --git a/internal/lsp/lsp_test.go b/internal/lsp/lsp_test.go
index 68c83f653dc..d21d71d6661 100644
--- a/internal/lsp/lsp_test.go
+++ b/internal/lsp/lsp_test.go
@@ -583,7 +583,7 @@ func (r *runner) FunctionExtraction(t *testing.T, start span.Span, end span.Span
 	if err != nil {
 		t.Fatal(err)
 	}
-	actions, err := r.server.CodeAction(r.ctx, &protocol.CodeActionParams{
+	actionsRaw, err := r.server.CodeAction(r.ctx, &protocol.CodeActionParams{
 		TextDocument: protocol.TextDocumentIdentifier{
 			URI: protocol.URIFromSpanURI(uri),
 		},
@@ -595,6 +595,12 @@ func (r *runner) FunctionExtraction(t *testing.T, start span.Span, end span.Span
 	if err != nil {
 		t.Fatal(err)
 	}
+	var actions []protocol.CodeAction
+	for _, action := range actionsRaw {
+		if action.Command.Title == "Extract function" {
+			actions = append(actions, action)
+		}
+	}
 	// Hack: We assume that we only get one code action per range.
 	// TODO(rstambler): Support multiple code actions per test.
 	if len(actions) == 0 || len(actions) > 1 {
@@ -618,6 +624,58 @@ func (r *runner) FunctionExtraction(t *testing.T, start span.Span, end span.Span
 	}
 }
 
+func (r *runner) MethodExtraction(t *testing.T, start span.Span, end span.Span) {
+	uri := start.URI()
+	m, err := r.data.Mapper(uri)
+	if err != nil {
+		t.Fatal(err)
+	}
+	spn := span.New(start.URI(), start.Start(), end.End())
+	rng, err := m.Range(spn)
+	if err != nil {
+		t.Fatal(err)
+	}
+	actionsRaw, err := r.server.CodeAction(r.ctx, &protocol.CodeActionParams{
+		TextDocument: protocol.TextDocumentIdentifier{
+			URI: protocol.URIFromSpanURI(uri),
+		},
+		Range: rng,
+		Context: protocol.CodeActionContext{
+			Only: []protocol.CodeActionKind{"refactor.extract"},
+		},
+	})
+	if err != nil {
+		t.Fatal(err)
+	}
+	var actions []protocol.CodeAction
+	for _, action := range actionsRaw {
+		if action.Command.Title == "Extract method" {
+			actions = append(actions, action)
+		}
+	}
+	// Hack: We assume that we only get one matching code action per range.
+	// TODO(rstambler): Support multiple code actions per test.
+	if len(actions) == 0 || len(actions) > 1 {
+		t.Fatalf("unexpected number of code actions, want 1, got %v", len(actions))
+	}
+	_, err = r.server.ExecuteCommand(r.ctx, &protocol.ExecuteCommandParams{
+		Command:   actions[0].Command.Command,
+		Arguments: actions[0].Command.Arguments,
+	})
+	if err != nil {
+		t.Fatal(err)
+	}
+	res := <-r.editRecv
+	for u, got := range res {
+		want := string(r.data.Golden("methodextraction_"+tests.SpanName(spn), u.Filename(), func() ([]byte, error) {
+			return []byte(got), nil
+		}))
+		if want != got {
+			t.Errorf("method extraction failed for %s:\n%s", u.Filename(), tests.Diff(t, want, got))
+		}
+	}
+}
+
 func (r *runner) Definition(t *testing.T, spn span.Span, d tests.Definition) {
 	sm, err := r.data.Mapper(d.Src.URI())
 	if err != nil {
@@ -660,7 +718,7 @@ func (r *runner) Definition(t *testing.T, spn span.Span, d tests.Definition) {
 	didSomething := false
 	if hover != nil {
 		didSomething = true
-		tag := fmt.Sprintf("%s-hover", d.Name)
+		tag := fmt.Sprintf("%s-hoverdef", d.Name)
 		expectHover := string(r.data.Golden(tag, d.Src.URI().Filename(), func() ([]byte, error) {
 			return []byte(hover.Contents.Value), nil
 		}))
@@ -782,6 +840,43 @@ func (r *runner) Highlight(t *testing.T, src span.Span, locations []span.Span) {
 	}
 }
 
+func (r *runner) Hover(t *testing.T, src span.Span, text string) {
+	m, err := r.data.Mapper(src.URI())
+	if err != nil {
+		t.Fatal(err)
+	}
+	loc, err := m.Location(src)
+	if err != nil {
+		t.Fatalf("failed for %v", err)
+	}
+	tdpp := protocol.TextDocumentPositionParams{
+		TextDocument: protocol.TextDocumentIdentifier{URI: loc.URI},
+		Position:     loc.Range.Start,
+	}
+	params := &protocol.HoverParams{
+		TextDocumentPositionParams: tdpp,
+	}
+	hover, err := r.server.Hover(r.ctx, params)
+	if err != nil {
+		t.Fatal(err)
+	}
+	if text == "" {
+		if hover != nil {
+			t.Errorf("want nil, got %v\n", hover)
+		}
+	} else {
+		if hover == nil {
+			t.Fatalf("want hover result to include %s, but got nil", text)
+		}
+		if got := hover.Contents.Value; got != text {
+			t.Errorf("want %v, got %v\n", text, got)
+		}
+		if want, got := loc.Range, hover.Range; want != got {
+			t.Errorf("want range %v, got %v instead", want, got)
+		}
+	}
+}
+
 func (r *runner) References(t *testing.T, src span.Span, itemList []span.Span) {
 	sm, err := r.data.Mapper(src.URI())
 	if err != nil {
diff --git a/internal/lsp/lsppos/lsppos.go b/internal/lsp/lsppos/lsppos.go
new file mode 100644
index 00000000000..f27bde57374
--- /dev/null
+++ b/internal/lsp/lsppos/lsppos.go
@@ -0,0 +1,89 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package lsppos provides utilities for working with LSP positions.
+//
+// See https://microsoft.github.io/language-server-protocol/specification#textDocuments
+// for a description of LSP positions. Notably:
+//  - Positions are specified by a 0-based line count and 0-based utf-16
+//    character offset.
+//  - Positions are line-ending agnostic: there is no way to specify \r|\n or
+//    \n|. Instead the former maps to the end of the current line, and the
+//    latter to the start of the next line.
+package lsppos
+
+import (
+	"sort"
+	"unicode/utf8"
+)
+
+type Mapper struct {
+	nonASCII bool
+	src      []byte
+
+	// Start-of-line positions. If src is newline-terminated, the final entry will be empty.
+	lines []int
+}
+
+func NewMapper(src []byte) *Mapper {
+	m := &Mapper{src: src}
+	if len(src) == 0 {
+		return m
+	}
+	m.lines = []int{0}
+	for offset, b := range src {
+		if b == '\n' {
+			m.lines = append(m.lines, offset+1)
+		}
+		if b >= utf8.RuneSelf {
+			m.nonASCII = true
+		}
+	}
+	return m
+}
+
+func (m *Mapper) Position(offset int) (line, char int) {
+	if offset < 0 || offset > len(m.src) {
+		return -1, -1
+	}
+	nextLine := sort.Search(len(m.lines), func(i int) bool {
+		return offset < m.lines[i]
+	})
+	if nextLine == 0 {
+		return -1, -1
+	}
+	line = nextLine - 1
+	start := m.lines[line]
+	var charOffset int
+	if m.nonASCII {
+		charOffset = UTF16len(m.src[start:offset])
+	} else {
+		charOffset = offset - start
+	}
+
+	var eol int
+	if line == len(m.lines)-1 {
+		eol = len(m.src)
+	} else {
+		eol = m.lines[line+1] - 1
+	}
+
+	// Adjustment for line-endings: \r|\n is the same as |\r\n.
+	if offset == eol && offset > 0 && m.src[offset-1] == '\r' {
+		charOffset--
+	}
+
+	return line, charOffset
+}
+
+func UTF16len(buf []byte) int {
+	cnt := 0
+	for _, r := range string(buf) {
+		cnt++
+		if r >= 1<<16 {
+			cnt++
+		}
+	}
+	return cnt
+}
diff --git a/internal/lsp/lsprpc/lsprpc.go b/internal/lsp/lsprpc/lsprpc.go
index 91770780c22..ca32f0e174e 100644
--- a/internal/lsp/lsprpc/lsprpc.go
+++ b/internal/lsp/lsprpc/lsprpc.go
@@ -225,9 +225,9 @@ func (f *Forwarder) ServeStream(ctx context.Context, clientConn jsonrpc2.Conn) e
 
 	err = nil
 	if serverConn.Err() != nil {
-		err = errors.Errorf("remote disconnected: %v", err)
+		err = errors.Errorf("remote disconnected: %v", serverConn.Err())
 	} else if clientConn.Err() != nil {
-		err = errors.Errorf("client disconnected: %v", err)
+		err = errors.Errorf("client disconnected: %v", clientConn.Err())
 	}
 	event.Log(ctx, fmt.Sprintf("forwarder: exited with error: %v", err))
 	return err
diff --git a/internal/lsp/mod/diagnostics.go b/internal/lsp/mod/diagnostics.go
index 625bc63c3cf..4b4d0cb9f38 100644
--- a/internal/lsp/mod/diagnostics.go
+++ b/internal/lsp/mod/diagnostics.go
@@ -86,7 +86,7 @@ func DiagnosticsForMod(ctx context.Context, snapshot source.Snapshot, fh source.
 	}
 
 	// Packages in the workspace can contribute diagnostics to go.mod files.
-	wspkgs, err := snapshot.WorkspacePackages(ctx)
+	wspkgs, err := snapshot.ActivePackages(ctx)
 	if err != nil && !source.IsNonFatalGoModError(err) {
 		event.Error(ctx, fmt.Sprintf("workspace packages: diagnosing %s", pm.URI), err)
 	}
diff --git a/internal/lsp/protocol/tsclient.go b/internal/lsp/protocol/tsclient.go
index 5cdfc9bccea..9a88f3338a0 100644
--- a/internal/lsp/protocol/tsclient.go
+++ b/internal/lsp/protocol/tsclient.go
@@ -6,8 +6,8 @@ package protocol
 
 // Package protocol contains data types and code for LSP jsonrpcs
 // generated automatically from vscode-languageserver-node
-// commit: 092c2afc3ad7e4d2b03fe8ac0deb418ec4276915
-// last fetched Sat Jul 03 2021 10:17:05 GMT-0700 (Pacific Daylight Time)
+// commit: 0cb3812e7d540ef3a904e96df795bc37a21de9b0
+// last fetched Mon Aug 02 2021 10:08:19 GMT-0400 (Eastern Daylight Time)
 
 // Code generated (see typescript/README.md) DO NOT EDIT.
 
diff --git a/internal/lsp/protocol/tsprotocol.go b/internal/lsp/protocol/tsprotocol.go
index 209da9bcab4..fe0e7499ee5 100644
--- a/internal/lsp/protocol/tsprotocol.go
+++ b/internal/lsp/protocol/tsprotocol.go
@@ -4,8 +4,8 @@
 
 // Package protocol contains data types and code for LSP jsonrpcs
 // generated automatically from vscode-languageserver-node
-// commit: 092c2afc3ad7e4d2b03fe8ac0deb418ec4276915
-// last fetched Sat Jul 03 2021 10:17:05 GMT-0700 (Pacific Daylight Time)
+// commit: 0cb3812e7d540ef3a904e96df795bc37a21de9b0
+// last fetched Mon Aug 02 2021 10:08:19 GMT-0400 (Eastern Daylight Time)
 package protocol
 
 // Code generated (see typescript/README.md) DO NOT EDIT.
@@ -976,17 +976,15 @@ type CompletionItemKind float64
  */
 type CompletionItemLabelDetails struct {
 	/**
-	 * The parameters without the return type.
+	 * An optional string which is rendered less prominently directly after {@link CompletionItemLabel.label label},
+	 * without any spacing. Should be used for function signatures or type annotations.
 	 */
-	Parameters string `json:"parameters,omitempty"`
-	/**
-	 * The fully qualified name, like package name or file path.
-	 */
-	Qualifier string `json:"qualifier,omitempty"`
+	Detail string `json:"detail,omitempty"`
 	/**
-	 * The return-type of a function or type of a property/variable.
+	 * An optional string which is rendered less prominently after {@link CompletionItemLabel.detail}. Should be used
+	 * for fully qualified names or file path.
 	 */
-	Type string `json:"type,omitempty"`
+	Description string `json:"description,omitempty"`
 }
 
 /**
diff --git a/internal/lsp/protocol/tsserver.go b/internal/lsp/protocol/tsserver.go
index 948250c2790..b274eb1df26 100644
--- a/internal/lsp/protocol/tsserver.go
+++ b/internal/lsp/protocol/tsserver.go
@@ -6,8 +6,8 @@ package protocol
 
 // Package protocol contains data types and code for LSP jsonrpcs
 // generated automatically from vscode-languageserver-node
-// commit: 092c2afc3ad7e4d2b03fe8ac0deb418ec4276915
-// last fetched Sat Jul 03 2021 10:17:05 GMT-0700 (Pacific Daylight Time)
+// commit: 0cb3812e7d540ef3a904e96df795bc37a21de9b0
+// last fetched Mon Aug 02 2021 10:08:19 GMT-0400 (Eastern Daylight Time)
 
 // Code generated (see typescript/README.md) DO NOT EDIT.
 
diff --git a/internal/lsp/protocol/typescript/util.ts b/internal/lsp/protocol/typescript/util.ts
index 08b920493b6..a32aab0c2da 100644
--- a/internal/lsp/protocol/typescript/util.ts
+++ b/internal/lsp/protocol/typescript/util.ts
@@ -15,7 +15,7 @@ export const fnames = [
   `${dir}/${srcDir}/protocol/src/browser/main.ts`, `${dir}${srcDir}/types/src/main.ts`,
   `${dir}${srcDir}/jsonrpc/src/node/main.ts`
 ];
-export const gitHash = '092c2afc3ad7e4d2b03fe8ac0deb418ec4276915';
+export const gitHash = '0cb3812e7d540ef3a904e96df795bc37a21de9b0';
 let outFname = 'tsprotocol.go';
 let fda: number, fdb: number, fde: number;  // file descriptors
 
diff --git a/internal/lsp/regtest/runner.go b/internal/lsp/regtest/runner.go
index 6b3501c1e12..05867c4f970 100644
--- a/internal/lsp/regtest/runner.go
+++ b/internal/lsp/regtest/runner.go
@@ -178,10 +178,6 @@ func DebugAddress(addr string) RunOption {
 	})
 }
 
-var WindowsLineEndings = optionSetter(func(opts *runConfig) {
-	opts.editor.WindowsLineEndings = true
-})
-
 // SkipLogs skips the buffering of logs during test execution. It is intended
 // for long-running stress tests.
 func SkipLogs() RunOption {
diff --git a/internal/lsp/regtest/wrappers.go b/internal/lsp/regtest/wrappers.go
index 5677ab04161..96844e3b6d0 100644
--- a/internal/lsp/regtest/wrappers.go
+++ b/internal/lsp/regtest/wrappers.go
@@ -369,6 +369,13 @@ func (e *Env) References(path string, pos fake.Pos) []protocol.Location {
 	return locations
 }
 
+func (e *Env) Rename(path string, pos fake.Pos, newName string) {
+	e.T.Helper()
+	if err := e.Editor.Rename(e.Ctx, path, pos, newName); err != nil {
+		e.T.Fatal(err)
+	}
+}
+
 // Completion executes a completion request on the server.
 func (e *Env) Completion(path string, pos fake.Pos) *protocol.CompletionList {
 	e.T.Helper()
diff --git a/internal/lsp/semantic.go b/internal/lsp/semantic.go
index c0ed972d89f..6a98b6c586c 100644
--- a/internal/lsp/semantic.go
+++ b/internal/lsp/semantic.go
@@ -11,7 +11,7 @@ import (
 	"go/ast"
 	"go/token"
 	"go/types"
-	"log"
+	"path/filepath"
 	"sort"
 	"strings"
 	"time"
@@ -88,12 +88,11 @@ func (s *Server) computeSemanticTokens(ctx context.Context, td protocol.TextDocu
 	if err != nil {
 		return nil, err
 	}
-	info := pkg.GetTypesInfo()
 	pgf, err := pkg.File(fh.URI())
 	if err != nil {
 		return nil, err
 	}
-	// don't return errors on pgf.ParseErr. Do what we can.
+	// ignore pgf.ParseErr. Do what we can.
 	if rng == nil && len(pgf.Src) > maxFullFileSize {
 		err := fmt.Errorf("semantic tokens: file %s too large for full (%d>%d)",
 			fh.URI().Filename(), len(pgf.Src), maxFullFileSize)
@@ -103,7 +102,8 @@ func (s *Server) computeSemanticTokens(ctx context.Context, td protocol.TextDocu
 		ctx:      ctx,
 		pgf:      pgf,
 		rng:      rng,
-		ti:       info,
+		ti:       pkg.GetTypesInfo(),
+		pkg:      pkg,
 		fset:     snapshot.FileSet(),
 		tokTypes: s.session.Options().SemanticTypes,
 		tokMods:  s.session.Options().SemanticMods,
@@ -122,6 +122,7 @@ func (s *Server) computeSemanticTokens(ctx context.Context, td protocol.TextDocu
 
 func (e *encoded) semantics() {
 	f := e.pgf.File
+	// may not be in range, but harmless
 	e.token(f.Package, len("package"), tokKeyword, nil)
 	e.token(f.Name.NamePos, len(f.Name.Name), tokNamespace, nil)
 	inspect := func(n ast.Node) bool {
@@ -154,7 +155,7 @@ const (
 	tokInterface tokenType = "interface"
 	tokParameter tokenType = "parameter"
 	tokVariable  tokenType = "variable"
-	tokMember    tokenType = "member"
+	tokMethod    tokenType = "method"
 	tokFunction  tokenType = "function"
 	tokKeyword   tokenType = "keyword"
 	tokComment   tokenType = "comment"
@@ -166,8 +167,11 @@ const (
 )
 
 func (e *encoded) token(start token.Pos, leng int, typ tokenType, mods []string) {
-	if start == 0 {
-		e.unexpected("token at token.NoPos")
+
+	if !start.IsValid() {
+		// This is not worth reporting
+		//e.unexpected("token at token.NoPos")
+		return
 	}
 	if start >= e.end || start+token.Pos(leng) <= e.start {
 		return
@@ -186,10 +190,7 @@ func (e *encoded) token(start token.Pos, leng int, typ tokenType, mods []string)
 		return
 	}
 	if lspRange.End.Line != lspRange.Start.Line {
-		// abrupt end of file, without \n. TODO(pjw): fix?
-		pos := e.fset.PositionFor(start, false)
-		msg := fmt.Sprintf("token at %s:%d.%d overflows", pos.Filename, pos.Line, pos.Column)
-		event.Log(e.ctx, msg)
+		// this happens if users are typing at the end of the file, but report nothing
 		return
 	}
 	// token is all on one line
@@ -219,6 +220,8 @@ type encoded struct {
 	pgf               *source.ParsedGoFile
 	rng               *protocol.Range
 	ti                *types.Info
+	types             *types.Package
+	pkg               source.Package
 	fset              *token.FileSet
 	// allowed starting and ending token.Pos, set by init
 	// used to avoid looking at declarations not in range
@@ -235,13 +238,38 @@ func (e *encoded) strStack() string {
 	}
 	if len(e.stack) > 0 {
 		loc := e.stack[len(e.stack)-1].Pos()
-		add := e.pgf.Tok.PositionFor(loc, false)
-		msg = append(msg, fmt.Sprintf("(line:%d,col:%d)", add.Line, add.Column))
+		if !source.InRange(e.pgf.Tok, loc) {
+			msg = append(msg, fmt.Sprintf("invalid position %v for %s", loc, e.pgf.URI))
+		} else if locInRange(e.pgf.Tok, loc) {
+			add := e.pgf.Tok.PositionFor(loc, false)
+			nm := filepath.Base(add.Filename)
+			msg = append(msg, fmt.Sprintf("(%s:%d,col:%d)", nm, add.Line, add.Column))
+		} else {
+			msg = append(msg, fmt.Sprintf("(loc %d out of range)", loc))
+		}
 	}
 	msg = append(msg, "]")
 	return strings.Join(msg, " ")
 }
 
+// avoid panic in token.PostionFor() when typing at the end of the file
+func locInRange(f *token.File, loc token.Pos) bool {
+	return f.Base() <= int(loc) && int(loc) < f.Base()+f.Size()
+}
+
+// find the line in the source
+func (e *encoded) srcLine(x ast.Node) string {
+	file := e.pgf.Tok
+	line := file.Line(x.Pos())
+	start := file.Offset(file.LineStart(line))
+	end := start
+	for ; end < len(e.pgf.Src) && e.pgf.Src[end] != '\n'; end++ {
+
+	}
+	ans := e.pgf.Src[start:end]
+	return string(ans)
+}
+
 func (e *encoded) inspector(n ast.Node) bool {
 	pop := func() {
 		e.stack = e.stack[:len(e.stack)-1]
@@ -381,12 +409,12 @@ func (e *encoded) inspector(n ast.Node) bool {
 	case *ast.UnaryExpr:
 		e.token(x.OpPos, len(x.Op.String()), tokOperator, nil)
 	case *ast.ValueSpec:
-	// things we only see with parsing or type errors, so we ignore them
+	// things only seen with parsing or type errors, so ignore them
 	case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt:
 		return true
 	// not going to see these
 	case *ast.File, *ast.Package:
-		log.Printf("implement %T %s", x, e.pgf.Tok.PositionFor(x.Pos(), false))
+		e.unexpected(fmt.Sprintf("implement %T %s", x, e.pgf.Tok.PositionFor(x.Pos(), false)))
 	// other things we knowingly ignore
 	case *ast.Comment, *ast.CommentGroup:
 		pop()
@@ -398,6 +426,10 @@ func (e *encoded) inspector(n ast.Node) bool {
 }
 
 func (e *encoded) ident(x *ast.Ident) {
+	if e.ti == nil {
+		e.unkIdent(x)
+		return
+	}
 	def := e.ti.Defs[x]
 	if def != nil {
 		what, mods := e.definitionFor(x)
@@ -409,7 +441,8 @@ func (e *encoded) ident(x *ast.Ident) {
 	use := e.ti.Uses[x]
 	switch y := use.(type) {
 	case nil:
-		e.token(x.NamePos, len(x.Name), tokVariable, []string{"definition"})
+		e.unkIdent(x)
+		return
 	case *types.Builtin:
 		e.token(x.NamePos, len(x.Name), tokFunction, []string{"defaultLibrary"})
 	case *types.Const:
@@ -462,6 +495,135 @@ func (e *encoded) ident(x *ast.Ident) {
 	}
 }
 
+// both e.ti.Defs and e.ti.Uses are nil. use the parse stack
+// a lot of these only happen when the package doesn't compile
+func (e *encoded) unkIdent(x *ast.Ident) {
+	tok := func(tok tokenType, mod []string) {
+		e.token(x.Pos(), len(x.Name), tok, mod)
+	}
+	def := []string{"definition"}
+	n := len(e.stack) - 2 // parent of Ident
+	if n < 0 {
+		e.unexpected("no stack?")
+		return
+	}
+	switch nd := e.stack[n].(type) {
+	case *ast.BinaryExpr, *ast.UnaryExpr, *ast.ParenExpr, *ast.StarExpr,
+		*ast.IncDecStmt, *ast.SliceExpr, *ast.ExprStmt, *ast.IndexExpr,
+		*ast.ReturnStmt,
+		*ast.ForStmt,      // possibly incomplete
+		*ast.IfStmt,       /* condition */
+		*ast.KeyValueExpr: // either key or value
+		tok(tokVariable, nil)
+	case *ast.Ellipsis:
+		tok(tokType, nil)
+	case *ast.CaseClause:
+		if n-2 >= 0 {
+			if _, ok := e.stack[n-2].(*ast.TypeSwitchStmt); ok {
+				tok(tokType, nil)
+				return
+			}
+		}
+		tok(tokVariable, nil)
+	case *ast.ArrayType:
+		if x == nd.Len {
+			tok(tokVariable, nil)
+		} else {
+			tok(tokType, nil)
+		}
+	case *ast.MapType:
+		tok(tokType, nil)
+	case *ast.CallExpr:
+		if x == nd.Fun {
+			tok(tokFunction, nil)
+			return
+		}
+		tok(tokVariable, nil)
+	case *ast.TypeAssertExpr:
+		if x == nd.X {
+			tok(tokVariable, nil)
+		} else if x == nd.Type {
+			tok(tokType, nil)
+		}
+	case *ast.ValueSpec:
+		for _, p := range nd.Names {
+			if p == x {
+				tok(tokVariable, def)
+				return
+			}
+		}
+		for _, p := range nd.Values {
+			if p == x {
+				tok(tokVariable, nil)
+				return
+			}
+		}
+		tok(tokType, nil)
+	case *ast.SelectorExpr: // e.ti.Selections[nd] is nil, so no help
+		if n-1 >= 0 {
+			if ce, ok := e.stack[n-1].(*ast.CallExpr); ok {
+				// ... CallExpr SelectorExpr Ident (_.x())
+				if ce.Fun == nd && nd.Sel == x {
+					tok(tokFunction, nil)
+					return
+				}
+			}
+		}
+		tok(tokVariable, nil)
+	case *ast.AssignStmt:
+		for _, p := range nd.Lhs {
+			// x := ..., or x = ...
+			if p == x {
+				if nd.Tok != token.DEFINE {
+					def = nil
+				}
+				tok(tokVariable, def)
+				return
+			}
+		}
+		// RHS, = x
+		tok(tokVariable, nil)
+	case *ast.TypeSpec: // it's a type if it is either the Name or the Type
+		if x == nd.Type {
+			def = nil
+		}
+		tok(tokType, def)
+	case *ast.Field:
+		// ident could be type in a field, or a method in an interface type, or a variable
+		if x == nd.Type {
+			tok(tokType, nil)
+			return
+		}
+		if n-2 >= 0 {
+			_, okit := e.stack[n-2].(*ast.InterfaceType)
+			_, okfl := e.stack[n-1].(*ast.FieldList)
+			if okit && okfl {
+				tok(tokMethod, def)
+				return
+			}
+		}
+		tok(tokVariable, nil)
+	case *ast.LabeledStmt, *ast.BranchStmt:
+		// nothing to report
+	case *ast.CompositeLit:
+		if nd.Type == x {
+			tok(tokType, nil)
+			return
+		}
+		tok(tokVariable, nil)
+	case *ast.RangeStmt:
+		if nd.Tok != token.DEFINE {
+			def = nil
+		}
+		tok(tokVariable, def)
+	case *ast.FuncDecl:
+		tok(tokFunction, def)
+	default:
+		msg := fmt.Sprintf("%T undexpected: %s %s%q", nd, x.Name, e.strStack(), e.srcLine(x))
+		e.unexpected(msg)
+	}
+}
+
 func isDeprecated(n *ast.CommentGroup) bool {
 	if n == nil {
 		return false
@@ -483,7 +645,7 @@ func (e *encoded) definitionFor(x *ast.Ident) (tokenType, []string) {
 			if x.Name == "_" {
 				return "", nil // not really a variable
 			}
-			return "variable", mods
+			return tokVariable, mods
 		case *ast.GenDecl:
 			if isDeprecated(y.Doc) {
 				mods = append(mods, "deprecated")
@@ -499,7 +661,7 @@ func (e *encoded) definitionFor(x *ast.Ident) (tokenType, []string) {
 					mods = append(mods, "deprecated")
 				}
 				if y.Recv != nil {
-					return tokMember, mods
+					return tokMethod, mods
 				}
 				return tokFunction, mods
 			}
@@ -509,8 +671,10 @@ func (e *encoded) definitionFor(x *ast.Ident) (tokenType, []string) {
 			}
 			// if x < ... < FieldList < FuncType < FuncDecl, this is a param
 			return tokParameter, mods
+		case *ast.FuncType:
+			return tokParameter, mods
 		case *ast.InterfaceType:
-			return tokMember, mods
+			return tokMethod, mods
 		case *ast.TypeSpec:
 			// GenDecl/Typespec/FuncType/FieldList/Field/Ident
 			// (type A func(b uint64)) (err error)
@@ -631,27 +795,36 @@ func (e *encoded) importSpec(d *ast.ImportSpec) {
 	// a local package name or the last component of the Path
 	if d.Name != nil {
 		nm := d.Name.String()
-		// import . x => x is not a namespace
-		// import _ x => x is a namespace
 		if nm != "_" && nm != "." {
 			e.token(d.Name.Pos(), len(nm), tokNamespace, nil)
-			return
 		}
-		if nm == "." {
-			return
-		}
-		// and fall through for _
+		return // don't mark anything for . or _
+	}
+	val := d.Path.Value
+	if len(val) < 2 || val[0] != '"' || val[len(val)-1] != '"' {
+		// avoid panics on imports without a properly quoted string
+		return
 	}
-	if d.Path.Value == "" {
+	nm := val[1 : len(val)-1] // remove surrounding "s
+	// Import strings are implementation defined. Try to match with parse information.
+	x, err := e.pkg.GetImport(nm)
+	if err != nil {
+		// unexpected, but impact is that maybe some import is not colored
+		return
+	}
+	// expect that nm is x.PkgPath and that x.Name() is a component of it
+	if x.PkgPath() != nm {
+		// don't know how or what to color (if this can happen at all)
 		return
 	}
-	nm := d.Path.Value[1 : len(d.Path.Value)-1] // trailing "
-	v := strings.LastIndex(nm, "/")
-	if v != -1 {
-		nm = nm[v+1:]
+	// this is not a precise test: imagine "github.com/nasty/v/v2"
+	j := strings.LastIndex(nm, x.Name())
+	if j == -1 {
+		// name doesn't show up, for whatever reason, so nothing to report
+		return
 	}
-	start := d.Path.End() - token.Pos(1+len(nm))
-	e.token(start, len(nm), tokNamespace, nil)
+	start := d.Path.Pos() + 1 + token.Pos(j) // skip the initial quote
+	e.token(start, len(x.Name()), tokNamespace, nil)
 }
 
 // log unexpected state
@@ -707,7 +880,7 @@ var (
 	semanticTypes = [...]string{
 		"namespace", "type", "class", "enum", "interface",
 		"struct", "typeParameter", "parameter", "variable", "property", "enumMember",
-		"event", "function", "member", "macro", "keyword", "modifier", "comment",
+		"event", "function", "method", "macro", "keyword", "modifier", "comment",
 		"string", "number", "regexp", "operator",
 	}
 	semanticModifiers = [...]string{
diff --git a/internal/lsp/source/api_json.go b/internal/lsp/source/api_json.go
index 9accafa8d5f..9b5734fa87a 100755
--- a/internal/lsp/source/api_json.go
+++ b/internal/lsp/source/api_json.go
@@ -40,7 +40,7 @@ var GeneratedAPIJSON = &APIJSON{
 					Keys:      nil,
 				},
 				EnumValues: nil,
-				Default:    "[]",
+				Default:    "[\"-node_modules\"]",
 				Status:     "",
 				Hierarchy:  "build",
 			},
@@ -325,6 +325,10 @@ var GeneratedAPIJSON = &APIJSON{
 						Value: "\"CaseSensitive\"",
 						Doc:   "",
 					},
+					{
+						Value: "\"FastFuzzy\"",
+						Doc:   "",
+					},
 					{
 						Value: "\"Fuzzy\"",
 						Doc:   "",
@@ -337,7 +341,7 @@ var GeneratedAPIJSON = &APIJSON{
 			{
 				Name: "symbolStyle",
 				Type: "enum",
-				Doc:  "symbolStyle controls how symbols are qualified in symbol responses.\n\nExample Usage:\n\n```json5\n\"gopls\": {\n...\n  \"symbolStyle\": \"dynamic\",\n...\n}\n```\n",
+				Doc:  "symbolStyle controls how symbols are qualified in symbol responses.\n\nExample Usage:\n\n```json5\n\"gopls\": {\n...\n  \"symbolStyle\": \"Dynamic\",\n...\n}\n```\n",
 				EnumKeys: EnumKeys{
 					ValueType: "",
 					Keys:      nil,
@@ -655,7 +659,7 @@ var GeneratedAPIJSON = &APIJSON{
 			{
 				Name: "codelenses",
 				Type: "map[string]bool",
-				Doc:  "codelenses overrides the enabled/disabled state of code lenses. See the\n\"Code Lenses\" section of the\n[Settings page](https://github.com/golang/tools/blob/master/gopls/doc/settings.md)\nfor the list of supported lenses.\n\nExample Usage:\n\n```json5\n\"gopls\": {\n...\n  \"codelens\": {\n    \"generate\": false,  // Don't show the `go generate` lens.\n    \"gc_details\": true  // Show a code lens toggling the display of gc's choices.\n  }\n...\n}\n```\n",
+				Doc:  "codelenses overrides the enabled/disabled state of code lenses. See the\n\"Code Lenses\" section of the\n[Settings page](https://github.com/golang/tools/blob/master/gopls/doc/settings.md)\nfor the list of supported lenses.\n\nExample Usage:\n\n```json5\n\"gopls\": {\n...\n  \"codelenses\": {\n    \"generate\": false,  // Don't show the `go generate` lens.\n    \"gc_details\": true  // Show a code lens toggling the display of gc's choices.\n  }\n...\n}\n```\n",
 				EnumKeys: EnumKeys{
 					ValueType: "bool",
 					Keys: []EnumKey{
diff --git a/internal/lsp/source/completion/completion.go b/internal/lsp/source/completion/completion.go
index 741e6b35b68..dbc380c0f34 100644
--- a/internal/lsp/source/completion/completion.go
+++ b/internal/lsp/source/completion/completion.go
@@ -365,10 +365,10 @@ type candidate struct {
 	// itself) for a deep candidate.
 	path []types.Object
 
-	// names tracks the names of objects from search root (excluding the
-	// candidate itself) for a deep candidate. This also includes
-	// expanded calls for function invocations.
-	names []string
+	// pathInvokeMask is a bit mask tracking whether each entry in path
+	// should be formatted with "()" (i.e. whether it is a function
+	// invocation).
+	pathInvokeMask uint16
 
 	// mods contains modifications that should be applied to the
 	// candidate when inserted. For example, "foo" may be insterted as
@@ -555,7 +555,6 @@ func Completion(ctx context.Context, snapshot source.Snapshot, fh source.FileHan
 
 	// Deep search collected candidates and their members for more candidates.
 	c.deepSearch(ctx)
-	c.deepState.searchQueue = nil
 
 	for _, callback := range c.completionCallbacks {
 		if err := c.snapshot.RunProcessEnvFunc(ctx, callback); err != nil {
@@ -1088,10 +1087,9 @@ func (c *completer) selector(ctx context.Context, sel *ast.SelectorExpr) error {
 					return err
 				}
 			}
-			candidates := c.packageMembers(pkgName.Imported(), stdScore, nil)
-			for _, cand := range candidates {
+			c.packageMembers(pkgName.Imported(), stdScore, nil, func(cand candidate) {
 				c.deepState.enqueue(cand)
-			}
+			})
 			return nil
 		}
 	}
@@ -1099,10 +1097,9 @@ func (c *completer) selector(ctx context.Context, sel *ast.SelectorExpr) error {
 	// Invariant: sel is a true selector.
 	tv, ok := c.pkg.GetTypesInfo().Types[sel.X]
 	if ok {
-		candidates := c.methodsAndFields(tv.Type, tv.Addressable(), nil)
-		for _, cand := range candidates {
+		c.methodsAndFields(tv.Type, tv.Addressable(), nil, func(cand candidate) {
 			c.deepState.enqueue(cand)
-		}
+		})
 
 		c.addPostfixSnippetCandidates(ctx, sel)
 
@@ -1159,10 +1156,9 @@ func (c *completer) unimportedMembers(ctx context.Context, id *ast.Ident) error
 		if imports.ImportPathToAssumedName(path) != pkg.GetTypes().Name() {
 			imp.name = pkg.GetTypes().Name()
 		}
-		candidates := c.packageMembers(pkg.GetTypes(), unimportedScore(relevances[path]), imp)
-		for _, cand := range candidates {
+		c.packageMembers(pkg.GetTypes(), unimportedScore(relevances[path]), imp, func(cand candidate) {
 			c.deepState.enqueue(cand)
-		}
+		})
 		if len(c.items) >= unimportedMemberTarget {
 			return nil
 		}
@@ -1209,22 +1205,20 @@ func unimportedScore(relevance float64) float64 {
 	return (stdScore + .1*relevance) / 2
 }
 
-func (c *completer) packageMembers(pkg *types.Package, score float64, imp *importInfo) []candidate {
-	var candidates []candidate
+func (c *completer) packageMembers(pkg *types.Package, score float64, imp *importInfo, cb func(candidate)) {
 	scope := pkg.Scope()
 	for _, name := range scope.Names() {
 		obj := scope.Lookup(name)
-		candidates = append(candidates, candidate{
+		cb(candidate{
 			obj:         obj,
 			score:       score,
 			imp:         imp,
 			addressable: isVar(obj),
 		})
 	}
-	return candidates
 }
 
-func (c *completer) methodsAndFields(typ types.Type, addressable bool, imp *importInfo) []candidate {
+func (c *completer) methodsAndFields(typ types.Type, addressable bool, imp *importInfo, cb func(candidate)) {
 	mset := c.methodSetCache[methodSetKey{typ, addressable}]
 	if mset == nil {
 		if addressable && !types.IsInterface(typ) && !isPointer(typ) {
@@ -1237,9 +1231,8 @@ func (c *completer) methodsAndFields(typ types.Type, addressable bool, imp *impo
 		c.methodSetCache[methodSetKey{typ, addressable}] = mset
 	}
 
-	var candidates []candidate
 	for i := 0; i < mset.Len(); i++ {
-		candidates = append(candidates, candidate{
+		cb(candidate{
 			obj:         mset.At(i).Obj(),
 			score:       stdScore,
 			imp:         imp,
@@ -1249,15 +1242,13 @@ func (c *completer) methodsAndFields(typ types.Type, addressable bool, imp *impo
 
 	// Add fields of T.
 	eachField(typ, func(v *types.Var) {
-		candidates = append(candidates, candidate{
+		cb(candidate{
 			obj:         v,
 			score:       stdScore - 0.01,
 			imp:         imp,
 			addressable: addressable || isPointer(typ),
 		})
 	})
-
-	return candidates
 }
 
 // lexical finds completions in the lexical environment.
@@ -1272,6 +1263,7 @@ func (c *completer) lexical(ctx context.Context) error {
 		// Filter it out from completion results to stabilize tests.
 		// TODO(rFindley) update (or remove) our handling for comparable once the
 		//                type parameter API has stabilized.
+		builtinAny        = types.Universe.Lookup("any")
 		builtinComparable = types.Universe.Lookup("comparable")
 	)
 
@@ -1291,7 +1283,7 @@ func (c *completer) lexical(ctx context.Context) error {
 			if declScope != scope {
 				continue // Name was declared in some enclosing scope, or not at all.
 			}
-			if obj == builtinComparable {
+			if obj == builtinComparable || obj == builtinAny {
 				continue
 			}
 
diff --git a/internal/lsp/source/completion/deep_completion.go b/internal/lsp/source/completion/deep_completion.go
index 45a02ff0716..a13d807d4f5 100644
--- a/internal/lsp/source/completion/deep_completion.go
+++ b/internal/lsp/source/completion/deep_completion.go
@@ -26,8 +26,11 @@ type deepCompletionState struct {
 	// once we're running out of our time budget.
 	queueClosed bool
 
-	// searchQueue holds the current breadth first search queue.
-	searchQueue []candidate
+	// thisQueue holds the current breadth first search queue.
+	thisQueue []candidate
+
+	// nextQueue holds the next breadth first search iteration's queue.
+	nextQueue []candidate
 
 	// highScores tracks the highest deep candidate scores we have found
 	// so far. This is used to avoid work for low scoring deep candidates.
@@ -40,13 +43,13 @@ type deepCompletionState struct {
 
 // enqueue adds a candidate to the search queue.
 func (s *deepCompletionState) enqueue(cand candidate) {
-	s.searchQueue = append(s.searchQueue, cand)
+	s.nextQueue = append(s.nextQueue, cand)
 }
 
 // dequeue removes and returns the leftmost element from the search queue.
 func (s *deepCompletionState) dequeue() *candidate {
 	var cand *candidate
-	cand, s.searchQueue = &s.searchQueue[0], s.searchQueue[1:]
+	cand, s.thisQueue = &s.thisQueue[len(s.thisQueue)-1], s.thisQueue[:len(s.thisQueue)-1]
 	return cand
 }
 
@@ -99,130 +102,135 @@ func (s *deepCompletionState) isHighScore(score float64) bool {
 
 // newPath returns path from search root for an object following a given
 // candidate.
-func (s *deepCompletionState) newPath(cand *candidate, obj types.Object, invoke bool) ([]types.Object, []string) {
-	name := obj.Name()
-	if invoke {
-		name += "()"
-	}
+func (s *deepCompletionState) newPath(cand candidate, obj types.Object) []types.Object {
+	path := make([]types.Object, len(cand.path)+1)
+	copy(path, cand.path)
+	path[len(path)-1] = obj
 
-	// copy the slice since we don't want to overwrite the original slice.
-	path := append([]types.Object{}, cand.path...)
-	names := append([]string{}, cand.names...)
-
-	return append(path, obj), append(names, name)
+	return path
 }
 
 // deepSearch searches a candidate and its subordinate objects for completion
 // items if deep completion is enabled and adds the valid candidates to
 // completion items.
 func (c *completer) deepSearch(ctx context.Context) {
-outer:
-	for len(c.deepState.searchQueue) > 0 {
-		cand := c.deepState.dequeue()
-		obj := cand.obj
+	defer func() {
+		// We can return early before completing the search, so be sure to
+		// clear out our queues to not impact any further invocations.
+		c.deepState.thisQueue = c.deepState.thisQueue[:0]
+		c.deepState.nextQueue = c.deepState.nextQueue[:0]
+	}()
 
-		if obj == nil {
-			continue
-		}
+	for len(c.deepState.nextQueue) > 0 {
+		c.deepState.thisQueue, c.deepState.nextQueue = c.deepState.nextQueue, c.deepState.thisQueue[:0]
+
+	outer:
+		for _, cand := range c.deepState.thisQueue {
+			obj := cand.obj
 
-		// At the top level, dedupe by object.
-		if len(cand.path) == 0 {
-			if c.seen[obj] {
+			if obj == nil {
 				continue
 			}
-			c.seen[obj] = true
-		}
 
-		// If obj is not accessible because it lives in another package and is
-		// not exported, don't treat it as a completion candidate unless it's
-		// a package completion candidate.
-		if !c.completionContext.packageCompletion &&
-			obj.Pkg() != nil && obj.Pkg() != c.pkg.GetTypes() && !obj.Exported() {
-			continue
-		}
+			// At the top level, dedupe by object.
+			if len(cand.path) == 0 {
+				if c.seen[obj] {
+					continue
+				}
+				c.seen[obj] = true
+			}
 
-		// If we want a type name, don't offer non-type name candidates.
-		// However, do offer package names since they can contain type names,
-		// and do offer any candidate without a type since we aren't sure if it
-		// is a type name or not (i.e. unimported candidate).
-		if c.wantTypeName() && obj.Type() != nil && !isTypeName(obj) && !isPkgName(obj) {
-			continue
-		}
+			// If obj is not accessible because it lives in another package and is
+			// not exported, don't treat it as a completion candidate unless it's
+			// a package completion candidate.
+			if !c.completionContext.packageCompletion &&
+				obj.Pkg() != nil && obj.Pkg() != c.pkg.GetTypes() && !obj.Exported() {
+				continue
+			}
 
-		// When searching deep, make sure we don't have a cycle in our chain.
-		// We don't dedupe by object because we want to allow both "foo.Baz"
-		// and "bar.Baz" even though "Baz" is represented the same types.Object
-		// in both.
-		for _, seenObj := range cand.path {
-			if seenObj == obj {
-				continue outer
+			// If we want a type name, don't offer non-type name candidates.
+			// However, do offer package names since they can contain type names,
+			// and do offer any candidate without a type since we aren't sure if it
+			// is a type name or not (i.e. unimported candidate).
+			if c.wantTypeName() && obj.Type() != nil && !isTypeName(obj) && !isPkgName(obj) {
+				continue
 			}
-		}
 
-		c.addCandidate(ctx, cand)
+			// When searching deep, make sure we don't have a cycle in our chain.
+			// We don't dedupe by object because we want to allow both "foo.Baz"
+			// and "bar.Baz" even though "Baz" is represented the same types.Object
+			// in both.
+			for _, seenObj := range cand.path {
+				if seenObj == obj {
+					continue outer
+				}
+			}
 
-		c.deepState.candidateCount++
-		if c.opts.budget > 0 && c.deepState.candidateCount%100 == 0 {
-			spent := float64(time.Since(c.startTime)) / float64(c.opts.budget)
-			select {
-			case <-ctx.Done():
-				return
-			default:
-				// If we are almost out of budgeted time, no further elements
-				// should be added to the queue. This ensures remaining time is
-				// used for processing current queue.
-				if !c.deepState.queueClosed && spent >= 0.85 {
-					c.deepState.queueClosed = true
+			c.addCandidate(ctx, &cand)
+
+			c.deepState.candidateCount++
+			if c.opts.budget > 0 && c.deepState.candidateCount%100 == 0 {
+				spent := float64(time.Since(c.startTime)) / float64(c.opts.budget)
+				select {
+				case <-ctx.Done():
+					return
+				default:
+					// If we are almost out of budgeted time, no further elements
+					// should be added to the queue. This ensures remaining time is
+					// used for processing current queue.
+					if !c.deepState.queueClosed && spent >= 0.85 {
+						c.deepState.queueClosed = true
+					}
 				}
 			}
-		}
 
-		// if deep search is disabled, don't add any more candidates.
-		if !c.deepState.enabled || c.deepState.queueClosed {
-			continue
-		}
+			// if deep search is disabled, don't add any more candidates.
+			if !c.deepState.enabled || c.deepState.queueClosed {
+				continue
+			}
 
-		// Searching members for a type name doesn't make sense.
-		if isTypeName(obj) {
-			continue
-		}
-		if obj.Type() == nil {
-			continue
-		}
+			// Searching members for a type name doesn't make sense.
+			if isTypeName(obj) {
+				continue
+			}
+			if obj.Type() == nil {
+				continue
+			}
 
-		// Don't search embedded fields because they were already included in their
-		// parent's fields.
-		if v, ok := obj.(*types.Var); ok && v.Embedded() {
-			continue
-		}
+			// Don't search embedded fields because they were already included in their
+			// parent's fields.
+			if v, ok := obj.(*types.Var); ok && v.Embedded() {
+				continue
+			}
 
-		if sig, ok := obj.Type().Underlying().(*types.Signature); ok {
-			// If obj is a function that takes no arguments and returns one
-			// value, keep searching across the function call.
-			if sig.Params().Len() == 0 && sig.Results().Len() == 1 {
-				path, names := c.deepState.newPath(cand, obj, true)
-				// The result of a function call is not addressable.
-				candidates := c.methodsAndFields(sig.Results().At(0).Type(), false, cand.imp)
-				for _, newCand := range candidates {
-					newCand.path, newCand.names = path, names
-					c.deepState.enqueue(newCand)
+			if sig, ok := obj.Type().Underlying().(*types.Signature); ok {
+				// If obj is a function that takes no arguments and returns one
+				// value, keep searching across the function call.
+				if sig.Params().Len() == 0 && sig.Results().Len() == 1 {
+					path := c.deepState.newPath(cand, obj)
+					// The result of a function call is not addressable.
+					c.methodsAndFields(sig.Results().At(0).Type(), false, cand.imp, func(newCand candidate) {
+						newCand.pathInvokeMask = cand.pathInvokeMask | (1 << uint64(len(cand.path)))
+						newCand.path = path
+						c.deepState.enqueue(newCand)
+					})
 				}
 			}
-		}
 
-		path, names := c.deepState.newPath(cand, obj, false)
-		switch obj := obj.(type) {
-		case *types.PkgName:
-			candidates := c.packageMembers(obj.Imported(), stdScore, cand.imp)
-			for _, newCand := range candidates {
-				newCand.path, newCand.names = path, names
-				c.deepState.enqueue(newCand)
-			}
-		default:
-			candidates := c.methodsAndFields(obj.Type(), cand.addressable, cand.imp)
-			for _, newCand := range candidates {
-				newCand.path, newCand.names = path, names
-				c.deepState.enqueue(newCand)
+			path := c.deepState.newPath(cand, obj)
+			switch obj := obj.(type) {
+			case *types.PkgName:
+				c.packageMembers(obj.Imported(), stdScore, cand.imp, func(newCand candidate) {
+					newCand.pathInvokeMask = cand.pathInvokeMask
+					newCand.path = path
+					c.deepState.enqueue(newCand)
+				})
+			default:
+				c.methodsAndFields(obj.Type(), cand.addressable, cand.imp, func(newCand candidate) {
+					newCand.pathInvokeMask = cand.pathInvokeMask
+					newCand.path = path
+					c.deepState.enqueue(newCand)
+				})
 			}
 		}
 	}
@@ -273,12 +281,40 @@ func (c *completer) addCandidate(ctx context.Context, cand *candidate) {
 		cand.score = 0
 	}
 
-	cand.name = strings.Join(append(cand.names, cand.obj.Name()), ".")
+	cand.name = deepCandName(cand)
 	if item, err := c.item(ctx, *cand); err == nil {
 		c.items = append(c.items, item)
 	}
 }
 
+// deepCandName produces the full candidate name including any
+// ancestor objects. For example, "foo.bar().baz" for candidate "baz".
+func deepCandName(cand *candidate) string {
+	totalLen := len(cand.obj.Name())
+	for i, obj := range cand.path {
+		totalLen += len(obj.Name()) + 1
+		if cand.pathInvokeMask&(1<<uint16(i)) > 0 {
+			totalLen += 2
+		}
+	}
+
+	var buf strings.Builder
+	buf.Grow(totalLen)
+
+	for i, obj := range cand.path {
+		buf.WriteString(obj.Name())
+		if cand.pathInvokeMask&(1<<uint16(i)) > 0 {
+			buf.WriteByte('(')
+			buf.WriteByte(')')
+		}
+		buf.WriteByte('.')
+	}
+
+	buf.WriteString(cand.obj.Name())
+
+	return buf.String()
+}
+
 // penalty reports a score penalty for cand in the range (0, 1).
 // For example, a candidate is penalized if it has already been used
 // in another switch case statement.
diff --git a/internal/lsp/source/completion/format.go b/internal/lsp/source/completion/format.go
index 5a20633f7c8..c7a7e013a6f 100644
--- a/internal/lsp/source/completion/format.go
+++ b/internal/lsp/source/completion/format.go
@@ -20,6 +20,11 @@ import (
 	errors "golang.org/x/xerrors"
 )
 
+var (
+	errNoMatch  = errors.New("not a surrounding match")
+	errLowScore = errors.New("not a high scoring candidate")
+)
+
 // item formats a candidate to a CompletionItem.
 func (c *completer) item(ctx context.Context, cand candidate) (CompletionItem, error) {
 	obj := cand.obj
@@ -27,13 +32,13 @@ func (c *completer) item(ctx context.Context, cand candidate) (CompletionItem, e
 	// if the object isn't a valid match against the surrounding, return early.
 	matchScore := c.matcher.Score(cand.name)
 	if matchScore <= 0 {
-		return CompletionItem{}, errors.New("not a surrounding match")
+		return CompletionItem{}, errNoMatch
 	}
 	cand.score *= float64(matchScore)
 
 	// Ignore deep candidates that wont be in the MaxDeepCompletions anyway.
 	if len(cand.path) != 0 && !c.deepState.isHighScore(cand.score) {
-		return CompletionItem{}, errors.New("not a high scoring candidate")
+		return CompletionItem{}, errLowScore
 	}
 
 	// Handle builtin types separately.
@@ -143,6 +148,7 @@ Suffixes:
 	// add the additional text edits needed.
 	if cand.imp != nil {
 		addlEdits, err := c.importEdits(cand.imp)
+
 		if err != nil {
 			return CompletionItem{}, err
 		}
diff --git a/internal/lsp/source/completion/package.go b/internal/lsp/source/completion/package.go
index d927fef9764..0ed66e637e3 100644
--- a/internal/lsp/source/completion/package.go
+++ b/internal/lsp/source/completion/package.go
@@ -213,7 +213,7 @@ func (c *completer) packageNameCompletions(ctx context.Context, fileURI span.URI
 // file. This also includes test packages for these packages (<pkg>_test) and
 // the directory name itself.
 func packageSuggestions(ctx context.Context, snapshot source.Snapshot, fileURI span.URI, prefix string) (packages []candidate, err error) {
-	workspacePackages, err := snapshot.WorkspacePackages(ctx)
+	workspacePackages, err := snapshot.ActivePackages(ctx)
 	if err != nil {
 		return nil, err
 	}
diff --git a/internal/lsp/source/extract.go b/internal/lsp/source/extract.go
index 6450ba3612c..8f7010a2510 100644
--- a/internal/lsp/source/extract.go
+++ b/internal/lsp/source/extract.go
@@ -79,16 +79,16 @@ func extractVariable(fset *token.FileSet, rng span.Range, src []byte, file *ast.
 
 	return &analysis.SuggestedFix{
 		TextEdits: []analysis.TextEdit{
-			{
-				Pos:     rng.Start,
-				End:     rng.End,
-				NewText: []byte(lhs),
-			},
 			{
 				Pos:     insertBeforeStmt.Pos(),
 				End:     insertBeforeStmt.Pos(),
 				NewText: []byte(assignment),
 			},
+			{
+				Pos:     rng.Start,
+				End:     rng.End,
+				NewText: []byte(lhs),
+			},
 		},
 	}, nil
 }
@@ -139,11 +139,17 @@ func calculateIndentation(content []byte, tok *token.File, insertBeforeStmt ast.
 // Possible collisions include other function and variable names. Returns the next index to check for prefix.
 func generateAvailableIdentifier(pos token.Pos, file *ast.File, path []ast.Node, info *types.Info, prefix string, idx int) (string, int) {
 	scopes := CollectScopes(info, path, pos)
+	return generateIdentifier(idx, prefix, func(name string) bool {
+		return file.Scope.Lookup(name) != nil || !isValidName(name, scopes)
+	})
+}
+
+func generateIdentifier(idx int, prefix string, hasCollision func(string) bool) (string, int) {
 	name := prefix
 	if idx != 0 {
 		name += fmt.Sprintf("%d", idx)
 	}
-	for file.Scope.Lookup(name) != nil || !isValidName(name, scopes) {
+	for hasCollision(name) {
 		idx++
 		name = fmt.Sprintf("%v%d", prefix, idx)
 	}
@@ -177,28 +183,42 @@ type returnVariable struct {
 	zeroVal ast.Expr
 }
 
+// extractMethod refactors the selected block of code into a new method.
+func extractMethod(fset *token.FileSet, rng span.Range, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error) {
+	return extractFunctionMethod(fset, rng, src, file, pkg, info, true)
+}
+
 // extractFunction refactors the selected block of code into a new function.
+func extractFunction(fset *token.FileSet, rng span.Range, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error) {
+	return extractFunctionMethod(fset, rng, src, file, pkg, info, false)
+}
+
+// extractFunctionMethod refactors the selected block of code into a new function/method.
 // It also replaces the selected block of code with a call to the extracted
 // function. First, we manually adjust the selection range. We remove trailing
 // and leading whitespace characters to ensure the range is precisely bounded
 // by AST nodes. Next, we determine the variables that will be the parameters
-// and return values of the extracted function. Lastly, we construct the call
-// of the function and insert this call as well as the extracted function into
+// and return values of the extracted function/method. Lastly, we construct the call
+// of the function/method and insert this call as well as the extracted function/method into
 // their proper locations.
-func extractFunction(fset *token.FileSet, rng span.Range, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error) {
-	p, ok, err := CanExtractFunction(fset, rng, src, file)
-	if !ok {
-		return nil, fmt.Errorf("extractFunction: cannot extract %s: %v",
+func extractFunctionMethod(fset *token.FileSet, rng span.Range, src []byte, file *ast.File, pkg *types.Package, info *types.Info, isMethod bool) (*analysis.SuggestedFix, error) {
+	errorPrefix := "extractFunction"
+	if isMethod {
+		errorPrefix = "extractMethod"
+	}
+	p, ok, methodOk, err := CanExtractFunction(fset, rng, src, file)
+	if (!ok && !isMethod) || (!methodOk && isMethod) {
+		return nil, fmt.Errorf("%s: cannot extract %s: %v", errorPrefix,
 			fset.Position(rng.Start), err)
 	}
 	tok, path, rng, outer, start := p.tok, p.path, p.rng, p.outer, p.start
 	fileScope := info.Scopes[file]
 	if fileScope == nil {
-		return nil, fmt.Errorf("extractFunction: file scope is empty")
+		return nil, fmt.Errorf("%s: file scope is empty", errorPrefix)
 	}
 	pkgScope := fileScope.Parent()
 	if pkgScope == nil {
-		return nil, fmt.Errorf("extractFunction: package scope is empty")
+		return nil, fmt.Errorf("%s: package scope is empty", errorPrefix)
 	}
 
 	// A return statement is non-nested if its parent node is equal to the parent node
@@ -235,6 +255,25 @@ func extractFunction(fset *token.FileSet, rng span.Range, src []byte, file *ast.
 		return nil, err
 	}
 
+	var (
+		receiverUsed bool
+		receiver     *ast.Field
+		receiverName string
+		receiverObj  types.Object
+	)
+	if isMethod {
+		if outer == nil || outer.Recv == nil || len(outer.Recv.List) == 0 {
+			return nil, fmt.Errorf("%s: cannot extract need method receiver", errorPrefix)
+		}
+		receiver = outer.Recv.List[0]
+		if len(receiver.Names) == 0 || receiver.Names[0] == nil {
+			return nil, fmt.Errorf("%s: cannot extract need method receiver name", errorPrefix)
+		}
+		recvName := receiver.Names[0]
+		receiverName = recvName.Name
+		receiverObj = info.ObjectOf(recvName)
+	}
+
 	var (
 		params, returns         []ast.Expr     // used when calling the extracted function
 		paramTypes, returnTypes []*ast.Field   // used in the signature of the extracted function
@@ -308,6 +347,11 @@ func extractFunction(fset *token.FileSet, rng span.Range, src []byte, file *ast.
 		// extracted function. (1) it must be free (isFree), and (2) its first
 		// use within the selection cannot be its own definition (isDefined).
 		if v.free && !v.defined {
+			// Skip the selector for a method.
+			if isMethod && v.obj == receiverObj {
+				receiverUsed = true
+				continue
+			}
 			params = append(params, identifier)
 			paramTypes = append(paramTypes, &ast.Field{
 				Names: []*ast.Ident{identifier},
@@ -471,9 +515,17 @@ func extractFunction(fset *token.FileSet, rng span.Range, src []byte, file *ast.
 	if canDefine {
 		sym = token.DEFINE
 	}
-	funName, _ := generateAvailableIdentifier(rng.Start, file, path, info, "newFunction", 0)
+	var name, funName string
+	if isMethod {
+		name = "newMethod"
+		// TODO(suzmue): generate a name that does not conflict for "newMethod".
+		funName = name
+	} else {
+		name = "newFunction"
+		funName, _ = generateAvailableIdentifier(rng.Start, file, path, info, name, 0)
+	}
 	extractedFunCall := generateFuncCall(hasNonNestedReturn, hasReturnValues, params,
-		append(returns, getNames(retVars)...), funName, sym)
+		append(returns, getNames(retVars)...), funName, sym, receiverName)
 
 	// Build the extracted function.
 	newFunc := &ast.FuncDecl{
@@ -484,6 +536,18 @@ func extractFunction(fset *token.FileSet, rng span.Range, src []byte, file *ast.
 		},
 		Body: extractedBlock,
 	}
+	if isMethod {
+		var names []*ast.Ident
+		if receiverUsed {
+			names = append(names, ast.NewIdent(receiverName))
+		}
+		newFunc.Recv = &ast.FieldList{
+			List: []*ast.Field{{
+				Names: names,
+				Type:  receiver.Type,
+			}},
+		}
+	}
 
 	// Create variable declarations for any identifiers that need to be initialized prior to
 	// calling the extracted function. We do not manually initialize variables if every return
@@ -844,24 +908,24 @@ type fnExtractParams struct {
 
 // CanExtractFunction reports whether the code in the given range can be
 // extracted to a function.
-func CanExtractFunction(fset *token.FileSet, rng span.Range, src []byte, file *ast.File) (*fnExtractParams, bool, error) {
+func CanExtractFunction(fset *token.FileSet, rng span.Range, src []byte, file *ast.File) (*fnExtractParams, bool, bool, error) {
 	if rng.Start == rng.End {
-		return nil, false, fmt.Errorf("start and end are equal")
+		return nil, false, false, fmt.Errorf("start and end are equal")
 	}
 	tok := fset.File(file.Pos())
 	if tok == nil {
-		return nil, false, fmt.Errorf("no file for pos %v", fset.Position(file.Pos()))
+		return nil, false, false, fmt.Errorf("no file for pos %v", fset.Position(file.Pos()))
 	}
 	rng = adjustRangeForWhitespace(rng, tok, src)
 	path, _ := astutil.PathEnclosingInterval(file, rng.Start, rng.End)
 	if len(path) == 0 {
-		return nil, false, fmt.Errorf("no path enclosing interval")
+		return nil, false, false, fmt.Errorf("no path enclosing interval")
 	}
 	// Node that encloses the selection must be a statement.
 	// TODO: Support function extraction for an expression.
 	_, ok := path[0].(ast.Stmt)
 	if !ok {
-		return nil, false, fmt.Errorf("node is not a statement")
+		return nil, false, false, fmt.Errorf("node is not a statement")
 	}
 
 	// Find the function declaration that encloses the selection.
@@ -873,7 +937,7 @@ func CanExtractFunction(fset *token.FileSet, rng span.Range, src []byte, file *a
 		}
 	}
 	if outer == nil {
-		return nil, false, fmt.Errorf("no enclosing function")
+		return nil, false, false, fmt.Errorf("no enclosing function")
 	}
 
 	// Find the nodes at the start and end of the selection.
@@ -893,7 +957,7 @@ func CanExtractFunction(fset *token.FileSet, rng span.Range, src []byte, file *a
 		return n.Pos() <= rng.End
 	})
 	if start == nil || end == nil {
-		return nil, false, fmt.Errorf("range does not map to AST nodes")
+		return nil, false, false, fmt.Errorf("range does not map to AST nodes")
 	}
 	return &fnExtractParams{
 		tok:   tok,
@@ -901,7 +965,7 @@ func CanExtractFunction(fset *token.FileSet, rng span.Range, src []byte, file *a
 		rng:   rng,
 		outer: outer,
 		start: start,
-	}, true, nil
+	}, true, outer.Recv != nil, nil
 }
 
 // objUsed checks if the object is used within the range. It returns the first
@@ -1089,13 +1153,22 @@ func adjustReturnStatements(returnTypes []*ast.Field, seenVars map[types.Object]
 
 // generateFuncCall constructs a call expression for the extracted function, described by the
 // given parameters and return variables.
-func generateFuncCall(hasNonNestedReturn, hasReturnVals bool, params, returns []ast.Expr, name string, token token.Token) ast.Node {
+func generateFuncCall(hasNonNestedReturn, hasReturnVals bool, params, returns []ast.Expr, name string, token token.Token, selector string) ast.Node {
 	var replace ast.Node
-	if hasReturnVals {
-		callExpr := &ast.CallExpr{
-			Fun:  ast.NewIdent(name),
+	callExpr := &ast.CallExpr{
+		Fun:  ast.NewIdent(name),
+		Args: params,
+	}
+	if selector != "" {
+		callExpr = &ast.CallExpr{
+			Fun: &ast.SelectorExpr{
+				X:   ast.NewIdent(selector),
+				Sel: ast.NewIdent(name),
+			},
 			Args: params,
 		}
+	}
+	if hasReturnVals {
 		if hasNonNestedReturn {
 			// Create a return statement that returns the result of the function call.
 			replace = &ast.ReturnStmt{
@@ -1111,10 +1184,7 @@ func generateFuncCall(hasNonNestedReturn, hasReturnVals bool, params, returns []
 			}
 		}
 	} else {
-		replace = &ast.CallExpr{
-			Fun:  ast.NewIdent(name),
-			Args: params,
-		}
+		replace = callExpr
 	}
 	return replace
 }
diff --git a/internal/lsp/source/fix.go b/internal/lsp/source/fix.go
index 6a012396cc9..e0046ee589e 100644
--- a/internal/lsp/source/fix.go
+++ b/internal/lsp/source/fix.go
@@ -19,27 +19,43 @@ import (
 	errors "golang.org/x/xerrors"
 )
 
-// SuggestedFixFunc is a function used to get the suggested fixes for a given
-// gopls command, some of which are provided by go/analysis.Analyzers. Some of
-// the analyzers in internal/lsp/analysis are not efficient enough to include
-// suggested fixes with their diagnostics, so we have to compute them
-// separately. Such analyzers should provide a function with a signature of
-// SuggestedFixFunc.
-type SuggestedFixFunc func(fset *token.FileSet, rng span.Range, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error)
+type (
+	// SuggestedFixFunc is a function used to get the suggested fixes for a given
+	// gopls command, some of which are provided by go/analysis.Analyzers. Some of
+	// the analyzers in internal/lsp/analysis are not efficient enough to include
+	// suggested fixes with their diagnostics, so we have to compute them
+	// separately. Such analyzers should provide a function with a signature of
+	// SuggestedFixFunc.
+	SuggestedFixFunc  func(ctx context.Context, snapshot Snapshot, fh VersionedFileHandle, pRng protocol.Range) (*analysis.SuggestedFix, error)
+	singleFileFixFunc func(fset *token.FileSet, rng span.Range, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error)
+)
 
 const (
 	FillStruct      = "fill_struct"
 	UndeclaredName  = "undeclared_name"
 	ExtractVariable = "extract_variable"
 	ExtractFunction = "extract_function"
+	ExtractMethod   = "extract_method"
 )
 
 // suggestedFixes maps a suggested fix command id to its handler.
 var suggestedFixes = map[string]SuggestedFixFunc{
-	FillStruct:      fillstruct.SuggestedFix,
-	UndeclaredName:  undeclaredname.SuggestedFix,
-	ExtractVariable: extractVariable,
-	ExtractFunction: extractFunction,
+	FillStruct:      singleFile(fillstruct.SuggestedFix),
+	UndeclaredName:  singleFile(undeclaredname.SuggestedFix),
+	ExtractVariable: singleFile(extractVariable),
+	ExtractFunction: singleFile(extractFunction),
+	ExtractMethod:   singleFile(extractMethod),
+}
+
+// singleFile calls analyzers that expect inputs for a single file
+func singleFile(sf singleFileFixFunc) SuggestedFixFunc {
+	return func(ctx context.Context, snapshot Snapshot, fh VersionedFileHandle, pRng protocol.Range) (*analysis.SuggestedFix, error) {
+		fset, rng, src, file, pkg, info, err := getAllSuggestedFixInputs(ctx, snapshot, fh, pRng)
+		if err != nil {
+			return nil, err
+		}
+		return sf(fset, rng, src, file, pkg, info)
+	}
 }
 
 func SuggestedFixFromCommand(cmd protocol.Command, kind protocol.CodeActionKind) SuggestedFix {
@@ -57,57 +73,66 @@ func ApplyFix(ctx context.Context, fix string, snapshot Snapshot, fh VersionedFi
 	if !ok {
 		return nil, fmt.Errorf("no suggested fix function for %s", fix)
 	}
-	fset, rng, src, file, m, pkg, info, err := getAllSuggestedFixInputs(ctx, snapshot, fh, pRng)
-	if err != nil {
-		return nil, err
-	}
-	suggestion, err := handler(fset, rng, src, file, pkg, info)
+	suggestion, err := handler(ctx, snapshot, fh, pRng)
 	if err != nil {
 		return nil, err
 	}
 	if suggestion == nil {
 		return nil, nil
 	}
-
-	var edits []protocol.TextDocumentEdit
+	fset := snapshot.FileSet()
+	editsPerFile := map[span.URI]*protocol.TextDocumentEdit{}
 	for _, edit := range suggestion.TextEdits {
-		rng := span.NewRange(fset, edit.Pos, edit.End)
-		spn, err := rng.Span()
+		spn, err := span.NewRange(fset, edit.Pos, edit.End).Span()
 		if err != nil {
 			return nil, err
 		}
-		clRng, err := m.Range(spn)
+		fh, err := snapshot.GetVersionedFile(ctx, spn.URI())
 		if err != nil {
 			return nil, err
 		}
-		edits = append(edits, protocol.TextDocumentEdit{
-			TextDocument: protocol.OptionalVersionedTextDocumentIdentifier{
-				Version: fh.Version(),
-				TextDocumentIdentifier: protocol.TextDocumentIdentifier{
-					URI: protocol.URIFromSpanURI(fh.URI()),
-				},
-			},
-			Edits: []protocol.TextEdit{
-				{
-					Range:   clRng,
-					NewText: string(edit.NewText),
+		te, ok := editsPerFile[spn.URI()]
+		if !ok {
+			te = &protocol.TextDocumentEdit{
+				TextDocument: protocol.OptionalVersionedTextDocumentIdentifier{
+					Version: fh.Version(),
+					TextDocumentIdentifier: protocol.TextDocumentIdentifier{
+						URI: protocol.URIFromSpanURI(fh.URI()),
+					},
 				},
-			},
+			}
+			editsPerFile[spn.URI()] = te
+		}
+		_, pgf, err := GetParsedFile(ctx, snapshot, fh, NarrowestPackage)
+		if err != nil {
+			return nil, err
+		}
+		rng, err := pgf.Mapper.Range(spn)
+		if err != nil {
+			return nil, err
+		}
+		te.Edits = append(te.Edits, protocol.TextEdit{
+			Range:   rng,
+			NewText: string(edit.NewText),
 		})
 	}
+	var edits []protocol.TextDocumentEdit
+	for _, edit := range editsPerFile {
+		edits = append(edits, *edit)
+	}
 	return edits, nil
 }
 
 // getAllSuggestedFixInputs is a helper function to collect all possible needed
 // inputs for an AppliesFunc or SuggestedFixFunc.
-func getAllSuggestedFixInputs(ctx context.Context, snapshot Snapshot, fh FileHandle, pRng protocol.Range) (*token.FileSet, span.Range, []byte, *ast.File, *protocol.ColumnMapper, *types.Package, *types.Info, error) {
+func getAllSuggestedFixInputs(ctx context.Context, snapshot Snapshot, fh FileHandle, pRng protocol.Range) (*token.FileSet, span.Range, []byte, *ast.File, *types.Package, *types.Info, error) {
 	pkg, pgf, err := GetParsedFile(ctx, snapshot, fh, NarrowestPackage)
 	if err != nil {
-		return nil, span.Range{}, nil, nil, nil, nil, nil, errors.Errorf("getting file for Identifier: %w", err)
+		return nil, span.Range{}, nil, nil, nil, nil, errors.Errorf("getting file for Identifier: %w", err)
 	}
 	rng, err := pgf.Mapper.RangeToSpanRange(pRng)
 	if err != nil {
-		return nil, span.Range{}, nil, nil, nil, nil, nil, err
+		return nil, span.Range{}, nil, nil, nil, nil, err
 	}
-	return snapshot.FileSet(), rng, pgf.Src, pgf.File, pgf.Mapper, pkg.GetTypes(), pkg.GetTypesInfo(), nil
+	return snapshot.FileSet(), rng, pgf.Src, pgf.File, pkg.GetTypes(), pkg.GetTypesInfo(), nil
 }
diff --git a/internal/lsp/source/format.go b/internal/lsp/source/format.go
index ca76d2080e1..0d61172a245 100644
--- a/internal/lsp/source/format.go
+++ b/internal/lsp/source/format.go
@@ -19,7 +19,9 @@ import (
 	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/imports"
 	"golang.org/x/tools/internal/lsp/diff"
+	"golang.org/x/tools/internal/lsp/lsppos"
 	"golang.org/x/tools/internal/lsp/protocol"
+	"golang.org/x/tools/internal/span"
 )
 
 // Format formats a file with a given range.
@@ -177,7 +179,7 @@ func computeFixEdits(snapshot Snapshot, pgf *ParsedGoFile, options *imports.Opti
 	if err != nil {
 		return nil, err
 	}
-	return ToProtocolEdits(pgf.Mapper, edits)
+	return ProtocolEditsFromSource([]byte(left), edits, pgf.Mapper.Converter)
 }
 
 // importPrefix returns the prefix of the given file content through the final
@@ -280,6 +282,37 @@ func computeTextEdits(ctx context.Context, snapshot Snapshot, pgf *ParsedGoFile,
 	return ToProtocolEdits(pgf.Mapper, edits)
 }
 
+// ProtocolEditsFromSource converts text edits to LSP edits using the original
+// source.
+func ProtocolEditsFromSource(src []byte, edits []diff.TextEdit, converter span.Converter) ([]protocol.TextEdit, error) {
+	m := lsppos.NewMapper(src)
+	var result []protocol.TextEdit
+	for _, edit := range edits {
+		spn, err := edit.Span.WithOffset(converter)
+		if err != nil {
+			return nil, fmt.Errorf("computing offsets: %v", err)
+		}
+		startLine, startChar := m.Position(spn.Start().Offset())
+		endLine, endChar := m.Position(spn.End().Offset())
+		if startLine < 0 || endLine < 0 {
+			return nil, fmt.Errorf("out of bound span: %v", spn)
+		}
+
+		pstart := protocol.Position{Line: uint32(startLine), Character: uint32(startChar)}
+		pend := protocol.Position{Line: uint32(endLine), Character: uint32(endChar)}
+		if pstart == pend && edit.NewText == "" {
+			// Degenerate case, which may result from a diff tool wanting to delete
+			// '\r' in line endings. Filter it out.
+			continue
+		}
+		result = append(result, protocol.TextEdit{
+			Range:   protocol.Range{Start: pstart, End: pend},
+			NewText: edit.NewText,
+		})
+	}
+	return result, nil
+}
+
 func ToProtocolEdits(m *protocol.ColumnMapper, edits []diff.TextEdit) ([]protocol.TextEdit, error) {
 	if edits == nil {
 		return nil, nil
diff --git a/internal/lsp/source/hover.go b/internal/lsp/source/hover.go
index be2bfe209bc..a2c731a71bf 100644
--- a/internal/lsp/source/hover.go
+++ b/internal/lsp/source/hover.go
@@ -14,9 +14,12 @@ import (
 	"go/format"
 	"go/token"
 	"go/types"
+	"strconv"
 	"strings"
 	"time"
+	"unicode/utf8"
 
+	"golang.org/x/text/unicode/runenames"
 	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/lsp/protocol"
 	"golang.org/x/tools/internal/typeparams"
@@ -66,6 +69,9 @@ type HoverInformation struct {
 func Hover(ctx context.Context, snapshot Snapshot, fh FileHandle, position protocol.Position) (*protocol.Hover, error) {
 	ident, err := Identifier(ctx, snapshot, fh, position)
 	if err != nil {
+		if hover, innerErr := hoverRune(ctx, snapshot, fh, position); innerErr == nil {
+			return hover, nil
+		}
 		return nil, nil
 	}
 	h, err := HoverIdentifier(ctx, ident)
@@ -93,12 +99,161 @@ func Hover(ctx context.Context, snapshot Snapshot, fh FileHandle, position proto
 	}, nil
 }
 
+func hoverRune(ctx context.Context, snapshot Snapshot, fh FileHandle, position protocol.Position) (*protocol.Hover, error) {
+	ctx, done := event.Start(ctx, "source.hoverRune")
+	defer done()
+
+	r, mrng, err := findRune(ctx, snapshot, fh, position)
+	if err != nil {
+		return nil, err
+	}
+	rng, err := mrng.Range()
+	if err != nil {
+		return nil, err
+	}
+
+	var desc string
+	runeName := runenames.Name(r)
+	if len(runeName) > 0 && runeName[0] == '<' {
+		// Check if the rune looks like an HTML tag. If so, trim the surrounding <>
+		// characters to work around https://github.com/microsoft/vscode/issues/124042.
+		runeName = strings.TrimRight(runeName[1:], ">")
+	}
+	if strconv.IsPrint(r) {
+		desc = fmt.Sprintf("'%s', U+%04X, %s", string(r), uint32(r), runeName)
+	} else {
+		desc = fmt.Sprintf("U+%04X, %s", uint32(r), runeName)
+	}
+	return &protocol.Hover{
+		Contents: protocol.MarkupContent{
+			Kind:  snapshot.View().Options().PreferredContentFormat,
+			Value: desc,
+		},
+		Range: rng,
+	}, nil
+}
+
+// ErrNoRuneFound is the error returned when no rune is found at a particular position.
+var ErrNoRuneFound = errors.New("no rune found")
+
+// findRune returns rune information for a position in a file.
+func findRune(ctx context.Context, snapshot Snapshot, fh FileHandle, pos protocol.Position) (rune, MappedRange, error) {
+	pkg, pgf, err := GetParsedFile(ctx, snapshot, fh, NarrowestPackage)
+	if err != nil {
+		return 0, MappedRange{}, err
+	}
+	spn, err := pgf.Mapper.PointSpan(pos)
+	if err != nil {
+		return 0, MappedRange{}, err
+	}
+	rng, err := spn.Range(pgf.Mapper.Converter)
+	if err != nil {
+		return 0, MappedRange{}, err
+	}
+
+	// Find the basic literal enclosing the given position, if there is one.
+	var lit *ast.BasicLit
+	var found bool
+	ast.Inspect(pgf.File, func(n ast.Node) bool {
+		if found {
+			return false
+		}
+		if n, ok := n.(*ast.BasicLit); ok && rng.Start >= n.Pos() && rng.Start <= n.End() {
+			lit = n
+			found = true
+		}
+		return !found
+	})
+	if !found {
+		return 0, MappedRange{}, ErrNoRuneFound
+	}
+
+	var r rune
+	var start, end token.Pos
+	switch lit.Kind {
+	case token.CHAR:
+		s, err := strconv.Unquote(lit.Value)
+		if err != nil {
+			// If the conversion fails, it's because of an invalid syntax, therefore
+			// there is no rune to be found.
+			return 0, MappedRange{}, ErrNoRuneFound
+		}
+		r, _ = utf8.DecodeRuneInString(s)
+		if r == utf8.RuneError {
+			return 0, MappedRange{}, fmt.Errorf("rune error")
+		}
+		start, end = lit.Pos(), lit.End()
+	case token.INT:
+		// It's an integer, scan only if it is a hex litteral whose bitsize in
+		// ranging from 8 to 32.
+		if !(strings.HasPrefix(lit.Value, "0x") && len(lit.Value[2:]) >= 2 && len(lit.Value[2:]) <= 8) {
+			return 0, MappedRange{}, ErrNoRuneFound
+		}
+		v, err := strconv.ParseUint(lit.Value[2:], 16, 32)
+		if err != nil {
+			return 0, MappedRange{}, err
+		}
+		r = rune(v)
+		if r == utf8.RuneError {
+			return 0, MappedRange{}, fmt.Errorf("rune error")
+		}
+		start, end = lit.Pos(), lit.End()
+	case token.STRING:
+		// It's a string, scan only if it contains a unicode escape sequence under or before the
+		// current cursor position.
+		var found bool
+		strMappedRng, err := posToMappedRange(snapshot, pkg, lit.Pos(), lit.End())
+		if err != nil {
+			return 0, MappedRange{}, err
+		}
+		strRng, err := strMappedRng.Range()
+		if err != nil {
+			return 0, MappedRange{}, err
+		}
+		offset := strRng.Start.Character
+		for i := pos.Character - offset; i > 0; i-- {
+			// Start at the cursor position and search backward for the beginning of a rune escape sequence.
+			rr, _ := utf8.DecodeRuneInString(lit.Value[i:])
+			if rr == utf8.RuneError {
+				return 0, MappedRange{}, fmt.Errorf("rune error")
+			}
+			if rr == '\\' {
+				// Got the beginning, decode it.
+				var tail string
+				r, _, tail, err = strconv.UnquoteChar(lit.Value[i:], '"')
+				if err != nil {
+					// If the conversion fails, it's because of an invalid syntax, therefore is no rune to be found.
+					return 0, MappedRange{}, ErrNoRuneFound
+				}
+				// Only the rune escape sequence part of the string has to be highlighted, recompute the range.
+				runeLen := len(lit.Value) - (int(i) + len(tail))
+				start = token.Pos(int(lit.Pos()) + int(i))
+				end = token.Pos(int(start) + runeLen)
+				found = true
+				break
+			}
+		}
+		if !found {
+			// No escape sequence found
+			return 0, MappedRange{}, ErrNoRuneFound
+		}
+	default:
+		return 0, MappedRange{}, ErrNoRuneFound
+	}
+
+	mappedRange, err := posToMappedRange(snapshot, pkg, start, end)
+	if err != nil {
+		return 0, MappedRange{}, err
+	}
+	return r, mappedRange, nil
+}
+
 func HoverIdentifier(ctx context.Context, i *IdentifierInfo) (*HoverInformation, error) {
 	ctx, done := event.Start(ctx, "source.Hover")
 	defer done()
 
 	fset := i.Snapshot.FileSet()
-	h, err := HoverInfo(ctx, i.Snapshot, i.pkg, i.Declaration.obj, i.Declaration.node, i.Declaration.fullSpec)
+	h, err := HoverInfo(ctx, i.Snapshot, i.pkg, i.Declaration.obj, i.Declaration.node, i.Declaration.fullDecl)
 	if err != nil {
 		return nil, err
 	}
@@ -117,6 +272,16 @@ func HoverIdentifier(ctx context.Context, i *IdentifierInfo) (*HoverInformation,
 			}
 			h.Signature = prefix + h.Signature
 		}
+
+		// Check if the variable is an integer whose value we can present in a more
+		// user-friendly way, i.e. `var hex = 0xe34e` becomes `var hex = 58190`
+		if spec, ok := x.(*ast.ValueSpec); ok && len(spec.Values) > 0 {
+			if lit, ok := spec.Values[0].(*ast.BasicLit); ok && len(spec.Names) > 0 {
+				val := constant.MakeFromLiteral(types.ExprString(lit), lit.Kind, 0)
+				h.Signature = fmt.Sprintf("var %s = %s", spec.Names[0], val)
+			}
+		}
+
 	case types.Object:
 		// If the variable is implicitly declared in a type switch, we need to
 		// manually generate its object string.
@@ -241,7 +406,7 @@ func moduleAtVersion(path string, i *IdentifierInfo) (string, string, bool) {
 func objectString(obj types.Object, qf types.Qualifier, inferred *types.Signature) string {
 	// If the signature type was inferred, prefer the preferred signature with a
 	// comment showing the generic signature.
-	if sig, _ := obj.Type().(*types.Signature); sig != nil && len(typeparams.ForSignature(sig)) > 0 && inferred != nil {
+	if sig, _ := obj.Type().(*types.Signature); sig != nil && typeparams.ForSignature(sig).Len() > 0 && inferred != nil {
 		obj2 := types.NewFunc(obj.Pos(), obj.Pkg(), obj.Name(), inferred)
 		str := types.ObjectString(obj2, qf)
 		// Try to avoid overly long lines.
@@ -274,15 +439,28 @@ func objectString(obj types.Object, qf types.Qualifier, inferred *types.Signatur
 }
 
 // HoverInfo returns a HoverInformation struct for an ast node and its type
-// object.
-func HoverInfo(ctx context.Context, s Snapshot, pkg Package, obj types.Object, node ast.Node, spec ast.Spec) (*HoverInformation, error) {
+// object. node should be the actual node used in type checking, while fullNode
+// could be a separate node with more complete syntactic information.
+func HoverInfo(ctx context.Context, s Snapshot, pkg Package, obj types.Object, pkgNode ast.Node, fullDecl ast.Decl) (*HoverInformation, error) {
 	var info *HoverInformation
 
+	// This is problematic for a number of reasons. We really need to have a more
+	// general mechanism to validate the coherency of AST with type information,
+	// but absent that we must do our best to ensure that we don't use fullNode
+	// when we actually need the node that was type checked.
+	//
+	// pkgNode may be nil, if it was eliminated from the type-checked syntax. In
+	// that case, use fullDecl if available.
+	node := pkgNode
+	if node == nil && fullDecl != nil {
+		node = fullDecl
+	}
+
 	switch node := node.(type) {
 	case *ast.Ident:
 		// The package declaration.
 		for _, f := range pkg.GetSyntax() {
-			if f.Name == node {
+			if f.Name == pkgNode {
 				info = &HoverInformation{comment: f.Doc}
 			}
 		}
@@ -306,6 +484,24 @@ func HoverInfo(ctx context.Context, s Snapshot, pkg Package, obj types.Object, n
 	case *ast.GenDecl:
 		switch obj := obj.(type) {
 		case *types.TypeName, *types.Var, *types.Const, *types.Func:
+			// Always use the full declaration here if we have it, because the
+			// dependent code doesn't rely on pointer identity. This is fragile.
+			if d, _ := fullDecl.(*ast.GenDecl); d != nil {
+				node = d
+			}
+			// obj may not have been produced by type checking the AST containing
+			// node, so we need to be careful about using token.Pos.
+			tok := s.FileSet().File(obj.Pos())
+			offset := tok.Offset(obj.Pos())
+			tok2 := s.FileSet().File(node.Pos())
+			var spec ast.Spec
+			for _, s := range node.Specs {
+				// Avoid panics by guarding the calls to token.Offset (golang/go#48249).
+				if InRange(tok2, s.Pos()) && InRange(tok2, s.End()) && tok2.Offset(s.Pos()) <= offset && offset <= tok2.Offset(s.End()) {
+					spec = s
+					break
+				}
+			}
 			var err error
 			info, err = formatGenDecl(node, spec, obj, obj.Type())
 			if err != nil {
@@ -386,14 +582,6 @@ func formatGenDecl(node *ast.GenDecl, spec ast.Spec, obj types.Object, typ types
 			return formatGenDecl(node, spec, obj, typ.Underlying())
 		}
 	}
-	if spec == nil {
-		for _, s := range node.Specs {
-			if s.Pos() <= obj.Pos() && obj.Pos() <= s.End() {
-				spec = s
-				break
-			}
-		}
-	}
 	if spec == nil {
 		return nil, errors.Errorf("no spec for node %v at position %v", node, obj.Pos())
 	}
@@ -454,6 +642,15 @@ func formatVar(node ast.Spec, obj types.Object, decl *ast.GenDecl) *HoverInforma
 		if comment == nil {
 			comment = spec.Comment
 		}
+
+		// We need the AST nodes for variable declarations of basic literals with
+		// associated values so that we can augment their hover with more information.
+		if _, ok := obj.(*types.Var); ok && spec.Type == nil && len(spec.Values) > 0 {
+			if _, ok := spec.Values[0].(*ast.BasicLit); ok {
+				return &HoverInformation{source: spec, comment: comment}
+			}
+		}
+
 		return &HoverInformation{source: obj, comment: comment}
 	}
 
diff --git a/internal/lsp/source/identifier.go b/internal/lsp/source/identifier.go
index ee8684bdee1..2bc3431d7d8 100644
--- a/internal/lsp/source/identifier.go
+++ b/internal/lsp/source/identifier.go
@@ -57,10 +57,11 @@ type Declaration struct {
 
 	// The typechecked node.
 	node ast.Node
-	// Optional: the fully parsed spec, to be used for formatting in cases where
+
+	// Optional: the fully parsed node, to be used for formatting in cases where
 	// node has missing information. This could be the case when node was parsed
 	// in ParseExported mode.
-	fullSpec ast.Spec
+	fullDecl ast.Decl
 
 	// The typechecked object.
 	obj types.Object
@@ -77,7 +78,7 @@ func Identifier(ctx context.Context, snapshot Snapshot, fh FileHandle, pos proto
 	ctx, done := event.Start(ctx, "source.Identifier")
 	defer done()
 
-	pkgs, err := snapshot.PackagesForFile(ctx, fh.URI(), TypecheckAll)
+	pkgs, err := snapshot.PackagesForFile(ctx, fh.URI(), TypecheckAll, false)
 	if err != nil {
 		return nil, err
 	}
@@ -290,8 +291,7 @@ func findIdentifier(ctx context.Context, snapshot Snapshot, pkg Package, pgf *Pa
 	}
 	// Ensure that we have the full declaration, in case the declaration was
 	// parsed in ParseExported and therefore could be missing information.
-	result.Declaration.fullSpec, err = fullSpec(snapshot, result.Declaration.obj, declPkg)
-	if err != nil {
+	if result.Declaration.fullDecl, err = fullNode(snapshot, result.Declaration.obj, declPkg); err != nil {
 		return nil, err
 	}
 	typ := pkg.GetTypesInfo().TypeOf(result.ident)
@@ -314,10 +314,10 @@ func findIdentifier(ctx context.Context, snapshot Snapshot, pkg Package, pgf *Pa
 	return result, nil
 }
 
-// fullSpec tries to extract the full spec corresponding to obj's declaration.
+// fullNode tries to extract the full spec corresponding to obj's declaration.
 // If the package was not parsed in full, the declaration file will be
 // re-parsed to ensure it has complete syntax.
-func fullSpec(snapshot Snapshot, obj types.Object, pkg Package) (ast.Spec, error) {
+func fullNode(snapshot Snapshot, obj types.Object, pkg Package) (ast.Decl, error) {
 	// declaration in a different package... make sure we have full AST information.
 	tok := snapshot.FileSet().File(obj.Pos())
 	uri := span.URIFromPath(tok.Name())
@@ -338,9 +338,9 @@ func fullSpec(snapshot Snapshot, obj types.Object, pkg Package) (ast.Spec, error
 		}
 	}
 	path, _ := astutil.PathEnclosingInterval(file, pos, pos)
-	if len(path) > 1 {
-		if spec, _ := path[1].(*ast.TypeSpec); spec != nil {
-			return spec, nil
+	for _, n := range path {
+		if decl, ok := n.(ast.Decl); ok {
+			return decl, nil
 		}
 	}
 	return nil, nil
@@ -371,22 +371,25 @@ func inferredSignature(info *types.Info, path []ast.Node) *types.Signature {
 	case *ast.CallExpr:
 		_, sig := typeparams.GetInferred(info, n)
 		return sig
-	case *ast.IndexExpr:
-		// If the IndexExpr is fully instantiated, we consider that 'inference' for
-		// gopls' purposes.
-		sig, _ := info.TypeOf(n).(*types.Signature)
-		if sig != nil && len(typeparams.ForSignature(sig)) == 0 {
-			return sig
-		}
-		_, sig = typeparams.GetInferred(info, n)
-		if sig != nil {
-			return sig
-		}
-		if len(path) >= 2 {
-			if call, _ := path[2].(*ast.CallExpr); call != nil {
-				_, sig := typeparams.GetInferred(info, call)
+	default:
+		if ix := typeparams.GetIndexExprData(n); ix != nil {
+			e := n.(ast.Expr)
+			// If the IndexExpr is fully instantiated, we consider that 'inference' for
+			// gopls' purposes.
+			sig, _ := info.TypeOf(e).(*types.Signature)
+			if sig != nil && typeparams.ForSignature(sig).Len() == 0 {
+				return sig
+			}
+			_, sig = typeparams.GetInferred(info, e)
+			if sig != nil {
 				return sig
 			}
+			if len(path) >= 2 {
+				if call, _ := path[2].(*ast.CallExpr); call != nil {
+					_, sig := typeparams.GetInferred(info, call)
+					return sig
+				}
+			}
 		}
 	}
 	return nil
diff --git a/internal/lsp/source/implementation.go b/internal/lsp/source/implementation.go
index 379471faae0..04aea37f9b0 100644
--- a/internal/lsp/source/implementation.go
+++ b/internal/lsp/source/implementation.go
@@ -15,6 +15,7 @@ import (
 
 	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/lsp/protocol"
+	"golang.org/x/tools/internal/span"
 	"golang.org/x/xerrors"
 )
 
@@ -65,7 +66,7 @@ func implementations(ctx context.Context, s Snapshot, f FileHandle, pp protocol.
 		fset  = s.FileSet()
 	)
 
-	qos, err := qualifiedObjsAtProtocolPos(ctx, s, f, pp)
+	qos, err := qualifiedObjsAtProtocolPos(ctx, s, f.URI(), pp)
 	if err != nil {
 		return nil, err
 	}
@@ -213,19 +214,72 @@ var (
 // referenced at the given position. An object will be returned for
 // every package that the file belongs to, in every typechecking mode
 // applicable.
-func qualifiedObjsAtProtocolPos(ctx context.Context, s Snapshot, fh FileHandle, pp protocol.Position) ([]qualifiedObject, error) {
-	pkgs, err := s.PackagesForFile(ctx, fh.URI(), TypecheckAll)
+func qualifiedObjsAtProtocolPos(ctx context.Context, s Snapshot, uri span.URI, pp protocol.Position) ([]qualifiedObject, error) {
+	pkgs, err := s.PackagesForFile(ctx, uri, TypecheckAll, false)
 	if err != nil {
 		return nil, err
 	}
-	// Check all the packages that the file belongs to.
+	if len(pkgs) == 0 {
+		return nil, errNoObjectFound
+	}
+	pkg := pkgs[0]
+	var offset int
+	pgf, err := pkg.File(uri)
+	if err != nil {
+		return nil, err
+	}
+	spn, err := pgf.Mapper.PointSpan(pp)
+	if err != nil {
+		return nil, err
+	}
+	rng, err := spn.Range(pgf.Mapper.Converter)
+	if err != nil {
+		return nil, err
+	}
+	offset = pgf.Tok.Offset(rng.Start)
+	return qualifiedObjsAtLocation(ctx, s, objSearchKey{uri, offset}, map[objSearchKey]bool{})
+}
+
+type objSearchKey struct {
+	uri    span.URI
+	offset int
+}
+
+// qualifiedObjsAtLocation finds all objects referenced at offset in uri, across
+// all packages in the snapshot.
+func qualifiedObjsAtLocation(ctx context.Context, s Snapshot, key objSearchKey, seen map[objSearchKey]bool) ([]qualifiedObject, error) {
+	if seen[key] {
+		return nil, nil
+	}
+	seen[key] = true
+
+	// We search for referenced objects starting with all packages containing the
+	// current location, and then repeating the search for every distinct object
+	// location discovered.
+	//
+	// In the common case, there should be at most one additional location to
+	// consider: the definition of the object referenced by the location. But we
+	// try to be comprehensive in case we ever support variations on build
+	// constraints.
+
+	pkgs, err := s.PackagesForFile(ctx, key.uri, TypecheckAll, false)
+	if err != nil {
+		return nil, err
+	}
+
+	// report objects in the order we encounter them. This ensures that the first
+	// result is at the cursor...
 	var qualifiedObjs []qualifiedObject
+	// ...but avoid duplicates.
+	seenObjs := map[types.Object]bool{}
+
 	for _, searchpkg := range pkgs {
-		astFile, pos, err := getASTFile(searchpkg, fh, pp)
+		pgf, err := searchpkg.File(key.uri)
 		if err != nil {
 			return nil, err
 		}
-		path := pathEnclosingObjNode(astFile, pos)
+		pos := pgf.Tok.Pos(key.offset)
+		path := pathEnclosingObjNode(pgf.File, pos)
 		if path == nil {
 			continue
 		}
@@ -279,6 +333,41 @@ func qualifiedObjsAtProtocolPos(ctx context.Context, s Snapshot, fh FileHandle,
 				sourcePkg: searchpkg,
 				node:      path[0],
 			})
+			seenObjs[obj] = true
+
+			// If the qualified object is in another file (or more likely, another
+			// package), it's possible that there is another copy of it in a package
+			// that we haven't searched, e.g. a test variant. See golang/go#47564.
+			//
+			// In order to be sure we've considered all packages, call
+			// qualifiedObjsAtLocation recursively for all locations we encounter. We
+			// could probably be more precise here, only continuing the search if obj
+			// is in another package, but this should be good enough to find all
+			// uses.
+
+			pos := obj.Pos()
+			var uri span.URI
+			offset := -1
+			for _, pgf := range pkg.CompiledGoFiles() {
+				if pgf.Tok.Base() <= int(pos) && int(pos) <= pgf.Tok.Base()+pgf.Tok.Size() {
+					offset = pgf.Tok.Offset(pos)
+					uri = pgf.URI
+				}
+			}
+			if offset >= 0 {
+				otherObjs, err := qualifiedObjsAtLocation(ctx, s, objSearchKey{uri, offset}, seen)
+				if err != nil {
+					return nil, err
+				}
+				for _, other := range otherObjs {
+					if !seenObjs[other.obj] {
+						qualifiedObjs = append(qualifiedObjs, other)
+						seenObjs[other.obj] = true
+					}
+				}
+			} else {
+				return nil, fmt.Errorf("missing file for position of %q in %q", obj.Name(), obj.Pkg().Name())
+			}
 		}
 	}
 	// Return an error if no objects were found since callers will assume that
@@ -289,22 +378,6 @@ func qualifiedObjsAtProtocolPos(ctx context.Context, s Snapshot, fh FileHandle,
 	return qualifiedObjs, nil
 }
 
-func getASTFile(pkg Package, f FileHandle, pos protocol.Position) (*ast.File, token.Pos, error) {
-	pgf, err := pkg.File(f.URI())
-	if err != nil {
-		return nil, 0, err
-	}
-	spn, err := pgf.Mapper.PointSpan(pos)
-	if err != nil {
-		return nil, 0, err
-	}
-	rng, err := spn.Range(pgf.Mapper.Converter)
-	if err != nil {
-		return nil, 0, err
-	}
-	return pgf.File, rng.Start, nil
-}
-
 // pathEnclosingObjNode returns the AST path to the object-defining
 // node associated with pos. "Object-defining" means either an
 // *ast.Ident mapped directly to a types.Object or an ast.Node mapped
@@ -349,11 +422,6 @@ func pathEnclosingObjNode(f *ast.File, pos token.Pos) []ast.Node {
 			if pos == n.Star {
 				pos = n.X.Pos()
 			}
-		case *ast.SelectorExpr:
-			// If pos is on the ".", move it into the selector.
-			if pos == n.X.End() {
-				pos = n.Sel.Pos()
-			}
 		}
 
 		return !found
diff --git a/internal/lsp/source/options.go b/internal/lsp/source/options.go
index c78bab0c434..9bc73a9d7c7 100644
--- a/internal/lsp/source/options.go
+++ b/internal/lsp/source/options.go
@@ -7,6 +7,7 @@ package source
 import (
 	"context"
 	"fmt"
+	"io"
 	"path/filepath"
 	"regexp"
 	"strings"
@@ -69,7 +70,7 @@ var (
 
 // DefaultOptions is the options that are used for Gopls execution independent
 // of any externally provided configuration (LSP initialization, command
-// invokation, etc.).
+// invocation, etc.).
 func DefaultOptions() *Options {
 	optionsOnce.Do(func() {
 		var commands []string
@@ -109,6 +110,7 @@ func DefaultOptions() *Options {
 					ExpandWorkspaceToModule:     true,
 					ExperimentalPackageCacheKey: true,
 					MemoryMode:                  ModeNormal,
+					DirectoryFilters:            []string{"-node_modules"},
 				},
 				UIOptions: UIOptions{
 					DiagnosticOptions: DiagnosticOptions{
@@ -288,7 +290,7 @@ type UIOptions struct {
 	// ```json5
 	// "gopls": {
 	// ...
-	//   "codelens": {
+	//   "codelenses": {
 	//     "generate": false,  // Don't show the `go generate` lens.
 	//     "gc_details": true  // Show a code lens toggling the display of gc's choices.
 	//   }
@@ -410,7 +412,7 @@ type NavigationOptions struct {
 	// ```json5
 	// "gopls": {
 	// ...
-	//   "symbolStyle": "dynamic",
+	//   "symbolStyle": "Dynamic",
 	// ...
 	// }
 	// ```
@@ -543,6 +545,7 @@ type SymbolMatcher string
 
 const (
 	SymbolFuzzy           SymbolMatcher = "Fuzzy"
+	SymbolFastFuzzy       SymbolMatcher = "FastFuzzy"
 	SymbolCaseInsensitive SymbolMatcher = "CaseInsensitive"
 	SymbolCaseSensitive   SymbolMatcher = "CaseSensitive"
 )
@@ -834,6 +837,7 @@ func (o *Options) set(name string, value interface{}, seen map[string]struct{})
 	case "symbolMatcher":
 		if s, ok := result.asOneOf(
 			string(SymbolFuzzy),
+			string(SymbolFastFuzzy),
 			string(SymbolCaseInsensitive),
 			string(SymbolCaseSensitive),
 		); ok {
@@ -1277,6 +1281,69 @@ type OptionJSON struct {
 	Hierarchy  string
 }
 
+func (o *OptionJSON) String() string {
+	return o.Name
+}
+
+func (o *OptionJSON) Write(w io.Writer) {
+	fmt.Fprintf(w, "**%v** *%v*\n\n", o.Name, o.Type)
+	writeStatus(w, o.Status)
+	enumValues := collectEnums(o)
+	fmt.Fprintf(w, "%v%v\nDefault: `%v`.\n\n", o.Doc, enumValues, o.Default)
+}
+
+func writeStatus(section io.Writer, status string) {
+	switch status {
+	case "":
+	case "advanced":
+		fmt.Fprint(section, "**This is an advanced setting and should not be configured by most `gopls` users.**\n\n")
+	case "debug":
+		fmt.Fprint(section, "**This setting is for debugging purposes only.**\n\n")
+	case "experimental":
+		fmt.Fprint(section, "**This setting is experimental and may be deleted.**\n\n")
+	default:
+		fmt.Fprintf(section, "**Status: %s.**\n\n", status)
+	}
+}
+
+var parBreakRE = regexp.MustCompile("\n{2,}")
+
+func collectEnums(opt *OptionJSON) string {
+	var b strings.Builder
+	write := func(name, doc string, index, len int) {
+		if doc != "" {
+			unbroken := parBreakRE.ReplaceAllString(doc, "\\\n")
+			fmt.Fprintf(&b, "* %s", unbroken)
+		} else {
+			fmt.Fprintf(&b, "* `%s`", name)
+		}
+		if index < len-1 {
+			fmt.Fprint(&b, "\n")
+		}
+	}
+	if len(opt.EnumValues) > 0 && opt.Type == "enum" {
+		b.WriteString("\nMust be one of:\n\n")
+		for i, val := range opt.EnumValues {
+			write(val.Value, val.Doc, i, len(opt.EnumValues))
+		}
+	} else if len(opt.EnumKeys.Keys) > 0 && shouldShowEnumKeysInSettings(opt.Name) {
+		b.WriteString("\nCan contain any of:\n\n")
+		for i, val := range opt.EnumKeys.Keys {
+			write(val.Name, val.Doc, i, len(opt.EnumKeys.Keys))
+		}
+	}
+	return b.String()
+}
+
+func shouldShowEnumKeysInSettings(name string) bool {
+	// Both of these fields have too many possible options to print.
+	return !hardcodedEnumKeys(name)
+}
+
+func hardcodedEnumKeys(name string) bool {
+	return name == "analyses" || name == "codelenses"
+}
+
 type EnumKeys struct {
 	ValueType string
 	Keys      []EnumKey
@@ -1301,14 +1368,44 @@ type CommandJSON struct {
 	ResultDoc string
 }
 
+func (c *CommandJSON) String() string {
+	return c.Command
+}
+
+func (c *CommandJSON) Write(w io.Writer) {
+	fmt.Fprintf(w, "### **%v**\nIdentifier: `%v`\n\n%v\n\n", c.Title, c.Command, c.Doc)
+	if c.ArgDoc != "" {
+		fmt.Fprintf(w, "Args:\n\n```\n%s\n```\n\n", c.ArgDoc)
+	}
+	if c.ResultDoc != "" {
+		fmt.Fprintf(w, "Result:\n\n```\n%s\n```\n\n", c.ResultDoc)
+	}
+}
+
 type LensJSON struct {
 	Lens  string
 	Title string
 	Doc   string
 }
 
+func (l *LensJSON) String() string {
+	return l.Title
+}
+
+func (l *LensJSON) Write(w io.Writer) {
+	fmt.Fprintf(w, "%s (%s): %s", l.Title, l.Lens, l.Doc)
+}
+
 type AnalyzerJSON struct {
 	Name    string
 	Doc     string
 	Default bool
 }
+
+func (a *AnalyzerJSON) String() string {
+	return a.Name
+}
+
+func (a *AnalyzerJSON) Write(w io.Writer) {
+	fmt.Fprintf(w, "%s (%s): %v", a.Name, a.Doc, a.Default)
+}
diff --git a/internal/lsp/source/options_test.go b/internal/lsp/source/options_test.go
index 83cb7959e8e..f8260c1dd3e 100644
--- a/internal/lsp/source/options_test.go
+++ b/internal/lsp/source/options_test.go
@@ -18,7 +18,7 @@ func TestSetOption(t *testing.T) {
 	}{
 		{
 			name:  "symbolStyle",
-			value: "dynamic",
+			value: "Dynamic",
 			check: func(o Options) bool { return o.SymbolStyle == DynamicSymbols },
 		},
 		{
diff --git a/internal/lsp/source/references.go b/internal/lsp/source/references.go
index a608f504be0..993b9f8a14f 100644
--- a/internal/lsp/source/references.go
+++ b/internal/lsp/source/references.go
@@ -6,6 +6,7 @@ package source
 
 import (
 	"context"
+	"fmt"
 	"go/ast"
 	"go/token"
 	"go/types"
@@ -33,7 +34,7 @@ func References(ctx context.Context, s Snapshot, f FileHandle, pp protocol.Posit
 	ctx, done := event.Start(ctx, "source.References")
 	defer done()
 
-	qualifiedObjs, err := qualifiedObjsAtProtocolPos(ctx, s, f, pp)
+	qualifiedObjs, err := qualifiedObjsAtProtocolPos(ctx, s, f.URI(), pp)
 	// Don't return references for builtin types.
 	if errors.Is(err, errBuiltin) {
 		return nil, nil
@@ -68,7 +69,11 @@ func references(ctx context.Context, snapshot Snapshot, qos []qualifiedObject, i
 		seen       = make(map[token.Pos]bool)
 	)
 
-	filename := snapshot.FileSet().Position(qos[0].obj.Pos()).Filename
+	pos := qos[0].obj.Pos()
+	if pos == token.NoPos {
+		return nil, fmt.Errorf("no position for %s", qos[0].obj)
+	}
+	filename := snapshot.FileSet().Position(pos).Filename
 	pgf, err := qos[0].pkg.File(span.URIFromPath(filename))
 	if err != nil {
 		return nil, err
diff --git a/internal/lsp/source/rename.go b/internal/lsp/source/rename.go
index 70dfcfbc93c..2ad5d265fc1 100644
--- a/internal/lsp/source/rename.go
+++ b/internal/lsp/source/rename.go
@@ -51,7 +51,7 @@ func PrepareRename(ctx context.Context, snapshot Snapshot, f FileHandle, pp prot
 	ctx, done := event.Start(ctx, "source.PrepareRename")
 	defer done()
 
-	qos, err := qualifiedObjsAtProtocolPos(ctx, snapshot, f, pp)
+	qos, err := qualifiedObjsAtProtocolPos(ctx, snapshot, f.URI(), pp)
 	if err != nil {
 		return nil, nil, err
 	}
@@ -94,7 +94,7 @@ func Rename(ctx context.Context, s Snapshot, f FileHandle, pp protocol.Position,
 	ctx, done := event.Start(ctx, "source.Rename")
 	defer done()
 
-	qos, err := qualifiedObjsAtProtocolPos(ctx, s, f, pp)
+	qos, err := qualifiedObjsAtProtocolPos(ctx, s, f.URI(), pp)
 	if err != nil {
 		return nil, err
 	}
diff --git a/internal/lsp/source/rename_check.go b/internal/lsp/source/rename_check.go
index a46254c3cdd..3aafc391e64 100644
--- a/internal/lsp/source/rename_check.go
+++ b/internal/lsp/source/rename_check.go
@@ -195,7 +195,7 @@ func (r *renamer) checkInLexicalScope(from types.Object, pkg Package) {
 			// The name r.to is defined in a superblock.
 			// Is that name referenced from within this block?
 			forEachLexicalRef(pkg, to, func(id *ast.Ident, block *types.Scope) bool {
-				_, obj := lexicalLookup(block, from.Name(), id.Pos())
+				_, obj := block.LookupParent(from.Name(), id.Pos())
 				if obj == from {
 					// super-block conflict
 					r.errorf(from.Pos(), "renaming this %s %q to %q",
@@ -215,10 +215,9 @@ func (r *renamer) checkInLexicalScope(from types.Object, pkg Package) {
 	forEachLexicalRef(pkg, from, func(id *ast.Ident, block *types.Scope) bool {
 		// Find the block that defines the found reference.
 		// It may be an ancestor.
-		fromBlock, _ := lexicalLookup(block, from.Name(), id.Pos())
-
+		fromBlock, _ := block.LookupParent(from.Name(), id.Pos())
 		// See what r.to would resolve to in the same scope.
-		toBlock, to := lexicalLookup(block, r.to, id.Pos())
+		toBlock, to := block.LookupParent(r.to, id.Pos())
 		if to != nil {
 			// sub-block conflict
 			if deeper(toBlock, fromBlock) {
@@ -249,26 +248,6 @@ func (r *renamer) checkInLexicalScope(from types.Object, pkg Package) {
 	}
 }
 
-// lexicalLookup is like (*types.Scope).LookupParent but respects the
-// environment visible at pos.  It assumes the relative position
-// information is correct with each file.
-func lexicalLookup(block *types.Scope, name string, pos token.Pos) (*types.Scope, types.Object) {
-	for b := block; b != nil; b = b.Parent() {
-		obj := b.Lookup(name)
-		// The scope of a package-level object is the entire package,
-		// so ignore pos in that case.
-		// No analogous clause is needed for file-level objects
-		// since no reference can appear before an import decl.
-		if obj == nil || obj.Pkg() == nil {
-			continue
-		}
-		if b == obj.Pkg().Scope() || obj.Pos() < pos {
-			return b, obj
-		}
-	}
-	return nil, nil
-}
-
 // deeper reports whether block x is lexically deeper than y.
 func deeper(x, y *types.Scope) bool {
 	if x == y || x == nil {
diff --git a/internal/lsp/source/signature_help.go b/internal/lsp/source/signature_help.go
index 620a8cf4505..9c52f997d7c 100644
--- a/internal/lsp/source/signature_help.go
+++ b/internal/lsp/source/signature_help.go
@@ -51,7 +51,12 @@ FindCall:
 			// which may be the parameter to the *ast.CallExpr.
 			// Don't show signature help in this case.
 			return nil, 0, errors.Errorf("no signature help within a function declaration")
+		case *ast.BasicLit:
+			if node.Kind == token.STRING {
+				return nil, 0, errors.Errorf("no signature help within a string literal")
+			}
 		}
+
 	}
 	if callExpr == nil || callExpr.Fun == nil {
 		return nil, 0, errors.Errorf("cannot find an enclosing function")
diff --git a/internal/lsp/source/source_test.go b/internal/lsp/source/source_test.go
index c09b2feadad..83ce712c831 100644
--- a/internal/lsp/source/source_test.go
+++ b/internal/lsp/source/source_test.go
@@ -576,12 +576,12 @@ func (r *runner) Definition(t *testing.T, spn span.Span, d tests.Definition) {
 	didSomething := false
 	if hover != "" {
 		didSomething = true
-		tag := fmt.Sprintf("%s-hover", d.Name)
+		tag := fmt.Sprintf("%s-hoverdef", d.Name)
 		expectHover := string(r.data.Golden(tag, d.Src.URI().Filename(), func() ([]byte, error) {
 			return []byte(hover), nil
 		}))
 		if hover != expectHover {
-			t.Errorf("hover for %s failed:\n%s", d.Src, tests.Diff(t, expectHover, hover))
+			t.Errorf("hoverdef for %s failed:\n%s", d.Src, tests.Diff(t, expectHover, hover))
 		}
 	}
 	if !d.OnlyHover {
@@ -682,6 +682,37 @@ func (r *runner) Highlight(t *testing.T, src span.Span, locations []span.Span) {
 	}
 }
 
+func (r *runner) Hover(t *testing.T, src span.Span, text string) {
+	ctx := r.ctx
+	_, srcRng, err := spanToRange(r.data, src)
+	if err != nil {
+		t.Fatal(err)
+	}
+	fh, err := r.snapshot.GetFile(r.ctx, src.URI())
+	if err != nil {
+		t.Fatal(err)
+	}
+	hover, err := source.Hover(ctx, r.snapshot, fh, srcRng.Start)
+	if err != nil {
+		t.Errorf("hover failed for %s: %v", src.URI(), err)
+	}
+	if text == "" {
+		if hover != nil {
+			t.Errorf("want nil, got %v\n", hover)
+		}
+	} else {
+		if hover == nil {
+			t.Fatalf("want hover result to not be nil")
+		}
+		if got := hover.Contents.Value; got != text {
+			t.Errorf("want %v, got %v\n", got, text)
+		}
+		if want, got := srcRng, hover.Range; want != got {
+			t.Errorf("want range %v, got %v instead", want, got)
+		}
+	}
+}
+
 func (r *runner) References(t *testing.T, src span.Span, itemList []span.Span) {
 	ctx := r.ctx
 	_, srcRng, err := spanToRange(r.data, src)
@@ -935,6 +966,7 @@ func (r *runner) Link(t *testing.T, uri span.URI, wantLinks []tests.Link) {}
 func (r *runner) SuggestedFix(t *testing.T, spn span.Span, actionKinds []string, expectedActions int) {
 }
 func (r *runner) FunctionExtraction(t *testing.T, start span.Span, end span.Span) {}
+func (r *runner) MethodExtraction(t *testing.T, start span.Span, end span.Span)   {}
 func (r *runner) CodeLens(t *testing.T, uri span.URI, want []protocol.CodeLens)   {}
 func (r *runner) AddImport(t *testing.T, uri span.URI, expectedImport string)     {}
 
diff --git a/internal/lsp/source/util.go b/internal/lsp/source/util.go
index a30cc75e8e3..00ab860fa4b 100644
--- a/internal/lsp/source/util.go
+++ b/internal/lsp/source/util.go
@@ -282,9 +282,7 @@ func FindPackageFromPos(ctx context.Context, snapshot Snapshot, pos token.Pos) (
 		return nil, errors.Errorf("no file for pos %v", pos)
 	}
 	uri := span.URIFromPath(tok.Name())
-	// Search all packages: some callers may be working with packages not
-	// type-checked in workspace mode.
-	pkgs, err := snapshot.PackagesForFile(ctx, uri, TypecheckAll)
+	pkgs, err := snapshot.PackagesForFile(ctx, uri, TypecheckAll, true)
 	if err != nil {
 		return nil, err
 	}
@@ -544,3 +542,9 @@ func IsValidImport(pkgPath, importPkgPath string) bool {
 func IsCommandLineArguments(s string) bool {
 	return strings.Contains(s, "command-line-arguments")
 }
+
+// InRange reports whether the given position is in the given token.File.
+func InRange(tok *token.File, pos token.Pos) bool {
+	size := tok.Pos(tok.Size())
+	return int(pos) >= tok.Base() && pos <= size
+}
diff --git a/internal/lsp/source/view.go b/internal/lsp/source/view.go
index 74b77ca325c..2dd0dbc1d70 100644
--- a/internal/lsp/source/view.go
+++ b/internal/lsp/source/view.go
@@ -137,7 +137,7 @@ type Snapshot interface {
 
 	// PackagesForFile returns the packages that this file belongs to, checked
 	// in mode.
-	PackagesForFile(ctx context.Context, uri span.URI, mode TypecheckMode) ([]Package, error)
+	PackagesForFile(ctx context.Context, uri span.URI, mode TypecheckMode, includeTestVariants bool) ([]Package, error)
 
 	// PackageForFile returns a single package that this file belongs to,
 	// checked in mode and filtered by the package policy.
@@ -156,8 +156,17 @@ type Snapshot interface {
 	// in TypecheckWorkspace mode.
 	KnownPackages(ctx context.Context) ([]Package, error)
 
-	// WorkspacePackages returns the snapshot's top-level packages.
-	WorkspacePackages(ctx context.Context) ([]Package, error)
+	// ActivePackages returns the packages considered 'active' in the workspace.
+	//
+	// In normal memory mode, this is all workspace packages. In degraded memory
+	// mode, this is just the reverse transitive closure of open packages.
+	ActivePackages(ctx context.Context) ([]Package, error)
+
+	// Symbols returns all symbols in the snapshot.
+	Symbols(ctx context.Context) (map[span.URI][]Symbol, error)
+
+	// Metadata returns package metadata associated with the given file URI.
+	MetadataForFile(ctx context.Context, uri span.URI) ([]Metadata, error)
 
 	// GetCriticalError returns any critical errors in the workspace.
 	GetCriticalError(ctx context.Context) *CriticalError
@@ -296,6 +305,15 @@ type TidiedModule struct {
 	TidiedContent []byte
 }
 
+// Metadata represents package metadata retrieved from go/packages.
+type Metadata interface {
+	// PackageName is the package name.
+	PackageName() string
+
+	// PackagePath is the package path.
+	PackagePath() string
+}
+
 // Session represents a single connection from a client.
 // This is the level at which things like open files are maintained on behalf
 // of the client.
diff --git a/internal/lsp/source/workspace_symbol.go b/internal/lsp/source/workspace_symbol.go
index c0aabf2afea..1f6fd208950 100644
--- a/internal/lsp/source/workspace_symbol.go
+++ b/internal/lsp/source/workspace_symbol.go
@@ -7,13 +7,11 @@ package source
 import (
 	"context"
 	"fmt"
-	"go/ast"
-	"go/token"
 	"go/types"
+	"runtime"
 	"sort"
 	"strings"
 	"unicode"
-	"unicode/utf8"
 
 	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/lsp/fuzzy"
@@ -21,6 +19,15 @@ import (
 	"golang.org/x/tools/internal/span"
 )
 
+// Symbol holds a precomputed symbol value. Note: we avoid using the
+// protocol.SymbolInformation struct here in order to reduce the size of each
+// symbol.
+type Symbol struct {
+	Name  string
+	Kind  protocol.SymbolKind
+	Range protocol.Range
+}
+
 // maxSymbols defines the maximum number of symbol results that should ever be
 // sent in response to a client.
 const maxSymbols = 100
@@ -52,82 +59,80 @@ func WorkspaceSymbols(ctx context.Context, matcherType SymbolMatcher, style Symb
 	return sc.walk(ctx, views)
 }
 
-// A matcherFunc determines the matching score of a symbol.
+// A matcherFunc returns the index and score of a symbol match.
 //
 // See the comment for symbolCollector for more information.
-type matcherFunc func(name string) float64
+type matcherFunc func(chunks []string) (int, float64)
 
-// A symbolizer returns the best symbol match for name with pkg, according to
-// some heuristic.
+// A symbolizer returns the best symbol match for a name with pkg, according to
+// some heuristic. The symbol name is passed as the slice nameParts of logical
+// name pieces. For example, for myType.field the caller can pass either
+// []string{"myType.field"} or []string{"myType.", "field"}.
 //
 // See the comment for symbolCollector for more information.
-type symbolizer func(name string, pkg Package, m matcherFunc) (string, float64)
+type symbolizer func(name string, pkg Metadata, m matcherFunc) ([]string, float64)
 
-func fullyQualifiedSymbolMatch(name string, pkg Package, matcher matcherFunc) (string, float64) {
+func fullyQualifiedSymbolMatch(name string, pkg Metadata, matcher matcherFunc) ([]string, float64) {
 	_, score := dynamicSymbolMatch(name, pkg, matcher)
 	if score > 0 {
-		return pkg.PkgPath() + "." + name, score
+		return []string{pkg.PackagePath(), ".", name}, score
 	}
-	return "", 0
+	return nil, 0
 }
 
-func dynamicSymbolMatch(name string, pkg Package, matcher matcherFunc) (string, float64) {
-	// Prefer any package-qualified match.
-	pkgQualified := pkg.Name() + "." + name
-	if match, score := bestMatch(pkgQualified, matcher); match != "" {
-		return match, score
-	}
-	fullyQualified := pkg.PkgPath() + "." + name
-	if match, score := bestMatch(fullyQualified, matcher); match != "" {
-		return match, score
-	}
-	return "", 0
-}
+func dynamicSymbolMatch(name string, pkg Metadata, matcher matcherFunc) ([]string, float64) {
+	var score float64
 
-func packageSymbolMatch(name string, pkg Package, matcher matcherFunc) (string, float64) {
-	qualified := pkg.Name() + "." + name
-	if matcher(qualified) > 0 {
-		return qualified, 1
+	endsInPkgName := strings.HasSuffix(pkg.PackagePath(), pkg.PackageName())
+
+	// If the package path does not end in the package name, we need to check the
+	// package-qualified symbol as an extra pass first.
+	if !endsInPkgName {
+		pkgQualified := []string{pkg.PackageName(), ".", name}
+		idx, score := matcher(pkgQualified)
+		nameStart := len(pkg.PackageName()) + 1
+		if score > 0 {
+			// If our match is contained entirely within the unqualified portion,
+			// just return that.
+			if idx >= nameStart {
+				return []string{name}, score
+			}
+			// Lower the score for matches that include the package name.
+			return pkgQualified, score * 0.8
+		}
 	}
-	return "", 0
-}
 
-// bestMatch returns the highest scoring symbol suffix of fullPath, starting
-// from the right and splitting on selectors and path components.
-//
-// e.g. given a symbol path of the form 'host.com/dir/pkg.type.field', we
-// check the match quality of the following:
-//  - field
-//  - type.field
-//  - pkg.type.field
-//  - dir/pkg.type.field
-//  - host.com/dir/pkg.type.field
-//
-// and return the best match, along with its score.
-//
-// This is used to implement the 'dynamic' symbol style.
-func bestMatch(fullPath string, matcher matcherFunc) (string, float64) {
-	pathParts := strings.Split(fullPath, "/")
-	dottedParts := strings.Split(pathParts[len(pathParts)-1], ".")
+	// Now try matching the fully qualified symbol.
+	fullyQualified := []string{pkg.PackagePath(), ".", name}
+	idx, score := matcher(fullyQualified)
 
-	var best string
-	var score float64
+	// As above, check if we matched just the unqualified symbol name.
+	nameStart := len(pkg.PackagePath()) + 1
+	if idx >= nameStart {
+		return []string{name}, score
+	}
 
-	for i := 0; i < len(dottedParts); i++ {
-		path := strings.Join(dottedParts[len(dottedParts)-1-i:], ".")
-		if match := matcher(path); match > score {
-			best = path
-			score = match
+	// If our package path ends in the package name, we'll have skipped the
+	// initial pass above, so check if we matched just the package-qualified
+	// name.
+	if endsInPkgName && idx >= 0 {
+		pkgStart := len(pkg.PackagePath()) - len(pkg.PackageName())
+		if idx >= pkgStart {
+			return []string{pkg.PackageName(), ".", name}, score
 		}
 	}
-	for i := 0; i < len(pathParts); i++ {
-		path := strings.Join(pathParts[len(pathParts)-1-i:], "/")
-		if match := matcher(path); match > score {
-			best = path
-			score = match
-		}
+
+	// Our match was not contained within the unqualified or package qualified
+	// symbol. Return the fully qualified symbol but discount the score.
+	return fullyQualified, score * 0.6
+}
+
+func packageSymbolMatch(name string, pkg Metadata, matcher matcherFunc) ([]string, float64) {
+	qualified := []string{pkg.PackageName(), ".", name}
+	if _, s := matcher(qualified); s > 0 {
+		return qualified, s
 	}
-	return best, score
+	return nil, 0
 }
 
 // symbolCollector holds context as we walk Packages, gathering symbols that
@@ -141,39 +146,14 @@ func bestMatch(fullPath string, matcher matcherFunc) (string, float64) {
 //    enables the 'symbolStyle' configuration option.
 type symbolCollector struct {
 	// These types parameterize the symbol-matching pass.
-	matcher    matcherFunc
+	matchers   []matcherFunc
 	symbolizer symbolizer
 
-	// current holds metadata for the package we are currently walking.
-	current *pkgView
-	curFile *ParsedGoFile
-
-	res [maxSymbols]symbolInformation
+	seen map[span.URI]bool
+	symbolStore
 }
 
 func newSymbolCollector(matcher SymbolMatcher, style SymbolStyle, query string) *symbolCollector {
-	var m matcherFunc
-	switch matcher {
-	case SymbolFuzzy:
-		m = parseQuery(query)
-	case SymbolCaseSensitive:
-		m = func(s string) float64 {
-			if strings.Contains(s, query) {
-				return 1
-			}
-			return 0
-		}
-	case SymbolCaseInsensitive:
-		q := strings.ToLower(query)
-		m = func(s string) float64 {
-			if strings.Contains(strings.ToLower(s), q) {
-				return 1
-			}
-			return 0
-		}
-	default:
-		panic(fmt.Errorf("unknown symbol matcher: %v", matcher))
-	}
 	var s symbolizer
 	switch style {
 	case DynamicSymbols:
@@ -185,10 +165,33 @@ func newSymbolCollector(matcher SymbolMatcher, style SymbolStyle, query string)
 	default:
 		panic(fmt.Errorf("unknown symbol style: %v", style))
 	}
-	return &symbolCollector{
-		matcher:    m,
-		symbolizer: s,
+	sc := &symbolCollector{symbolizer: s}
+	sc.matchers = make([]matcherFunc, runtime.GOMAXPROCS(-1))
+	for i := range sc.matchers {
+		sc.matchers[i] = buildMatcher(matcher, query)
 	}
+	return sc
+}
+
+func buildMatcher(matcher SymbolMatcher, query string) matcherFunc {
+	switch matcher {
+	case SymbolFuzzy:
+		return parseQuery(query)
+	case SymbolFastFuzzy:
+		return fuzzy.NewSymbolMatcher(query).Match
+	case SymbolCaseSensitive:
+		return matchExact(query)
+	case SymbolCaseInsensitive:
+		q := strings.ToLower(query)
+		exact := matchExact(q)
+		wrapper := []string{""}
+		return func(chunks []string) (int, float64) {
+			s := strings.Join(chunks, "")
+			wrapper[0] = strings.ToLower(s)
+			return exact(wrapper)
+		}
+	}
+	panic(fmt.Errorf("unknown symbol matcher: %v", matcher))
 }
 
 // parseQuery parses a field-separated symbol query, extracting the special
@@ -206,7 +209,7 @@ func newSymbolCollector(matcher SymbolMatcher, style SymbolStyle, query string)
 func parseQuery(q string) matcherFunc {
 	fields := strings.Fields(q)
 	if len(fields) == 0 {
-		return func(string) float64 { return 0 }
+		return func([]string) (int, float64) { return -1, 0 }
 	}
 	var funcs []matcherFunc
 	for _, field := range fields {
@@ -214,218 +217,284 @@ func parseQuery(q string) matcherFunc {
 		switch {
 		case strings.HasPrefix(field, "^"):
 			prefix := field[1:]
-			f = smartCase(prefix, func(s string) float64 {
+			f = smartCase(prefix, func(chunks []string) (int, float64) {
+				s := strings.Join(chunks, "")
 				if strings.HasPrefix(s, prefix) {
-					return 1
+					return 0, 1
 				}
-				return 0
+				return -1, 0
 			})
 		case strings.HasPrefix(field, "'"):
 			exact := field[1:]
-			f = smartCase(exact, func(s string) float64 {
-				if strings.Contains(s, exact) {
-					return 1
-				}
-				return 0
-			})
+			f = smartCase(exact, matchExact(exact))
 		case strings.HasSuffix(field, "$"):
 			suffix := field[0 : len(field)-1]
-			f = smartCase(suffix, func(s string) float64 {
+			f = smartCase(suffix, func(chunks []string) (int, float64) {
+				s := strings.Join(chunks, "")
 				if strings.HasSuffix(s, suffix) {
-					return 1
+					return len(s) - len(suffix), 1
 				}
-				return 0
+				return -1, 0
 			})
 		default:
 			fm := fuzzy.NewMatcher(field)
-			f = func(s string) float64 {
-				return float64(fm.Score(s))
+			f = func(chunks []string) (int, float64) {
+				score := float64(fm.ScoreChunks(chunks))
+				ranges := fm.MatchedRanges()
+				if len(ranges) > 0 {
+					return ranges[0], score
+				}
+				return -1, score
 			}
 		}
 		funcs = append(funcs, f)
 	}
+	if len(funcs) == 1 {
+		return funcs[0]
+	}
 	return comboMatcher(funcs).match
 }
 
+func matchExact(exact string) matcherFunc {
+	return func(chunks []string) (int, float64) {
+		s := strings.Join(chunks, "")
+		if idx := strings.LastIndex(s, exact); idx >= 0 {
+			return idx, 1
+		}
+		return -1, 0
+	}
+}
+
 // smartCase returns a matcherFunc that is case-sensitive if q contains any
 // upper-case characters, and case-insensitive otherwise.
 func smartCase(q string, m matcherFunc) matcherFunc {
 	insensitive := strings.ToLower(q) == q
-	return func(s string) float64 {
+	wrapper := []string{""}
+	return func(chunks []string) (int, float64) {
+		s := strings.Join(chunks, "")
 		if insensitive {
 			s = strings.ToLower(s)
 		}
-		return m(s)
+		wrapper[0] = s
+		return m(wrapper)
 	}
 }
 
 type comboMatcher []matcherFunc
 
-func (c comboMatcher) match(s string) float64 {
+func (c comboMatcher) match(chunks []string) (int, float64) {
 	score := 1.0
+	first := 0
 	for _, f := range c {
-		score *= f(s)
-	}
-	return score
-}
-
-// walk walks views, gathers symbols, and returns the results.
-func (sc *symbolCollector) walk(ctx context.Context, views []View) (_ []protocol.SymbolInformation, err error) {
-	toWalk, err := sc.collectPackages(ctx, views)
-	if err != nil {
-		return nil, err
-	}
-	// Make sure we only walk files once (we might see them more than once due to
-	// build constraints).
-	seen := make(map[span.URI]bool)
-	for _, pv := range toWalk {
-		sc.current = pv
-		for _, pgf := range pv.pkg.CompiledGoFiles() {
-			if seen[pgf.URI] {
-				continue
-			}
-			seen[pgf.URI] = true
-			sc.curFile = pgf
-			sc.walkFilesDecls(pgf.File.Decls)
+		idx, s := f(chunks)
+		if idx < first {
+			first = idx
 		}
+		score *= s
 	}
-	return sc.results(), nil
+	return first, score
 }
 
-func (sc *symbolCollector) results() []protocol.SymbolInformation {
-	var res []protocol.SymbolInformation
-	for _, si := range sc.res {
-		if si.score <= 0 {
-			return res
-		}
-		res = append(res, si.asProtocolSymbolInformation())
+func (sc *symbolCollector) walk(ctx context.Context, views []View) ([]protocol.SymbolInformation, error) {
+
+	// Use the root view URIs for determining (lexically) whether a uri is in any
+	// open workspace.
+	var roots []string
+	for _, v := range views {
+		roots = append(roots, strings.TrimRight(string(v.Folder()), "/"))
 	}
-	return res
-}
 
-// collectPackages gathers all known packages and sorts for stability.
-func (sc *symbolCollector) collectPackages(ctx context.Context, views []View) ([]*pkgView, error) {
-	var toWalk []*pkgView
+	results := make(chan *symbolStore)
+	matcherlen := len(sc.matchers)
+	files := make(map[span.URI]symbolFile)
+
 	for _, v := range views {
 		snapshot, release := v.Snapshot(ctx)
 		defer release()
-		knownPkgs, err := snapshot.KnownPackages(ctx)
-		if err != nil {
-			return nil, err
-		}
-		workspacePackages, err := snapshot.WorkspacePackages(ctx)
+		psyms, err := snapshot.Symbols(ctx)
 		if err != nil {
 			return nil, err
 		}
-		isWorkspacePkg := make(map[Package]bool)
-		for _, wp := range workspacePackages {
-			isWorkspacePkg[wp] = true
-		}
-		for _, pkg := range knownPkgs {
-			toWalk = append(toWalk, &pkgView{
-				pkg:         pkg,
-				isWorkspace: isWorkspacePkg[pkg],
-			})
+
+		for uri, syms := range psyms {
+			// Only scan each file once.
+			if _, ok := files[uri]; ok {
+				continue
+			}
+			mds, err := snapshot.MetadataForFile(ctx, uri)
+			if err != nil {
+				return nil, err
+			}
+			if len(mds) == 0 {
+				// TODO: should use the bug reporting API
+				continue
+			}
+			files[uri] = symbolFile{uri, mds[0], syms}
 		}
 	}
-	// Now sort for stability of results. We order by
-	// (pkgView.isWorkspace, pkgView.p.ID())
-	sort.Slice(toWalk, func(i, j int) bool {
-		lhs := toWalk[i]
-		rhs := toWalk[j]
-		switch {
-		case lhs.isWorkspace == rhs.isWorkspace:
-			return lhs.pkg.ID() < rhs.pkg.ID()
-		case lhs.isWorkspace:
-			return true
-		default:
-			return false
+
+	var work []symbolFile
+	for _, f := range files {
+		work = append(work, f)
+	}
+
+	// Compute matches concurrently. Each symbolWorker has its own symbolStore,
+	// which we merge at the end.
+	for i, matcher := range sc.matchers {
+		go func(i int, matcher matcherFunc) {
+			w := &symbolWorker{
+				symbolizer: sc.symbolizer,
+				matcher:    matcher,
+				ss:         &symbolStore{},
+				roots:      roots,
+			}
+			for j := i; j < len(work); j += matcherlen {
+				w.matchFile(work[j])
+			}
+			results <- w.ss
+		}(i, matcher)
+	}
+
+	for i := 0; i < matcherlen; i++ {
+		ss := <-results
+		for _, si := range ss.res {
+			sc.store(si)
 		}
-	})
-	return toWalk, nil
+	}
+	return sc.results(), nil
+}
+
+// symbolFile holds symbol information for a single file.
+type symbolFile struct {
+	uri  span.URI
+	md   Metadata
+	syms []Symbol
 }
 
-func (sc *symbolCollector) walkFilesDecls(decls []ast.Decl) {
-	for _, decl := range decls {
-		switch decl := decl.(type) {
-		case *ast.FuncDecl:
-			kind := protocol.Function
-			var recv *ast.Ident
-			if decl.Recv.NumFields() > 0 {
-				kind = protocol.Method
-				recv = unpackRecv(decl.Recv.List[0].Type)
+// symbolWorker matches symbols and captures the highest scoring results.
+type symbolWorker struct {
+	symbolizer symbolizer
+	matcher    matcherFunc
+	ss         *symbolStore
+	roots      []string
+}
+
+func (w *symbolWorker) matchFile(i symbolFile) {
+	for _, sym := range i.syms {
+		symbolParts, score := w.symbolizer(sym.Name, i.md, w.matcher)
+
+		// Check if the score is too low before applying any downranking.
+		if w.ss.tooLow(score) {
+			continue
+		}
+
+		// Factors to apply to the match score for the purpose of downranking
+		// results.
+		//
+		// These numbers were crudely calibrated based on trial-and-error using a
+		// small number of sample queries. Adjust as necessary.
+		//
+		// All factors are multiplicative, meaning if more than one applies they are
+		// multiplied together.
+		const (
+			// nonWorkspaceFactor is applied to symbols outside of any active
+			// workspace. Developers are less likely to want to jump to code that they
+			// are not actively working on.
+			nonWorkspaceFactor = 0.5
+			// nonWorkspaceUnexportedFactor is applied to unexported symbols outside of
+			// any active workspace. Since one wouldn't usually jump to unexported
+			// symbols to understand a package API, they are particularly irrelevant.
+			nonWorkspaceUnexportedFactor = 0.5
+			// every field or method nesting level to access the field decreases
+			// the score by a factor of 1.0 - depth*depthFactor, up to a depth of
+			// 3.
+			depthFactor = 0.2
+		)
+
+		startWord := true
+		exported := true
+		depth := 0.0
+		for _, r := range sym.Name {
+			if startWord && !unicode.IsUpper(r) {
+				exported = false
 			}
-			if recv != nil {
-				sc.match(decl.Name.Name, kind, decl.Name, recv)
+			if r == '.' {
+				startWord = true
+				depth++
 			} else {
-				sc.match(decl.Name.Name, kind, decl.Name)
+				startWord = false
 			}
-		case *ast.GenDecl:
-			for _, spec := range decl.Specs {
-				switch spec := spec.(type) {
-				case *ast.TypeSpec:
-					sc.match(spec.Name.Name, typeToKind(sc.current.pkg.GetTypesInfo().TypeOf(spec.Type)), spec.Name)
-					sc.walkType(spec.Type, spec.Name)
-				case *ast.ValueSpec:
-					for _, name := range spec.Names {
-						kind := protocol.Variable
-						if decl.Tok == token.CONST {
-							kind = protocol.Constant
-						}
-						sc.match(name.Name, kind, name)
-					}
-				}
+		}
+
+		inWorkspace := false
+		for _, root := range w.roots {
+			if strings.HasPrefix(string(i.uri), root) {
+				inWorkspace = true
+				break
 			}
 		}
-	}
-}
 
-func unpackRecv(rtyp ast.Expr) *ast.Ident {
-	// Extract the receiver identifier. Lifted from go/types/resolver.go
-L:
-	for {
-		switch t := rtyp.(type) {
-		case *ast.ParenExpr:
-			rtyp = t.X
-		case *ast.StarExpr:
-			rtyp = t.X
-		default:
-			break L
+		// Apply downranking based on workspace position.
+		if !inWorkspace {
+			score *= nonWorkspaceFactor
+			if !exported {
+				score *= nonWorkspaceUnexportedFactor
+			}
 		}
-	}
-	if name, _ := rtyp.(*ast.Ident); name != nil {
-		return name
-	}
-	return nil
-}
 
-// walkType processes symbols related to a type expression. path is path of
-// nested type identifiers to the type expression.
-func (sc *symbolCollector) walkType(typ ast.Expr, path ...*ast.Ident) {
-	switch st := typ.(type) {
-	case *ast.StructType:
-		for _, field := range st.Fields.List {
-			sc.walkField(field, protocol.Field, protocol.Field, path...)
+		// Apply downranking based on symbol depth.
+		if depth > 3 {
+			depth = 3
+		}
+		score *= 1.0 - depth*depthFactor
+
+		if w.ss.tooLow(score) {
+			continue
 		}
-	case *ast.InterfaceType:
-		for _, field := range st.Methods.List {
-			sc.walkField(field, protocol.Interface, protocol.Method, path...)
+
+		si := symbolInformation{
+			score:     score,
+			symbol:    strings.Join(symbolParts, ""),
+			kind:      sym.Kind,
+			uri:       i.uri,
+			rng:       sym.Range,
+			container: i.md.PackagePath(),
 		}
+		w.ss.store(si)
 	}
 }
 
-// walkField processes symbols related to the struct field or interface method.
-//
-// unnamedKind and namedKind are the symbol kinds if the field is resp. unnamed
-// or named. path is the path of nested identifiers containing the field.
-func (sc *symbolCollector) walkField(field *ast.Field, unnamedKind, namedKind protocol.SymbolKind, path ...*ast.Ident) {
-	if len(field.Names) == 0 {
-		sc.match(types.ExprString(field.Type), unnamedKind, field, path...)
+type symbolStore struct {
+	res [maxSymbols]symbolInformation
+}
+
+// store inserts si into the sorted results, if si has a high enough score.
+func (sc *symbolStore) store(si symbolInformation) {
+	if sc.tooLow(si.score) {
+		return
 	}
-	for _, name := range field.Names {
-		sc.match(name.Name, namedKind, name, path...)
-		sc.walkType(field.Type, append(path, name)...)
+	insertAt := sort.Search(len(sc.res), func(i int) bool {
+		return sc.res[i].score < si.score
+	})
+	if insertAt < len(sc.res)-1 {
+		copy(sc.res[insertAt+1:], sc.res[insertAt:len(sc.res)-1])
 	}
+	sc.res[insertAt] = si
+}
+
+func (sc *symbolStore) tooLow(score float64) bool {
+	return score <= sc.res[len(sc.res)-1].score
+}
+
+func (sc *symbolStore) results() []protocol.SymbolInformation {
+	var res []protocol.SymbolInformation
+	for _, si := range sc.res {
+		if si.score <= 0 {
+			return res
+		}
+		res = append(res, si.asProtocolSymbolInformation())
+	}
+	return res
 }
 
 func typeToKind(typ types.Type) protocol.SymbolKind {
@@ -455,130 +524,15 @@ func typeToKind(typ types.Type) protocol.SymbolKind {
 	return protocol.Variable
 }
 
-// match finds matches and gathers the symbol identified by name, kind and node
-// via the symbolCollector's matcher after first de-duping against previously
-// seen symbols.
-//
-// path specifies the identifier path to a nested field or interface method.
-func (sc *symbolCollector) match(name string, kind protocol.SymbolKind, node ast.Node, path ...*ast.Ident) {
-	if !node.Pos().IsValid() || !node.End().IsValid() {
-		return
-	}
-
-	isExported := isExported(name)
-	if len(path) > 0 {
-		var nameBuilder strings.Builder
-		for _, ident := range path {
-			nameBuilder.WriteString(ident.Name)
-			nameBuilder.WriteString(".")
-			if !ident.IsExported() {
-				isExported = false
-			}
-		}
-		nameBuilder.WriteString(name)
-		name = nameBuilder.String()
-	}
-
-	// Factors to apply to the match score for the purpose of downranking
-	// results.
-	//
-	// These numbers were crudely calibrated based on trial-and-error using a
-	// small number of sample queries. Adjust as necessary.
-	//
-	// All factors are multiplicative, meaning if more than one applies they are
-	// multiplied together.
-	const (
-		// nonWorkspaceFactor is applied to symbols outside of any active
-		// workspace. Developers are less likely to want to jump to code that they
-		// are not actively working on.
-		nonWorkspaceFactor = 0.5
-		// nonWorkspaceUnexportedFactor is applied to unexported symbols outside of
-		// any active workspace. Since one wouldn't usually jump to unexported
-		// symbols to understand a package API, they are particularly irrelevant.
-		nonWorkspaceUnexportedFactor = 0.5
-		// fieldFactor is applied to fields and interface methods. One would
-		// typically jump to the type definition first, so ranking fields highly
-		// can be noisy.
-		fieldFactor = 0.5
-	)
-	symbol, score := sc.symbolizer(name, sc.current.pkg, sc.matcher)
-
-	// Downrank symbols outside of the workspace.
-	if !sc.current.isWorkspace {
-		score *= nonWorkspaceFactor
-		if !isExported {
-			score *= nonWorkspaceUnexportedFactor
-		}
-	}
-
-	// Downrank fields.
-	if len(path) > 0 {
-		score *= fieldFactor
-	}
-
-	// Avoid the work below if we know this score will not be sorted into the
-	// results.
-	if score <= sc.res[len(sc.res)-1].score {
-		return
-	}
-
-	rng, err := fileRange(sc.curFile, node.Pos(), node.End())
-	if err != nil {
-		return
-	}
-	si := symbolInformation{
-		score:     score,
-		name:      name,
-		symbol:    symbol,
-		container: sc.current.pkg.PkgPath(),
-		kind:      kind,
-		location: protocol.Location{
-			URI:   protocol.URIFromSpanURI(sc.curFile.URI),
-			Range: rng,
-		},
-	}
-	insertAt := sort.Search(len(sc.res), func(i int) bool {
-		return sc.res[i].score < score
-	})
-	if insertAt < len(sc.res)-1 {
-		copy(sc.res[insertAt+1:], sc.res[insertAt:len(sc.res)-1])
-	}
-	sc.res[insertAt] = si
-}
-
-func fileRange(pgf *ParsedGoFile, start, end token.Pos) (protocol.Range, error) {
-	s, err := span.FileSpan(pgf.Tok, pgf.Mapper.Converter, start, end)
-	if err != nil {
-		return protocol.Range{}, nil
-	}
-	return pgf.Mapper.Range(s)
-}
-
-// isExported reports if a token is exported. Copied from
-// token.IsExported (go1.13+).
-//
-// TODO: replace usage with token.IsExported once go1.12 is no longer
-// supported.
-func isExported(name string) bool {
-	ch, _ := utf8.DecodeRuneInString(name)
-	return unicode.IsUpper(ch)
-}
-
-// pkgView holds information related to a package that we are going to walk.
-type pkgView struct {
-	pkg         Package
-	isWorkspace bool
-}
-
 // symbolInformation is a cut-down version of protocol.SymbolInformation that
 // allows struct values of this type to be used as map keys.
 type symbolInformation struct {
 	score     float64
-	name      string
 	symbol    string
 	container string
 	kind      protocol.SymbolKind
-	location  protocol.Location
+	uri       span.URI
+	rng       protocol.Range
 }
 
 // asProtocolSymbolInformation converts s to a protocol.SymbolInformation value.
@@ -586,9 +540,12 @@ type symbolInformation struct {
 // TODO: work out how to handle tags if/when they are needed.
 func (s symbolInformation) asProtocolSymbolInformation() protocol.SymbolInformation {
 	return protocol.SymbolInformation{
-		Name:          s.symbol,
-		Kind:          s.kind,
-		Location:      s.location,
+		Name: s.symbol,
+		Kind: s.kind,
+		Location: protocol.Location{
+			URI:   protocol.URIFromSpanURI(s.uri),
+			Range: s.rng,
+		},
 		ContainerName: s.container,
 	}
 }
diff --git a/internal/lsp/source/workspace_symbol_test.go b/internal/lsp/source/workspace_symbol_test.go
index f3d9dbb9d44..89c754db09d 100644
--- a/internal/lsp/source/workspace_symbol_test.go
+++ b/internal/lsp/source/workspace_symbol_test.go
@@ -5,7 +5,6 @@
 package source
 
 import (
-	"strings"
 	"testing"
 )
 
@@ -40,58 +39,8 @@ func TestParseQuery(t *testing.T) {
 
 	for _, test := range tests {
 		matcher := parseQuery(test.query)
-		if score := matcher(test.s); score > 0 != test.wantMatch {
+		if _, score := matcher([]string{test.s}); score > 0 != test.wantMatch {
 			t.Errorf("parseQuery(%q) match for %q: %.2g, want match: %t", test.query, test.s, score, test.wantMatch)
 		}
 	}
 }
-
-func TestBestMatch(t *testing.T) {
-	tests := []struct {
-		desc      string
-		symbol    string
-		matcher   matcherFunc
-		wantMatch string
-		wantScore float64
-	}{
-		{
-			desc:      "shortest match",
-			symbol:    "foo/bar/baz.quux",
-			matcher:   func(string) float64 { return 1.0 },
-			wantMatch: "quux",
-			wantScore: 1.0,
-		},
-		{
-			desc:   "partial match",
-			symbol: "foo/bar/baz.quux",
-			matcher: func(s string) float64 {
-				if strings.HasPrefix(s, "bar") {
-					return 1.0
-				}
-				return 0.0
-			},
-			wantMatch: "bar/baz.quux",
-			wantScore: 1.0,
-		},
-		{
-			desc:   "longest match",
-			symbol: "foo/bar/baz.quux",
-			matcher: func(s string) float64 {
-				parts := strings.Split(s, "/")
-				return float64(len(parts))
-			},
-			wantMatch: "foo/bar/baz.quux",
-			wantScore: 3.0,
-		},
-	}
-
-	for _, test := range tests {
-		test := test
-		t.Run(test.desc, func(t *testing.T) {
-			gotMatch, gotScore := bestMatch(test.symbol, test.matcher)
-			if gotMatch != test.wantMatch || gotScore != test.wantScore {
-				t.Errorf("bestMatch(%q, matcher) = (%q, %.2g), want (%q, %.2g)", test.symbol, gotMatch, gotScore, test.wantMatch, test.wantScore)
-			}
-		})
-	}
-}
diff --git a/internal/lsp/template/completion.go b/internal/lsp/template/completion.go
index a593bf5732a..4ec7623ba9a 100644
--- a/internal/lsp/template/completion.go
+++ b/internal/lsp/template/completion.go
@@ -5,17 +5,294 @@
 package template
 
 import (
+	"bytes"
 	"context"
 	"fmt"
+	"go/scanner"
+	"go/token"
+	"strings"
 
 	"golang.org/x/tools/internal/lsp/protocol"
 	"golang.org/x/tools/internal/lsp/source"
-	"golang.org/x/tools/internal/lsp/source/completion"
 )
 
-func Completion(ctx context.Context, snapshot source.Snapshot, fh source.VersionedFileHandle, pos protocol.Position, context protocol.CompletionContext) ([]completion.CompletionItem, *completion.Selection, error) {
+// information needed for completion
+type completer struct {
+	p      *Parsed
+	pos    protocol.Position
+	offset int // offset of the start of the Token
+	ctx    protocol.CompletionContext
+	syms   map[string]symbol
+}
+
+func Completion(ctx context.Context, snapshot source.Snapshot, fh source.VersionedFileHandle, pos protocol.Position, context protocol.CompletionContext) (*protocol.CompletionList, error) {
 	if skipTemplates(snapshot) {
-		return nil, nil, nil
+		return nil, nil
+	}
+	all := New(snapshot.Templates())
+	var start int // the beginning of the Token (completed or not)
+	syms := make(map[string]symbol)
+	var p *Parsed
+	for fn, fc := range all.files {
+		// collect symbols from all template files
+		filterSyms(syms, fc.symbols)
+		if fn.Filename() != fh.URI().Filename() {
+			continue
+		}
+		if start = inTemplate(fc, pos); start == -1 {
+			return nil, nil
+		}
+		p = fc
+	}
+	if p == nil {
+		// this cannot happen unless the search missed a template file
+		return nil, fmt.Errorf("%s not found", fh.FileIdentity().URI.Filename())
+	}
+	c := completer{
+		p:      p,
+		pos:    pos,
+		offset: start + len(Left),
+		ctx:    context,
+		syms:   syms,
+	}
+	return c.complete()
+}
+
+func filterSyms(syms map[string]symbol, ns []symbol) {
+	for _, xsym := range ns {
+		switch xsym.kind {
+		case protocol.Method, protocol.Package, protocol.Boolean, protocol.Namespace,
+			protocol.Function:
+			syms[xsym.name] = xsym // we don't care which symbol we get
+		case protocol.Variable:
+			if xsym.name != "dot" {
+				syms[xsym.name] = xsym
+			}
+		case protocol.Constant:
+			if xsym.name == "nil" {
+				syms[xsym.name] = xsym
+			}
+		}
+	}
+}
+
+// return the starting position of the enclosing token, or -1 if none
+func inTemplate(fc *Parsed, pos protocol.Position) int {
+	// 1. pos might be in a Token, return tk.Start
+	// 2. pos might be after an elided but before a Token, return elided
+	// 3. return -1 for false
+	offset := fc.FromPosition(pos)
+	// this could be a binary search, as the tokens are ordered
+	for _, tk := range fc.tokens {
+		if tk.Start <= offset && offset < tk.End {
+			return tk.Start
+		}
+	}
+	for _, x := range fc.elided {
+		if x > offset {
+			// fc.elided is sorted
+			break
+		}
+		// If the interval [x,offset] does not contain Left or Right
+		// then provide completions. (do we need the test for Right?)
+		if !bytes.Contains(fc.buf[x:offset], []byte(Left)) && !bytes.Contains(fc.buf[x:offset], []byte(Right)) {
+			return x
+		}
+	}
+	return -1
+}
+
+var (
+	keywords = []string{"if", "with", "else", "block", "range", "template", "end}}", "end"}
+	globals  = []string{"and", "call", "html", "index", "slice", "js", "len", "not", "or",
+		"urlquery", "printf", "println", "print", "eq", "ne", "le", "lt", "ge", "gt"}
+)
+
+// find the completions. start is the offset of either the Token enclosing pos, or where
+// the incomplete token starts.
+// The error return is always nil.
+func (c *completer) complete() (*protocol.CompletionList, error) {
+	ans := &protocol.CompletionList{IsIncomplete: true, Items: []protocol.CompletionItem{}}
+	start := c.p.FromPosition(c.pos)
+	sofar := c.p.buf[c.offset:start]
+	if len(sofar) == 0 || sofar[len(sofar)-1] == ' ' || sofar[len(sofar)-1] == '\t' {
+		return ans, nil
+	}
+	// sofar could be parsed by either c.analyzer() or scan(). The latter is precise
+	// and slower, but fast enough
+	words := scan(sofar)
+	// 1. if pattern starts $, show variables
+	// 2. if pattern starts ., show methods (and . by itself?)
+	// 3. if len(words) == 1, show firstWords (but if it were a |, show functions and globals)
+	// 4. ...? (parenthetical expressions, arguments, ...) (packages, namespaces, nil?)
+	if len(words) == 0 {
+		return nil, nil // if this happens, why were we called?
+	}
+	pattern := string(words[len(words)-1])
+	if pattern[0] == '$' {
+		// should we also return a raw "$"?
+		for _, s := range c.syms {
+			if s.kind == protocol.Variable && weakMatch(s.name, pattern) > 0 {
+				ans.Items = append(ans.Items, protocol.CompletionItem{
+					Label:  s.name,
+					Kind:   protocol.VariableCompletion,
+					Detail: "Variable",
+				})
+			}
+		}
+		return ans, nil
+	}
+	if pattern[0] == '.' {
+		for _, s := range c.syms {
+			if s.kind == protocol.Method && weakMatch("."+s.name, pattern) > 0 {
+				ans.Items = append(ans.Items, protocol.CompletionItem{
+					Label:  s.name,
+					Kind:   protocol.MethodCompletion,
+					Detail: "Method/member",
+				})
+			}
+		}
+		return ans, nil
+	}
+	// could we get completion attempts in strings or numbers, and if so, do we care?
+	// globals
+	for _, kw := range globals {
+		if weakMatch(kw, string(pattern)) != 0 {
+			ans.Items = append(ans.Items, protocol.CompletionItem{
+				Label:  kw,
+				Kind:   protocol.KeywordCompletion,
+				Detail: "Function",
+			})
+		}
+	}
+	// and functions
+	for _, s := range c.syms {
+		if s.kind == protocol.Function && weakMatch(s.name, pattern) != 0 {
+			ans.Items = append(ans.Items, protocol.CompletionItem{
+				Label:  s.name,
+				Kind:   protocol.FunctionCompletion,
+				Detail: "Function",
+			})
+		}
+	}
+	// keywords if we're at the beginning
+	if len(words) <= 1 || len(words[len(words)-2]) == 1 && words[len(words)-2][0] == '|' {
+		for _, kw := range keywords {
+			if weakMatch(kw, string(pattern)) != 0 {
+				ans.Items = append(ans.Items, protocol.CompletionItem{
+					Label:  kw,
+					Kind:   protocol.KeywordCompletion,
+					Detail: "keyword",
+				})
+			}
+		}
+	}
+	return ans, nil
+}
+
+// someday think about comments, strings, backslashes, etc
+// this would repeat some of the template parsing, but because the user is typing
+// there may be no parse tree here.
+// (go/scanner will report 2 tokens for $a, as $ is not a legal go identifier character)
+// (go/scanner is about 2.7 times more expensive)
+func (c *completer) analyze(buf []byte) [][]byte {
+	// we want to split on whitespace and before dots
+	var working []byte
+	var ans [][]byte
+	for _, ch := range buf {
+		if ch == '.' && len(working) > 0 {
+			ans = append(ans, working)
+			working = []byte{'.'}
+			continue
+		}
+		if ch == ' ' || ch == '\t' || ch == '\n' || ch == '\r' {
+			if len(working) > 0 {
+				ans = append(ans, working)
+				working = []byte{}
+				continue
+			}
+		}
+		working = append(working, ch)
+	}
+	if len(working) > 0 {
+		ans = append(ans, working)
+	}
+	ch := buf[len(buf)-1]
+	if ch == ' ' || ch == '\t' {
+		// avoid completing on whitespace
+		ans = append(ans, []byte{ch})
+	}
+	return ans
+}
+
+// version of c.analyze that uses go/scanner.
+func scan(buf []byte) []string {
+	fset := token.NewFileSet()
+	fp := fset.AddFile("", -1, len(buf))
+	var sc scanner.Scanner
+	sc.Init(fp, buf, func(pos token.Position, msg string) {}, scanner.ScanComments)
+	ans := make([]string, 0, 10) // preallocating gives a measurable savings
+	for {
+		_, tok, lit := sc.Scan() // tok is an int
+		if tok == token.EOF {
+			break // done
+		} else if tok == token.SEMICOLON && lit == "\n" {
+			continue // don't care, but probably can't happen
+		} else if tok == token.PERIOD {
+			ans = append(ans, ".") // lit is empty
+		} else if tok == token.IDENT && len(ans) > 0 && ans[len(ans)-1] == "." {
+			ans[len(ans)-1] = "." + lit
+		} else if tok == token.IDENT && len(ans) > 0 && ans[len(ans)-1] == "$" {
+			ans[len(ans)-1] = "$" + lit
+		} else {
+			ans = append(ans, lit)
+		}
+	}
+	return ans
+}
+
+// pattern is what the user has typed
+func weakMatch(choice, pattern string) float64 {
+	lower := strings.ToLower(choice)
+	// for now, use only lower-case everywhere
+	pattern = strings.ToLower(pattern)
+	// The first char has to match
+	if pattern[0] != lower[0] {
+		return 0
+	}
+	// If they start with ., then the second char has to match
+	from := 1
+	if pattern[0] == '.' {
+		if len(pattern) < 2 {
+			return 1 // pattern just a ., so it matches
+		}
+		if pattern[1] != lower[1] {
+			return 0
+		}
+		from = 2
+	}
+	// check that all the characters of pattern occur as a subsequence of choice
+	for i, j := from, from; j < len(pattern); j++ {
+		if pattern[j] == lower[i] {
+			i++
+			if i >= len(lower) {
+				return 0
+			}
+		}
+	}
+	return 1
+}
+
+// for debug printing
+func strContext(c protocol.CompletionContext) string {
+	switch c.TriggerKind {
+	case protocol.Invoked:
+		return "invoked"
+	case protocol.TriggerCharacter:
+		return fmt.Sprintf("triggered(%s)", c.TriggerCharacter)
+	case protocol.TriggerForIncompleteCompletions:
+		// gopls doesn't seem to handle these explicitly anywhere
+		return "incomplete"
 	}
-	return nil, nil, fmt.Errorf("implement template completion")
+	return fmt.Sprintf("?%v", c)
 }
diff --git a/internal/lsp/template/completion_test.go b/internal/lsp/template/completion_test.go
new file mode 100644
index 00000000000..7d17ab1ebab
--- /dev/null
+++ b/internal/lsp/template/completion_test.go
@@ -0,0 +1,98 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package template
+
+import (
+	"log"
+	"sort"
+	"strings"
+	"testing"
+
+	"golang.org/x/tools/internal/lsp/protocol"
+)
+
+func init() {
+	log.SetFlags(log.Lshortfile)
+}
+
+type tparse struct {
+	marked string   // ^ shows where to ask for completions. (The user just typed the following character.)
+	wanted []string // expected completions
+}
+
+// Test completions in templates that parse enough (if completion needs symbols)
+func TestParsed(t *testing.T) {
+	var tests = []tparse{
+		{"{{^if}}", []string{"index", "if"}},
+		{"{{if .}}{{^e {{end}}", []string{"eq", "end}}", "else", "end"}},
+		{"{{foo}}{{^f", []string{"foo"}},
+		{"{{^$}}", []string{"$"}},
+		{"{{$x:=4}}{{^$", []string{"$x"}},
+		{"{{$x:=4}}{{$^ ", []string{}},
+		{"{{len .Modified}}{{^.Mo", []string{"Modified"}},
+		{"{{len .Modified}}{{.m^f", []string{"Modified"}},
+		{"{{^$ }}", []string{"$"}},
+		{"{{$a =3}}{{^$", []string{"$a"}},
+		// .two is not good here: fix someday
+		{`{{.Modified}}{{^.{{if $.one.two}}xxx{{end}}`, []string{"Modified", "one", "two"}},
+		{`{{.Modified}}{{.^o{{if $.one.two}}xxx{{end}}`, []string{"one"}},
+		{"{{.Modiifed}}{{.one.^t{{if $.one.two}}xxx{{end}}", []string{"two"}},
+		{`{{block "foo" .}}{{^i`, []string{"index", "if"}},
+		{"{{i^n{{Internal}}", []string{"index", "Internal", "if"}},
+		// simple number has no completions
+		{"{{4^e", []string{}},
+		// simple string has no completions
+		{"{{`^e", []string{}},
+		{"{{`No ^i", []string{}}, // example of why go/scanner is used
+		{"{{xavier}}{{12. ^x", []string{"xavier"}},
+	}
+	for _, tx := range tests {
+		c := testCompleter(t, tx)
+		ans, err := c.complete()
+		if err != nil {
+			t.Fatal(err)
+		}
+		var v []string
+		for _, a := range ans.Items {
+			v = append(v, a.Label)
+		}
+		if len(v) != len(tx.wanted) {
+			t.Errorf("%q: got %v, wanted %v", tx.marked, v, tx.wanted)
+			continue
+		}
+		sort.Strings(tx.wanted)
+		sort.Strings(v)
+		for i := 0; i < len(v); i++ {
+			if tx.wanted[i] != v[i] {
+				t.Errorf("%q at %d: got %v, wanted %v", tx.marked, i, v, tx.wanted)
+				break
+			}
+		}
+	}
+}
+
+func testCompleter(t *testing.T, tx tparse) *completer {
+	t.Helper()
+	col := strings.Index(tx.marked, "^") + 1
+	offset := strings.LastIndex(tx.marked[:col], string(Left))
+	if offset < 0 {
+		t.Fatalf("no {{ before ^: %q", tx.marked)
+	}
+	buf := strings.Replace(tx.marked, "^", "", 1)
+	p := parseBuffer([]byte(buf))
+	if p.ParseErr != nil {
+		log.Printf("%q: %v", tx.marked, p.ParseErr)
+	}
+	syms := make(map[string]symbol)
+	filterSyms(syms, p.symbols)
+	c := &completer{
+		p:      p,
+		pos:    protocol.Position{Line: 0, Character: uint32(col)},
+		offset: offset + len(Left),
+		ctx:    protocol.CompletionContext{TriggerKind: protocol.Invoked},
+		syms:   syms,
+	}
+	return c
+}
diff --git a/internal/lsp/template/parse.go b/internal/lsp/template/parse.go
index 0853612a708..0ad8fabdcb1 100644
--- a/internal/lsp/template/parse.go
+++ b/internal/lsp/template/parse.go
@@ -37,10 +37,11 @@ var (
 )
 
 type Parsed struct {
-	buf   []byte   //contents
-	lines [][]byte // needed?, other than for debugging?
+	buf    []byte   //contents
+	lines  [][]byte // needed?, other than for debugging?
+	elided []int    // offsets where Left was replaced by blanks
 
-	// tokens, computed before trying to parse
+	// tokens are matched Left-Right pairs, computed before trying to parse
 	tokens []Token
 
 	// result of parsing
@@ -50,6 +51,7 @@ type Parsed struct {
 	stack    []parse.Node // used while computing symbols
 
 	// for mapping from offsets in buf to LSP coordinates
+	// See FromPosition() and LineCol()
 	nls      []int // offset of newlines before each line (nls[0]==-1)
 	lastnl   int   // last line seen
 	check    int   // used to decide whether to use lastnl or search through nls
@@ -102,27 +104,28 @@ func parseBuffer(buf []byte) *Parsed {
 	if buf[len(buf)-1] != '\n' {
 		ans.buf = append(buf, '\n')
 	}
-	// at the cost of complexity we could fold this into the allAscii loop
-	ans.lines = bytes.Split(buf, []byte{'\n'})
 	for i, p := range ans.buf {
 		if p == '\n' {
 			ans.nls = append(ans.nls, i)
 		}
 	}
-	ans.setTokens()
-	t, err := template.New("").Parse(string(buf))
+	ans.setTokens() // ans.buf may be a new []byte
+	ans.lines = bytes.Split(ans.buf, []byte{'\n'})
+	t, err := template.New("").Parse(string(ans.buf))
 	if err != nil {
 		funcs := make(template.FuncMap)
 		for t == nil && ans.ParseErr == nil {
+			// in 1.17 it may be possible to avoid getting this error
 			//  template: :2: function "foo" not defined
 			matches := parseErrR.FindStringSubmatch(err.Error())
-			if len(matches) < 2 { // uncorrectable error
-				ans.ParseErr = err
-				return ans
+			if len(matches) == 2 {
+				// suppress the error by giving it a function with the right name
+				funcs[matches[1]] = func() interface{} { return nil }
+				t, err = template.New("").Funcs(funcs).Parse(string(ans.buf))
+				continue
 			}
-			// suppress the error by giving it a function with the right name
-			funcs[matches[1]] = func(interface{}) interface{} { return nil }
-			t, err = template.New("").Funcs(funcs).Parse(string(buf))
+			ans.ParseErr = err // unfixed error
+			return ans
 		}
 	}
 	ans.named = t.Templates()
@@ -173,24 +176,94 @@ func (p *Parsed) FindLiteralBefore(pos int) (int, int) {
 	return left + 1, right - left - 1
 }
 
-var parseErrR = regexp.MustCompile(`template:.*function "([^"]+)" not defined`)
+var (
+	parseErrR = regexp.MustCompile(`template:.*function "([^"]+)" not defined`)
+)
 
 func (p *Parsed) setTokens() {
-	last := 0
-	for left := bytes.Index(p.buf[last:], Left); left != -1; left = bytes.Index(p.buf[last:], Left) {
-		left += last
-		tok := Token{Start: left}
-		last = left + len(Left)
-		right := bytes.Index(p.buf[last:], Right)
-		if right == -1 {
-			break
+	const (
+		// InRaw and InString only occur inside an action (SeenLeft)
+		Start = iota
+		InRaw
+		InString
+		SeenLeft
+	)
+	state := Start
+	var left, oldState int
+	for n := 0; n < len(p.buf); n++ {
+		c := p.buf[n]
+		switch state {
+		case InRaw:
+			if c == '`' {
+				state = oldState
+			}
+		case InString:
+			if c == '"' && !isEscaped(p.buf[:n]) {
+				state = oldState
+			}
+		case SeenLeft:
+			if c == '`' {
+				oldState = state // it's SeenLeft, but a little clearer this way
+				state = InRaw
+				continue
+			}
+			if c == '"' {
+				oldState = state
+				state = InString
+				continue
+			}
+			if bytes.HasPrefix(p.buf[n:], Right) {
+				right := n + len(Right)
+				tok := Token{Start: left,
+					End:       right,
+					Multiline: bytes.Contains(p.buf[left:right], []byte{'\n'}),
+				}
+				p.tokens = append(p.tokens, tok)
+				state = Start
+			}
+			// If we see (unquoted) Left then the original left is probably the user
+			// typing. Suppress the original left
+			if bytes.HasPrefix(p.buf[n:], Left) {
+				p.elideAt(left)
+				left = n
+				n += len(Left) - 1 // skip the rest
+			}
+		case Start:
+			if bytes.HasPrefix(p.buf[n:], Left) {
+				left = n
+				state = SeenLeft
+				n += len(Left) - 1 // skip the rest (avoids {{{ bug)
+			}
 		}
-		right += last + len(Right)
-		tok.End = right
-		tok.Multiline = bytes.Contains(p.buf[left:right], []byte{'\n'})
-		p.tokens = append(p.tokens, tok)
-		last = right
 	}
+	// this error occurs after typing {{ at the end of the file
+	if state != Start {
+		// Unclosed Left. remove the Left at left
+		p.elideAt(left)
+	}
+}
+
+func (p *Parsed) elideAt(left int) {
+	if p.elided == nil {
+		// p.buf is the same buffer that v.Read() returns, so copy it.
+		// (otherwise the next time it's parsed, elided information is lost)
+		b := make([]byte, len(p.buf))
+		copy(b, p.buf)
+		p.buf = b
+	}
+	for i := 0; i < len(Left); i++ {
+		p.buf[left+i] = ' '
+	}
+	p.elided = append(p.elided, left)
+}
+
+// isEscaped reports whether the byte after buf is escaped
+func isEscaped(buf []byte) bool {
+	backSlashes := 0
+	for j := len(buf) - 1; j >= 0 && buf[j] == '\\'; j-- {
+		backSlashes++
+	}
+	return backSlashes%2 == 1
 }
 
 func (p *Parsed) Tokens() []Token {
@@ -250,7 +323,7 @@ func (p *Parsed) LineCol(x int) (uint32, uint32) {
 		return uint32(i - 1), uint32(count)
 	}
 	if x == len(p.buf)-1 { // trailing \n
-		return uint32(len(p.nls)), 1
+		return uint32(len(p.nls) - 1), 0
 	}
 	// shouldn't happen
 	for i := 1; i < 4; i++ {
@@ -284,6 +357,10 @@ func (p *Parsed) Range(x, length int) protocol.Range {
 // FromPosition translates a protocol.Position into an offset into the template
 func (p *Parsed) FromPosition(x protocol.Position) int {
 	l, c := int(x.Line), int(x.Character)
+	if l >= len(p.nls) || p.nls[l]+1 >= len(p.buf) {
+		// paranoia to avoid panic. return the largest offset
+		return len(p.buf)
+	}
 	line := p.buf[p.nls[l]+1:]
 	cnt := 0
 	for w := range string(line) {
diff --git a/internal/lsp/template/parse_test.go b/internal/lsp/template/parse_test.go
index e6a95eff4e6..db60989fe3b 100644
--- a/internal/lsp/template/parse_test.go
+++ b/internal/lsp/template/parse_test.go
@@ -54,9 +54,11 @@ func TestSymbols(t *testing.T) {
 }
 
 func TestWordAt(t *testing.T) {
-	want := []string{"", "", "if", "if", "", "$A", "$A", "", "", "B", "", "", "end", "end", "end", "", ""}
-	p := parseBuffer([]byte("{{if $A}}B{{end}}"))
-	for i := 0; i < len(want); i++ {
+	want := []string{"", "", "$A", "$A", "", "", "", "", "", "",
+		"", "", "", "if", "if", "", "$A", "$A", "", "",
+		"B", "", "", "end", "end", "end", "", "", ""}
+	p := parseBuffer([]byte("{{$A := .}}{{if $A}}B{{end}}"))
+	for i := 0; i < len(p.buf); i++ {
 		got := findWordAt(p, i)
 		if got != want[i] {
 			t.Errorf("for %d, got %q, wanted %q", i, got, want[i])
@@ -142,6 +144,20 @@ func TestLineCol(t *testing.T) {
 	}
 }
 
+func TestLineColNL(t *testing.T) {
+	buf := "\n\n\n\n\n"
+	p := parseBuffer([]byte(buf))
+	if p.ParseErr != nil {
+		t.Fatal(p.ParseErr)
+	}
+	for i := 0; i < len(buf); i++ {
+		l, c := p.LineCol(i)
+		if c != 0 || int(l) != i+1 {
+			t.Errorf("got (%d,%d), expected (%d,0)", l, c, i)
+		}
+	}
+}
+
 func TestPos(t *testing.T) {
 	buf := `
 	{{if (foÜx .X.Y)}}{{$A := "hi"}}{{.Z $A}}{{else}}
@@ -190,3 +206,32 @@ func TestUtf16(t *testing.T) {
 		t.Error("expected nonASCII to be true")
 	}
 }
+
+type ttest struct {
+	tmpl      string
+	tokCnt    int
+	elidedCnt int8
+}
+
+func TestQuotes(t *testing.T) {
+	tsts := []ttest{
+		{"{{- /*comment*/ -}}", 1, 0},
+		{"{{/*`\ncomment\n`*/}}", 1, 0},
+		//{"{{foo\nbar}}\n", 1, 0}, // this action spanning lines parses in 1.16
+		{"{{\"{{foo}}{{\"}}", 1, 0},
+		{"{{\n{{- when}}", 1, 1},          // corrected
+		{"{{{{if .}}xx{{\n{{end}}", 2, 2}, // corrected
+	}
+	for _, s := range tsts {
+		p := parseBuffer([]byte(s.tmpl))
+		if len(p.tokens) != s.tokCnt {
+			t.Errorf("%q: got %d tokens, expected %d", s, len(p.tokens), s.tokCnt)
+		}
+		if p.ParseErr != nil {
+			t.Errorf("%q: %v", string(p.buf), p.ParseErr)
+		}
+		if len(p.elided) != int(s.elidedCnt) {
+			t.Errorf("%q: elided %d, expected %d", s, len(p.elided), s.elidedCnt)
+		}
+	}
+}
diff --git a/internal/lsp/testdata/basiclit/basiclit.go b/internal/lsp/testdata/basiclit/basiclit.go
index ab895dc011c..9829003d357 100644
--- a/internal/lsp/testdata/basiclit/basiclit.go
+++ b/internal/lsp/testdata/basiclit/basiclit.go
@@ -10,4 +10,47 @@ func _() {
 	_ = 1. //@complete(".")
 
 	_ = 'a' //@complete("' ")
+
+	_ = 'a' //@hover("'a'", "'a', U+0061, LATIN SMALL LETTER A")
+	_ = 0x61 //@hover("0x61", "'a', U+0061, LATIN SMALL LETTER A")
+
+	_ = '\u2211' //@hover("'\\u2211'", "'∑', U+2211, N-ARY SUMMATION")
+	_ = 0x2211 //@hover("0x2211", "'∑', U+2211, N-ARY SUMMATION")
+	_ = "foo \u2211 bar" //@hover("\\u2211", "'∑', U+2211, N-ARY SUMMATION")
+
+	_ = '\a' //@hover("'\\a'", "U+0007, control")
+	_ = "foo \a bar" //@hover("\\a", "U+0007, control")
+
+	_ = '\U0001F30A' //@hover("'\\U0001F30A'", "'🌊', U+1F30A, WATER WAVE")
+	_ = 0x0001F30A //@hover("0x0001F30A", "'🌊', U+1F30A, WATER WAVE")
+	_ = "foo \U0001F30A bar" //@hover("\\U0001F30A", "'🌊', U+1F30A, WATER WAVE")
+
+	_ = '\x7E' //@hover("'\\x7E'", "'~', U+007E, TILDE")
+	_ = "foo \x7E bar" //@hover("\\x7E", "'~', U+007E, TILDE")
+	_ = "foo \a bar" //@hover("\\a", "U+0007, control")
+
+	_ = '\173' //@hover("'\\173'", "'{', U+007B, LEFT CURLY BRACKET")
+	_ = "foo \173 bar" //@hover("\\173", "'{', U+007B, LEFT CURLY BRACKET")
+	_ = "foo \173 bar \u2211 baz" //@hover("\\173", "'{', U+007B, LEFT CURLY BRACKET")
+	_ = "foo \173 bar \u2211 baz" //@hover("\\u2211", "'∑', U+2211, N-ARY SUMMATION")
+	_ = "foo\173bar\u2211baz" //@hover("\\173", "'{', U+007B, LEFT CURLY BRACKET")
+	_ = "foo\173bar\u2211baz" //@hover("\\u2211", "'∑', U+2211, N-ARY SUMMATION")
+
+	// search for runes in string only if there is an escaped sequence
+	_ = "hello" //@hover("\"hello\"", "")
+
+	// incorrect escaped rune sequences
+	_ = '\0' //@hover("'\\0'", "")
+	_ = '\u22111' //@hover("'\\u22111'", "")
+	_ = '\U00110000' //@hover("'\\U00110000'", "")
+	_ = '\u12e45'//@hover("'\\u12e45'", "")
+	_ = '\xa' //@hover("'\\xa'", "")
+	_ = 'aa' //@hover("'aa'", "")
+
+	// other basic lits
+	_ = 1 //@hover("1", "")
+	_ = 1.2 //@hover("1.2", "")
+	_ = 1.2i //@hover("1.2i", "")
+	_ = 0123 //@hover("0123", "")
+	_ = 0x1234567890 //@hover("0x1234567890", "")
 }
diff --git a/internal/lsp/testdata/cgo/declarecgo.go.golden b/internal/lsp/testdata/cgo/declarecgo.go.golden
index 773f3b7d3e3..b6d94d0c6c6 100644
--- a/internal/lsp/testdata/cgo/declarecgo.go.golden
+++ b/internal/lsp/testdata/cgo/declarecgo.go.golden
@@ -22,7 +22,7 @@ func Example()
 	"description": "```go\nfunc Example()\n```\n\n[`cgo.Example` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/cgo?utm_source=gopls#Example)"
 }
 
--- funccgoexample-hover --
+-- funccgoexample-hoverdef --
 ```go
 func Example()
 ```
diff --git a/internal/lsp/testdata/cgoimport/usecgo.go.golden b/internal/lsp/testdata/cgoimport/usecgo.go.golden
index 8f7518a154e..f33f94f84a6 100644
--- a/internal/lsp/testdata/cgoimport/usecgo.go.golden
+++ b/internal/lsp/testdata/cgoimport/usecgo.go.golden
@@ -22,7 +22,7 @@ func cgo.Example()
 	"description": "```go\nfunc cgo.Example()\n```\n\n[`cgo.Example` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/cgo?utm_source=gopls#Example)"
 }
 
--- funccgoexample-hover --
+-- funccgoexample-hoverdef --
 ```go
 func cgo.Example()
 ```
diff --git a/internal/lsp/testdata/extract/extract_function/extract_scope.go b/internal/lsp/testdata/extract/extract_function/extract_scope.go
index 73d74192e23..6cc141fd117 100644
--- a/internal/lsp/testdata/extract/extract_function/extract_scope.go
+++ b/internal/lsp/testdata/extract/extract_function/extract_scope.go
@@ -1,10 +1,10 @@
 package extract
 
 func _() {
-	fn0 := 1
-	a := fn0 //@extractfunc("a", "fn0")
+	newFunction := 1
+	a := newFunction //@extractfunc("a", "newFunction")
 }
 
-func fn1() int {
+func newFunction1() int {
 	return 1
 }
diff --git a/internal/lsp/testdata/extract/extract_function/extract_scope.go.golden b/internal/lsp/testdata/extract/extract_function/extract_scope.go.golden
index 1bb4e61fe44..a4803b4fe3e 100644
--- a/internal/lsp/testdata/extract/extract_function/extract_scope.go.golden
+++ b/internal/lsp/testdata/extract/extract_function/extract_scope.go.golden
@@ -2,15 +2,15 @@
 package extract
 
 func _() {
-	fn0 := 1
-	newFunction(fn0) //@extractfunc("a", "fn0")
+	newFunction := 1
+	newFunction2(newFunction) //@extractfunc("a", "newFunction")
 }
 
-func newFunction(fn0 int) {
-	a := fn0
+func newFunction2(newFunction int) {
+	a := newFunction
 }
 
-func fn1() int {
+func newFunction1() int {
 	return 1
 }
 
diff --git a/internal/lsp/testdata/extract/extract_method/extract_basic.go b/internal/lsp/testdata/extract/extract_method/extract_basic.go
new file mode 100644
index 00000000000..c9a8d9dce38
--- /dev/null
+++ b/internal/lsp/testdata/extract/extract_method/extract_basic.go
@@ -0,0 +1,24 @@
+package extract
+
+type A struct {
+	x int
+	y int
+}
+
+func (a *A) XLessThanYP() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a *A) AddP() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func (a A) XLessThanY() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a A) Add() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
diff --git a/internal/lsp/testdata/extract/extract_method/extract_basic.go.golden b/internal/lsp/testdata/extract/extract_method/extract_basic.go.golden
new file mode 100644
index 00000000000..eab22a673c1
--- /dev/null
+++ b/internal/lsp/testdata/extract/extract_method/extract_basic.go.golden
@@ -0,0 +1,728 @@
+-- functionextraction_extract_basic_13_2 --
+package extract
+
+type A struct {
+	x int
+	y int
+}
+
+func (a *A) XLessThanYP() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a *A) AddP() int {
+	sum := newFunction(a) //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func newFunction(a *A) int {
+	sum := a.x + a.y
+	return sum
+}
+
+func (a A) XLessThanY() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a A) Add() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+-- functionextraction_extract_basic_14_2 --
+package extract
+
+type A struct {
+	x int
+	y int
+}
+
+func (a *A) XLessThanYP() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a *A) AddP() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return newFunction(sum)       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func newFunction(sum int) int {
+	return sum
+}
+
+func (a A) XLessThanY() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a A) Add() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+-- functionextraction_extract_basic_18_2 --
+package extract
+
+type A struct {
+	x int
+	y int
+}
+
+func (a *A) XLessThanYP() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a *A) AddP() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func (a A) XLessThanY() bool {
+	return newFunction(a) //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func newFunction(a A) bool {
+	return a.x < a.y
+}
+
+func (a A) Add() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+-- functionextraction_extract_basic_22_2 --
+package extract
+
+type A struct {
+	x int
+	y int
+}
+
+func (a *A) XLessThanYP() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a *A) AddP() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func (a A) XLessThanY() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a A) Add() int {
+	sum := newFunction(a) //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func newFunction(a A) int {
+	sum := a.x + a.y
+	return sum
+}
+
+-- functionextraction_extract_basic_23_2 --
+package extract
+
+type A struct {
+	x int
+	y int
+}
+
+func (a *A) XLessThanYP() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a *A) AddP() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func (a A) XLessThanY() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a A) Add() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return newFunction(sum)       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func newFunction(sum int) int {
+	return sum
+}
+
+-- functionextraction_extract_basic_9_2 --
+package extract
+
+type A struct {
+	x int
+	y int
+}
+
+func (a *A) XLessThanYP() bool {
+	return newFunction(a) //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func newFunction(a *A) bool {
+	return a.x < a.y
+}
+
+func (a *A) AddP() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func (a A) XLessThanY() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a A) Add() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+-- functionextraction_extract_method_13_2 --
+package extract
+
+type A struct {
+	x int
+	y int
+}
+
+func (a *A) XLessThanYP() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a *A) AddP() int {
+	sum := newFunction(a) //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func newFunction(a *A) int {
+	sum := a.x + a.y
+	return sum
+}
+
+func (a A) XLessThanY() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a A) Add() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+-- functionextraction_extract_method_14_2 --
+package extract
+
+type A struct {
+	x int
+	y int
+}
+
+func (a *A) XLessThanYP() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a *A) AddP() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return newFunction(sum)       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func newFunction(sum int) int {
+	return sum
+}
+
+func (a A) XLessThanY() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a A) Add() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+-- functionextraction_extract_method_18_2 --
+package extract
+
+type A struct {
+	x int
+	y int
+}
+
+func (a *A) XLessThanYP() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a *A) AddP() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func (a A) XLessThanY() bool {
+	return newFunction(a) //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func newFunction(a A) bool {
+	return a.x < a.y
+}
+
+func (a A) Add() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+-- functionextraction_extract_method_22_2 --
+package extract
+
+type A struct {
+	x int
+	y int
+}
+
+func (a *A) XLessThanYP() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a *A) AddP() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func (a A) XLessThanY() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a A) Add() int {
+	sum := newFunction(a) //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func newFunction(a A) int {
+	sum := a.x + a.y
+	return sum
+}
+
+-- functionextraction_extract_method_23_2 --
+package extract
+
+type A struct {
+	x int
+	y int
+}
+
+func (a *A) XLessThanYP() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a *A) AddP() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func (a A) XLessThanY() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a A) Add() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return newFunction(sum)       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func newFunction(sum int) int {
+	return sum
+}
+
+-- functionextraction_extract_method_9_2 --
+package extract
+
+type A struct {
+	x int
+	y int
+}
+
+func (a *A) XLessThanYP() bool {
+	return newFunction(a) //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func newFunction(a *A) bool {
+	return a.x < a.y
+}
+
+func (a *A) AddP() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func (a A) XLessThanY() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a A) Add() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+-- methodextraction_extract_basic_13_2 --
+package extract
+
+type A struct {
+	x int
+	y int
+}
+
+func (a *A) XLessThanYP() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a *A) AddP() int {
+	sum := a.newMethod() //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func (a *A) newMethod() int {
+	sum := a.x + a.y
+	return sum
+}
+
+func (a A) XLessThanY() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a A) Add() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+-- methodextraction_extract_basic_14_2 --
+package extract
+
+type A struct {
+	x int
+	y int
+}
+
+func (a *A) XLessThanYP() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a *A) AddP() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return a.newMethod(sum)       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func (*A) newMethod(sum int) int {
+	return sum
+}
+
+func (a A) XLessThanY() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a A) Add() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+-- methodextraction_extract_basic_18_2 --
+package extract
+
+type A struct {
+	x int
+	y int
+}
+
+func (a *A) XLessThanYP() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a *A) AddP() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func (a A) XLessThanY() bool {
+	return a.newMethod() //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a A) newMethod() bool {
+	return a.x < a.y
+}
+
+func (a A) Add() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+-- methodextraction_extract_basic_22_2 --
+package extract
+
+type A struct {
+	x int
+	y int
+}
+
+func (a *A) XLessThanYP() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a *A) AddP() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func (a A) XLessThanY() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a A) Add() int {
+	sum := a.newMethod() //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func (a A) newMethod() int {
+	sum := a.x + a.y
+	return sum
+}
+
+-- methodextraction_extract_basic_23_2 --
+package extract
+
+type A struct {
+	x int
+	y int
+}
+
+func (a *A) XLessThanYP() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a *A) AddP() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func (a A) XLessThanY() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a A) Add() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return a.newMethod(sum)       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func (A) newMethod(sum int) int {
+	return sum
+}
+
+-- methodextraction_extract_basic_9_2 --
+package extract
+
+type A struct {
+	x int
+	y int
+}
+
+func (a *A) XLessThanYP() bool {
+	return a.newMethod() //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a *A) newMethod() bool {
+	return a.x < a.y
+}
+
+func (a *A) AddP() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func (a A) XLessThanY() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a A) Add() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+-- methodextraction_extract_method_13_2 --
+package extract
+
+type A struct {
+	x int
+	y int
+}
+
+func (a *A) XLessThanYP() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a *A) AddP() int {
+	sum := a.newMethod() //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func (a *A) newMethod() int {
+	sum := a.x + a.y
+	return sum
+}
+
+func (a A) XLessThanY() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a A) Add() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+-- methodextraction_extract_method_14_2 --
+package extract
+
+type A struct {
+	x int
+	y int
+}
+
+func (a *A) XLessThanYP() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a *A) AddP() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return a.newMethod(sum)       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func (*A) newMethod(sum int) int {
+	return sum
+}
+
+func (a A) XLessThanY() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a A) Add() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+-- methodextraction_extract_method_18_2 --
+package extract
+
+type A struct {
+	x int
+	y int
+}
+
+func (a *A) XLessThanYP() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a *A) AddP() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func (a A) XLessThanY() bool {
+	return a.newMethod() //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a A) newMethod() bool {
+	return a.x < a.y
+}
+
+func (a A) Add() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+-- methodextraction_extract_method_22_2 --
+package extract
+
+type A struct {
+	x int
+	y int
+}
+
+func (a *A) XLessThanYP() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a *A) AddP() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func (a A) XLessThanY() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a A) Add() int {
+	sum := a.newMethod() //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func (a A) newMethod() int {
+	sum := a.x + a.y
+	return sum
+}
+
+-- methodextraction_extract_method_23_2 --
+package extract
+
+type A struct {
+	x int
+	y int
+}
+
+func (a *A) XLessThanYP() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a *A) AddP() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func (a A) XLessThanY() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a A) Add() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return a.newMethod(sum)       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func (A) newMethod(sum int) int {
+	return sum
+}
+
+-- methodextraction_extract_method_9_2 --
+package extract
+
+type A struct {
+	x int
+	y int
+}
+
+func (a *A) XLessThanYP() bool {
+	return a.newMethod() //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a *A) newMethod() bool {
+	return a.x < a.y
+}
+
+func (a *A) AddP() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
+func (a A) XLessThanY() bool {
+	return a.x < a.y //@extractmethod("return", "a.y"),extractfunc("return", "a.y")
+}
+
+func (a A) Add() int {
+	sum := a.x + a.y //@extractmethod("sum", "a.y"),extractfunc("sum", "a.y")
+	return sum       //@extractmethod("return", "sum"),extractfunc("return", "sum")
+}
+
diff --git a/internal/lsp/testdata/godef/a/a.go b/internal/lsp/testdata/godef/a/a.go
index 993fd86b437..5cc85527aeb 100644
--- a/internal/lsp/testdata/godef/a/a.go
+++ b/internal/lsp/testdata/godef/a/a.go
@@ -1,5 +1,5 @@
 // Package a is a package for testing go to definition.
-package a //@mark(aPackage, "a "),hover("a ", aPackage)
+package a //@mark(aPackage, "a "),hoverdef("a ", aPackage)
 
 import (
 	"fmt"
@@ -9,19 +9,19 @@ import (
 
 var (
 	// x is a variable.
-	x string //@x,hover("x", x)
+	x string //@x,hoverdef("x", x)
 )
 
 // Constant block. When I hover on h, I should see this comment.
 const (
 	// When I hover on g, I should see this comment.
-	g = 1 //@g,hover("g", g)
+	g = 1 //@g,hoverdef("g", g)
 
-	h = 2 //@h,hover("h", h)
+	h = 2 //@h,hoverdef("h", h)
 )
 
 // z is a variable too.
-var z string //@z,hover("z", z)
+var z string //@z,hoverdef("z", z)
 
 type A string //@mark(AString, "A")
 
@@ -33,14 +33,14 @@ func AStuff() { //@AStuff
 	var err error         //@err
 	fmt.Printf("%v", err) //@godef("err", err)
 
-	var y string       //@string,hover("string", string)
-	_ = make([]int, 0) //@make,hover("make", make)
+	var y string       //@string,hoverdef("string", string)
+	_ = make([]int, 0) //@make,hoverdef("make", make)
 
 	var mu sync.Mutex
-	mu.Lock() //@Lock,hover("Lock", Lock)
+	mu.Lock() //@Lock,hoverdef("Lock", Lock)
 
-	var typ *types.Named //@mark(typesImport, "types"),hover("types", typesImport)
-	typ.Obj().Name()     //@Name,hover("Name", Name)
+	var typ *types.Named //@mark(typesImport, "types"),hoverdef("types", typesImport)
+	typ.Obj().Name()     //@Name,hoverdef("Name", Name)
 }
 
 type A struct {
@@ -76,7 +76,7 @@ type J interface {
 func _() {
 	// 1st type declaration block
 	type (
-		a struct { //@mark(declBlockA, "a"),hover("a", declBlockA)
+		a struct { //@mark(declBlockA, "a"),hoverdef("a", declBlockA)
 			x string
 		}
 	)
@@ -84,21 +84,21 @@ func _() {
 	// 2nd type declaration block
 	type (
 		// b has a comment
-		b struct{} //@mark(declBlockB, "b"),hover("b", declBlockB)
+		b struct{} //@mark(declBlockB, "b"),hoverdef("b", declBlockB)
 	)
 
 	// 3rd type declaration block
 	type (
 		// c is a struct
-		c struct { //@mark(declBlockC, "c"),hover("c", declBlockC)
+		c struct { //@mark(declBlockC, "c"),hoverdef("c", declBlockC)
 			f string
 		}
 
-		d string //@mark(declBlockD, "d"),hover("d", declBlockD)
+		d string //@mark(declBlockD, "d"),hoverdef("d", declBlockD)
 	)
 
 	type (
-		e struct { //@mark(declBlockE, "e"),hover("e", declBlockE)
+		e struct { //@mark(declBlockE, "e"),hoverdef("e", declBlockE)
 			f float64
 		} // e has a comment
 	)
diff --git a/internal/lsp/testdata/godef/a/a.go.golden b/internal/lsp/testdata/godef/a/a.go.golden
index c26829350b7..182928eebbc 100644
--- a/internal/lsp/testdata/godef/a/a.go.golden
+++ b/internal/lsp/testdata/godef/a/a.go.golden
@@ -1,4 +1,4 @@
--- Lock-hover --
+-- Lock-hoverdef --
 ```go
 func (*sync.Mutex).Lock()
 ```
@@ -6,7 +6,7 @@ func (*sync.Mutex).Lock()
 [`(sync.Mutex).Lock` on pkg.go.dev](https://pkg.go.dev/sync?utm_source=gopls#Mutex.Lock)
 
 Lock locks m\.
--- Name-hover --
+-- Name-hoverdef --
 ```go
 func (*types.object).Name() string
 ```
@@ -38,7 +38,7 @@ func Random() int
 	"description": "```go\nfunc Random() int\n```\n\n[`a.Random` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Random)"
 }
 
--- Random-hover --
+-- Random-hoverdef --
 ```go
 func Random() int
 ```
@@ -68,15 +68,15 @@ func Random2(y int) int
 	"description": "```go\nfunc Random2(y int) int\n```\n\n[`a.Random2` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Random2)"
 }
 
--- Random2-hover --
+-- Random2-hoverdef --
 ```go
 func Random2(y int) int
 ```
 
 [`a.Random2` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Random2)
--- aPackage-hover --
+-- aPackage-hoverdef --
 Package a is a package for testing go to definition\.
--- declBlockA-hover --
+-- declBlockA-hoverdef --
 ```go
 type a struct {
 	x string
@@ -84,13 +84,13 @@ type a struct {
 ```
 
 1st type declaration block
--- declBlockB-hover --
+-- declBlockB-hoverdef --
 ```go
 type b struct{}
 ```
 
 b has a comment
--- declBlockC-hover --
+-- declBlockC-hoverdef --
 ```go
 type c struct {
 	f string
@@ -98,13 +98,13 @@ type c struct {
 ```
 
 c is a struct
--- declBlockD-hover --
+-- declBlockD-hoverdef --
 ```go
 type d string
 ```
 
 3rd type declaration block
--- declBlockE-hover --
+-- declBlockE-hoverdef --
 ```go
 type e struct {
 	f float64
@@ -125,36 +125,36 @@ var err error
 		"start": {
 			"line": 33,
 			"column": 6,
-			"offset": 597
+			"offset": 612
 		},
 		"end": {
 			"line": 33,
 			"column": 9,
-			"offset": 600
+			"offset": 615
 		}
 	},
 	"description": "```go\nvar err error\n```\n\n\\@err"
 }
 
--- err-hover --
+-- err-hoverdef --
 ```go
 var err error
 ```
 
 \@err
--- g-hover --
+-- g-hoverdef --
 ```go
 const g untyped int = 1
 ```
 
 When I hover on g, I should see this comment\.
--- h-hover --
+-- h-hoverdef --
 ```go
 const h untyped int = 2
 ```
 
 Constant block\.
--- make-hover --
+-- make-hoverdef --
 ```go
 func(t Type, size ...IntegerType) Type
 ```
@@ -162,23 +162,23 @@ func(t Type, size ...IntegerType) Type
 [`make` on pkg.go.dev](https://pkg.go.dev/builtin?utm_source=gopls#make)
 
 The make built\-in function allocates and initializes an object of type slice, map, or chan \(only\)\.
--- string-hover --
+-- string-hoverdef --
 ```go
 string
 ```
--- typesImport-hover --
+-- typesImport-hoverdef --
 ```go
 package types ("go/types")
 ```
 
 [`types` on pkg.go.dev](https://pkg.go.dev/go/types?utm_source=gopls)
--- x-hover --
+-- x-hoverdef --
 ```go
 var x string
 ```
 
 x is a variable\.
--- z-hover --
+-- z-hoverdef --
 ```go
 var z string
 ```
diff --git a/internal/lsp/testdata/godef/a/a_test.go.golden b/internal/lsp/testdata/godef/a/a_test.go.golden
index ac50b90b95d..e5cb3d799cc 100644
--- a/internal/lsp/testdata/godef/a/a_test.go.golden
+++ b/internal/lsp/testdata/godef/a/a_test.go.golden
@@ -20,7 +20,7 @@ func TestA(t *testing.T)
 	"description": "```go\nfunc TestA(t *testing.T)\n```"
 }
 
--- TestA-hover --
+-- TestA-hoverdef --
 ```go
 func TestA(t *testing.T)
 ```
diff --git a/internal/lsp/testdata/godef/a/a_x_test.go.golden b/internal/lsp/testdata/godef/a/a_x_test.go.golden
index dd1d7401647..2e3064794f2 100644
--- a/internal/lsp/testdata/godef/a/a_x_test.go.golden
+++ b/internal/lsp/testdata/godef/a/a_x_test.go.golden
@@ -20,7 +20,7 @@ func TestA2(t *testing.T)
 	"description": "```go\nfunc TestA2(t *testing.T)\n```"
 }
 
--- TestA2-hover --
+-- TestA2-hoverdef --
 ```go
 func TestA2(t *testing.T)
 ```
diff --git a/internal/lsp/testdata/godef/a/d.go b/internal/lsp/testdata/godef/a/d.go
index d20bdad9882..2da8d058edf 100644
--- a/internal/lsp/testdata/godef/a/d.go
+++ b/internal/lsp/testdata/godef/a/d.go
@@ -1,4 +1,4 @@
-package a //@mark(a, "a "),hover("a ", a)
+package a //@mark(a, "a "),hoverdef("a ", a)
 
 import "fmt"
 
diff --git a/internal/lsp/testdata/godef/a/d.go.golden b/internal/lsp/testdata/godef/a/d.go.golden
index d80c14a9dfb..23c7da1ec4c 100644
--- a/internal/lsp/testdata/godef/a/d.go.golden
+++ b/internal/lsp/testdata/godef/a/d.go.golden
@@ -13,18 +13,18 @@ field Member string
 		"start": {
 			"line": 6,
 			"column": 2,
-			"offset": 87
+			"offset": 90
 		},
 		"end": {
 			"line": 6,
 			"column": 8,
-			"offset": 93
+			"offset": 96
 		}
 	},
 	"description": "```go\nfield Member string\n```\n\n[`(a.Thing).Member` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Thing.Member)\n\n\\@Member"
 }
 
--- Member-hover --
+-- Member-hoverdef --
 ```go
 field Member string
 ```
@@ -45,18 +45,18 @@ func (Thing).Method(i int) string
 		"start": {
 			"line": 15,
 			"column": 16,
-			"offset": 216
+			"offset": 219
 		},
 		"end": {
 			"line": 15,
 			"column": 22,
-			"offset": 222
+			"offset": 225
 		}
 	},
 	"description": "```go\nfunc (Thing).Method(i int) string\n```\n\n[`(a.Thing).Method` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Thing.Method)"
 }
 
--- Method-hover --
+-- Method-hoverdef --
 ```go
 func (Thing).Method(i int) string
 ```
@@ -77,18 +77,18 @@ var Other Thing
 		"start": {
 			"line": 9,
 			"column": 5,
-			"offset": 118
+			"offset": 121
 		},
 		"end": {
 			"line": 9,
 			"column": 10,
-			"offset": 123
+			"offset": 126
 		}
 	},
 	"description": "```go\nvar Other Thing\n```\n\n[`a.Other` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Other)\n\n\\@Other"
 }
 
--- Other-hover --
+-- Other-hoverdef --
 ```go
 var Other Thing
 ```
@@ -111,18 +111,18 @@ type Thing struct {
 		"start": {
 			"line": 5,
 			"column": 6,
-			"offset": 62
+			"offset": 65
 		},
 		"end": {
 			"line": 5,
 			"column": 11,
-			"offset": 67
+			"offset": 70
 		}
 	},
 	"description": "```go\ntype Thing struct {\n\tMember string //@Member\n}\n```\n\n[`a.Thing` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Thing)"
 }
 
--- Thing-hover --
+-- Thing-hoverdef --
 ```go
 type Thing struct {
 	Member string //@Member
@@ -143,22 +143,22 @@ func Things(val []string) []Thing
 		"start": {
 			"line": 11,
 			"column": 6,
-			"offset": 145
+			"offset": 148
 		},
 		"end": {
 			"line": 11,
 			"column": 12,
-			"offset": 151
+			"offset": 154
 		}
 	},
 	"description": "```go\nfunc Things(val []string) []Thing\n```\n\n[`a.Things` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Things)"
 }
 
--- Things-hover --
+-- Things-hoverdef --
 ```go
 func Things(val []string) []Thing
 ```
 
 [`a.Things` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Things)
--- a-hover --
+-- a-hoverdef --
 Package a is a package for testing go to definition\.
diff --git a/internal/lsp/testdata/godef/a/f.go b/internal/lsp/testdata/godef/a/f.go
index 2d3eefcfbc1..589c45fc1ae 100644
--- a/internal/lsp/testdata/godef/a/f.go
+++ b/internal/lsp/testdata/godef/a/f.go
@@ -7,9 +7,9 @@ func TypeStuff() { //@Stuff
 
 	switch y := interface{}(x).(type) { //@mark(switchY, "y"),godef("y", switchY)
 	case int: //@mark(intY, "int")
-		fmt.Printf("%v", y) //@hover("y", intY)
+		fmt.Printf("%v", y) //@hoverdef("y", intY)
 	case string: //@mark(stringY, "string")
-		fmt.Printf("%v", y) //@hover("y", stringY)
+		fmt.Printf("%v", y) //@hoverdef("y", stringY)
 	}
 
 }
diff --git a/internal/lsp/testdata/godef/a/f.go.golden b/internal/lsp/testdata/godef/a/f.go.golden
index 6c84b4d5fa6..a084356c06b 100644
--- a/internal/lsp/testdata/godef/a/f.go.golden
+++ b/internal/lsp/testdata/godef/a/f.go.golden
@@ -1,8 +1,8 @@
--- intY-hover --
+-- intY-hoverdef --
 ```go
 var y int
 ```
--- stringY-hover --
+-- stringY-hoverdef --
 ```go
 var y string
 ```
@@ -28,7 +28,7 @@ var y interface{}
 	"description": "```go\nvar y interface{}\n```"
 }
 
--- switchY-hover --
+-- switchY-hoverdef --
 ```go
 var y interface{}
 ```
diff --git a/internal/lsp/testdata/godef/a/g.go b/internal/lsp/testdata/godef/a/g.go
index 4f31857e393..dfef2fb8040 100644
--- a/internal/lsp/testdata/godef/a/g.go
+++ b/internal/lsp/testdata/godef/a/g.go
@@ -3,4 +3,4 @@ package a
 import "time"
 
 // dur is a constant of type time.Duration.
-const dur = 15*time.Minute + 10*time.Second + 350*time.Millisecond //@dur,hover("dur", dur)
+const dur = 15*time.Minute + 10*time.Second + 350*time.Millisecond //@dur,hoverdef("dur", dur)
diff --git a/internal/lsp/testdata/godef/a/g.go.golden b/internal/lsp/testdata/godef/a/g.go.golden
index d46ff048bd8..b7ed7392806 100644
--- a/internal/lsp/testdata/godef/a/g.go.golden
+++ b/internal/lsp/testdata/godef/a/g.go.golden
@@ -1,4 +1,4 @@
--- dur-hover --
+-- dur-hoverdef --
 ```go
 const dur time.Duration = 910350000000 // 15m10.35s
 ```
diff --git a/internal/lsp/testdata/godef/a/h.go b/internal/lsp/testdata/godef/a/h.go
index efe7d4ec12f..5a5dcc6784d 100644
--- a/internal/lsp/testdata/godef/a/h.go
+++ b/internal/lsp/testdata/godef/a/h.go
@@ -25,9 +25,9 @@ func _() {
 	}
 
 	var t s
-	_ = t.nested.number  //@hover("number", nestedNumber)
-	_ = t.nested2[0].str //@hover("str", nestedString)
-	_ = t.x.x.x.x.x.m    //@hover("m", nestedMap)
+	_ = t.nested.number  //@hoverdef("number", nestedNumber)
+	_ = t.nested2[0].str //@hoverdef("str", nestedString)
+	_ = t.x.x.x.x.x.m    //@hoverdef("m", nestedMap)
 }
 
 func _() {
@@ -40,9 +40,9 @@ func _() {
 			c int //@mark(structC, "c")
 		}
 	}
-	_ = s.a   //@hover("a", structA)
-	_ = s.b   //@hover("b", structB)
-	_ = s.b.c //@hover("c", structC)
+	_ = s.a   //@hoverdef("a", structA)
+	_ = s.b   //@hoverdef("b", structB)
+	_ = s.b.c //@hoverdef("c", structC)
 
 	var arr []struct {
 		// d field
@@ -53,9 +53,9 @@ func _() {
 			f int //@mark(arrF, "f")
 		}
 	}
-	_ = arr[0].d   //@hover("d", arrD)
-	_ = arr[0].e   //@hover("e", arrE)
-	_ = arr[0].e.f //@hover("f", arrF)
+	_ = arr[0].d   //@hoverdef("d", arrD)
+	_ = arr[0].e   //@hoverdef("e", arrE)
+	_ = arr[0].e.f //@hoverdef("f", arrF)
 
 	var complex []struct {
 		c <-chan map[string][]struct {
@@ -68,16 +68,16 @@ func _() {
 			}
 		}
 	}
-	_ = (<-complex[0].c)["0"][0].h   //@hover("h", complexH)
-	_ = (<-complex[0].c)["0"][0].i   //@hover("i", complexI)
-	_ = (<-complex[0].c)["0"][0].i.j //@hover("j", complexJ)
+	_ = (<-complex[0].c)["0"][0].h   //@hoverdef("h", complexH)
+	_ = (<-complex[0].c)["0"][0].i   //@hoverdef("i", complexI)
+	_ = (<-complex[0].c)["0"][0].i.j //@hoverdef("j", complexJ)
 
 	var mapWithStructKey map[struct {
 		// X key field
 		x []string //@mark(mapStructKeyX, "x")
 	}]int
 	for k := range mapWithStructKey {
-		_ = k.x //@hover("x", mapStructKeyX)
+		_ = k.x //@hoverdef("x", mapStructKeyX)
 	}
 
 	var mapWithStructKeyAndValue map[struct {
@@ -90,15 +90,15 @@ func _() {
 	for k, v := range mapWithStructKeyAndValue {
 		// TODO: we don't show docs for y field because both map key and value
 		// are structs. And in this case, we parse only map value
-		_ = k.y //@hover("y", mapStructKeyY)
-		_ = v.x //@hover("x", mapStructValueX)
+		_ = k.y //@hoverdef("y", mapStructKeyY)
+		_ = v.x //@hoverdef("x", mapStructValueX)
 	}
 
 	var i []map[string]interface {
 		// open method comment
 		open() error //@mark(openMethod, "open")
 	}
-	i[0]["1"].open() //@hover("open", openMethod)
+	i[0]["1"].open() //@hoverdef("open", openMethod)
 }
 
 func _() {
@@ -106,7 +106,7 @@ func _() {
 		// test description
 		desc string //@mark(testDescription, "desc")
 	}{}
-	_ = test.desc //@hover("desc", testDescription)
+	_ = test.desc //@hoverdef("desc", testDescription)
 
 	for _, tt := range []struct {
 		// test input
@@ -123,11 +123,11 @@ func _() {
 			}
 		}
 	}{} {
-		_ = tt.in               //@hover("in", testInput)
-		_ = tt.in["0"][0].key   //@hover("key", testInputKey)
-		_ = tt.in["0"][0].value //@hover("value", testInputValue)
+		_ = tt.in               //@hoverdef("in", testInput)
+		_ = tt.in["0"][0].key   //@hoverdef("key", testInputKey)
+		_ = tt.in["0"][0].value //@hoverdef("value", testInputValue)
 
-		_ = (<-tt.result.v).value //@hover("value", testResultValue)
+		_ = (<-tt.result.v).value //@hoverdef("value", testResultValue)
 	}
 }
 
@@ -142,6 +142,6 @@ func _() {
 	}
 
 	r := getPoints()
-	r[0].x //@hover("x", returnX)
-	r[0].y //@hover("y", returnY)
+	r[0].x //@hoverdef("x", returnX)
+	r[0].y //@hoverdef("y", returnY)
 }
diff --git a/internal/lsp/testdata/godef/a/h.go.golden b/internal/lsp/testdata/godef/a/h.go.golden
index 3525d4cfde0..4b27211e9aa 100644
--- a/internal/lsp/testdata/godef/a/h.go.golden
+++ b/internal/lsp/testdata/godef/a/h.go.golden
@@ -1,134 +1,134 @@
--- arrD-hover --
+-- arrD-hoverdef --
 ```go
 field d int
 ```
 
 d field
--- arrE-hover --
+-- arrE-hoverdef --
 ```go
 field e struct{f int}
 ```
 
 e nested struct
--- arrF-hover --
+-- arrF-hoverdef --
 ```go
 field f int
 ```
 
 f field of nested struct
--- complexH-hover --
+-- complexH-hoverdef --
 ```go
 field h int
 ```
 
 h field
--- complexI-hover --
+-- complexI-hoverdef --
 ```go
 field i struct{j int}
 ```
 
 i nested struct
--- complexJ-hover --
+-- complexJ-hoverdef --
 ```go
 field j int
 ```
 
 j field of nested struct
--- mapStructKeyX-hover --
+-- mapStructKeyX-hoverdef --
 ```go
 field x []string
 ```
 
 X key field
--- mapStructKeyY-hover --
+-- mapStructKeyY-hoverdef --
 ```go
 field y string
 ```
--- mapStructValueX-hover --
+-- mapStructValueX-hoverdef --
 ```go
 field x string
 ```
 
 X value field
--- nestedMap-hover --
+-- nestedMap-hoverdef --
 ```go
 field m map[string]float64
 ```
 
 nested map
--- nestedNumber-hover --
+-- nestedNumber-hoverdef --
 ```go
 field number int64
 ```
 
 nested number
--- nestedString-hover --
+-- nestedString-hoverdef --
 ```go
 field str string
 ```
 
 nested string
--- openMethod-hover --
+-- openMethod-hoverdef --
 ```go
 func (interface).open() error
 ```
 
 open method comment
--- returnX-hover --
+-- returnX-hoverdef --
 ```go
 field x int
 ```
 
 X coord
--- returnY-hover --
+-- returnY-hoverdef --
 ```go
 field y int
 ```
 
 Y coord
--- structA-hover --
+-- structA-hoverdef --
 ```go
 field a int
 ```
 
 a field
--- structB-hover --
+-- structB-hoverdef --
 ```go
 field b struct{c int}
 ```
 
 b nested struct
--- structC-hover --
+-- structC-hoverdef --
 ```go
 field c int
 ```
 
 c field of nested struct
--- testDescription-hover --
+-- testDescription-hoverdef --
 ```go
 field desc string
 ```
 
 test description
--- testInput-hover --
+-- testInput-hoverdef --
 ```go
 field in map[string][]struct{key string; value interface{}}
 ```
 
 test input
--- testInputKey-hover --
+-- testInputKey-hoverdef --
 ```go
 field key string
 ```
 
 test key
--- testInputValue-hover --
+-- testInputValue-hoverdef --
 ```go
 field value interface{}
 ```
 
 test value
--- testResultValue-hover --
+-- testResultValue-hoverdef --
 ```go
 field value int
 ```
diff --git a/internal/lsp/testdata/godef/a/random.go.golden b/internal/lsp/testdata/godef/a/random.go.golden
index 0f99a52f342..381a11acee8 100644
--- a/internal/lsp/testdata/godef/a/random.go.golden
+++ b/internal/lsp/testdata/godef/a/random.go.golden
@@ -22,7 +22,7 @@ func (*Pos).Sum() int
 	"description": "```go\nfunc (*Pos).Sum() int\n```\n\n[`(a.Pos).Sum` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Pos.Sum)"
 }
 
--- PosSum-hover --
+-- PosSum-hoverdef --
 ```go
 func (*Pos).Sum() int
 ```
@@ -52,7 +52,7 @@ field x int
 	"description": "```go\nfield x int\n```\n\n\\@mark\\(PosX, \\\"x\\\"\\),mark\\(PosY, \\\"y\\\"\\)"
 }
 
--- PosX-hover --
+-- PosX-hoverdef --
 ```go
 field x int
 ```
@@ -80,7 +80,7 @@ var y int
 	"description": "```go\nvar y int\n```"
 }
 
--- RandomParamY-hover --
+-- RandomParamY-hoverdef --
 ```go
 var y int
 ```
@@ -106,7 +106,7 @@ field field string
 	"description": "```go\nfield field string\n```"
 }
 
--- TypField-hover --
+-- TypField-hoverdef --
 ```go
 field field string
 ```
diff --git a/internal/lsp/testdata/godef/b/b.go b/internal/lsp/testdata/godef/b/b.go
index 23d908f1f8d..f9c1d64024b 100644
--- a/internal/lsp/testdata/godef/b/b.go
+++ b/internal/lsp/testdata/godef/b/b.go
@@ -13,13 +13,13 @@ type Embed struct {
 
 func _() {
 	e := Embed{}
-	e.Hi()      //@hover("Hi", AHi)
-	e.B()       //@hover("B", AB)
-	e.Field     //@hover("Field", AField)
-	e.Field2    //@hover("Field2", AField2)
-	e.Hello()   //@hover("Hello", AHello)
-	e.Hey()     //@hover("Hey", AHey)
-	e.Goodbye() //@hover("Goodbye", AGoodbye)
+	e.Hi()      //@hoverdef("Hi", AHi)
+	e.B()       //@hoverdef("B", AB)
+	e.Field     //@hoverdef("Field", AField)
+	e.Field2    //@hoverdef("Field2", AField2)
+	e.Hello()   //@hoverdef("Hello", AHello)
+	e.Hey()     //@hoverdef("Hey", AHey)
+	e.Goodbye() //@hoverdef("Goodbye", AGoodbye)
 }
 
 type aAlias = a.A //@mark(aAlias, "aAlias")
diff --git a/internal/lsp/testdata/godef/b/b.go.golden b/internal/lsp/testdata/godef/b/b.go.golden
index 553718075ff..7f05a70ce10 100644
--- a/internal/lsp/testdata/godef/b/b.go.golden
+++ b/internal/lsp/testdata/godef/b/b.go.golden
@@ -1,4 +1,4 @@
--- AB-hover --
+-- AB-hoverdef --
 ```go
 func (a.I).B()
 ```
@@ -6,7 +6,7 @@ func (a.I).B()
 [`(a.I).B` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#I.B)
 
 \@mark\(AB, \"B\"\)
--- AField-hover --
+-- AField-hoverdef --
 ```go
 field Field int
 ```
@@ -14,7 +14,7 @@ field Field int
 [`(a.S).Field` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#S.Field)
 
 \@mark\(AField, \"Field\"\)
--- AField2-hover --
+-- AField2-hoverdef --
 ```go
 field Field2 int
 ```
@@ -22,7 +22,7 @@ field Field2 int
 [`(a.R).Field2` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#R.Field2)
 
 \@mark\(AField2, \"Field2\"\)
--- AGoodbye-hover --
+-- AGoodbye-hoverdef --
 ```go
 func (a.H).Goodbye()
 ```
@@ -30,7 +30,7 @@ func (a.H).Goodbye()
 [`(a.H).Goodbye` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#H.Goodbye)
 
 \@mark\(AGoodbye, \"Goodbye\"\)
--- AHello-hover --
+-- AHello-hoverdef --
 ```go
 func (a.J).Hello()
 ```
@@ -38,13 +38,13 @@ func (a.J).Hello()
 [`(a.J).Hello` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#J.Hello)
 
 \@mark\(AHello, \"Hello\"\)
--- AHey-hover --
+-- AHey-hoverdef --
 ```go
 func (a.R).Hey()
 ```
 
 [`(a.R).Hey` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#R.Hey)
--- AHi-hover --
+-- AHi-hoverdef --
 ```go
 func (a.A).Hi()
 ```
@@ -74,7 +74,7 @@ package a ("golang.org/x/tools/internal/lsp/godef/a")
 	"description": "```go\npackage a (\"golang.org/x/tools/internal/lsp/godef/a\")\n```\n\n[`a` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls)"
 }
 
--- AImport-hover --
+-- AImport-hoverdef --
 ```go
 package a ("golang.org/x/tools/internal/lsp/godef/a")
 ```
@@ -95,18 +95,18 @@ type A string
 		"start": {
 			"line": 26,
 			"column": 6,
-			"offset": 452
+			"offset": 467
 		},
 		"end": {
 			"line": 26,
 			"column": 7,
-			"offset": 453
+			"offset": 468
 		}
 	},
 	"description": "```go\ntype A string\n```\n\n[`a.A` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#A)\n\n\\@mark\\(AString, \\\"A\\\"\\)"
 }
 
--- AString-hover --
+-- AString-hoverdef --
 ```go
 type A string
 ```
@@ -127,18 +127,18 @@ func a.AStuff()
 		"start": {
 			"line": 28,
 			"column": 6,
-			"offset": 489
+			"offset": 504
 		},
 		"end": {
 			"line": 28,
 			"column": 12,
-			"offset": 495
+			"offset": 510
 		}
 	},
 	"description": "```go\nfunc a.AStuff()\n```\n\n[`a.AStuff` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#AStuff)"
 }
 
--- AStuff-hover --
+-- AStuff-hoverdef --
 ```go
 func a.AStuff()
 ```
@@ -162,18 +162,18 @@ type S1 struct {
 		"start": {
 			"line": 27,
 			"column": 6,
-			"offset": 566
+			"offset": 587
 		},
 		"end": {
 			"line": 27,
 			"column": 8,
-			"offset": 568
+			"offset": 589
 		}
 	},
 	"description": "```go\ntype S1 struct {\n\tF1     int //@mark(S1F1, \"F1\")\n\tS2         //@godef(\"S2\", S2),mark(S1S2, \"S2\")\n\ta.A        //@godef(\"A\", AString)\n\taAlias     //@godef(\"a\", aAlias)\n}\n```\n\n[`b.S1` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S1)"
 }
 
--- S1-hover --
+-- S1-hoverdef --
 ```go
 type S1 struct {
 	F1     int //@mark(S1F1, "F1")
@@ -199,18 +199,18 @@ field F1 int
 		"start": {
 			"line": 28,
 			"column": 2,
-			"offset": 585
+			"offset": 606
 		},
 		"end": {
 			"line": 28,
 			"column": 4,
-			"offset": 587
+			"offset": 608
 		}
 	},
 	"description": "```go\nfield F1 int\n```\n\n[`(b.S1).F1` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S1.F1)\n\n\\@mark\\(S1F1, \\\"F1\\\"\\)"
 }
 
--- S1F1-hover --
+-- S1F1-hoverdef --
 ```go
 field F1 int
 ```
@@ -233,18 +233,18 @@ field S2 S2
 		"start": {
 			"line": 29,
 			"column": 2,
-			"offset": 617
+			"offset": 638
 		},
 		"end": {
 			"line": 29,
 			"column": 4,
-			"offset": 619
+			"offset": 640
 		}
 	},
 	"description": "```go\nfield S2 S2\n```\n\n[`(b.S1).S2` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S1.S2)\n\n\\@godef\\(\\\"S2\\\", S2\\),mark\\(S1S2, \\\"S2\\\"\\)"
 }
 
--- S1S2-hover --
+-- S1S2-hoverdef --
 ```go
 field S2 S2
 ```
@@ -269,18 +269,18 @@ type S2 struct {
 		"start": {
 			"line": 34,
 			"column": 6,
-			"offset": 741
+			"offset": 762
 		},
 		"end": {
 			"line": 34,
 			"column": 8,
-			"offset": 743
+			"offset": 764
 		}
 	},
 	"description": "```go\ntype S2 struct {\n\tF1   string //@mark(S2F1, \"F1\")\n\tF2   int    //@mark(S2F2, \"F2\")\n\t*a.A        //@godef(\"A\", AString),godef(\"a\",AImport)\n}\n```\n\n[`b.S2` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S2)"
 }
 
--- S2-hover --
+-- S2-hoverdef --
 ```go
 type S2 struct {
 	F1   string //@mark(S2F1, "F1")
@@ -305,18 +305,18 @@ field F1 string
 		"start": {
 			"line": 35,
 			"column": 2,
-			"offset": 760
+			"offset": 781
 		},
 		"end": {
 			"line": 35,
 			"column": 4,
-			"offset": 762
+			"offset": 783
 		}
 	},
 	"description": "```go\nfield F1 string\n```\n\n[`(b.S2).F1` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S2.F1)\n\n\\@mark\\(S2F1, \\\"F1\\\"\\)"
 }
 
--- S2F1-hover --
+-- S2F1-hoverdef --
 ```go
 field F1 string
 ```
@@ -339,18 +339,18 @@ field F2 int
 		"start": {
 			"line": 36,
 			"column": 2,
-			"offset": 793
+			"offset": 814
 		},
 		"end": {
 			"line": 36,
 			"column": 4,
-			"offset": 795
+			"offset": 816
 		}
 	},
 	"description": "```go\nfield F2 int\n```\n\n[`(b.S2).F2` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S2.F2)\n\n\\@mark\\(S2F2, \\\"F2\\\"\\)"
 }
 
--- S2F2-hover --
+-- S2F2-hoverdef --
 ```go
 field F2 int
 ```
@@ -371,18 +371,18 @@ type aAlias = a.A
 		"start": {
 			"line": 25,
 			"column": 6,
-			"offset": 521
+			"offset": 542
 		},
 		"end": {
 			"line": 25,
 			"column": 12,
-			"offset": 527
+			"offset": 548
 		}
 	},
 	"description": "```go\ntype aAlias = a.A\n```\n\n\\@mark\\(aAlias, \\\"aAlias\\\"\\)"
 }
 
--- aAlias-hover --
+-- aAlias-hoverdef --
 ```go
 type aAlias = a.A
 ```
@@ -403,18 +403,18 @@ const X untyped int = 0
 		"start": {
 			"line": 57,
 			"column": 7,
-			"offset": 1228
+			"offset": 1249
 		},
 		"end": {
 			"line": 57,
 			"column": 8,
-			"offset": 1229
+			"offset": 1250
 		}
 	},
 	"description": "```go\nconst X untyped int = 0\n```\n\n[`b.X` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#X)\n\n\\@mark\\(bX, \\\"X\\\"\\),godef\\(\\\"X\\\", bX\\)"
 }
 
--- bX-hover --
+-- bX-hoverdef --
 ```go
 const X untyped int = 0
 ```
@@ -446,7 +446,7 @@ package myFoo ("golang.org/x/tools/internal/lsp/foo")
 	"description": "```go\npackage myFoo (\"golang.org/x/tools/internal/lsp/foo\")\n```\n\n[`myFoo` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/foo?utm_source=gopls)"
 }
 
--- myFoo-hover --
+-- myFoo-hoverdef --
 ```go
 package myFoo ("golang.org/x/tools/internal/lsp/foo")
 ```
diff --git a/internal/lsp/testdata/godef/b/c.go.golden b/internal/lsp/testdata/godef/b/c.go.golden
index 9554c0d4355..3ae3e2d0ac9 100644
--- a/internal/lsp/testdata/godef/b/c.go.golden
+++ b/internal/lsp/testdata/godef/b/c.go.golden
@@ -16,18 +16,18 @@ type S1 struct {
 		"start": {
 			"line": 27,
 			"column": 6,
-			"offset": 566
+			"offset": 587
 		},
 		"end": {
 			"line": 27,
 			"column": 8,
-			"offset": 568
+			"offset": 589
 		}
 	},
 	"description": "```go\ntype S1 struct {\n\tF1     int //@mark(S1F1, \"F1\")\n\tS2         //@godef(\"S2\", S2),mark(S1S2, \"S2\")\n\ta.A        //@godef(\"A\", AString)\n\taAlias     //@godef(\"a\", aAlias)\n}\n```\n\n[`b.S1` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S1)"
 }
 
--- S1-hover --
+-- S1-hoverdef --
 ```go
 type S1 struct {
 	F1     int //@mark(S1F1, "F1")
@@ -53,18 +53,18 @@ field F1 int
 		"start": {
 			"line": 28,
 			"column": 2,
-			"offset": 585
+			"offset": 606
 		},
 		"end": {
 			"line": 28,
 			"column": 4,
-			"offset": 587
+			"offset": 608
 		}
 	},
 	"description": "```go\nfield F1 int\n```\n\n[`(b.S1).F1` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/b?utm_source=gopls#S1.F1)\n\n\\@mark\\(S1F1, \\\"F1\\\"\\)"
 }
 
--- S1F1-hover --
+-- S1F1-hoverdef --
 ```go
 field F1 int
 ```
diff --git a/internal/lsp/testdata/godef/b/e.go b/internal/lsp/testdata/godef/b/e.go
index 92037ed3393..7b96cd7e8ae 100644
--- a/internal/lsp/testdata/godef/b/e.go
+++ b/internal/lsp/testdata/godef/b/e.go
@@ -22,10 +22,10 @@ godef(bFunc, Things)
 
 func _() {
 	var x interface{}      //@mark(eInterface, "interface{}")
-	switch x := x.(type) { //@hover("x", eInterface)
+	switch x := x.(type) { //@hoverdef("x", eInterface)
 	case string: //@mark(eString, "string")
-		fmt.Println(x) //@hover("x", eString)
+		fmt.Println(x) //@hoverdef("x", eString)
 	case int: //@mark(eInt, "int")
-		fmt.Println(x) //@hover("x", eInt)
+		fmt.Println(x) //@hoverdef("x", eInt)
 	}
 }
diff --git a/internal/lsp/testdata/godef/b/e.go.golden b/internal/lsp/testdata/godef/b/e.go.golden
index 13c2e0eb5dd..079ed7923cc 100644
--- a/internal/lsp/testdata/godef/b/e.go.golden
+++ b/internal/lsp/testdata/godef/b/e.go.golden
@@ -13,18 +13,18 @@ field Member string
 		"start": {
 			"line": 6,
 			"column": 2,
-			"offset": 87
+			"offset": 90
 		},
 		"end": {
 			"line": 6,
 			"column": 8,
-			"offset": 93
+			"offset": 96
 		}
 	},
 	"description": "```go\nfield Member string\n```\n\n[`(a.Thing).Member` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Thing.Member)\n\n\\@Member"
 }
 
--- Member-hover --
+-- Member-hoverdef --
 ```go
 field Member string
 ```
@@ -47,18 +47,18 @@ var a.Other a.Thing
 		"start": {
 			"line": 9,
 			"column": 5,
-			"offset": 118
+			"offset": 121
 		},
 		"end": {
 			"line": 9,
 			"column": 10,
-			"offset": 123
+			"offset": 126
 		}
 	},
 	"description": "```go\nvar a.Other a.Thing\n```\n\n[`a.Other` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Other)\n\n\\@Other"
 }
 
--- Other-hover --
+-- Other-hoverdef --
 ```go
 var a.Other a.Thing
 ```
@@ -81,18 +81,18 @@ type Thing struct {
 		"start": {
 			"line": 5,
 			"column": 6,
-			"offset": 62
+			"offset": 65
 		},
 		"end": {
 			"line": 5,
 			"column": 11,
-			"offset": 67
+			"offset": 70
 		}
 	},
 	"description": "```go\ntype Thing struct {\n\tMember string //@Member\n}\n```\n\n[`a.Thing` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Thing)"
 }
 
--- Thing-hover --
+-- Thing-hoverdef --
 ```go
 type Thing struct {
 	Member string //@Member
@@ -113,32 +113,32 @@ func a.Things(val []string) []a.Thing
 		"start": {
 			"line": 11,
 			"column": 6,
-			"offset": 145
+			"offset": 148
 		},
 		"end": {
 			"line": 11,
 			"column": 12,
-			"offset": 151
+			"offset": 154
 		}
 	},
 	"description": "```go\nfunc a.Things(val []string) []a.Thing\n```\n\n[`a.Things` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Things)"
 }
 
--- Things-hover --
+-- Things-hoverdef --
 ```go
 func a.Things(val []string) []a.Thing
 ```
 
 [`a.Things` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#Things)
--- eInt-hover --
+-- eInt-hoverdef --
 ```go
 var x int
 ```
--- eInterface-hover --
+-- eInterface-hoverdef --
 ```go
 var x interface{}
 ```
--- eString-hover --
+-- eString-hoverdef --
 ```go
 var x string
 ```
diff --git a/internal/lsp/testdata/godef/b/h.go b/internal/lsp/testdata/godef/b/h.go
index c2776a03a51..c8cbe850f9c 100644
--- a/internal/lsp/testdata/godef/b/h.go
+++ b/internal/lsp/testdata/godef/b/h.go
@@ -4,7 +4,7 @@ import . "golang.org/x/tools/internal/lsp/godef/a"
 
 func _() {
 	// variable of type a.A
-	var _ A //@mark(AVariable, "_"),hover("_", AVariable)
+	var _ A //@mark(AVariable, "_"),hoverdef("_", AVariable)
 
-	AStuff() //@hover("AStuff", AStuff)
+	AStuff() //@hoverdef("AStuff", AStuff)
 }
diff --git a/internal/lsp/testdata/godef/b/h.go.golden b/internal/lsp/testdata/godef/b/h.go.golden
index b854dd4ab3d..f32f0264f8f 100644
--- a/internal/lsp/testdata/godef/b/h.go.golden
+++ b/internal/lsp/testdata/godef/b/h.go.golden
@@ -1,10 +1,10 @@
--- AStuff-hover --
+-- AStuff-hoverdef --
 ```go
 func AStuff()
 ```
 
 [`a.AStuff` on pkg.go.dev](https://pkg.go.dev/golang.org/x/tools/internal/lsp/godef/a?utm_source=gopls#AStuff)
--- AVariable-hover --
+-- AVariable-hoverdef --
 ```go
 var _ A
 ```
diff --git a/internal/lsp/testdata/godef/broken/unclosedIf.go.golden b/internal/lsp/testdata/godef/broken/unclosedIf.go.golden
index eac0339236c..5c3329d8b67 100644
--- a/internal/lsp/testdata/godef/broken/unclosedIf.go.golden
+++ b/internal/lsp/testdata/godef/broken/unclosedIf.go.golden
@@ -22,7 +22,7 @@ var myUnclosedIf string
 	"description": "```go\nvar myUnclosedIf string\n```\n\n\\@myUnclosedIf"
 }
 
--- myUnclosedIf-hover --
+-- myUnclosedIf-hoverdef --
 ```go
 var myUnclosedIf string
 ```
diff --git a/internal/lsp/testdata/godef/infer_generics/inferred.go b/internal/lsp/testdata/godef/infer_generics/inferred.go
index 78abf274503..2fe50e8141b 100644
--- a/internal/lsp/testdata/godef/infer_generics/inferred.go
+++ b/internal/lsp/testdata/godef/infer_generics/inferred.go
@@ -5,8 +5,8 @@ func app[S interface{ ~[]E }, E any](s S, e E) S {
 }
 
 func _() {
-	_ = app[[]int]             //@mark(constrInfer, "app"),hover("app", constrInfer)
-	_ = app[[]int, int]        //@mark(instance, "app"),hover("app", instance)
-	_ = app[[]int]([]int{}, 0) //@mark(partialInfer, "app"),hover("app", partialInfer)
-	_ = app([]int{}, 0)        //@mark(argInfer, "app"),hover("app", argInfer)
+	_ = app[[]int]             //@mark(constrInfer, "app"),hoverdef("app", constrInfer)
+	_ = app[[]int, int]        //@mark(instance, "app"),hoverdef("app", instance)
+	_ = app[[]int]([]int{}, 0) //@mark(partialInfer, "app"),hoverdef("app", partialInfer)
+	_ = app([]int{}, 0)        //@mark(argInfer, "app"),hoverdef("app", argInfer)
 }
diff --git a/internal/lsp/testdata/godef/infer_generics/inferred.go.golden b/internal/lsp/testdata/godef/infer_generics/inferred.go.golden
index 2dd97d9b6a4..081ea53dc0e 100644
--- a/internal/lsp/testdata/godef/infer_generics/inferred.go.golden
+++ b/internal/lsp/testdata/godef/infer_generics/inferred.go.golden
@@ -1,20 +1,20 @@
--- argInfer-hover --
+-- argInfer-hoverdef --
 ```go
 func app(s []int, e int) []int // func[S₁ interface{~[]E₂}, E₂ interface{}](s S₁, e E₂) S₁
 ```
--- constrInf-hover --
+-- constrInf-hoverdef --
 ```go
 func app(s []int, e int) []int // func[S₁ interface{~[]E₂}, E₂ interface{}](s S₁, e E₂) S₁
 ```
--- constrInfer-hover --
+-- constrInfer-hoverdef --
 ```go
 func app(s []int, e int) []int // func[S₁ interface{~[]E₂}, E₂ interface{}](s S₁, e E₂) S₁
 ```
--- instance-hover --
+-- instance-hoverdef --
 ```go
 func app(s []int, e int) []int // func[S₁ interface{~[]E₂}, E₂ interface{}](s S₁, e E₂) S₁
 ```
--- partialInfer-hover --
+-- partialInfer-hoverdef --
 ```go
 func app(s []int, e int) []int // func[S₁ interface{~[]E₂}, E₂ interface{}](s S₁, e E₂) S₁
 ```
diff --git a/internal/lsp/testdata/references/another/another.go b/internal/lsp/testdata/references/another/another.go
index de2ea16f829..47bda1e4acf 100644
--- a/internal/lsp/testdata/references/another/another.go
+++ b/internal/lsp/testdata/references/another/another.go
@@ -7,7 +7,7 @@ import (
 
 func _() {
 	xes := other.GetXes()
-	for _, x := range xes {
-		_ = x.Y //@mark(anotherXY, "Y"),refs("Y", typeXY, anotherXY, GetXesY)
+	for _, x := range xes { //@mark(defX, "x")
+		_ = x.Y //@mark(useX, "x"),mark(anotherXY, "Y"),refs("Y", typeXY, anotherXY, GetXesY),refs(".", defX, useX),refs("x", defX, useX)
 	}
 }
diff --git a/internal/lsp/testdata/rename/shadow/shadow.go b/internal/lsp/testdata/rename/shadow/shadow.go
new file mode 100644
index 00000000000..38329b4fea2
--- /dev/null
+++ b/internal/lsp/testdata/rename/shadow/shadow.go
@@ -0,0 +1,20 @@
+package shadow
+
+func _() {
+	a := true
+	b, c, _ := A(), B(), D() //@rename("A", "a"),rename("B", "b"),rename("b", "c"),rename("D", "d")
+	d := false
+	_, _, _, _ = a, b, c, d
+}
+
+func A() int {
+	return 0
+}
+
+func B() int {
+	return 0
+}
+
+func D() int {
+	return 0
+}
diff --git a/internal/lsp/testdata/rename/shadow/shadow.go.golden b/internal/lsp/testdata/rename/shadow/shadow.go.golden
new file mode 100644
index 00000000000..6281bcdd91d
--- /dev/null
+++ b/internal/lsp/testdata/rename/shadow/shadow.go.golden
@@ -0,0 +1,48 @@
+-- a-rename --
+renaming this func "A" to "a"	would cause this reference to become shadowed	by this intervening var definition
+-- b-rename --
+package shadow
+
+func _() {
+	a := true
+	b, c, _ := A(), b(), D() //@rename("A", "a"),rename("B", "b"),rename("b", "c"),rename("D", "d")
+	d := false
+	_, _, _, _ = a, b, c, d
+}
+
+func A() int {
+	return 0
+}
+
+func b() int {
+	return 0
+}
+
+func D() int {
+	return 0
+}
+
+-- c-rename --
+renaming this var "b" to "c"	conflicts with var in same block
+-- d-rename --
+package shadow
+
+func _() {
+	a := true
+	b, c, _ := A(), B(), d() //@rename("A", "a"),rename("B", "b"),rename("b", "c"),rename("D", "d")
+	d := false
+	_, _, _, _ = a, b, c, d
+}
+
+func A() int {
+	return 0
+}
+
+func B() int {
+	return 0
+}
+
+func d() int {
+	return 0
+}
+
diff --git a/internal/lsp/testdata/semantic/a.go b/internal/lsp/testdata/semantic/a.go
index 756c56ec98a..54d6c8a62fa 100644
--- a/internal/lsp/testdata/semantic/a.go
+++ b/internal/lsp/testdata/semantic/a.go
@@ -55,6 +55,8 @@ func (a *A) f() bool {
 	w := b[4:]
 	j := len(x)
 	j--
+	q := []interface{}{j, 23i, &y}
+	g(q...)
 	return true
 }
 
@@ -74,5 +76,6 @@ Never:
 	if !ok {
 		switch x := vv[0].(type) {
 		}
+		goto Never
 	}
 }
diff --git a/internal/lsp/testdata/semantic/a.go.golden b/internal/lsp/testdata/semantic/a.go.golden
index 512a83eade9..4622ae4d742 100644
--- a/internal/lsp/testdata/semantic/a.go.golden
+++ b/internal/lsp/testdata/semantic/a.go.golden
@@ -2,7 +2,7 @@
 /*⇒7,keyword,[]*/package /*⇒14,namespace,[]*/semantictokens /*⇒16,comment,[]*///@ semantic("")
 
 /*⇒6,keyword,[]*/import (
-	_ "encoding/utf8"/*⇐4,namespace,[]*/
+	_ "encoding/utf8"
 	/*⇒3,namespace,[]*/utf "encoding/utf8"
 	"fmt"/*⇐3,namespace,[]*/ /*⇒19,comment,[]*///@ semantic("fmt")
 	. "fmt"
@@ -31,15 +31,15 @@
 }
 /*⇒4,keyword,[]*/type /*⇒1,type,[definition]*/B /*⇒9,keyword,[]*/interface {
 	/*⇒1,type,[]*/A
-	/*⇒3,member,[definition]*/sad(/*⇒3,type,[defaultLibrary]*/int) /*⇒4,type,[defaultLibrary]*/bool
+	/*⇒3,method,[definition]*/sad(/*⇒3,type,[defaultLibrary]*/int) /*⇒4,type,[defaultLibrary]*/bool
 }
 
 /*⇒4,keyword,[]*/type /*⇒1,type,[definition]*/F /*⇒3,type,[defaultLibrary]*/int
 
-/*⇒4,keyword,[]*/func (/*⇒1,variable,[]*/a /*⇒1,operator,[]*/*/*⇒1,type,[]*/A) /*⇒1,member,[definition]*/f() /*⇒4,type,[defaultLibrary]*/bool {
+/*⇒4,keyword,[]*/func (/*⇒1,variable,[]*/a /*⇒1,operator,[]*/*/*⇒1,type,[]*/A) /*⇒1,method,[definition]*/f() /*⇒4,type,[defaultLibrary]*/bool {
 	/*⇒3,keyword,[]*/var /*⇒1,variable,[definition]*/z /*⇒6,type,[defaultLibrary]*/string
 	/*⇒1,variable,[definition]*/x /*⇒2,operator,[]*/:= /*⇒5,string,[]*/"foo"
-	/*⇒1,variable,[]*/a(/*⇒1,variable,[definition]*/x)
+	/*⇒1,variable,[]*/a(/*⇒1,variable,[]*/x)
 	/*⇒1,variable,[definition]*/y /*⇒2,operator,[]*/:= /*⇒5,string,[]*/"bar" /*⇒1,operator,[]*/+ /*⇒1,variable,[]*/x
 	/*⇒6,keyword,[]*/switch /*⇒1,variable,[]*/z {
 	/*⇒4,keyword,[]*/case /*⇒4,string,[]*/"xx":
@@ -52,18 +52,20 @@
 	/*⇒3,keyword,[]*/for /*⇒1,variable,[definition]*/k, /*⇒1,variable,[definition]*/v := /*⇒5,keyword,[]*/range /*⇒1,variable,[]*/m {
 		/*⇒6,keyword,[]*/return (/*⇒1,operator,[]*/!/*⇒1,variable,[]*/k) /*⇒2,operator,[]*/&& /*⇒1,variable,[]*/v[/*⇒1,number,[]*/0] /*⇒2,operator,[]*/== /*⇒3,variable,[readonly defaultLibrary]*/nil
 	}
-	/*⇒2,variable,[]*/c2 /*⇒2,operator,[]*/<- /*⇒1,type,[]*/A./*⇒1,variable,[definition]*/X
+	/*⇒2,variable,[]*/c2 /*⇒2,operator,[]*/<- /*⇒1,type,[]*/A./*⇒1,variable,[]*/X
 	/*⇒1,variable,[definition]*/w /*⇒2,operator,[]*/:= /*⇒1,variable,[]*/b[/*⇒1,number,[]*/4:]
 	/*⇒1,variable,[definition]*/j /*⇒2,operator,[]*/:= /*⇒3,function,[defaultLibrary]*/len(/*⇒1,variable,[]*/x)
 	/*⇒1,variable,[]*/j/*⇒2,operator,[]*/--
+	/*⇒1,variable,[definition]*/q /*⇒2,operator,[]*/:= []/*⇒9,keyword,[]*/interface{}{/*⇒1,variable,[]*/j, /*⇒3,number,[]*/23i, /*⇒1,operator,[]*/&/*⇒1,variable,[]*/y}
+	/*⇒1,function,[]*/g(/*⇒1,variable,[]*/q/*⇒3,operator,[]*/...)
 	/*⇒6,keyword,[]*/return /*⇒4,variable,[readonly]*/true
 }
 
 /*⇒4,keyword,[]*/func /*⇒1,function,[definition]*/g(/*⇒2,parameter,[definition]*/vv /*⇒3,operator,[]*/.../*⇒9,keyword,[]*/interface{}) {
 	/*⇒2,variable,[definition]*/ff /*⇒2,operator,[]*/:= /*⇒4,keyword,[]*/func() {}
 	/*⇒5,keyword,[]*/defer /*⇒2,variable,[]*/ff()
-	/*⇒2,keyword,[]*/go /*⇒3,namespace,[]*/utf./*⇒9,variable,[definition]*/RuneCount(/*⇒2,string,[]*/"")
-	/*⇒2,keyword,[]*/go /*⇒4,namespace,[]*/utf8./*⇒9,function,[]*/RuneCount(/*⇒2,variable,[]*/vv.(/*⇒6,variable,[definition]*/string))
+	/*⇒2,keyword,[]*/go /*⇒3,namespace,[]*/utf./*⇒9,function,[]*/RuneCount(/*⇒2,string,[]*/"")
+	/*⇒2,keyword,[]*/go /*⇒4,namespace,[]*/utf8./*⇒9,function,[]*/RuneCount(/*⇒2,variable,[]*/vv.(/*⇒6,type,[]*/string))
 	/*⇒2,keyword,[]*/if /*⇒4,variable,[readonly]*/true {
 	} /*⇒4,keyword,[]*/else {
 	}
@@ -75,6 +77,7 @@
 	/*⇒2,keyword,[]*/if /*⇒1,operator,[]*/!/*⇒2,variable,[]*/ok {
 		/*⇒6,keyword,[]*/switch /*⇒1,variable,[definition]*/x /*⇒2,operator,[]*/:= /*⇒2,variable,[]*/vv[/*⇒1,number,[]*/0].(/*⇒4,keyword,[]*/type) {
 		}
+		/*⇒4,keyword,[]*/goto Never
 	}
 }
 
diff --git a/internal/lsp/testdata/semantic/b.go.golden b/internal/lsp/testdata/semantic/b.go.golden
index 863a68cc8f2..203f6b18932 100644
--- a/internal/lsp/testdata/semantic/b.go.golden
+++ b/internal/lsp/testdata/semantic/b.go.golden
@@ -31,6 +31,6 @@
 /*⇒4,keyword,[]*/type /*⇒2,type,[definition]*/CC /*⇒6,keyword,[]*/struct {
 	/*⇒2,variable,[definition]*/AA /*⇒3,type,[defaultLibrary]*/int
 }
-/*⇒4,keyword,[]*/type /*⇒1,type,[definition]*/D /*⇒4,keyword,[]*/func(/*⇒2,variable,[definition]*/aa /*⇒2,type,[]*/AA) (/*⇒2,variable,[definition]*/BB /*⇒5,type,[]*/error)
+/*⇒4,keyword,[]*/type /*⇒1,type,[definition]*/D /*⇒4,keyword,[]*/func(/*⇒2,parameter,[definition]*/aa /*⇒2,type,[]*/AA) (/*⇒2,parameter,[definition]*/BB /*⇒5,type,[]*/error)
 /*⇒4,keyword,[]*/type /*⇒1,type,[definition]*/E /*⇒4,keyword,[]*/func(/*⇒2,type,[]*/AA) /*⇒2,type,[]*/BB
 
diff --git a/internal/lsp/testdata/signature/signature.go b/internal/lsp/testdata/signature/signature.go
index 05f8da2fe06..4e2b12bc419 100644
--- a/internal/lsp/testdata/signature/signature.go
+++ b/internal/lsp/testdata/signature/signature.go
@@ -47,11 +47,12 @@ func Qux() {
 		return func(int) rune { return 0 }
 	}
 
-	fn("hi", "there")    //@signature("hi", "fn(hi string, there string) func(i int) rune", 0)
+	fn("hi", "there")    //@signature("hi", "", 0)
+	fn("hi", "there")    //@signature(",", "fn(hi string, there string) func(i int) rune", 0)
 	fn("hi", "there")(1) //@signature("1", "func(i int) rune", 0)
 
 	fnPtr := &fn
-	(*fnPtr)("hi", "there") //@signature("hi", "func(hi string, there string) func(i int) rune", 0)
+	(*fnPtr)("hi", "there") //@signature(",", "func(hi string, there string) func(i int) rune", 0)
 
 	var fnIntf interface{} = Foo
 	fnIntf.(func(string, int) bool)("hi", 123) //@signature("123", "func(string, int) bool", 1)
@@ -69,8 +70,8 @@ func Qux() {
 	Foo(myFunc(123), 456) //@signature("myFunc", "Foo(a string, b int) (c bool)", 0)
 	Foo(myFunc(123), 456) //@signature("123", "myFunc(foo int) string", 0)
 
-	panic("oops!")            //@signature("oops", "panic(v interface{})", 0)
-	println("hello", "world") //@signature("world", "println(args ...Type)", 0)
+	panic("oops!")            //@signature(")", "panic(v interface{})", 0)
+	println("hello", "world") //@signature(",", "println(args ...Type)", 0)
 
 	Hello(func() {
 		//@signature("//", "", 0)
diff --git a/internal/lsp/testdata/summary.txt.golden b/internal/lsp/testdata/summary.txt.golden
index edbb4fa5686..7143365912a 100644
--- a/internal/lsp/testdata/summary.txt.golden
+++ b/internal/lsp/testdata/summary.txt.golden
@@ -14,16 +14,17 @@ FormatCount = 6
 ImportCount = 8
 SemanticTokenCount = 3
 SuggestedFixCount = 40
-FunctionExtractionCount = 18
+FunctionExtractionCount = 24
+MethodExtractionCount = 6
 DefinitionsCount = 95
 TypeDefinitionsCount = 18
 HighlightsCount = 69
-ReferencesCount = 25
-RenamesCount = 37
+ReferencesCount = 27
+RenamesCount = 41
 PrepareRenamesCount = 7
 SymbolsCount = 5
 WorkspaceSymbolsCount = 20
-SignaturesCount = 32
+SignaturesCount = 33
 LinksCount = 7
 ImplementationsCount = 14
 
diff --git a/internal/lsp/testdata/summary_generics.txt.golden b/internal/lsp/testdata/summary_go1.18.txt.golden
similarity index 82%
rename from internal/lsp/testdata/summary_generics.txt.golden
rename to internal/lsp/testdata/summary_go1.18.txt.golden
index 152f38d5733..aaeb2622b6c 100644
--- a/internal/lsp/testdata/summary_generics.txt.golden
+++ b/internal/lsp/testdata/summary_go1.18.txt.golden
@@ -14,16 +14,17 @@ FormatCount = 6
 ImportCount = 8
 SemanticTokenCount = 3
 SuggestedFixCount = 40
-FunctionExtractionCount = 18
+FunctionExtractionCount = 24
+MethodExtractionCount = 6
 DefinitionsCount = 99
 TypeDefinitionsCount = 18
 HighlightsCount = 69
-ReferencesCount = 25
-RenamesCount = 33
+ReferencesCount = 27
+RenamesCount = 41
 PrepareRenamesCount = 7
 SymbolsCount = 5
 WorkspaceSymbolsCount = 20
-SignaturesCount = 32
+SignaturesCount = 33
 LinksCount = 7
 ImplementationsCount = 14
 
diff --git a/internal/lsp/testdata/workspacesymbol/query.go.golden b/internal/lsp/testdata/workspacesymbol/query.go.golden
index 857ef3f2359..4c6d470f7be 100644
--- a/internal/lsp/testdata/workspacesymbol/query.go.golden
+++ b/internal/lsp/testdata/workspacesymbol/query.go.golden
@@ -41,7 +41,7 @@ workspacesymbol/main.go:21:2-15 main.myStruct.myStructField Field
 workspacesymbol/main.go:21:2-15 main.myStruct.myStructField Field
 
 -- workspace_symbol-casesensitive-main.myType --
-workspacesymbol/main.go:14:6-12 main.myType String
+workspacesymbol/main.go:14:6-12 main.myType Class
 workspacesymbol/main.go:18:18-26 main.myType.Blahblah Method
 
 -- workspace_symbol-casesensitive-main.myType.Blahblah --
diff --git a/internal/lsp/tests/tests.go b/internal/lsp/tests/tests.go
index f942ced3bc4..13da55994c6 100644
--- a/internal/lsp/tests/tests.go
+++ b/internal/lsp/tests/tests.go
@@ -47,7 +47,7 @@ var summaryFile = "summary.txt"
 
 func init() {
 	if typeparams.Enabled {
-		summaryFile = "summary_generics.txt"
+		summaryFile = "summary_go1.18.txt"
 	}
 }
 
@@ -70,6 +70,7 @@ type Imports []span.Span
 type SemanticTokens []span.Span
 type SuggestedFixes map[span.Span][]string
 type FunctionExtractions map[span.Span]span.Span
+type MethodExtractions map[span.Span]span.Span
 type Definitions map[span.Span]Definition
 type Implementations map[span.Span][]span.Span
 type Highlights map[span.Span][]span.Span
@@ -83,6 +84,7 @@ type WorkspaceSymbols map[WorkspaceSymbolsTestType]map[span.URI][]string
 type Signatures map[span.Span]*protocol.SignatureHelp
 type Links map[span.URI][]Link
 type AddImport map[span.URI]string
+type Hovers map[span.Span]string
 
 type Data struct {
 	Config                   packages.Config
@@ -104,6 +106,7 @@ type Data struct {
 	SemanticTokens           SemanticTokens
 	SuggestedFixes           SuggestedFixes
 	FunctionExtractions      FunctionExtractions
+	MethodExtractions        MethodExtractions
 	Definitions              Definitions
 	Implementations          Implementations
 	Highlights               Highlights
@@ -117,6 +120,7 @@ type Data struct {
 	Signatures               Signatures
 	Links                    Links
 	AddImport                AddImport
+	Hovers                   Hovers
 
 	t         testing.TB
 	fragments map[string]string
@@ -147,6 +151,7 @@ type Tests interface {
 	SemanticTokens(*testing.T, span.Span)
 	SuggestedFix(*testing.T, span.Span, []string, int)
 	FunctionExtraction(*testing.T, span.Span, span.Span)
+	MethodExtraction(*testing.T, span.Span, span.Span)
 	Definition(*testing.T, span.Span, Definition)
 	Implementation(*testing.T, span.Span, []span.Span)
 	Highlight(*testing.T, span.Span, []span.Span)
@@ -158,6 +163,7 @@ type Tests interface {
 	SignatureHelp(*testing.T, span.Span, *protocol.SignatureHelp)
 	Link(*testing.T, span.URI, []Link)
 	AddImport(*testing.T, span.URI, string)
+	Hover(*testing.T, span.Span, string)
 }
 
 type Definition struct {
@@ -298,6 +304,7 @@ func load(t testing.TB, mode string, dir string) *Data {
 		PrepareRenames:           make(PrepareRenames),
 		SuggestedFixes:           make(SuggestedFixes),
 		FunctionExtractions:      make(FunctionExtractions),
+		MethodExtractions:        make(MethodExtractions),
 		Symbols:                  make(Symbols),
 		symbolsChildren:          make(SymbolsChildren),
 		symbolInformation:        make(SymbolInformation),
@@ -305,6 +312,7 @@ func load(t testing.TB, mode string, dir string) *Data {
 		Signatures:               make(Signatures),
 		Links:                    make(Links),
 		AddImport:                make(AddImport),
+		Hovers:                   make(Hovers),
 
 		t:         t,
 		dir:       dir,
@@ -455,7 +463,8 @@ func load(t testing.TB, mode string, dir string) *Data {
 		"godef":           datum.collectDefinitions,
 		"implementations": datum.collectImplementations,
 		"typdef":          datum.collectTypeDefinitions,
-		"hover":           datum.collectHoverDefinitions,
+		"hoverdef":        datum.collectHoverDefinitions,
+		"hover":           datum.collectHovers,
 		"highlight":       datum.collectHighlights,
 		"refs":            datum.collectReferences,
 		"rename":          datum.collectRenames,
@@ -465,6 +474,7 @@ func load(t testing.TB, mode string, dir string) *Data {
 		"link":            datum.collectLinks,
 		"suggestedfix":    datum.collectSuggestedFixes,
 		"extractfunc":     datum.collectFunctionExtractions,
+		"extractmethod":   datum.collectMethodExtractions,
 		"incomingcalls":   datum.collectIncomingCalls,
 		"outgoingcalls":   datum.collectOutgoingCalls,
 		"addimport":       datum.collectAddImports,
@@ -480,7 +490,7 @@ func load(t testing.TB, mode string, dir string) *Data {
 	// Collect names for the entries that require golden files.
 	if err := datum.Exported.Expect(map[string]interface{}{
 		"godef":                        datum.collectDefinitionNames,
-		"hover":                        datum.collectDefinitionNames,
+		"hoverdef":                     datum.collectDefinitionNames,
 		"workspacesymbol":              datum.collectWorkspaceSymbols(WorkspaceSymbolsDefault),
 		"workspacesymbolfuzzy":         datum.collectWorkspaceSymbols(WorkspaceSymbolsFuzzy),
 		"workspacesymbolcasesensitive": datum.collectWorkspaceSymbols(WorkspaceSymbolsCaseSensitive),
@@ -675,6 +685,20 @@ func Run(t *testing.T, tests Tests, data *Data) {
 		}
 	})
 
+	t.Run("MethodExtraction", func(t *testing.T) {
+		t.Helper()
+		for start, end := range data.MethodExtractions {
+			// Check if we should skip this spn if the -modfile flag is not available.
+			if shouldSkip(data, start.URI()) {
+				continue
+			}
+			t.Run(SpanName(start), func(t *testing.T) {
+				t.Helper()
+				tests.MethodExtraction(t, start, end)
+			})
+		}
+	})
+
 	t.Run("Definition", func(t *testing.T) {
 		t.Helper()
 		for spn, d := range data.Definitions {
@@ -711,6 +735,16 @@ func Run(t *testing.T, tests Tests, data *Data) {
 		}
 	})
 
+	t.Run("Hover", func(t *testing.T) {
+		t.Helper()
+		for pos, info := range data.Hovers {
+			t.Run(SpanName(pos), func(t *testing.T) {
+				t.Helper()
+				tests.Hover(t, pos, info)
+			})
+		}
+	})
+
 	t.Run("References", func(t *testing.T) {
 		t.Helper()
 		for src, itemList := range data.References {
@@ -895,6 +929,7 @@ func checkData(t *testing.T, data *Data) {
 	fmt.Fprintf(buf, "SemanticTokenCount = %v\n", len(data.SemanticTokens))
 	fmt.Fprintf(buf, "SuggestedFixCount = %v\n", len(data.SuggestedFixes))
 	fmt.Fprintf(buf, "FunctionExtractionCount = %v\n", len(data.FunctionExtractions))
+	fmt.Fprintf(buf, "MethodExtractionCount = %v\n", len(data.MethodExtractions))
 	fmt.Fprintf(buf, "DefinitionsCount = %v\n", definitionCount)
 	fmt.Fprintf(buf, "TypeDefinitionsCount = %v\n", typeDefinitionCount)
 	fmt.Fprintf(buf, "HighlightsCount = %v\n", len(data.Highlights))
@@ -1128,6 +1163,12 @@ func (data *Data) collectFunctionExtractions(start span.Span, end span.Span) {
 	}
 }
 
+func (data *Data) collectMethodExtractions(start span.Span, end span.Span) {
+	if _, ok := data.MethodExtractions[start]; !ok {
+		data.MethodExtractions[start] = end
+	}
+}
+
 func (data *Data) collectDefinitions(src, target span.Span) {
 	data.Definitions[src] = Definition{
 		Src: src,
@@ -1196,6 +1237,10 @@ func (data *Data) collectHoverDefinitions(src, target span.Span) {
 	}
 }
 
+func (data *Data) collectHovers(src span.Span, expected string) {
+	data.Hovers[src] = expected
+}
+
 func (data *Data) collectTypeDefinitions(src, target span.Span) {
 	data.Definitions[src] = Definition{
 		Src:    src,
diff --git a/internal/lsp/text_synchronization.go b/internal/lsp/text_synchronization.go
index f54ca3735c3..d9a69614074 100644
--- a/internal/lsp/text_synchronization.go
+++ b/internal/lsp/text_synchronization.go
@@ -282,6 +282,7 @@ func (s *Server) processModifications(ctx context.Context, modifications []sourc
 		// produce a better error message. The actual race to the cache should be
 		// guarded by Session.viewMu.
 		s.stateMu.Unlock()
+		close(diagnoseDone)
 		return errors.New("server is shut down")
 	}
 	s.stateMu.Unlock()
@@ -291,6 +292,7 @@ func (s *Server) processModifications(ctx context.Context, modifications []sourc
 
 	snapshots, releases, err := s.session.DidModifyFiles(ctx, modifications)
 	if err != nil {
+		close(diagnoseDone)
 		return err
 	}
 
diff --git a/internal/span/uri.go b/internal/span/uri.go
index 2504921356e..a9777ff8598 100644
--- a/internal/span/uri.go
+++ b/internal/span/uri.go
@@ -45,7 +45,7 @@ func filename(uri URI) (string, error) {
 	if u.Scheme != fileScheme {
 		return "", fmt.Errorf("only file URIs are supported, got %q from %q", u.Scheme, uri)
 	}
-	// If the URI is a Windows URI, we trim the leading "/" and lowercase
+	// If the URI is a Windows URI, we trim the leading "/" and uppercase
 	// the drive letter, which will never be case sensitive.
 	if isWindowsDriveURIPath(u.Path) {
 		u.Path = strings.ToUpper(string(u.Path[1])) + u.Path[2:]
diff --git a/internal/typeparams/doc.go b/internal/typeparams/common.go
similarity index 59%
rename from internal/typeparams/doc.go
rename to internal/typeparams/common.go
index 5583947e21f..9fc6b4beb88 100644
--- a/internal/typeparams/doc.go
+++ b/internal/typeparams/common.go
@@ -9,3 +9,17 @@
 // This package exists to make it easier for tools to work with generic code,
 // while also compiling against older Go versions.
 package typeparams
+
+import (
+	"go/ast"
+	"go/token"
+)
+
+// A IndexExprData holds data from both ast.IndexExpr and the new
+// ast.MultiIndexExpr, which was introduced in Go 1.18.
+type IndexExprData struct {
+	X       ast.Expr   // expression
+	Lbrack  token.Pos  // position of "["
+	Indices []ast.Expr // index expressions
+	Rbrack  token.Pos  // position of "]"
+}
diff --git a/internal/typeparams/common_test.go b/internal/typeparams/common_test.go
new file mode 100644
index 00000000000..e15c297f005
--- /dev/null
+++ b/internal/typeparams/common_test.go
@@ -0,0 +1,36 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeparams_test
+
+import (
+	"go/ast"
+	"testing"
+
+	"golang.org/x/tools/internal/typeparams"
+)
+
+func TestGetIndexExprData(t *testing.T) {
+	x := &ast.Ident{}
+	i := &ast.Ident{}
+
+	tests := map[ast.Node]bool{
+		&ast.IndexExpr{X: x, Lbrack: 1, Index: i, Rbrack: 2}: true,
+		&ast.Ident{}: false,
+	}
+	want := &typeparams.IndexExprData{X: x, Lbrack: 1, Indices: []ast.Expr{i}, Rbrack: 2}
+
+	for n, isIndexExpr := range tests {
+		ix := typeparams.GetIndexExprData(n)
+		if got := ix != nil; got != isIndexExpr {
+			t.Errorf("GetIndexExprData(%+v) = %+v, want nil: %t", n, ix, !isIndexExpr)
+		}
+		if ix == nil {
+			continue
+		}
+		if ix.X != x || ix.Lbrack != 1 || ix.Indices[0] != i || ix.Rbrack != 2 {
+			t.Errorf("GetIndexExprData(%+v) = %+v, want %+v", n, ix, want)
+		}
+	}
+}
diff --git a/internal/typeparams/enabled_go117.go b/internal/typeparams/enabled_go117.go
new file mode 100644
index 00000000000..18212390e19
--- /dev/null
+++ b/internal/typeparams/enabled_go117.go
@@ -0,0 +1,12 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !go1.18
+// +build !go1.18
+
+package typeparams
+
+// Enabled reports whether type parameters are enabled in the current build
+// environment.
+const Enabled = false
diff --git a/internal/typeparams/enabled_go118.go b/internal/typeparams/enabled_go118.go
new file mode 100644
index 00000000000..d67148823c4
--- /dev/null
+++ b/internal/typeparams/enabled_go118.go
@@ -0,0 +1,15 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18
+// +build go1.18
+
+package typeparams
+
+// Note: this constant is in a separate file as this is the only acceptable
+// diff between the <1.18 API of this package and the 1.18 API.
+
+// Enabled reports whether type parameters are enabled in the current build
+// environment.
+const Enabled = true
diff --git a/internal/typeparams/notypeparams.go b/internal/typeparams/notypeparams.go
deleted file mode 100644
index 3a0abc7c18e..00000000000
--- a/internal/typeparams/notypeparams.go
+++ /dev/null
@@ -1,90 +0,0 @@
-// Copyright 2021 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-//go:build !typeparams || !go1.17
-// +build !typeparams !go1.17
-
-package typeparams
-
-import (
-	"go/ast"
-	"go/types"
-)
-
-// NOTE: doc comments must be kept in sync with typeparams.go.
-
-// Enabled reports whether type parameters are enabled in the current build
-// environment.
-const Enabled = false
-
-// UnpackIndex extracts all index expressions from e. For non-generic code this
-// is always one expression: e.Index, but may be more than one expression for
-// generic type instantiation.
-func UnpackIndex(e *ast.IndexExpr) []ast.Expr {
-	return []ast.Expr{e.Index}
-}
-
-// IsListExpr reports whether n is an *ast.ListExpr, which is a new node type
-// introduced to hold type arguments for generic type instantiation.
-func IsListExpr(n ast.Node) bool {
-	return false
-}
-
-// ForTypeDecl extracts the (possibly nil) type parameter node list from n.
-func ForTypeDecl(*ast.TypeSpec) *ast.FieldList {
-	return nil
-}
-
-// ForFuncDecl extracts the (possibly nil) type parameter node list from n.
-func ForFuncDecl(*ast.FuncDecl) *ast.FieldList {
-	return nil
-}
-
-// ForSignature extracts the (possibly empty) type parameter object list from
-// sig.
-func ForSignature(*types.Signature) []*types.TypeName {
-	return nil
-}
-
-// HasTypeSet reports if iface has a type set.
-func HasTypeSet(*types.Interface) bool {
-	return false
-}
-
-// IsComparable reports if iface is the comparable interface.
-func IsComparable(*types.Interface) bool {
-	return false
-}
-
-// IsConstraint reports whether iface may only be used as a type parameter
-// constraint (i.e. has a type set or is the comparable interface).
-func IsConstraint(*types.Interface) bool {
-	return false
-}
-
-// ForNamed extracts the (possibly empty) type parameter object list from
-// named.
-func ForNamed(*types.Named) []*types.TypeName {
-	return nil
-}
-
-// NamedTArgs extracts the (possibly empty) type argument list from named.
-func NamedTArgs(*types.Named) []types.Type {
-	return nil
-}
-
-// InitInferred initializes info to record inferred type information.
-func InitInferred(*types.Info) {
-}
-
-// GetInferred extracts inferred type information from info for e.
-//
-// The expression e may have an inferred type if it is an *ast.IndexExpr
-// representing partial instantiation of a generic function type for which type
-// arguments have been inferred using constraint type inference, or if it is an
-// *ast.CallExpr for which type type arguments have be inferred using both
-// constraint type inference and function argument inference.
-func GetInferred(*types.Info, ast.Expr) ([]types.Type, *types.Signature) {
-	return nil, nil
-}
diff --git a/internal/typeparams/typeparams.go b/internal/typeparams/typeparams.go
deleted file mode 100644
index 6b7958af060..00000000000
--- a/internal/typeparams/typeparams.go
+++ /dev/null
@@ -1,105 +0,0 @@
-// Copyright 2021 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-//go:build typeparams && go1.17
-// +build typeparams,go1.17
-
-package typeparams
-
-import (
-	"go/ast"
-	"go/types"
-)
-
-// NOTE: doc comments must be kept in sync with notypeparams.go.
-
-// Enabled reports whether type parameters are enabled in the current build
-// environment.
-const Enabled = true
-
-// UnpackIndex extracts all index expressions from e. For non-generic code this
-// is always one expression: e.Index, but may be more than one expression for
-// generic type instantiation.
-func UnpackIndex(e *ast.IndexExpr) []ast.Expr {
-	if x, _ := e.Index.(*ast.ListExpr); x != nil {
-		return x.ElemList
-	}
-	if e.Index != nil {
-		return []ast.Expr{e.Index}
-	}
-	return nil
-}
-
-// IsListExpr reports whether n is an *ast.ListExpr, which is a new node type
-// introduced to hold type arguments for generic type instantiation.
-func IsListExpr(n ast.Node) bool {
-	_, ok := n.(*ast.ListExpr)
-	return ok
-}
-
-// ForTypeDecl extracts the (possibly nil) type parameter node list from n.
-func ForTypeDecl(n *ast.TypeSpec) *ast.FieldList {
-	return n.TParams
-}
-
-// ForFuncDecl extracts the (possibly nil) type parameter node list from n.
-func ForFuncDecl(n *ast.FuncDecl) *ast.FieldList {
-	if n.Type != nil {
-		return n.Type.TParams
-	}
-	return nil
-}
-
-// ForSignature extracts the (possibly empty) type parameter object list from
-// sig.
-func ForSignature(sig *types.Signature) []*types.TypeName {
-	return sig.TParams()
-}
-
-// HasTypeSet reports if iface has a type set.
-func HasTypeSet(iface *types.Interface) bool {
-	return iface.HasTypeList()
-}
-
-// IsComparable reports if iface is the comparable interface.
-func IsComparable(iface *types.Interface) bool {
-	return iface.IsComparable()
-}
-
-// IsConstraint reports whether iface may only be used as a type parameter
-// constraint (i.e. has a type set or is the comparable interface).
-func IsConstraint(iface *types.Interface) bool {
-	return iface.IsConstraint()
-}
-
-// ForNamed extracts the (possibly empty) type parameter object list from
-// named.
-func ForNamed(named *types.Named) []*types.TypeName {
-	return named.TParams()
-}
-
-// NamedTArgs extracts the (possibly empty) type argument list from named.
-func NamedTArgs(named *types.Named) []types.Type {
-	return named.TArgs()
-}
-
-// InitInferred initializes info to record inferred type information.
-func InitInferred(info *types.Info) {
-	info.Inferred = make(map[ast.Expr]types.Inferred)
-}
-
-// GetInferred extracts inferred type information from info for e.
-//
-// The expression e may have an inferred type if it is an *ast.IndexExpr
-// representing partial instantiation of a generic function type for which type
-// arguments have been inferred using constraint type inference, or if it is an
-// *ast.CallExpr for which type type arguments have be inferred using both
-// constraint type inference and function argument inference.
-func GetInferred(info *types.Info, e ast.Expr) ([]types.Type, *types.Signature) {
-	if info.Inferred == nil {
-		return nil, nil
-	}
-	inf := info.Inferred[e]
-	return inf.TArgs, inf.Sig
-}
diff --git a/internal/typeparams/typeparams_go117.go b/internal/typeparams/typeparams_go117.go
new file mode 100644
index 00000000000..d015ee1afa7
--- /dev/null
+++ b/internal/typeparams/typeparams_go117.go
@@ -0,0 +1,164 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !go1.18
+// +build !go1.18
+
+package typeparams
+
+import (
+	"go/ast"
+	"go/types"
+)
+
+func unsupported() {
+	panic("type parameters are unsupported at this go version")
+}
+
+// GetIndexExprData extracts data from *ast.IndexExpr nodes.
+// For other nodes, GetIndexExprData returns nil.
+func GetIndexExprData(n ast.Node) *IndexExprData {
+	if e, _ := n.(*ast.IndexExpr); e != nil {
+		return &IndexExprData{
+			X:       e.X,
+			Lbrack:  e.Lbrack,
+			Indices: []ast.Expr{e.Index},
+			Rbrack:  e.Rbrack,
+		}
+	}
+	return nil
+}
+
+// ForTypeDecl returns an empty field list, as type parameters on not supported
+// at this Go version.
+func ForTypeDecl(*ast.TypeSpec) *ast.FieldList {
+	return nil
+}
+
+// ForFuncDecl returns an empty field list, as type parameters are not
+// supported at this Go version.
+func ForFuncDecl(*ast.FuncDecl) *ast.FieldList {
+	return nil
+}
+
+// TypeParam is a placeholder type, as type parameters are not supported at
+// this Go version. Its methods panic on use.
+type TypeParam struct{ types.Type }
+
+// TypeParamList is a placeholder for an empty type parameter list.
+type TypeParamList struct{}
+
+func (*TypeParamList) Len() int          { return 0 }
+func (*TypeParamList) At(int) *TypeParam { unsupported(); return nil }
+
+// TypeList is a placeholder for an empty type list.
+type TypeList struct{}
+
+func (*TypeList) Len() int          { return 0 }
+func (*TypeList) At(int) types.Type { unsupported(); return nil }
+
+// NewTypeParam is unsupported at this Go version, and panics.
+func NewTypeParam(name *types.TypeName, constraint types.Type) *TypeParam {
+	unsupported()
+	return nil
+}
+
+// SetTypeParamConstraint is unsupported at this Go version, and panics.
+func SetTypeParamConstraint(tparam *TypeParam, constraint types.Type) {
+	unsupported()
+}
+
+// ForSignature returns an empty slice.
+func ForSignature(*types.Signature) *TypeParamList {
+	return nil
+}
+
+// SetForSignature panics if tparams is non-empty.
+func SetForSignature(_ *types.Signature, tparams []*TypeParam) {
+	if len(tparams) > 0 {
+		unsupported()
+	}
+}
+
+// RecvTypeParams returns a nil slice.
+func RecvTypeParams(sig *types.Signature) *TypeParamList {
+	return nil
+}
+
+// SetRecvTypeParams panics if rparams is non-empty.
+func SetRecvTypeParams(sig *types.Signature, rparams []*TypeParam) {
+	if len(rparams) > 0 {
+		unsupported()
+	}
+}
+
+// IsComparable returns false, as no interfaces are type-restricted at this Go
+// version.
+func IsComparable(*types.Interface) bool {
+	return false
+}
+
+// IsConstraint returns false, as no interfaces are type-restricted at this Go
+// version.
+func IsConstraint(*types.Interface) bool {
+	return false
+}
+
+// ForNamed returns an empty type parameter list, as type parameters are not
+// supported at this Go version.
+func ForNamed(*types.Named) *TypeParamList {
+	return nil
+}
+
+// SetForNamed panics if tparams is non-empty.
+func SetForNamed(_ *types.Named, tparams []*TypeParam) {
+	if len(tparams) > 0 {
+		unsupported()
+	}
+}
+
+// NamedTypeArgs extracts the (possibly empty) type argument list from named.
+func NamedTypeArgs(*types.Named) []types.Type {
+	return nil
+}
+
+// Term is a placeholder type, as type parameters are not supported at this Go
+// version. Its methods panic on use.
+type Term struct{ types.Type }
+
+// NewTerm is unsupported at this Go version, and panics.
+func NewTerm(tilde bool, typ types.Type) *Term {
+	unsupported()
+	return nil
+}
+
+// Union is a placeholder type, as type parameters are not supported at this Go
+// version. Its methods panic on use.
+type Union struct{ types.Type }
+
+// NewUnion is unsupported at this Go version, and panics.
+func NewUnion(terms []*Term) *Union {
+	unsupported()
+	return nil
+}
+
+// InitInferred is a noop at this Go version.
+func InitInferred(*types.Info) {
+}
+
+// GetInferred returns nothing, as type parameters are not supported at this Go
+// version.
+func GetInferred(*types.Info, ast.Expr) ([]types.Type, *types.Signature) {
+	return nil, nil
+}
+
+// Environment is a placeholder type, as type parameters are not supported at
+// this Go version.
+type Environment struct{}
+
+// Instantiate is unsupported on this Go version, and panics.
+func Instantiate(env *Environment, typ types.Type, targs []types.Type, validate bool) (types.Type, error) {
+	unsupported()
+	return nil, nil
+}
diff --git a/internal/typeparams/typeparams_go118.go b/internal/typeparams/typeparams_go118.go
new file mode 100644
index 00000000000..3e808e774d5
--- /dev/null
+++ b/internal/typeparams/typeparams_go118.go
@@ -0,0 +1,176 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18
+// +build go1.18
+
+package typeparams
+
+import (
+	"go/ast"
+	"go/types"
+)
+
+// GetIndexExprData extracts data from AST nodes that represent index
+// expressions.
+//
+// For an ast.IndexExpr, the resulting IndexExprData will have exactly one
+// index expression. For an ast.IndexListExpr (go1.18+), it may have a
+// variable number of index expressions.
+//
+// For nodes that don't represent index expressions, GetIndexExprData returns
+// nil.
+func GetIndexExprData(n ast.Node) *IndexExprData {
+	switch e := n.(type) {
+	case *ast.IndexExpr:
+		return &IndexExprData{
+			X:       e.X,
+			Lbrack:  e.Lbrack,
+			Indices: []ast.Expr{e.Index},
+			Rbrack:  e.Rbrack,
+		}
+	case *ast.IndexListExpr:
+		return (*IndexExprData)(e)
+	}
+	return nil
+}
+
+// ForTypeDecl returns n.TypeParams.
+func ForTypeDecl(n *ast.TypeSpec) *ast.FieldList {
+	return n.TypeParams
+}
+
+// ForFuncDecl returns n.Type.TypeParams.
+func ForFuncDecl(n *ast.FuncDecl) *ast.FieldList {
+	if n.Type != nil {
+		return n.Type.TypeParams
+	}
+	return nil
+}
+
+// TypeParam is an alias for types.TypeParam
+type TypeParam = types.TypeParam
+
+// TypeParamList is an alias for types.TypeParamList
+type TypeParamList = types.TypeParamList
+
+// TypeList is an alias for types.TypeList
+type TypeList = types.TypeList
+
+// NewTypeParam calls types.NewTypeParam.
+func NewTypeParam(name *types.TypeName, constraint types.Type) *TypeParam {
+	return types.NewTypeParam(name, constraint)
+}
+
+// SetTypeParamConstraint calls tparam.SetConstraint(constraint).
+func SetTypeParamConstraint(tparam *TypeParam, constraint types.Type) {
+	tparam.SetConstraint(constraint)
+}
+
+// ForSignature returns sig.TypeParams()
+func ForSignature(sig *types.Signature) *TypeParamList {
+	return sig.TypeParams()
+}
+
+// SetForSignature calls sig.SetTypeParams(tparams)
+func SetForSignature(sig *types.Signature, tparams []*TypeParam) {
+	sig.SetTypeParams(tparams)
+}
+
+// RecvTypeParams returns sig.RecvTypeParams().
+func RecvTypeParams(sig *types.Signature) *TypeParamList {
+	return sig.RecvTypeParams()
+}
+
+// SetRecvTypeParams calls sig.SetRecvTypeParams(rparams).
+func SetRecvTypeParams(sig *types.Signature, rparams []*TypeParam) {
+	sig.SetRecvTypeParams(rparams)
+}
+
+// IsComparable calls iface.IsComparable().
+func IsComparable(iface *types.Interface) bool {
+	return iface.IsComparable()
+}
+
+// IsConstraint calls iface.IsConstraint().
+func IsConstraint(iface *types.Interface) bool {
+	return iface.IsConstraint()
+}
+
+// ForNamed extracts the (possibly empty) type parameter object list from
+// named.
+func ForNamed(named *types.Named) *TypeParamList {
+	return named.TypeParams()
+}
+
+// SetForNamed sets the type params tparams on n. Each tparam must be of
+// dynamic type *types.TypeParam.
+func SetForNamed(n *types.Named, tparams []*TypeParam) {
+	n.SetTypeParams(tparams)
+}
+
+// NamedTypeArgs extracts the (possibly empty) type argument list from named.
+func NamedTypeArgs(named *types.Named) []types.Type {
+	targs := named.TypeArgs()
+	numArgs := targs.Len()
+
+	typs := make([]types.Type, numArgs)
+	for i := 0; i < numArgs; i++ {
+		typs[i] = targs.At(i)
+	}
+
+	return typs
+}
+
+// Term is an alias for types.Term.
+type Term = types.Term
+
+// NewTerm calls types.NewTerm.
+func NewTerm(tilde bool, typ types.Type) *Term {
+	return types.NewTerm(tilde, typ)
+}
+
+// Union is an alias for types.Union
+type Union = types.Union
+
+// NewUnion calls types.NewUnion.
+func NewUnion(terms []*Term) *Union {
+	return types.NewUnion(terms)
+}
+
+// InitInferred initializes info to record inferred type information.
+func InitInferred(info *types.Info) {
+	info.Inferred = make(map[ast.Expr]types.Inferred)
+}
+
+// GetInferred extracts inferred type information from info for e.
+//
+// The expression e may have an inferred type if it is an *ast.IndexExpr
+// representing partial instantiation of a generic function type for which type
+// arguments have been inferred using constraint type inference, or if it is an
+// *ast.CallExpr for which type type arguments have be inferred using both
+// constraint type inference and function argument inference.
+func GetInferred(info *types.Info, e ast.Expr) ([]types.Type, *types.Signature) {
+	if info.Inferred == nil {
+		return nil, nil
+	}
+	inf := info.Inferred[e]
+
+	length := inf.TArgs.Len()
+
+	typs := make([]types.Type, length)
+	for i := 0; i < length; i++ {
+		typs[i] = inf.TArgs.At(i)
+	}
+
+	return typs, inf.Sig
+}
+
+// Environment is an alias for types.Environment.
+type Environment = types.Environment
+
+// Instantiate calls types.Instantiate.
+func Instantiate(env *Environment, typ types.Type, targs []types.Type, validate bool) (types.Type, error) {
+	return types.Instantiate(env, typ, targs, validate)
+}
diff --git a/internal/typeparams/typeparams_test.go b/internal/typeparams/typeparams_test.go
new file mode 100644
index 00000000000..9302ad75c1e
--- /dev/null
+++ b/internal/typeparams/typeparams_test.go
@@ -0,0 +1,54 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18
+// +build go1.18
+
+package typeparams_test
+
+import (
+	"go/ast"
+	"go/importer"
+	"go/parser"
+	"go/token"
+	"go/types"
+	"testing"
+
+	"golang.org/x/tools/internal/apidiff"
+	"golang.org/x/tools/internal/testenv"
+)
+
+func TestAPIConsistency(t *testing.T) {
+	testenv.NeedsGoBuild(t) // This is a lie. We actually need the source code.
+
+	// The packages below exclude enabled_*.go, as typeparams.Enabled is
+	// permitted to change between versions.
+	old := typeCheck(t, []string{"common.go", "typeparams_go117.go"})
+	new := typeCheck(t, []string{"common.go", "typeparams_go118.go"})
+
+	report := apidiff.Changes(old, new)
+	if len(report.Changes) > 0 {
+		t.Errorf("API diff:\n%s", report)
+	}
+}
+
+func typeCheck(t *testing.T, filenames []string) *types.Package {
+	fset := token.NewFileSet()
+	var files []*ast.File
+	for _, name := range filenames {
+		f, err := parser.ParseFile(fset, name, nil, 0)
+		if err != nil {
+			t.Fatal(err)
+		}
+		files = append(files, f)
+	}
+	conf := types.Config{
+		Importer: importer.Default(),
+	}
+	pkg, err := conf.Check("", fset, files, nil)
+	if err != nil {
+		t.Fatal(err)
+	}
+	return pkg
+}
diff --git a/txtar/archive.go b/txtar/archive.go
index c384f33bdf8..214256617b5 100644
--- a/txtar/archive.go
+++ b/txtar/archive.go
@@ -121,7 +121,7 @@ func isMarker(data []byte) (name string, after []byte) {
 	if i := bytes.IndexByte(data, '\n'); i >= 0 {
 		data, after = data[:i], data[i+1:]
 	}
-	if !bytes.HasSuffix(data, markerEnd) {
+	if !(bytes.HasSuffix(data, markerEnd) && len(data) >= len(marker)+len(markerEnd)) {
 		return "", nil
 	}
 	return strings.TrimSpace(string(data[len(marker) : len(data)-len(markerEnd)])), after
diff --git a/txtar/archive_test.go b/txtar/archive_test.go
index 7ac5ee9dd72..6534f530103 100644
--- a/txtar/archive_test.go
+++ b/txtar/archive_test.go
@@ -29,7 +29,10 @@ More file 1 text.
 File 2 text.
 -- empty --
 -- noNL --
-hello world`,
+hello world
+-- empty filename line --
+some content
+-- --`,
 			parsed: &Archive{
 				Comment: []byte("comment1\ncomment2\n"),
 				Files: []File{
@@ -37,6 +40,7 @@ hello world`,
 					{"file 2", []byte("File 2 text.\n")},
 					{"empty", []byte{}},
 					{"noNL", []byte("hello world\n")},
+					{"empty filename line", []byte("some content\n-- --\n")},
 				},
 			},
 		},