m/fzf
1
0
mirror of https://github.com/junegunn/fzf.git synced 2025-11-15 14:53:47 -05:00

Fix lint warnings (#4586)

go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...
This commit is contained in:
phanium
2025-11-12 21:05:17 +08:00
committed by GitHub
parent b9f2bf64ff
commit 91fab3b3c2
13 changed files with 34 additions and 35 deletions

View File

@@ -365,7 +365,7 @@ func asciiFuzzyIndex(input *util.Chars, pattern []rune, caseSensitive bool) (int
firstIdx, idx, lastIdx := 0, 0, 0 firstIdx, idx, lastIdx := 0, 0, 0
var b byte var b byte
for pidx := 0; pidx < len(pattern); pidx++ { for pidx := range pattern {
b = byte(pattern[pidx]) b = byte(pattern[pidx])
idx = trySkip(input, caseSensitive, b, idx) idx = trySkip(input, caseSensitive, b, idx)
if idx < 0 { if idx < 0 {
@@ -726,7 +726,7 @@ func FuzzyMatchV1(caseSensitive bool, normalize bool, forward bool, text *util.C
lenRunes := text.Length() lenRunes := text.Length()
lenPattern := len(pattern) lenPattern := len(pattern)
for index := 0; index < lenRunes; index++ { for index := range lenRunes {
char := text.Get(indexAt(index, lenRunes, forward)) char := text.Get(indexAt(index, lenRunes, forward))
// This is considerably faster than blindly applying strings.ToLower to the // This is considerably faster than blindly applying strings.ToLower to the
// whole string // whole string

View File

@@ -41,7 +41,7 @@ func testParserReference(t testing.TB, str string) {
equal := len(got) == len(exp) equal := len(got) == len(exp)
if equal { if equal {
for i := 0; i < len(got); i++ { for i := range got {
if got[i] != exp[i] { if got[i] != exp[i] {
equal = false equal = false
break break
@@ -167,9 +167,9 @@ func TestNextAnsiEscapeSequence_Fuzz_Random(t *testing.T) {
randomString := func(rr *rand.Rand) string { randomString := func(rr *rand.Rand) string {
numChars := rand.Intn(50) numChars := rand.Intn(50)
codePoints := make([]rune, numChars) codePoints := make([]rune, numChars)
for i := 0; i < len(codePoints); i++ { for i := range codePoints {
var r rune var r rune
for n := 0; n < 1000; n++ { for range 1000 {
r = rune(rr.Intn(utf8.MaxRune)) r = rune(rr.Intn(utf8.MaxRune))
// Allow 10% of runes to be invalid // Allow 10% of runes to be invalid
if utf8.ValidRune(r) || rr.Float64() < 0.10 { if utf8.ValidRune(r) || rr.Float64() < 0.10 {
@@ -182,7 +182,7 @@ func TestNextAnsiEscapeSequence_Fuzz_Random(t *testing.T) {
} }
rr := rand.New(rand.NewSource(1)) rr := rand.New(rand.NewSource(1))
for i := 0; i < 100_000; i++ { for range 100_000 {
testParserReference(t, randomString(rr)) testParserReference(t, randomString(rr))
} }
} }

View File

@@ -51,7 +51,7 @@ func TestChunkList(t *testing.T) {
} }
// Add more data // Add more data
for i := 0; i < chunkSize*2; i++ { for i := range chunkSize * 2 {
cl.Push(fmt.Appendf(nil, "item %d", i)) cl.Push(fmt.Appendf(nil, "item %d", i))
} }
@@ -85,7 +85,7 @@ func TestChunkListTail(t *testing.T) {
return true return true
}) })
total := chunkSize*2 + chunkSize/2 total := chunkSize*2 + chunkSize/2
for i := 0; i < total; i++ { for i := range total {
cl.Push(fmt.Appendf(nil, "item %d", i)) cl.Push(fmt.Appendf(nil, "item %d", i))
} }

View File

@@ -502,7 +502,7 @@ func Run(opts *Options) (int, error) {
return item.acceptNth(opts.Ansi, opts.Delimiter, fn) return item.acceptNth(opts.Ansi, opts.Delimiter, fn)
} }
} }
for i := 0; i < count; i++ { for i := range count {
opts.Printer(transformer(merger.Get(i).item)) opts.Printer(transformer(merger.Get(i).item))
} }
if count == 0 { if count == 0 {

View File

@@ -38,7 +38,7 @@ func TestHistory(t *testing.T) {
if len(h.lines) != maxHistory+1 { if len(h.lines) != maxHistory+1 {
t.Errorf("Expected: %d, actual: %d\n", maxHistory+1, len(h.lines)) t.Errorf("Expected: %d, actual: %d\n", maxHistory+1, len(h.lines))
} }
for i := 0; i < maxHistory; i++ { for i := range maxHistory {
if h.lines[i] != "foobar" { if h.lines[i] != "foobar" {
t.Error("Expected: foobar, actual: " + h.lines[i]) t.Error("Expected: foobar, actual: " + h.lines[i])
} }

View File

@@ -34,11 +34,11 @@ func buildLists(partiallySorted bool) ([][]Result, []Result) {
numLists := 4 numLists := 4
lists := make([][]Result, numLists) lists := make([][]Result, numLists)
cnt := 0 cnt := 0
for i := 0; i < numLists; i++ { for i := range numLists {
numResults := rand.Int() % 20 numResults := rand.Int() % 20
cnt += numResults cnt += numResults
lists[i] = make([]Result, numResults) lists[i] = make([]Result, numResults)
for j := 0; j < numResults; j++ { for j := range numResults {
item := randResult() item := randResult()
lists[i][j] = item lists[i][j] = item
} }
@@ -60,7 +60,7 @@ func TestMergerUnsorted(t *testing.T) {
// Not sorted: same order // Not sorted: same order
mg := NewMerger(nil, lists, false, false, revision{}, 0, 0) mg := NewMerger(nil, lists, false, false, revision{}, 0, 0)
assert(t, cnt == mg.Length(), "Invalid Length") assert(t, cnt == mg.Length(), "Invalid Length")
for i := 0; i < cnt; i++ { for i := range cnt {
assert(t, items[i] == mg.Get(i), "Invalid Get") assert(t, items[i] == mg.Get(i), "Invalid Get")
} }
} }
@@ -73,7 +73,7 @@ func TestMergerSorted(t *testing.T) {
mg := NewMerger(nil, lists, true, false, revision{}, 0, 0) mg := NewMerger(nil, lists, true, false, revision{}, 0, 0)
assert(t, cnt == mg.Length(), "Invalid Length") assert(t, cnt == mg.Length(), "Invalid Length")
sort.Sort(ByRelevance(items)) sort.Sort(ByRelevance(items))
for i := 0; i < cnt; i++ { for i := range cnt {
if items[i] != mg.Get(i) { if items[i] != mg.Get(i) {
t.Error("Not sorted", items[i], mg.Get(i)) t.Error("Not sorted", items[i], mg.Get(i))
} }

View File

@@ -7,6 +7,7 @@ import (
"io/fs" "io/fs"
"os" "os"
"path/filepath" "path/filepath"
"slices"
"strings" "strings"
"sync" "sync"
"sync/atomic" "sync/atomic"
@@ -178,7 +179,7 @@ func (r *Reader) feed(src io.Reader) {
for { for {
n := 0 n := 0
scope := slab[:util.Min(len(slab), readerBufferSize)] scope := slab[:util.Min(len(slab), readerBufferSize)]
for i := 0; i < 100; i++ { for range 100 {
n, err = src.Read(scope) n, err = src.Read(scope)
if n > 0 || err != nil { if n > 0 || err != nil {
break break
@@ -308,15 +309,11 @@ func (r *Reader) readFiles(roots []string, opts walkerOpts, ignores []string) bo
if !opts.hidden && base[0] == '.' && base != ".." { if !opts.hidden && base[0] == '.' && base != ".." {
return filepath.SkipDir return filepath.SkipDir
} }
for _, ignore := range ignoresBase { if slices.Contains(ignoresBase, base) {
if ignore == base { return filepath.SkipDir
return filepath.SkipDir
}
} }
for _, ignore := range ignoresFull { if slices.Contains(ignoresFull, path) {
if ignore == path { return filepath.SkipDir
return filepath.SkipDir
}
} }
for _, ignore := range ignoresSuffix { for _, ignore := range ignoresSuffix {
if strings.HasSuffix(path, ignore) { if strings.HasSuffix(path, ignore) {

View File

@@ -91,7 +91,7 @@ func buildResult(item *Item, offsets []Offset, score int) Result {
case byBegin, byEnd: case byBegin, byEnd:
if validOffsetFound { if validOffsetFound {
whitePrefixLen := 0 whitePrefixLen := 0
for idx := 0; idx < numChars; idx++ { for idx := range numChars {
r := item.text.Get(idx) r := item.text.Get(idx)
whitePrefixLen = idx whitePrefixLen = idx
if idx == minBegin || !unicode.IsSpace(r) { if idx == minBegin || !unicode.IsSpace(r) {

View File

@@ -3526,11 +3526,12 @@ func (t *Terminal) printHighlighted(result Result, colBase tui.ColorPair, colMat
} else { } else {
tokens = Transform(Tokenize(item.text.ToString(), t.delimiter), t.nthCurrent) tokens = Transform(Tokenize(item.text.ToString(), t.delimiter), t.nthCurrent)
} }
for _, token := range tokens { nthOffsets = make([]Offset, len(tokens))
for i, token := range tokens {
start := token.prefixLength start := token.prefixLength
length := token.text.Length() - token.text.TrailingWhitespaces() length := token.text.Length() - token.text.TrailingWhitespaces()
end := start + int32(length) end := start + int32(length)
nthOffsets = append(nthOffsets, Offset{int32(start), int32(end)}) nthOffsets[i] = Offset{int32(start), int32(end)}
} }
sort.Sort(ByOrder(nthOffsets)) sort.Sort(ByOrder(nthOffsets))
} }
@@ -4902,7 +4903,7 @@ func (t *Terminal) buildPlusList(template string, forcePlus bool) (bool, [3][]*I
if asterisk { if asterisk {
cnt := t.merger.Length() cnt := t.merger.Length()
all = make([]*Item, cnt) all = make([]*Item, cnt)
for i := 0; i < cnt; i++ { for i := range cnt {
all[i] = t.merger.Get(i).item all[i] = t.merger.Get(i).item
} }
} }
@@ -7154,7 +7155,7 @@ func (t *Terminal) constrain() {
// May need to try again after adjusting the offset // May need to try again after adjusting the offset
t.offset = util.Constrain(t.offset, 0, count) t.offset = util.Constrain(t.offset, 0, count)
for tries := 0; tries < maxLines; tries++ { for range maxLines {
numItems := maxLines numItems := maxLines
// How many items can be fit on screen including the current item? // How many items can be fit on screen including the current item?
if t.canSpanMultiLines() && t.merger.Length() > 0 { if t.canSpanMultiLines() && t.merger.Length() > 0 {
@@ -7208,7 +7209,7 @@ func (t *Terminal) constrain() {
scrollOff := util.Min(maxLines/2, t.scrollOff) scrollOff := util.Min(maxLines/2, t.scrollOff)
newOffset := t.offset newOffset := t.offset
// 2-phase adjustment to avoid infinite loop of alternating between moving up and down // 2-phase adjustment to avoid infinite loop of alternating between moving up and down
for phase := 0; phase < 2; phase++ { for phase := range 2 {
for { for {
prevOffset := newOffset prevOffset := newOffset
numItems := t.merger.Length() numItems := t.merger.Length()

View File

@@ -206,8 +206,9 @@ func Tokenize(text string, delimiter Delimiter) []Token {
if delimiter.regex != nil { if delimiter.regex != nil {
locs := delimiter.regex.FindAllStringIndex(text, -1) locs := delimiter.regex.FindAllStringIndex(text, -1)
begin := 0 begin := 0
for _, loc := range locs { tokens = make([]string, len(locs))
tokens = append(tokens, text[begin:loc[1]]) for i, loc := range locs {
tokens[i] = text[begin:loc[1]]
begin = loc[1] begin = loc[1]
} }
if begin < len(text) { if begin < len(text) {

View File

@@ -98,7 +98,7 @@ func (r *LightRenderer) findOffset() (row int, col int) {
r.flush() r.flush()
var err error var err error
bytes := []byte{} bytes := []byte{}
for tries := 0; tries < offsetPollTries; tries++ { for tries := range offsetPollTries {
bytes, err = r.getBytesInternal(bytes, tries > 0) bytes, err = r.getBytesInternal(bytes, tries > 0)
if err != nil { if err != nil {
return -1, -1 return -1, -1

View File

@@ -8,7 +8,7 @@ import (
func TestAtExit(t *testing.T) { func TestAtExit(t *testing.T) {
want := []int{3, 2, 1, 0} want := []int{3, 2, 1, 0}
var called []int var called []int
for i := 0; i < 4; i++ { for i := range 4 {
n := i n := i
AtExit(func() { called = append(called, n) }) AtExit(func() { called = append(called, n) })
} }

View File

@@ -52,7 +52,7 @@ func ToChars(bytes []byte) Chars {
} }
runes := make([]rune, bytesUntil, len(bytes)) runes := make([]rune, bytesUntil, len(bytes))
for i := 0; i < bytesUntil; i++ { for i := range bytesUntil {
runes[i] = rune(bytes[i]) runes[i] = rune(bytes[i])
} }
for i := bytesUntil; i < len(bytes); { for i := bytesUntil; i < len(bytes); {
@@ -259,7 +259,7 @@ func (chars *Chars) Lines(multiLine bool, maxLines int, wrapCols int, wrapSignWi
lines = append(lines, text) lines = append(lines, text)
} else { } else {
from := 0 from := 0
for off := 0; off < len(text); off++ { for off := range text {
if text[off] == '\n' { if text[off] == '\n' {
lines = append(lines, text[from:off+1]) // Include '\n' lines = append(lines, text[from:off+1]) // Include '\n'
from = off + 1 from = off + 1