mirror of
https://github.com/junegunn/fzf.git
synced 2025-11-15 14:53:47 -05:00
Fix lint warnings (#4586)
go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...
This commit is contained in:
@@ -365,7 +365,7 @@ func asciiFuzzyIndex(input *util.Chars, pattern []rune, caseSensitive bool) (int
|
||||
|
||||
firstIdx, idx, lastIdx := 0, 0, 0
|
||||
var b byte
|
||||
for pidx := 0; pidx < len(pattern); pidx++ {
|
||||
for pidx := range pattern {
|
||||
b = byte(pattern[pidx])
|
||||
idx = trySkip(input, caseSensitive, b, idx)
|
||||
if idx < 0 {
|
||||
@@ -726,7 +726,7 @@ func FuzzyMatchV1(caseSensitive bool, normalize bool, forward bool, text *util.C
|
||||
lenRunes := text.Length()
|
||||
lenPattern := len(pattern)
|
||||
|
||||
for index := 0; index < lenRunes; index++ {
|
||||
for index := range lenRunes {
|
||||
char := text.Get(indexAt(index, lenRunes, forward))
|
||||
// This is considerably faster than blindly applying strings.ToLower to the
|
||||
// whole string
|
||||
|
||||
@@ -41,7 +41,7 @@ func testParserReference(t testing.TB, str string) {
|
||||
|
||||
equal := len(got) == len(exp)
|
||||
if equal {
|
||||
for i := 0; i < len(got); i++ {
|
||||
for i := range got {
|
||||
if got[i] != exp[i] {
|
||||
equal = false
|
||||
break
|
||||
@@ -167,9 +167,9 @@ func TestNextAnsiEscapeSequence_Fuzz_Random(t *testing.T) {
|
||||
randomString := func(rr *rand.Rand) string {
|
||||
numChars := rand.Intn(50)
|
||||
codePoints := make([]rune, numChars)
|
||||
for i := 0; i < len(codePoints); i++ {
|
||||
for i := range codePoints {
|
||||
var r rune
|
||||
for n := 0; n < 1000; n++ {
|
||||
for range 1000 {
|
||||
r = rune(rr.Intn(utf8.MaxRune))
|
||||
// Allow 10% of runes to be invalid
|
||||
if utf8.ValidRune(r) || rr.Float64() < 0.10 {
|
||||
@@ -182,7 +182,7 @@ func TestNextAnsiEscapeSequence_Fuzz_Random(t *testing.T) {
|
||||
}
|
||||
|
||||
rr := rand.New(rand.NewSource(1))
|
||||
for i := 0; i < 100_000; i++ {
|
||||
for range 100_000 {
|
||||
testParserReference(t, randomString(rr))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -51,7 +51,7 @@ func TestChunkList(t *testing.T) {
|
||||
}
|
||||
|
||||
// Add more data
|
||||
for i := 0; i < chunkSize*2; i++ {
|
||||
for i := range chunkSize * 2 {
|
||||
cl.Push(fmt.Appendf(nil, "item %d", i))
|
||||
}
|
||||
|
||||
@@ -85,7 +85,7 @@ func TestChunkListTail(t *testing.T) {
|
||||
return true
|
||||
})
|
||||
total := chunkSize*2 + chunkSize/2
|
||||
for i := 0; i < total; i++ {
|
||||
for i := range total {
|
||||
cl.Push(fmt.Appendf(nil, "item %d", i))
|
||||
}
|
||||
|
||||
|
||||
@@ -502,7 +502,7 @@ func Run(opts *Options) (int, error) {
|
||||
return item.acceptNth(opts.Ansi, opts.Delimiter, fn)
|
||||
}
|
||||
}
|
||||
for i := 0; i < count; i++ {
|
||||
for i := range count {
|
||||
opts.Printer(transformer(merger.Get(i).item))
|
||||
}
|
||||
if count == 0 {
|
||||
|
||||
@@ -38,7 +38,7 @@ func TestHistory(t *testing.T) {
|
||||
if len(h.lines) != maxHistory+1 {
|
||||
t.Errorf("Expected: %d, actual: %d\n", maxHistory+1, len(h.lines))
|
||||
}
|
||||
for i := 0; i < maxHistory; i++ {
|
||||
for i := range maxHistory {
|
||||
if h.lines[i] != "foobar" {
|
||||
t.Error("Expected: foobar, actual: " + h.lines[i])
|
||||
}
|
||||
|
||||
@@ -34,11 +34,11 @@ func buildLists(partiallySorted bool) ([][]Result, []Result) {
|
||||
numLists := 4
|
||||
lists := make([][]Result, numLists)
|
||||
cnt := 0
|
||||
for i := 0; i < numLists; i++ {
|
||||
for i := range numLists {
|
||||
numResults := rand.Int() % 20
|
||||
cnt += numResults
|
||||
lists[i] = make([]Result, numResults)
|
||||
for j := 0; j < numResults; j++ {
|
||||
for j := range numResults {
|
||||
item := randResult()
|
||||
lists[i][j] = item
|
||||
}
|
||||
@@ -60,7 +60,7 @@ func TestMergerUnsorted(t *testing.T) {
|
||||
// Not sorted: same order
|
||||
mg := NewMerger(nil, lists, false, false, revision{}, 0, 0)
|
||||
assert(t, cnt == mg.Length(), "Invalid Length")
|
||||
for i := 0; i < cnt; i++ {
|
||||
for i := range cnt {
|
||||
assert(t, items[i] == mg.Get(i), "Invalid Get")
|
||||
}
|
||||
}
|
||||
@@ -73,7 +73,7 @@ func TestMergerSorted(t *testing.T) {
|
||||
mg := NewMerger(nil, lists, true, false, revision{}, 0, 0)
|
||||
assert(t, cnt == mg.Length(), "Invalid Length")
|
||||
sort.Sort(ByRelevance(items))
|
||||
for i := 0; i < cnt; i++ {
|
||||
for i := range cnt {
|
||||
if items[i] != mg.Get(i) {
|
||||
t.Error("Not sorted", items[i], mg.Get(i))
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
@@ -178,7 +179,7 @@ func (r *Reader) feed(src io.Reader) {
|
||||
for {
|
||||
n := 0
|
||||
scope := slab[:util.Min(len(slab), readerBufferSize)]
|
||||
for i := 0; i < 100; i++ {
|
||||
for range 100 {
|
||||
n, err = src.Read(scope)
|
||||
if n > 0 || err != nil {
|
||||
break
|
||||
@@ -308,15 +309,11 @@ func (r *Reader) readFiles(roots []string, opts walkerOpts, ignores []string) bo
|
||||
if !opts.hidden && base[0] == '.' && base != ".." {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
for _, ignore := range ignoresBase {
|
||||
if ignore == base {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
if slices.Contains(ignoresBase, base) {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
for _, ignore := range ignoresFull {
|
||||
if ignore == path {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
if slices.Contains(ignoresFull, path) {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
for _, ignore := range ignoresSuffix {
|
||||
if strings.HasSuffix(path, ignore) {
|
||||
|
||||
@@ -91,7 +91,7 @@ func buildResult(item *Item, offsets []Offset, score int) Result {
|
||||
case byBegin, byEnd:
|
||||
if validOffsetFound {
|
||||
whitePrefixLen := 0
|
||||
for idx := 0; idx < numChars; idx++ {
|
||||
for idx := range numChars {
|
||||
r := item.text.Get(idx)
|
||||
whitePrefixLen = idx
|
||||
if idx == minBegin || !unicode.IsSpace(r) {
|
||||
|
||||
@@ -3526,11 +3526,12 @@ func (t *Terminal) printHighlighted(result Result, colBase tui.ColorPair, colMat
|
||||
} else {
|
||||
tokens = Transform(Tokenize(item.text.ToString(), t.delimiter), t.nthCurrent)
|
||||
}
|
||||
for _, token := range tokens {
|
||||
nthOffsets = make([]Offset, len(tokens))
|
||||
for i, token := range tokens {
|
||||
start := token.prefixLength
|
||||
length := token.text.Length() - token.text.TrailingWhitespaces()
|
||||
end := start + int32(length)
|
||||
nthOffsets = append(nthOffsets, Offset{int32(start), int32(end)})
|
||||
nthOffsets[i] = Offset{int32(start), int32(end)}
|
||||
}
|
||||
sort.Sort(ByOrder(nthOffsets))
|
||||
}
|
||||
@@ -4902,7 +4903,7 @@ func (t *Terminal) buildPlusList(template string, forcePlus bool) (bool, [3][]*I
|
||||
if asterisk {
|
||||
cnt := t.merger.Length()
|
||||
all = make([]*Item, cnt)
|
||||
for i := 0; i < cnt; i++ {
|
||||
for i := range cnt {
|
||||
all[i] = t.merger.Get(i).item
|
||||
}
|
||||
}
|
||||
@@ -7154,7 +7155,7 @@ func (t *Terminal) constrain() {
|
||||
|
||||
// May need to try again after adjusting the offset
|
||||
t.offset = util.Constrain(t.offset, 0, count)
|
||||
for tries := 0; tries < maxLines; tries++ {
|
||||
for range maxLines {
|
||||
numItems := maxLines
|
||||
// How many items can be fit on screen including the current item?
|
||||
if t.canSpanMultiLines() && t.merger.Length() > 0 {
|
||||
@@ -7208,7 +7209,7 @@ func (t *Terminal) constrain() {
|
||||
scrollOff := util.Min(maxLines/2, t.scrollOff)
|
||||
newOffset := t.offset
|
||||
// 2-phase adjustment to avoid infinite loop of alternating between moving up and down
|
||||
for phase := 0; phase < 2; phase++ {
|
||||
for phase := range 2 {
|
||||
for {
|
||||
prevOffset := newOffset
|
||||
numItems := t.merger.Length()
|
||||
|
||||
@@ -206,8 +206,9 @@ func Tokenize(text string, delimiter Delimiter) []Token {
|
||||
if delimiter.regex != nil {
|
||||
locs := delimiter.regex.FindAllStringIndex(text, -1)
|
||||
begin := 0
|
||||
for _, loc := range locs {
|
||||
tokens = append(tokens, text[begin:loc[1]])
|
||||
tokens = make([]string, len(locs))
|
||||
for i, loc := range locs {
|
||||
tokens[i] = text[begin:loc[1]]
|
||||
begin = loc[1]
|
||||
}
|
||||
if begin < len(text) {
|
||||
|
||||
@@ -98,7 +98,7 @@ func (r *LightRenderer) findOffset() (row int, col int) {
|
||||
r.flush()
|
||||
var err error
|
||||
bytes := []byte{}
|
||||
for tries := 0; tries < offsetPollTries; tries++ {
|
||||
for tries := range offsetPollTries {
|
||||
bytes, err = r.getBytesInternal(bytes, tries > 0)
|
||||
if err != nil {
|
||||
return -1, -1
|
||||
|
||||
@@ -8,7 +8,7 @@ import (
|
||||
func TestAtExit(t *testing.T) {
|
||||
want := []int{3, 2, 1, 0}
|
||||
var called []int
|
||||
for i := 0; i < 4; i++ {
|
||||
for i := range 4 {
|
||||
n := i
|
||||
AtExit(func() { called = append(called, n) })
|
||||
}
|
||||
|
||||
@@ -52,7 +52,7 @@ func ToChars(bytes []byte) Chars {
|
||||
}
|
||||
|
||||
runes := make([]rune, bytesUntil, len(bytes))
|
||||
for i := 0; i < bytesUntil; i++ {
|
||||
for i := range bytesUntil {
|
||||
runes[i] = rune(bytes[i])
|
||||
}
|
||||
for i := bytesUntil; i < len(bytes); {
|
||||
@@ -259,7 +259,7 @@ func (chars *Chars) Lines(multiLine bool, maxLines int, wrapCols int, wrapSignWi
|
||||
lines = append(lines, text)
|
||||
} else {
|
||||
from := 0
|
||||
for off := 0; off < len(text); off++ {
|
||||
for off := range text {
|
||||
if text[off] == '\n' {
|
||||
lines = append(lines, text[from:off+1]) // Include '\n'
|
||||
from = off + 1
|
||||
|
||||
Reference in New Issue
Block a user