From 01d9d9c8c895de728e7abcad1723db019280452d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexei=20=C8=98er=C8=99un?= Date: Mon, 17 Feb 2025 13:50:15 +0200 Subject: [PATCH] Normalize char before pattern lookup (#4252) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit There is an edge-case in FuzzyMatchV1 during backward scan, related to normalization: if string is initially denormalized (e.g. Unicode symbol), backward scan will proceed further to the next char; however, when the score is computed, the string is normalized first, then scanned based on the pattern. This leads to accessing pattern index increment, which itself leads to out-of-bound index access, resulting in a panic. To illustrate the process, here's the sequence of operations when search is perfored: 1. during backward scan by "minim" pattern ``` xxxxx Minímal example ^^^^^^^^^^^^ |||||||||||| miniiiiiiiim <- compute score for this substring ``` 2. during compute score by "minim" pattern ``` Minímal exam minimal exam <- normalize chars before computing the score ^^^^^^ |||||| minim <- at this point the pattern is already fully scanned and index is out-of-the-bound ``` In this commit the char is normalized during backward scan, to detect properly the boundaries for the pattern. --- src/algo/algo.go | 3 +++ src/algo/algo_test.go | 9 +++++++++ 2 files changed, 12 insertions(+) diff --git a/src/algo/algo.go b/src/algo/algo.go index c0022475..d6a9a663 100644 --- a/src/algo/algo.go +++ b/src/algo/algo.go @@ -767,6 +767,9 @@ func FuzzyMatchV1(caseSensitive bool, normalize bool, forward bool, text *util.C char = unicode.To(unicode.LowerCase, char) } } + if normalize { + char = normalizeRune(char) + } pidx_ := indexAt(pidx, lenPattern, forward) pchar := pattern[pidx_] diff --git a/src/algo/algo_test.go b/src/algo/algo_test.go index b5ed0e77..aab03b0a 100644 --- a/src/algo/algo_test.go +++ b/src/algo/algo_test.go @@ -200,3 +200,12 @@ func TestLongString(t *testing.T) { bytes[math.MaxUint16] = 'z' assertMatch(t, FuzzyMatchV2, true, true, string(bytes), "zx", math.MaxUint16, math.MaxUint16+2, scoreMatch*2+bonusConsecutive) } + +func TestLongStringWithNormalize(t *testing.T) { + bytes := make([]byte, 30000) + for i := range bytes { + bytes[i] = 'x' + } + unicodeString := string(bytes) + " Minímal example" + assertMatch2(t, FuzzyMatchV1, false, true, false, unicodeString, "minim", 30001, 30006, 140) +}