mirror of
https://github.com/junegunn/fzf.git
synced 2025-11-08 11:23:47 -05:00
Compare commits
1795 Commits
0.15.6
...
leverage-b
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
38040d43e4 | ||
|
|
4161403a1d | ||
|
|
53bcdc4294 | ||
|
|
30a8ef28cd | ||
|
|
855f90727a | ||
|
|
2191a44e36 | ||
|
|
952276dc2d | ||
|
|
2286edb329 | ||
|
|
a0f28583e7 | ||
|
|
8af0af3400 | ||
|
|
769e5cbb2d | ||
|
|
fc69308057 | ||
|
|
c6d620c99e | ||
|
|
f510a4def6 | ||
|
|
4ae3069c6f | ||
|
|
c0f27751d3 | ||
|
|
efbcd5a683 | ||
|
|
6a67712944 | ||
|
|
e8a690928d | ||
|
|
0eee95af57 | ||
|
|
a09c6e991a | ||
|
|
d06c5ab990 | ||
|
|
e0924d27b8 | ||
|
|
2775b771f2 | ||
|
|
cf7a3c6a0e | ||
|
|
230cc6acc3 | ||
|
|
626a23a585 | ||
|
|
74f196eebb | ||
|
|
cf2242aea3 | ||
|
|
8cb59e6fca | ||
|
|
5cce17e80a | ||
|
|
ee5302fb2d | ||
|
|
387c6ef664 | ||
|
|
581734c369 | ||
|
|
d90a969c00 | ||
|
|
2c8a96bb27 | ||
|
|
5da168db30 | ||
|
|
e215e2daf3 | ||
|
|
e28f5aa45b | ||
|
|
a2d0e8f233 | ||
|
|
303d04106a | ||
|
|
c423c496a1 | ||
|
|
4e85f72f0e | ||
|
|
dd0737aac0 | ||
|
|
f90985845d | ||
|
|
af4917dbb6 | ||
|
|
d9404fcce4 | ||
|
|
5c01fee5a9 | ||
|
|
9b27d68a37 | ||
|
|
b99d884e57 | ||
|
|
587df594b8 | ||
|
|
b896e0d314 | ||
|
|
559fb7ee45 | ||
|
|
62545cd983 | ||
|
|
c50812518e | ||
|
|
4cc5609d8b | ||
|
|
50fa90dfb8 | ||
|
|
a2c365e710 | ||
|
|
b4ddffdc61 | ||
|
|
8d4d184fc6 | ||
|
|
ea23539b59 | ||
|
|
9e92b6f11e | ||
|
|
6cbde812f6 | ||
|
|
3b2e932c13 | ||
|
|
8ff4e52641 | ||
|
|
2dbc874e3d | ||
|
|
039a2f1d04 | ||
|
|
4ef1cf5b35 | ||
|
|
b44ab9e33c | ||
|
|
8f4c23f1c4 | ||
|
|
23a391e715 | ||
|
|
035b0be29f | ||
|
|
e1fcdbc337 | ||
|
|
cfc149e994 | ||
|
|
2faffbd1b7 | ||
|
|
8db65704b9 | ||
|
|
797a01aed4 | ||
|
|
bf515a3d32 | ||
|
|
a06745826a | ||
|
|
0420ed4f2a | ||
|
|
3b944addd4 | ||
|
|
70bf8bc35d | ||
|
|
724f8a1d45 | ||
|
|
cc2b2146ee | ||
|
|
8689f5f230 | ||
|
|
e9e0011f1d | ||
|
|
5b52833785 | ||
|
|
1525768094 | ||
|
|
a70ea4654e | ||
|
|
b02bf9b6bb | ||
|
|
bee7bc5324 | ||
|
|
7c2ffd3fef | ||
|
|
db01e7dab6 | ||
|
|
2326c74eb2 | ||
|
|
b9d15569e8 | ||
|
|
c3cc378d89 | ||
|
|
27d1f5e0a8 | ||
|
|
540632bb9e | ||
|
|
d9c028c934 | ||
|
|
c54ad82e8d | ||
|
|
295b89631b | ||
|
|
6179faf778 | ||
|
|
16dc236a25 | ||
|
|
61ae9d75b6 | ||
|
|
e2401aca68 | ||
|
|
59943cbb48 | ||
|
|
02634d404d | ||
|
|
ed12925f7d | ||
|
|
e0ddb97ab4 | ||
|
|
b8c01af0fc | ||
|
|
6de0a7ddc1 | ||
|
|
79196c025d | ||
|
|
94c33ac020 | ||
|
|
b2ecb6352c | ||
|
|
9dc3ed638a | ||
|
|
0acace1ace | ||
|
|
1a2d37e1e6 | ||
|
|
22adb6494f | ||
|
|
e023736c30 | ||
|
|
dca2262fe6 | ||
|
|
0684a20ea3 | ||
|
|
a1a72bb8d1 | ||
|
|
144d55a5be | ||
|
|
7fc13c5cfd | ||
|
|
dfee7af57b | ||
|
|
9b0e2daf02 | ||
|
|
590060a16b | ||
|
|
368294edf6 | ||
|
|
c4a9ccd6af | ||
|
|
cbf91f2ed3 | ||
|
|
b1460d4787 | ||
|
|
7dc9e14874 | ||
|
|
1616ed543d | ||
|
|
dc73fba188 | ||
|
|
ef148dfd37 | ||
|
|
93bbb3032d | ||
|
|
4c83d8596d | ||
|
|
d453e6d7db | ||
|
|
c29533994f | ||
|
|
1afe13b5b5 | ||
|
|
36600eaaa9 | ||
|
|
3ee1fc2034 | ||
|
|
e2f93e5a2d | ||
|
|
cfdf2f1153 | ||
|
|
e042143e3f | ||
|
|
7c613d0d9b | ||
|
|
b00d46bc14 | ||
|
|
555b0d235b | ||
|
|
564daf9a7d | ||
|
|
41bcbe342f | ||
|
|
dbe8dc344e | ||
|
|
e33fb59da1 | ||
|
|
7aa88aa115 | ||
|
|
2b6d600879 | ||
|
|
05c765d442 | ||
|
|
49b496269c | ||
|
|
7405925952 | ||
|
|
3afd543a7e | ||
|
|
b4f2cde5ac | ||
|
|
ed53ef7cee | ||
|
|
12630b124d | ||
|
|
1d59ac09d2 | ||
|
|
a8f3a0dd59 | ||
|
|
124cd70710 | ||
|
|
782de139c8 | ||
|
|
32eb32ee5e | ||
|
|
2f51eb2b41 | ||
|
|
0ccbd79e10 | ||
|
|
99bd6de541 | ||
|
|
1fef36e4bc | ||
|
|
89375005b5 | ||
|
|
88e78c9193 | ||
|
|
29a19ad080 | ||
|
|
2a039ab746 | ||
|
|
7e9a0fcdbd | ||
|
|
7a97532547 | ||
|
|
996abb2831 | ||
|
|
da500a358f | ||
|
|
c36b846acc | ||
|
|
d9b5c9b2be | ||
|
|
3dee8778d0 | ||
|
|
d4216b0dcc | ||
|
|
bfe2bf4dce | ||
|
|
561f9291fd | ||
|
|
b5b0d6b3ea | ||
|
|
a90426b7ca | ||
|
|
303c3bae7f | ||
|
|
6b4358f641 | ||
|
|
552158f3ad | ||
|
|
7205203dc8 | ||
|
|
0cadf70072 | ||
|
|
076b3d0a9a | ||
|
|
7b0c9e04d3 | ||
|
|
573df524fe | ||
|
|
aee417c46a | ||
|
|
04db44067d | ||
|
|
5b204c54f9 | ||
|
|
daa602422d | ||
|
|
04dfb14e32 | ||
|
|
c24256cba3 | ||
|
|
685fb71d89 | ||
|
|
83b6033906 | ||
|
|
01e7668915 | ||
|
|
0994d9c881 | ||
|
|
030428ba43 | ||
|
|
8a110e02b9 | ||
|
|
86d92c17c4 | ||
|
|
c4cc7891b4 | ||
|
|
218843b9f1 | ||
|
|
d274d093af | ||
|
|
6432f00f0d | ||
|
|
4e9e842aa4 | ||
|
|
07880ca441 | ||
|
|
bcda25a513 | ||
|
|
8256fcde15 | ||
|
|
af65aa298a | ||
|
|
6834d17844 | ||
|
|
ed511d7867 | ||
|
|
cd8d736a9f | ||
|
|
0952b2dfd4 | ||
|
|
4bedd33c59 | ||
|
|
c5fb0c43f9 | ||
|
|
9e4780510e | ||
|
|
e8405f40fe | ||
|
|
065b9e6fb2 | ||
|
|
98141ca7d8 | ||
|
|
501577ab28 | ||
|
|
5669f48343 | ||
|
|
24ff66d4a9 | ||
|
|
bf184449bc | ||
|
|
7b98c2c653 | ||
|
|
b6add2a257 | ||
|
|
2bd41f1330 | ||
|
|
c37cd11ca5 | ||
|
|
9dee8edc0c | ||
|
|
f6aa28c380 | ||
|
|
dba1644518 | ||
|
|
260a65b0fb | ||
|
|
835d2fb98c | ||
|
|
a9811addaa | ||
|
|
ee9d88b637 | ||
|
|
194a763c46 | ||
|
|
8d74446bef | ||
|
|
7ed6c7905c | ||
|
|
159a37fa37 | ||
|
|
f39ae0e7c1 | ||
|
|
4a68eac99b | ||
|
|
2665580120 | ||
|
|
a4391aeedd | ||
|
|
b86a967ee2 | ||
|
|
608232568b | ||
|
|
7f85beccb5 | ||
|
|
767f1255ab | ||
|
|
fddbfe7b0e | ||
|
|
4ab7fdc28e | ||
|
|
e352b68878 | ||
|
|
207deeadba | ||
|
|
d18d92f925 | ||
|
|
af3ce47c44 | ||
|
|
d8bfb6712d | ||
|
|
f864f8b5f7 | ||
|
|
31d72efba7 | ||
|
|
d169c951f3 | ||
|
|
90d7e38909 | ||
|
|
938f23e429 | ||
|
|
f97d275413 | ||
|
|
3acb4ca90e | ||
|
|
e86b81bbf5 | ||
|
|
a5447b8b75 | ||
|
|
7ce6452d83 | ||
|
|
5643a306bd | ||
|
|
3c877c504b | ||
|
|
892d1acccb | ||
|
|
1a9c282f76 | ||
|
|
fd1ba46f77 | ||
|
|
a4745626dd | ||
|
|
17bb7ad278 | ||
|
|
152988c17b | ||
|
|
4cd37fc02b | ||
|
|
69b9d674a3 | ||
|
|
bad8061547 | ||
|
|
62963dcefd | ||
|
|
68a35e4735 | ||
|
|
9b9ad77e1c | ||
|
|
118b4d4a01 | ||
|
|
da14ab6f16 | ||
|
|
09a4ca6ab5 | ||
|
|
8a2df79711 | ||
|
|
c30e486b64 | ||
|
|
a575c0c54b | ||
|
|
77fe96ac0d | ||
|
|
5234c3759a | ||
|
|
41b3511ad9 | ||
|
|
128e4a2e8d | ||
|
|
07ac90d798 | ||
|
|
7de87a9b2c | ||
|
|
dff865239a | ||
|
|
07f8f70c5b | ||
|
|
f625c5aabe | ||
|
|
8a74976c1f | ||
|
|
b6bfd4a5cb | ||
|
|
008fb9d258 | ||
|
|
db6db49ed6 | ||
|
|
05453881c3 | ||
|
|
5e47ab9431 | ||
|
|
ec70acd0b9 | ||
|
|
25e61056b6 | ||
|
|
d579e335b5 | ||
|
|
7e344ceb85 | ||
|
|
0145b82ea0 | ||
|
|
b4efe7aab7 | ||
|
|
9ffe951f6d | ||
|
|
a5ea4f57bd | ||
|
|
88f4c16755 | ||
|
|
c7ee071efa | ||
|
|
0740ef7ceb | ||
|
|
b29bd809ac | ||
|
|
8977c9257a | ||
|
|
091b7eacba | ||
|
|
e74b1251c0 | ||
|
|
d282a1649d | ||
|
|
6ce8d49d1b | ||
|
|
c5b197078a | ||
|
|
0494f20d62 | ||
|
|
73aff476dd | ||
|
|
98ee5e651a | ||
|
|
01871ea383 | ||
|
|
1dbdb9438f | ||
|
|
c70f0eadb8 | ||
|
|
26244ad8c2 | ||
|
|
fa0aa5510d | ||
|
|
eec557b6aa | ||
|
|
0cc27c3cc1 | ||
|
|
507089d7b2 | ||
|
|
a6b3517b75 | ||
|
|
2d6beb7813 | ||
|
|
61bc129e1d | ||
|
|
52210a57f0 | ||
|
|
8061a2f108 | ||
|
|
7444eff6d4 | ||
|
|
f35a9da99a | ||
|
|
c3098e9ab2 | ||
|
|
686f9288fc | ||
|
|
1833670fb9 | ||
|
|
3dd42f5aa2 | ||
|
|
99a7beba57 | ||
|
|
edee2b753c | ||
|
|
545d5770be | ||
|
|
ca747a2b54 | ||
|
|
17da165cfe | ||
|
|
5e6788c679 | ||
|
|
425deadca9 | ||
|
|
2c8e9dd3a5 | ||
|
|
7a72f1a253 | ||
|
|
208e556332 | ||
|
|
c65d11bfb5 | ||
|
|
3b5b52d89a | ||
|
|
a4f6c8f990 | ||
|
|
670c329852 | ||
|
|
f3551c8422 | ||
|
|
90b8187882 | ||
|
|
1a43259989 | ||
|
|
3c0a630475 | ||
|
|
2a1e5a9729 | ||
|
|
413c66beba | ||
|
|
1416e696b1 | ||
|
|
d373cf89c7 | ||
|
|
dd886d22f0 | ||
|
|
472569a27c | ||
|
|
76cf6559cc | ||
|
|
a34e8dcdc9 | ||
|
|
da752fc9a4 | ||
|
|
beb2de2dd9 | ||
|
|
2a8b65e105 | ||
|
|
62a916bc24 | ||
|
|
c47b833e7b | ||
|
|
09b0958b5f | ||
|
|
3a4c3d3e58 | ||
|
|
7484292e63 | ||
|
|
687c2741b8 | ||
|
|
2fb285e530 | ||
|
|
16f6473938 | ||
|
|
66546208b2 | ||
|
|
532274045e | ||
|
|
9347c72fb6 | ||
|
|
e90bb7169c | ||
|
|
8a2c41e183 | ||
|
|
59fb65293a | ||
|
|
e7718b92b7 | ||
|
|
cdfaf761df | ||
|
|
1a9ea6f738 | ||
|
|
945c1c8597 | ||
|
|
e4d0f7acd5 | ||
|
|
250496c953 | ||
|
|
e47dc758c9 | ||
|
|
b92a843c5f | ||
|
|
91bea9c5b3 | ||
|
|
d75bb5cbe1 | ||
|
|
2671259fdb | ||
|
|
2024010119 | ||
|
|
412040f77e | ||
|
|
d210660ce8 | ||
|
|
863a12562b | ||
|
|
5da606a9ac | ||
|
|
8d20f3d5c4 | ||
|
|
5d360180af | ||
|
|
f0fbed6007 | ||
|
|
519de7c833 | ||
|
|
97ccef1a04 | ||
|
|
c4df0dd06e | ||
|
|
cd114c6818 | ||
|
|
1707b8cdba | ||
|
|
41d4d70b98 | ||
|
|
0e999482cb | ||
|
|
65b2c06027 | ||
|
|
d7b61ede07 | ||
|
|
87fc1c84b8 | ||
|
|
d4b5f12383 | ||
|
|
eb62b0d665 | ||
|
|
91387a741b | ||
|
|
e8b34cb00d | ||
|
|
82954258c1 | ||
|
|
50f092551b | ||
|
|
c36a64be68 | ||
|
|
a343b20775 | ||
|
|
a714e76ae1 | ||
|
|
d21d5c9510 | ||
|
|
cd6788a2bb | ||
|
|
6b99399c41 | ||
|
|
952b6af445 | ||
|
|
7c674ad7fa | ||
|
|
d7d2ac3951 | ||
|
|
29e67d307a | ||
|
|
7320b7df62 | ||
|
|
11fb4233f7 | ||
|
|
84bb350b14 | ||
|
|
38e3694d1c | ||
|
|
1084935241 | ||
|
|
f5f0b9ecaa | ||
|
|
230fc49ae2 | ||
|
|
250d507bdf | ||
|
|
a818653174 | ||
|
|
5c3b044740 | ||
|
|
c5aa8729a1 | ||
|
|
3f78d76da1 | ||
|
|
70c19ccf16 | ||
|
|
68db9cb499 | ||
|
|
d0466fa777 | ||
|
|
21ab64e962 | ||
|
|
a0145cebf2 | ||
|
|
69176fc5f4 | ||
|
|
278dce9ba6 | ||
|
|
1cfa3ee4c7 | ||
|
|
9a95cd5794 | ||
|
|
a62fe3df6f | ||
|
|
7701244a08 | ||
|
|
96e31e4b78 | ||
|
|
ec208af474 | ||
|
|
242641264d | ||
|
|
d3a9a0615b | ||
|
|
3277e8c89c | ||
|
|
d02b9442a5 | ||
|
|
bac385b59c | ||
|
|
b1a0ab8086 | ||
|
|
a33749eb71 | ||
|
|
f5e4ee90e4 | ||
|
|
690d5e6dbd | ||
|
|
a76c055b63 | ||
|
|
70c461c60b | ||
|
|
d51b71ee80 | ||
|
|
3666448ca6 | ||
|
|
d3311d9f43 | ||
|
|
3e1735b06e | ||
|
|
de7ef7eace | ||
|
|
7e89458a3b | ||
|
|
f212bafe46 | ||
|
|
86fe40708b | ||
|
|
d718747c5b | ||
|
|
46ee9ac41c | ||
|
|
f1d306feab | ||
|
|
2d0db98e83 | ||
|
|
3df06a1c68 | ||
|
|
a8f9432a3a | ||
|
|
561e0b04a8 | ||
|
|
404b6a864b | ||
|
|
4feaf31225 | ||
|
|
391aa14845 | ||
|
|
a0d61b4c37 | ||
|
|
2952737755 | ||
|
|
f103aa4753 | ||
|
|
884856023a | ||
|
|
d8188fce7b | ||
|
|
0f15f1ab73 | ||
|
|
488a236b7a | ||
|
|
e833823e15 | ||
|
|
ee4ba104e7 | ||
|
|
4fdc08295b | ||
|
|
a3ff49aaf1 | ||
|
|
76364ea767 | ||
|
|
8eec50d764 | ||
|
|
32b659b346 | ||
|
|
00809909ae | ||
|
|
9f7684f6fe | ||
|
|
2bed7d370e | ||
|
|
d2b852f7cb | ||
|
|
901939bd96 | ||
|
|
edfdcc8cee | ||
|
|
3982c9a552 | ||
|
|
4490b2d209 | ||
|
|
eb4bbf3294 | ||
|
|
dc97d48491 | ||
|
|
0f50dc848e | ||
|
|
c5e4b83de3 | ||
|
|
a08ab46713 | ||
|
|
f50a7058d6 | ||
|
|
2c74f0a040 | ||
|
|
58835e40f3 | ||
|
|
8befa5918a | ||
|
|
df80f7ff2a | ||
|
|
5f66786ef1 | ||
|
|
3a965856a5 | ||
|
|
03df609d77 | ||
|
|
178581b560 | ||
|
|
ffd2314120 | ||
|
|
815b595d2f | ||
|
|
11e56403dd | ||
|
|
4baadecda5 | ||
|
|
cf552b5f3b | ||
|
|
1894304d33 | ||
|
|
9d5392fb02 | ||
|
|
c280645671 | ||
|
|
45f92e6b38 | ||
|
|
6509f09961 | ||
|
|
3c279a6f0e | ||
|
|
9ec3f03871 | ||
|
|
84a9c2c112 | ||
|
|
af368119cb | ||
|
|
89b9189efa | ||
|
|
dd59b8c7b9 | ||
|
|
f83491274f | ||
|
|
c0435fdff4 | ||
|
|
3c09c77269 | ||
|
|
547e101f1d | ||
|
|
0130f64934 | ||
|
|
361e0543ee | ||
|
|
63aa5d3b4e | ||
|
|
01302d097c | ||
|
|
e6095cb7e8 | ||
|
|
b876b8af11 | ||
|
|
a7c41f3fcd | ||
|
|
4772bd8d4c | ||
|
|
d471067e3f | ||
|
|
d0b7780239 | ||
|
|
e627ca6bd7 | ||
|
|
c97172bdd4 | ||
|
|
ce8a745fb4 | ||
|
|
3e9efd1401 | ||
|
|
20340190b5 | ||
|
|
265040a78c | ||
|
|
448d7e0c5a | ||
|
|
6eb1874c5a | ||
|
|
4c70745cc1 | ||
|
|
7795748a3f | ||
|
|
098ef4d7cf | ||
|
|
e3f91bfe1b | ||
|
|
7374fe73a3 | ||
|
|
d2bde205f0 | ||
|
|
5620f70f9a | ||
|
|
37f258b1bf | ||
|
|
e2dd2a133e | ||
|
|
7514644e07 | ||
|
|
16b0aeda7d | ||
|
|
86e4f4a841 | ||
|
|
607eacf8c7 | ||
|
|
7a049644a8 | ||
|
|
17a13f00f8 | ||
|
|
43436e48e0 | ||
|
|
5a39102405 | ||
|
|
94999101e3 | ||
|
|
e619b7c4f4 | ||
|
|
b7c2e8cb67 | ||
|
|
fb76893e18 | ||
|
|
88d812fe82 | ||
|
|
77f9f4664a | ||
|
|
5c2f85c39e | ||
|
|
ac4d22cd12 | ||
|
|
cf95e44cb4 | ||
|
|
65dd2bb429 | ||
|
|
6be855be6a | ||
|
|
b6e3f4423b | ||
|
|
0c61d81713 | ||
|
|
7c6f5dba63 | ||
|
|
44cfc7e62a | ||
|
|
96670d5f16 | ||
|
|
36b971ee4e | ||
|
|
f1a9629652 | ||
|
|
20230402d0 | ||
|
|
5c2c3a6c88 | ||
|
|
fb019d43bf | ||
|
|
025aa33773 | ||
|
|
302e21fd58 | ||
|
|
211512ae64 | ||
|
|
8ec917b1c3 | ||
|
|
1c7534f009 | ||
|
|
ae745d9397 | ||
|
|
60f37aae2f | ||
|
|
d7daf5f724 | ||
|
|
e5103d9429 | ||
|
|
8fecb29848 | ||
|
|
290ea6179d | ||
|
|
9695a40fc9 | ||
|
|
1913b95227 | ||
|
|
a874aea692 | ||
|
|
69c52099e7 | ||
|
|
cfc0747d5d | ||
|
|
fcd7e8768d | ||
|
|
3c34dd8275 | ||
|
|
1116e481be | ||
|
|
63cf9d04de | ||
|
|
3364d4d147 | ||
|
|
57ad21e4bd | ||
|
|
414f87981f | ||
|
|
b1459c79cf | ||
|
|
352ea07226 | ||
|
|
27018787af | ||
|
|
4e305eca26 | ||
|
|
9e9c0ceaf4 | ||
|
|
b3bf18b1c0 | ||
|
|
b1619f675f | ||
|
|
96c3de12eb | ||
|
|
719dbb8bae | ||
|
|
f38a7f7f8f | ||
|
|
6ea38b4438 | ||
|
|
f7447aece1 | ||
|
|
aa2b9ec476 | ||
|
|
3ee00f8bc2 | ||
|
|
fccab60a5c | ||
|
|
0f4af38457 | ||
|
|
aef39f1160 | ||
|
|
2023012408 | ||
|
|
95a7661bb1 | ||
|
|
618d317803 | ||
|
|
ae897c8cdb | ||
|
|
d0a0f3c052 | ||
|
|
91b9591b10 | ||
|
|
aa7361337d | ||
|
|
284d77fe2e | ||
|
|
826178f1e2 | ||
|
|
acccf8a9b8 | ||
|
|
57c066f0be | ||
|
|
e44f64ae92 | ||
|
|
d51980a3f5 | ||
|
|
c3d73e7ecb | ||
|
|
b077f6821d | ||
|
|
a79de11af7 | ||
|
|
2023011763 | ||
|
|
b46e40e86b | ||
|
|
a6d6cdd165 | ||
|
|
dc8da605f9 | ||
|
|
8b299a29c7 | ||
|
|
3109b865d2 | ||
|
|
0c5956c43c | ||
|
|
1c83b39691 | ||
|
|
77874b473c | ||
|
|
b7cce7be15 | ||
|
|
3cd3362417 | ||
|
|
e97e925efb | ||
|
|
0f032235cf | ||
|
|
e0f0984da7 | ||
|
|
4d22b5aaef | ||
|
|
80b8846318 | ||
|
|
bf641faafa | ||
|
|
23d8b78ce1 | ||
|
|
3b2244077d | ||
|
|
ee5cdb9713 | ||
|
|
03d02d67f7 | ||
|
|
5798145581 | ||
|
|
51ef0b7f66 | ||
|
|
97b4542c73 | ||
|
|
c1cd0c09a2 | ||
|
|
1fc1f47d80 | ||
|
|
ec471a5bc2 | ||
|
|
a893fc0ca2 | ||
|
|
3761dc0433 | ||
|
|
aa71a07fbe | ||
|
|
088293f5e7 | ||
|
|
7c660aa86e | ||
|
|
435d8fa0a2 | ||
|
|
5cd6f1d064 | ||
|
|
ec20dfe312 | ||
|
|
924ffb5a35 | ||
|
|
62c7f59b94 | ||
|
|
e97176b1d7 | ||
|
|
d649f5d826 | ||
|
|
6c37177cf5 | ||
|
|
14775aa975 | ||
|
|
44b6336372 | ||
|
|
36d2bb332b | ||
|
|
4dbe45640a | ||
|
|
4b3f0b9f08 | ||
|
|
12af069dca | ||
|
|
d42e708d31 | ||
|
|
b7bb973118 | ||
|
|
750b2a6313 | ||
|
|
de0da86bd7 | ||
|
|
8e283f512a | ||
|
|
73162a4bc3 | ||
|
|
1a9761736e | ||
|
|
fd1f7665a7 | ||
|
|
6d14573fd0 | ||
|
|
cf69b836ac | ||
|
|
a7a771b92b | ||
|
|
def011c029 | ||
|
|
4b055bf260 | ||
|
|
1ba7484d60 | ||
|
|
51c518da1e | ||
|
|
a3b6b03dfb | ||
|
|
18e3b38c69 | ||
|
|
0ad30063ff | ||
|
|
7812c64a31 | ||
|
|
3d2376ab52 | ||
|
|
6b207bbf2b | ||
|
|
3f079ba7c6 | ||
|
|
8f4c89f50e | ||
|
|
6b7a543c82 | ||
|
|
2ba68d24f2 | ||
|
|
46877e0a92 | ||
|
|
b55f555487 | ||
|
|
a38b63be18 | ||
|
|
1bebd6f4f5 | ||
|
|
3da63f394d | ||
|
|
2a54e3d770 | ||
|
|
06b02ba46e | ||
|
|
d01ae55109 | ||
|
|
8868d7d188 | ||
|
|
2eec9892be | ||
|
|
01ae621f11 | ||
|
|
f984aa0d2c | ||
|
|
0881a6bc17 | ||
|
|
2c6a73546d | ||
|
|
a29944660e | ||
|
|
f6ce624c6f | ||
|
|
c09ec8e4d1 | ||
|
|
31bbaad06e | ||
|
|
b9ca1fe830 | ||
|
|
e61585f2f3 | ||
|
|
0de1aacb0c | ||
|
|
168829b555 | ||
|
|
170fc517d0 | ||
|
|
0fbf1c7c71 | ||
|
|
694be39c71 | ||
|
|
dad26d81df | ||
|
|
bcaea097ea | ||
|
|
d56fe74e24 | ||
|
|
4603d540c3 | ||
|
|
f9d53303bb | ||
|
|
d04faa6505 | ||
|
|
07da058eae | ||
|
|
cefa6b9878 | ||
|
|
04d0b0223f | ||
|
|
78ad6d2d88 | ||
|
|
22cbd9fa58 | ||
|
|
984049586a | ||
|
|
cdfc2b92e3 | ||
|
|
4530abe8df | ||
|
|
586020b8b6 | ||
|
|
3a8626fd04 | ||
|
|
a6e483a434 | ||
|
|
6a942e56b1 | ||
|
|
87c91550ad | ||
|
|
731daf0f37 | ||
|
|
f931e53890 | ||
|
|
b5efc68737 | ||
|
|
b9e6e7926c | ||
|
|
845034c81c | ||
|
|
54d42e3f40 | ||
|
|
e03ac3136e | ||
|
|
6fb41a202a | ||
|
|
4bef330ce1 | ||
|
|
8a5f719964 | ||
|
|
209d5e8e90 | ||
|
|
9d041aa582 | ||
|
|
6532b3e655 | ||
|
|
c1c355160d | ||
|
|
83515d5610 | ||
|
|
aa10dccf90 | ||
|
|
f4fd53211a | ||
|
|
4993d19466 | ||
|
|
19f9bbca0d | ||
|
|
779d8e1627 | ||
|
|
bb07410448 | ||
|
|
d826f9e72f | ||
|
|
6a6130615d | ||
|
|
a8e7021be2 | ||
|
|
38259d0382 | ||
|
|
f7e7259910 | ||
|
|
f0bfeba733 | ||
|
|
c3a7a24eea | ||
|
|
bbbcd780c9 | ||
|
|
475469a2e7 | ||
|
|
3a7447dcb6 | ||
|
|
e5d8cbd383 | ||
|
|
3c08dca7e7 | ||
|
|
d083f01d22 | ||
|
|
68cf393644 | ||
|
|
18f7230662 | ||
|
|
728f735281 | ||
|
|
ecc418ba77 | ||
|
|
3af5b7f2ac | ||
|
|
7a7cfcacbe | ||
|
|
52594355bf | ||
|
|
0d06c28b19 | ||
|
|
ccc4677252 | ||
|
|
821fc9feed | ||
|
|
82b46726fc | ||
|
|
8df872a482 | ||
|
|
c79c306adb | ||
|
|
51fdaad002 | ||
|
|
885cd8ff04 | ||
|
|
2707af403a | ||
|
|
2d227e5222 | ||
|
|
70529878e2 | ||
|
|
3b7a962dc6 | ||
|
|
6dcf5c3d7d | ||
|
|
b089bb5e7b | ||
|
|
a91a67668e | ||
|
|
220a908118 | ||
|
|
54841248e7 | ||
|
|
a0b42e6538 | ||
|
|
3312cf525d | ||
|
|
2093667548 | ||
|
|
3c868d7961 | ||
|
|
707f4f5816 | ||
|
|
b3ab6311c5 | ||
|
|
d56f605b63 | ||
|
|
f8b713f425 | ||
|
|
5209e95bc7 | ||
|
|
ef67a45702 | ||
|
|
b88eb72ac2 | ||
|
|
32847f7254 | ||
|
|
71df93b534 | ||
|
|
bb028191f8 | ||
|
|
19af8fc7d8 | ||
|
|
a06671b47f | ||
|
|
5f385d88e0 | ||
|
|
9cb7a364a3 | ||
|
|
f68cbc577d | ||
|
|
dc975e8974 | ||
|
|
4311ade535 | ||
|
|
cd23401411 | ||
|
|
176ee6910f | ||
|
|
13c8f3d3aa | ||
|
|
ce9af687bc | ||
|
|
43f0d0cacd | ||
|
|
20b4e6953e | ||
|
|
7da287e3aa | ||
|
|
205f885d69 | ||
|
|
3715cd349d | ||
|
|
e4c3ecc57e | ||
|
|
673c5d886d | ||
|
|
f799b568d1 | ||
|
|
7bff4661f6 | ||
|
|
ffd8bef808 | ||
|
|
02cee2234d | ||
|
|
e0dd2be3fb | ||
|
|
a33c011c21 | ||
|
|
7c3f42bbba | ||
|
|
edac9820b5 | ||
|
|
84a47f7102 | ||
|
|
97ae8afb6f | ||
|
|
4138333f5c | ||
|
|
61339a8ae2 | ||
|
|
50eb2e3855 | ||
|
|
5fc78e4584 | ||
|
|
2736a2f69e | ||
|
|
179993f0cd | ||
|
|
b734f657f9 | ||
|
|
c29d7d02c2 | ||
|
|
3df6b2a58c | ||
|
|
b8aa2d2c32 | ||
|
|
0ff885461b | ||
|
|
ca43f95fb1 | ||
|
|
09700f676b | ||
|
|
4271e9cffa | ||
|
|
f3dc8a10d5 | ||
|
|
00fb486f6a | ||
|
|
4173e94c6f | ||
|
|
261d3d3340 | ||
|
|
15e20fcae1 | ||
|
|
f4f47f5fe3 | ||
|
|
71d11de7ca | ||
|
|
88d74a15aa | ||
|
|
0f02fc0c77 | ||
|
|
3f90fb42d8 | ||
|
|
9bd8988300 | ||
|
|
3c804bcfec | ||
|
|
cca4cdc4f1 | ||
|
|
8f899aaf8a | ||
|
|
e53b4bb439 | ||
|
|
ab247a1309 | ||
|
|
c21e9edad4 | ||
|
|
9c21a20f8b | ||
|
|
7191ebb615 | ||
|
|
a74731d7f5 | ||
|
|
e086f0b3fe | ||
|
|
8255aa23f4 | ||
|
|
a4bc08f5a3 | ||
|
|
7e5aa1e2a5 | ||
|
|
0818dbc36a | ||
|
|
347c4b2625 | ||
|
|
34f0d4d0c4 | ||
|
|
cbedb57511 | ||
|
|
9ef825d2fd | ||
|
|
85ae745910 | ||
|
|
7411da8d5a | ||
|
|
3f75a8369f | ||
|
|
4cd621e877 | ||
|
|
6e3a2fe0bf | ||
|
|
8b0e1f941a | ||
|
|
c7c5e7670a | ||
|
|
f6c621ef1b | ||
|
|
faf32d451d | ||
|
|
252fd7ecb1 | ||
|
|
7fa89dddb4 | ||
|
|
fefdb8c84e | ||
|
|
a6cc05936e | ||
|
|
b209843545 | ||
|
|
19759ed36e | ||
|
|
1a7ae8e7b9 | ||
|
|
da1f645670 | ||
|
|
3a2015ee26 | ||
|
|
c440418ce6 | ||
|
|
3d37a5ba1d | ||
|
|
15f4cfb6d9 | ||
|
|
be36de2482 | ||
|
|
391237f7df | ||
|
|
977e5effd9 | ||
|
|
8b36a4cb19 | ||
|
|
c8cd94a772 | ||
|
|
764316a53d | ||
|
|
2048fd4042 | ||
|
|
f84b3de24b | ||
|
|
6a1f3ec08b | ||
|
|
2e353aee96 | ||
|
|
8edfd14a37 | ||
|
|
1a191ec6f7 | ||
|
|
e7171e94b4 | ||
|
|
398d937419 | ||
|
|
34fe5ab143 | ||
|
|
1b08f43f82 | ||
|
|
b24a2e2fdc | ||
|
|
4c4c6e626e | ||
|
|
7310370a31 | ||
|
|
8ae94f0059 | ||
|
|
8fccf20892 | ||
|
|
5a874ae241 | ||
|
|
f4e1ed25f2 | ||
|
|
cbfbb49ab4 | ||
|
|
489b16efce | ||
|
|
b82c1693c0 | ||
|
|
019bfc4e35 | ||
|
|
dfda5c054a | ||
|
|
f657169616 | ||
|
|
4c06da8b70 | ||
|
|
9fe2393a00 | ||
|
|
e2e8d94b14 | ||
|
|
4f9a7f8c87 | ||
|
|
bb0502ff44 | ||
|
|
c256442245 | ||
|
|
1137404190 | ||
|
|
d57c6d0284 | ||
|
|
76bbf57b3d | ||
|
|
806a47a7cc | ||
|
|
29851c18aa | ||
|
|
dea950c2c8 | ||
|
|
a367dfb22e | ||
|
|
9fe1a7b373 | ||
|
|
8e2d21c548 | ||
|
|
bedf1cd357 | ||
|
|
13f180a70c | ||
|
|
6654239c94 | ||
|
|
1b61e5e9e9 | ||
|
|
43b3b907f8 | ||
|
|
fcd896508b | ||
|
|
f55c990e86 | ||
|
|
d110372f99 | ||
|
|
c862af09f2 | ||
|
|
1cfeec0ca3 | ||
|
|
a0649edc1e | ||
|
|
0e0bcb3e10 | ||
|
|
686528d627 | ||
|
|
3afa920151 | ||
|
|
32c493e994 | ||
|
|
1a76bdf891 | ||
|
|
af48b3df29 | ||
|
|
58ac1fb2fa | ||
|
|
e922704f72 | ||
|
|
c6115735c7 | ||
|
|
9ddf5c72be | ||
|
|
cc5640326b | ||
|
|
bf447d7703 | ||
|
|
cbb008c938 | ||
|
|
eaa0c52b45 | ||
|
|
82791f7efc | ||
|
|
8c533e34ea | ||
|
|
37708ad9cd | ||
|
|
090dee857f | ||
|
|
d779ff7e6d | ||
|
|
fd8858f8c9 | ||
|
|
b234647a63 | ||
|
|
6e93eefc82 | ||
|
|
38fca30125 | ||
|
|
012ee9ca85 | ||
|
|
151252e33a | ||
|
|
7136cfc68b | ||
|
|
408c04f25f | ||
|
|
7f8e0dbc40 | ||
|
|
0de7ab18f6 | ||
|
|
e9bc7331bd | ||
|
|
797dd7c449 | ||
|
|
f37ccaa64f | ||
|
|
ab3937ee5a | ||
|
|
00f4551a7b | ||
|
|
257ddd028d | ||
|
|
e0a22e76f8 | ||
|
|
00a3610331 | ||
|
|
f502725120 | ||
|
|
b62a74b315 | ||
|
|
2ec382ae0e | ||
|
|
cbfee31593 | ||
|
|
6d647e13ff | ||
|
|
d2af3ff98d | ||
|
|
052d17e66a | ||
|
|
a9bc954e17 | ||
|
|
2983426771 | ||
|
|
c61eb94b3f | ||
|
|
3829eab1cf | ||
|
|
3fe8eeedc5 | ||
|
|
1efef88b6e | ||
|
|
7acdaf0b43 | ||
|
|
1ed25d76ba | ||
|
|
474c1f5e32 | ||
|
|
8b71fea5dc | ||
|
|
7bd99a22ee | ||
|
|
75b8cca3b3 | ||
|
|
ef2c29d5d4 | ||
|
|
218b3c8274 | ||
|
|
db9cb2ddda | ||
|
|
722d66e85a | ||
|
|
f6269f0193 | ||
|
|
520eae817a | ||
|
|
d099941360 | ||
|
|
e3e76fa8c5 | ||
|
|
2553806e79 | ||
|
|
1bcbc5a353 | ||
|
|
15d351b0f0 | ||
|
|
c144c95cda | ||
|
|
f08f4fd87d | ||
|
|
f8aaeef218 | ||
|
|
7915e365b3 | ||
|
|
1c68f81c37 | ||
|
|
d4c9db0a27 | ||
|
|
b5e0e29ec6 | ||
|
|
569be4c6c9 | ||
|
|
e7ca237b07 | ||
|
|
a7d3b72117 | ||
|
|
3ba7b5cf2d | ||
|
|
254e9765fe | ||
|
|
3304f284a5 | ||
|
|
0d5f862daf | ||
|
|
51dfacd542 | ||
|
|
c691d52fa7 | ||
|
|
de3d09fe79 | ||
|
|
eaa413c566 | ||
|
|
407205e52b | ||
|
|
552414978e | ||
|
|
607081bbaa | ||
|
|
e73383fbbb | ||
|
|
2e8e63fb0b | ||
|
|
874f7dd416 | ||
|
|
8b0e3b1624 | ||
|
|
9b946f2b7a | ||
|
|
11841f688b | ||
|
|
03c4f04246 | ||
|
|
a1f06ae27f | ||
|
|
69dffd78a6 | ||
|
|
2750e19657 | ||
|
|
b0987f727b | ||
|
|
a4d9b0b468 | ||
|
|
e2b87e0d74 | ||
|
|
2166b4ca17 | ||
|
|
d2d4d68585 | ||
|
|
faf68dbc5c | ||
|
|
305896fcb3 | ||
|
|
6c9adea0d3 | ||
|
|
fc7630a66d | ||
|
|
3248153d9f | ||
|
|
246b9f3130 | ||
|
|
865144850d | ||
|
|
d9752a4c21 | ||
|
|
dba14d2630 | ||
|
|
2986e64a49 | ||
|
|
1d8bd11b67 | ||
|
|
bafb99d520 | ||
|
|
3cc8a74a91 | ||
|
|
c0aa5a438f | ||
|
|
825d401403 | ||
|
|
9dfca77c36 | ||
|
|
82c4af2902 | ||
|
|
736344e151 | ||
|
|
6f9663da62 | ||
|
|
f8ae1786dd | ||
|
|
c60ed17583 | ||
|
|
e0f0b5bcf9 | ||
|
|
9e96073128 | ||
|
|
0db65c22d3 | ||
|
|
d785135606 | ||
|
|
ae15eda546 | ||
|
|
f2d44ab5a7 | ||
|
|
43798fc2e8 | ||
|
|
9dc4b40d7a | ||
|
|
1cb19dbf65 | ||
|
|
1ab4289ad6 | ||
|
|
e2ae1b249c | ||
|
|
92b7efafca | ||
|
|
f092e4038f | ||
|
|
aa5dae391b | ||
|
|
08a6fd4ad4 | ||
|
|
a61150a96c | ||
|
|
0f9cb5590e | ||
|
|
c0a83b27eb | ||
|
|
f79b1f71b8 | ||
|
|
8e027c445f | ||
|
|
dda3e3c39a | ||
|
|
fd5157998c | ||
|
|
e0217e8c79 | ||
|
|
3ab1c42266 | ||
|
|
d1676776aa | ||
|
|
bdde69d011 | ||
|
|
6dec42a33a | ||
|
|
199bc3f0ad | ||
|
|
17dd833925 | ||
|
|
4ec144c969 | ||
|
|
3e36f2b0ac | ||
|
|
07a03b3e73 | ||
|
|
c33258832e | ||
|
|
a7aa08ce07 | ||
|
|
06d63a862e | ||
|
|
43d1c4c4b5 | ||
|
|
f81feb1e69 | ||
|
|
01cf01e084 | ||
|
|
97a725fbd0 | ||
|
|
ace92ba281 | ||
|
|
d631c76e8d | ||
|
|
e6d33f77da | ||
|
|
a6d3e3687b | ||
|
|
08c2bcb952 | ||
|
|
98ca4bdede | ||
|
|
3f8e741562 | ||
|
|
6e464ebd9b | ||
|
|
c329279339 | ||
|
|
cf04753ad7 | ||
|
|
69e7eab11f | ||
|
|
dea206b023 | ||
|
|
5deaf58928 | ||
|
|
15e2952a2b | ||
|
|
a9fba1c849 | ||
|
|
71e573d082 | ||
|
|
334a4fa159 | ||
|
|
21f94ee800 | ||
|
|
540bfd7a72 | ||
|
|
8fbed2b13a | ||
|
|
aa17510e0a | ||
|
|
bf65e8cd12 | ||
|
|
0f5c6e8f04 | ||
|
|
b1b916ce15 | ||
|
|
a6a732e1fc | ||
|
|
a5c2f28539 | ||
|
|
18261fe31c | ||
|
|
079046863c | ||
|
|
07b965bba1 | ||
|
|
c39113ee41 | ||
|
|
14f90502a4 | ||
|
|
b0673c3563 | ||
|
|
373c6d8d55 | ||
|
|
b8fc828955 | ||
|
|
b43b040512 | ||
|
|
50b7608f9d | ||
|
|
7085e5b629 | ||
|
|
7d5985baf9 | ||
|
|
7c40a424c0 | ||
|
|
baf882ace7 | ||
|
|
ba82f0bef9 | ||
|
|
d9c6a0305b | ||
|
|
d9b1211191 | ||
|
|
99f1e02766 | ||
|
|
242c0db26b | ||
|
|
dd49e41c42 | ||
|
|
6db15e8693 | ||
|
|
4c9cab3f8a | ||
|
|
b2c0413a98 | ||
|
|
e34c7c00b1 | ||
|
|
7c447bbdc7 | ||
|
|
7bf1f2cc84 | ||
|
|
afa2c4e0af | ||
|
|
2ff7db1b36 | ||
|
|
9f0626da64 | ||
|
|
d8cb5c1cf5 | ||
|
|
dca56da0ef | ||
|
|
ec75d16ea8 | ||
|
|
5cae8ea733 | ||
|
|
1ccd8f6a64 | ||
|
|
9c293bb82b | ||
|
|
9897ee9591 | ||
|
|
5215415315 | ||
|
|
54891d11e0 | ||
|
|
567c8303bf | ||
|
|
2a60edcd52 | ||
|
|
d61ac32d7b | ||
|
|
b57e6cff7e | ||
|
|
5b99f19dac | ||
|
|
6c03571887 | ||
|
|
4fb410a93c | ||
|
|
5e1db9fdd3 | ||
|
|
9d7480ae3c | ||
|
|
f39cf6d855 | ||
|
|
001d116884 | ||
|
|
02c5e62efe | ||
|
|
446df07b62 | ||
|
|
8583b150c9 | ||
|
|
a859aa72ee | ||
|
|
0896036266 | ||
|
|
311b78ae82 | ||
|
|
f5cf4fc8fb | ||
|
|
7ceb58b2aa | ||
|
|
293dd76af1 | ||
|
|
3918c45ced | ||
|
|
4ec403347c | ||
|
|
e01266ffcb | ||
|
|
f246fb2fc2 | ||
|
|
f7b26b34cb | ||
|
|
a1bcdc225e | ||
|
|
7771241cc0 | ||
|
|
6e3af646b2 | ||
|
|
82bf8c138d | ||
|
|
e21b001116 | ||
|
|
577024f1e9 | ||
|
|
d4ad4a25db | ||
|
|
30577b0c17 | ||
|
|
212de25409 | ||
|
|
5da8bbf45a | ||
|
|
aa0e10ead7 | ||
|
|
a9906c7c29 | ||
|
|
9fefe08b3f | ||
|
|
684bfff713 | ||
|
|
3db6b88d82 | ||
|
|
8ae96774df | ||
|
|
f68017d21e | ||
|
|
2b725a4db5 | ||
|
|
af1a5f130b | ||
|
|
86e3994e87 | ||
|
|
1e6ac5590e | ||
|
|
5e42b1c9f8 | ||
|
|
9d842630c9 | ||
|
|
77cb906dfe | ||
|
|
a59e846f74 | ||
|
|
6e6340a0c9 | ||
|
|
357e82e51b | ||
|
|
394d8cfd18 | ||
|
|
ef80bd401f | ||
|
|
f51d61d57a | ||
|
|
1dd256a68a | ||
|
|
85644aa3fb | ||
|
|
effbc258bb | ||
|
|
e615600ff1 | ||
|
|
60465c4664 | ||
|
|
c03c058bd5 | ||
|
|
7238c8944d | ||
|
|
9a41fd5327 | ||
|
|
b471042037 | ||
|
|
2886f06977 | ||
|
|
d630484eeb | ||
|
|
e24299239e | ||
|
|
d2fa470165 | ||
|
|
168453da71 | ||
|
|
23a06d63ac | ||
|
|
751aa1944a | ||
|
|
05b5f3f845 | ||
|
|
16fc6862a8 | ||
|
|
7e1c0f39e7 | ||
|
|
deccf20a35 | ||
|
|
73c0a645e0 | ||
|
|
e975bd0c8d | ||
|
|
78da928727 | ||
|
|
11962dabba | ||
|
|
dceb5d09cd | ||
|
|
b4cccf23d4 | ||
|
|
b911af200c | ||
|
|
68683c444f | ||
|
|
a185593d65 | ||
|
|
525040238e | ||
|
|
33f89a08f3 | ||
|
|
11645e1fac | ||
|
|
6390140539 | ||
|
|
072066c49c | ||
|
|
a2e9366c84 | ||
|
|
391669a451 | ||
|
|
0c6c76e081 | ||
|
|
f1520bdde6 | ||
|
|
3089880f18 | ||
|
|
ab11b74be4 | ||
|
|
a5a97be017 | ||
|
|
80b5bc1b68 | ||
|
|
5c7dcaffe8 | ||
|
|
5095899245 | ||
|
|
4800e5d2ae | ||
|
|
3b1e37f718 | ||
|
|
6577388250 | ||
|
|
3b9dbd4146 | ||
|
|
a1260feeed | ||
|
|
7322504ad0 | ||
|
|
de569f0052 | ||
|
|
e7097a9d25 | ||
|
|
c1dbc800e5 | ||
|
|
951746297e | ||
|
|
984304568d | ||
|
|
723217bdea | ||
|
|
0fdb71f7e4 | ||
|
|
12ce76b56a | ||
|
|
0030d18448 | ||
|
|
0e3e6ac442 | ||
|
|
430e8193e0 | ||
|
|
03e8ed4d88 | ||
|
|
ef492f6178 | ||
|
|
8eea45ef50 | ||
|
|
ff951341c9 | ||
|
|
df570afd52 | ||
|
|
07d755df11 | ||
|
|
37585bd5a5 | ||
|
|
89e24bf8f2 | ||
|
|
8d2fcd3518 | ||
|
|
f39ab3875e | ||
|
|
82efe6c60d | ||
|
|
75972d59a8 | ||
|
|
e7d60aac9c | ||
|
|
a0bfbdd49c | ||
|
|
ba594982f0 | ||
|
|
2157f4f193 | ||
|
|
309bae423c | ||
|
|
4f8bf2ae78 | ||
|
|
85c1f8a9e0 | ||
|
|
e00e7e1e56 | ||
|
|
1a6defdbcc | ||
|
|
ef577a6509 | ||
|
|
b7c6838e45 | ||
|
|
91d04cec5c | ||
|
|
3bd8441079 | ||
|
|
8cf45a5197 | ||
|
|
8dc1377efb | ||
|
|
6c32148f90 | ||
|
|
315e568de0 | ||
|
|
5d16b28869 | ||
|
|
5624a89231 | ||
|
|
63c42b14f2 | ||
|
|
6f1eaa9b39 | ||
|
|
ca42e5e00a | ||
|
|
61feee690c | ||
|
|
d4ed955aee | ||
|
|
b46227dcb6 | ||
|
|
fd8d371ac7 | ||
|
|
0e06e298d4 | ||
|
|
72df905902 | ||
|
|
0d748a0699 | ||
|
|
27c40dc6b0 | ||
|
|
8e34e6fbb4 | ||
|
|
3bc98ed623 | ||
|
|
70a92a858a | ||
|
|
49d04374a4 | ||
|
|
8540902a35 | ||
|
|
8c6fcee3ca | ||
|
|
13803d0dbb | ||
|
|
423986996a | ||
|
|
1c9e7b7ea6 | ||
|
|
6de1ad9d3d | ||
|
|
5004ae3457 | ||
|
|
e67cc75063 | ||
|
|
0edbcbdf19 | ||
|
|
f0fe79dd3b | ||
|
|
daa1958f86 | ||
|
|
2c26f02f5c | ||
|
|
af87650bc4 | ||
|
|
2b19c0bc68 | ||
|
|
76a2dcb5a9 | ||
|
|
68ec3d1c10 | ||
|
|
2ff19084ca | ||
|
|
62f062ecfa | ||
|
|
cce17ad0a0 | ||
|
|
b8296a91b9 | ||
|
|
6e9452b06e | ||
|
|
888fd35689 | ||
|
|
1fb0fbca58 | ||
|
|
ddd2a109e4 | ||
|
|
87504a528e | ||
|
|
6eac4af7db | ||
|
|
89de1340af | ||
|
|
9e753a0d44 | ||
|
|
f57920ad90 | ||
|
|
7dbbbef51a | ||
|
|
7add75126d | ||
|
|
d207672bd5 | ||
|
|
851fa38251 | ||
|
|
43345fb642 | ||
|
|
9ff33814ea | ||
|
|
21b94d2de5 | ||
|
|
24236860c8 | ||
|
|
3f868fd792 | ||
|
|
417bca03df | ||
|
|
cce6aef557 | ||
|
|
eb3afc03b5 | ||
|
|
7f0caf0683 | ||
|
|
7f606665cb | ||
|
|
202872c2dc | ||
|
|
93aeae1985 | ||
|
|
5c34ab6692 | ||
|
|
390b49653b | ||
|
|
b877c385f0 | ||
|
|
9c47739c0e | ||
|
|
04aa2992e7 | ||
|
|
2f1edeff78 | ||
|
|
306d51cdcf | ||
|
|
54a026525a | ||
|
|
d6588fc835 | ||
|
|
5a7b41a2cf | ||
|
|
338a73d764 | ||
|
|
c20954f020 | ||
|
|
1e8e1d3c9d | ||
|
|
f6b1962056 | ||
|
|
b3b101a89c | ||
|
|
9615c4edf1 | ||
|
|
85a75ee035 | ||
|
|
1e5bd55672 | ||
|
|
37d4015d56 | ||
|
|
6b27554cdb | ||
|
|
fc1b119159 | ||
|
|
2cd0d4a9f7 | ||
|
|
fd03aabeb2 | ||
|
|
8068c975c2 | ||
|
|
a6d2ab3360 | ||
|
|
fe7b91dfd9 | ||
|
|
5784101bea | ||
|
|
eaf6eb8978 | ||
|
|
3af63bcf1f | ||
|
|
80a21f7a75 | ||
|
|
0b33dc6ce1 | ||
|
|
64a6ced62e | ||
|
|
438f6c96cd | ||
|
|
6ae085f974 | ||
|
|
cb8e97274e | ||
|
|
c4185e81e8 | ||
|
|
0580fe9046 | ||
|
|
1b1bc9ea36 | ||
|
|
c2614467cf | ||
|
|
077ae51f05 | ||
|
|
ee40212e97 | ||
|
|
7f5f6efbac | ||
|
|
45d4c57d91 | ||
|
|
41e0208335 | ||
|
|
2f8238342b | ||
|
|
e1582b8323 | ||
|
|
7cfa6f0265 | ||
|
|
e3973c74e7 | ||
|
|
a8deca2dd9 | ||
|
|
a78ade1771 | ||
|
|
79d2ef4616 | ||
|
|
5edc3f755c | ||
|
|
288976310b | ||
|
|
58b5be8ab6 | ||
|
|
26d7896877 | ||
|
|
fd6bc7308f | ||
|
|
6c41c95f28 | ||
|
|
446e04469d | ||
|
|
5097e563df | ||
|
|
c7ad97c641 | ||
|
|
9516fe3324 | ||
|
|
20cdbac8c3 | ||
|
|
e3e7b3360c | ||
|
|
655dfb8328 | ||
|
|
9b9c67b768 | ||
|
|
5b7457ff08 | ||
|
|
48adad5454 | ||
|
|
b27dc3eb17 | ||
|
|
e89eebb7ba | ||
|
|
fee404399a | ||
|
|
6b4805ca1a | ||
|
|
159699b5d7 | ||
|
|
af809c9661 | ||
|
|
329de8f416 | ||
|
|
e825b07e85 | ||
|
|
71fdb99a07 | ||
|
|
55ee4186aa | ||
|
|
941b0a0ff7 | ||
|
|
6aae12288e | ||
|
|
302cc552ef | ||
|
|
a2a4df0886 | ||
|
|
3399e39968 | ||
|
|
87874bba88 | ||
|
|
c304fc4333 | ||
|
|
6977cf268f | ||
|
|
931c78a70c | ||
|
|
8d23646fe6 | ||
|
|
656963e018 | ||
|
|
644277faf1 | ||
|
|
0558dfee79 | ||
|
|
487c8fe88f | ||
|
|
0d171ba1d8 | ||
|
|
2069bbc8b5 | ||
|
|
053d628b53 | ||
|
|
6bc592e6c9 | ||
|
|
6c76d8cd1c | ||
|
|
a09e411936 | ||
|
|
02a7b96f33 | ||
|
|
e55e029ae8 | ||
|
|
6b18b144cf | ||
|
|
6d53089cc1 | ||
|
|
e85a8a68d0 | ||
|
|
dc55e68524 | ||
|
|
462c68b625 | ||
|
|
999d374f0c | ||
|
|
b208aa675e | ||
|
|
2b98fee136 | ||
|
|
e5e75efebc | ||
|
|
4a4fef2daf | ||
|
|
ecb6b234cc | ||
|
|
39dbc8acdb | ||
|
|
a56489bc7f | ||
|
|
99927c7071 | ||
|
|
3e28403978 | ||
|
|
37370f057f | ||
|
|
f4b46fad27 | ||
|
|
9d2c6a95f4 | ||
|
|
376a76d1d3 | ||
|
|
1fcc07e54e | ||
|
|
8db3345c2f | ||
|
|
69aa2fea68 | ||
|
|
298749bfcd | ||
|
|
f1f31baae1 | ||
|
|
e1c8f19e8f | ||
|
|
5e302c70e9 | ||
|
|
4c5a679066 | ||
|
|
41f0b2c354 | ||
|
|
a0a3c349c9 | ||
|
|
bc3983181d | ||
|
|
980b58ef5a | ||
|
|
a2604c0963 | ||
|
|
6dbc108da2 | ||
|
|
bd98f988f0 | ||
|
|
06301c7847 | ||
|
|
18a1aeaa91 | ||
|
|
c9f16b6430 | ||
|
|
bc9d2abdb6 | ||
|
|
28810c178f | ||
|
|
a9e64efe45 | ||
|
|
6b5886c034 | ||
|
|
7727ad43af | ||
|
|
bbe10f4f77 | ||
|
|
5e72709613 | ||
|
|
9e85cba0d0 | ||
|
|
4b59ced08f | ||
|
|
8dbdd55730 | ||
|
|
6725151a99 | ||
|
|
d4f3d5a164 | ||
|
|
7b5ccc45bc | ||
|
|
940214a1a2 | ||
|
|
68bd410159 | ||
|
|
b13fcfd831 | ||
|
|
07ef2b051c | ||
|
|
3fc795340d | ||
|
|
70cfa6af13 | ||
|
|
dbcaec59ae | ||
|
|
faedae708e | ||
|
|
0c66521b23 | ||
|
|
bf92862459 | ||
|
|
1a68698d76 | ||
|
|
842a73357c | ||
|
|
5efdeccdbb | ||
|
|
050777b8c4 | ||
|
|
a4d78e2200 | ||
|
|
b49f22cdf9 | ||
|
|
7e483b0c25 | ||
|
|
3cf9ae04c7 | ||
|
|
bf0cb4bfe2 | ||
|
|
773133c4ce | ||
|
|
ca0b3b6fd7 | ||
|
|
f4731c0514 | ||
|
|
34f16e5b7d | ||
|
|
83e9af6601 | ||
|
|
8bbf9335e1 | ||
|
|
159f30b37f | ||
|
|
2e3dc75425 | ||
|
|
7d3575b362 | ||
|
|
35d407021c | ||
|
|
076f49d447 | ||
|
|
0665fe0413 | ||
|
|
669a6fee40 | ||
|
|
8aab0fc189 | ||
|
|
5d6eb5bfd6 | ||
|
|
cf4711d878 | ||
|
|
21d664d670 | ||
|
|
ab182e276b | ||
|
|
96a3250152 | ||
|
|
f5746002fd | ||
|
|
e1e3339770 | ||
|
|
3a5086796d | ||
|
|
11300913a4 | ||
|
|
e65f14cbed | ||
|
|
6898849e3e | ||
|
|
2d61691bb2 | ||
|
|
eba9e04e2e | ||
|
|
33f32de690 | ||
|
|
93b8f61551 | ||
|
|
7f17a9d1b4 | ||
|
|
d34e4cf698 | ||
|
|
6b592137b9 | ||
|
|
d5e72bf55d | ||
|
|
5677e5e133 | ||
|
|
7a11a06cbd | ||
|
|
a5862d4b9c | ||
|
|
a50909e806 | ||
|
|
7c8f7d3f20 | ||
|
|
9078197446 | ||
|
|
0fe07cf9fe | ||
|
|
2216169ca1 | ||
|
|
50e989ca85 | ||
|
|
fa1fc3d855 | ||
|
|
bbe696e925 | ||
|
|
5d12f523a3 | ||
|
|
d295d20dc4 | ||
|
|
2ba10071c9 | ||
|
|
505dc0491b | ||
|
|
54a4ab0f26 | ||
|
|
e03e91477b | ||
|
|
88ac397158 | ||
|
|
6fd4be580b | ||
|
|
53348feb89 | ||
|
|
337cdbb37c | ||
|
|
05fdf91fc5 | ||
|
|
c387689d1c | ||
|
|
cb9238dc4e | ||
|
|
a484811f78 | ||
|
|
111d1934c4 | ||
|
|
972fb1a29d | ||
|
|
3a6af27586 | ||
|
|
c89ac341e4 | ||
|
|
cd59e5d07b | ||
|
|
0b940e4b2b | ||
|
|
b29375c844 | ||
|
|
76d3f6d248 | ||
|
|
e87a85a179 | ||
|
|
11407bf656 | ||
|
|
c82fb3c9b9 | ||
|
|
309e1d8619 | ||
|
|
c2db67c1c0 | ||
|
|
9526594905 | ||
|
|
3d74d277aa | ||
|
|
fc274c2ba4 | ||
|
|
ce43ea9f42 | ||
|
|
21da02fac2 | ||
|
|
19569bd5c5 | ||
|
|
afa25d8c57 | ||
|
|
1ba7acf4bd | ||
|
|
a847fe8754 | ||
|
|
5bb18b6441 | ||
|
|
876c233a26 | ||
|
|
ee5aeb80a4 | ||
|
|
02ceae15a2 | ||
|
|
e514739280 | ||
|
|
72265298f9 | ||
|
|
4b700192c1 | ||
|
|
fe83589ade | ||
|
|
fcf63c74f1 | ||
|
|
c95bb109c8 | ||
|
|
bd9c46ee34 | ||
|
|
736aeaa1d3 | ||
|
|
dd1f26522c | ||
|
|
712b7b2188 | ||
|
|
5b749e2d5c | ||
|
|
d85a69a709 | ||
|
|
a425e96fb2 | ||
|
|
7763fdf6ba | ||
|
|
dd156b59fc | ||
|
|
36dceecd58 | ||
|
|
421b9b271a | ||
|
|
ed57dcb924 | ||
|
|
95c77bfb98 | ||
|
|
2e3e721344 | ||
|
|
da2c28d5c2 | ||
|
|
dbddee9de9 | ||
|
|
8731d75607 | ||
|
|
f2ce233a6d | ||
|
|
6a75e30941 | ||
|
|
a3244c4892 | ||
|
|
a5ad8fd3bd | ||
|
|
deccdb1ec5 | ||
|
|
12a43b5e62 | ||
|
|
e1291aa6d2 | ||
|
|
bb26f32ac7 | ||
|
|
4d928001b8 | ||
|
|
c4baa6a10c | ||
|
|
71dec3dc5e | ||
|
|
e5017c0431 | ||
|
|
cbb5134874 | ||
|
|
ff248d566d | ||
|
|
6ccc12c332 | ||
|
|
2a669e9a17 | ||
|
|
5130abe76f | ||
|
|
fa7c8977a8 | ||
|
|
24fa183297 | ||
|
|
131aa5dd15 | ||
|
|
a06ccc928f | ||
|
|
d09ad13208 | ||
|
|
8ac37d5927 | ||
|
|
7ef0e50507 | ||
|
|
62ab8ece5e | ||
|
|
8e2e63f9b9 | ||
|
|
f96173cbe4 | ||
|
|
11015df52f | ||
|
|
05ed57a9f0 | ||
|
|
4bece04207 | ||
|
|
ede7bfb901 | ||
|
|
44d3faa048 | ||
|
|
e0036b5ad2 | ||
|
|
208d4f2173 | ||
|
|
dc3957ce79 | ||
|
|
4ecb7f3a16 | ||
|
|
03f5ef08c8 | ||
|
|
2720816266 | ||
|
|
1896aa1748 | ||
|
|
5b68027bee | ||
|
|
48863ac55c | ||
|
|
d64828ce6d | ||
|
|
2aa739be81 | ||
|
|
9977a3e9fc | ||
|
|
f8082bc53a | ||
|
|
996dcb14a3 | ||
|
|
0c127cfdc1 | ||
|
|
ae274158de | ||
|
|
340af463cd | ||
|
|
78a3f81972 | ||
|
|
d18b8e0d2c | ||
|
|
6c6c0a4778 | ||
|
|
a16d8f66a9 | ||
|
|
45793d75c2 | ||
|
|
9d545f9578 | ||
|
|
a30999a785 | ||
|
|
1a50f1eca1 | ||
|
|
1448d631a7 | ||
|
|
fd137a9e87 | ||
|
|
3670273719 | ||
|
|
6c0fd7f9ca | ||
|
|
42a2371d26 | ||
|
|
45faad7e04 | ||
|
|
73eacf1137 | ||
|
|
7b0d9e1e07 | ||
|
|
c7b0764002 | ||
|
|
847c512539 | ||
|
|
97330ee8fc | ||
|
|
0508e70f9b | ||
|
|
8a502af4c1 | ||
|
|
c60bfb2b0f | ||
|
|
16b5902aa2 | ||
|
|
a442fe0fd0 | ||
|
|
ab9ae4f643 | ||
|
|
d9a51030ea | ||
|
|
67026718c1 | ||
|
|
a71c471405 | ||
|
|
3858086047 | ||
|
|
dffef3d9f3 | ||
|
|
de1c6b8727 | ||
|
|
6f17f412ba | ||
|
|
746961bf43 | ||
|
|
182a6d99fd | ||
|
|
af31088481 | ||
|
|
43425158f4 | ||
|
|
8524ea7441 | ||
|
|
6a65006f55 | ||
|
|
d75ed841a9 | ||
|
|
3cd2547e91 | ||
|
|
8c661d4e8c | ||
|
|
4b332d831e | ||
|
|
22487810ba | ||
|
|
c49e65d926 | ||
|
|
2e8814bb57 | ||
|
|
dc557c0d4c | ||
|
|
a2beb159f1 | ||
|
|
7ce427ff47 |
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1 @@
|
||||
github: junegunn
|
||||
29
.github/ISSUE_TEMPLATE.md
vendored
29
.github/ISSUE_TEMPLATE.md
vendored
@@ -1,29 +0,0 @@
|
||||
<!-- Check all that apply [x] -->
|
||||
- Category
|
||||
- [ ] fzf binary
|
||||
- [ ] fzf-tmux script
|
||||
- [ ] Key bindings
|
||||
- [ ] Completion
|
||||
- [ ] Vim
|
||||
- [ ] Neovim
|
||||
- [ ] Etc.
|
||||
- OS
|
||||
- [ ] Linux
|
||||
- [ ] Mac OS X
|
||||
- [ ] Windows
|
||||
- [ ] Etc.
|
||||
- Shell
|
||||
- [ ] bash
|
||||
- [ ] zsh
|
||||
- [ ] fish
|
||||
|
||||
<!--
|
||||
### Before submitting
|
||||
|
||||
- Make sure that you have the latest version of fzf
|
||||
- If you use tmux, make sure $TERM is set to screen or screen-256color
|
||||
- For more Vim stuff, check out https://github.com/junegunn/fzf.vim
|
||||
|
||||
Describe your problem or suggestion from here ...
|
||||
-->
|
||||
|
||||
49
.github/ISSUE_TEMPLATE/issue_template.yml
vendored
Normal file
49
.github/ISSUE_TEMPLATE/issue_template.yml
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
---
|
||||
name: Issue Template
|
||||
description: Report a problem or bug related to fzf to help us improve
|
||||
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: ISSUES NOT FOLLOWING THIS TEMPLATE WILL BE CLOSED AND DELETED
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Checklist
|
||||
options:
|
||||
- label: I have read through the manual page (`man fzf`)
|
||||
required: true
|
||||
- label: I have searched through the existing issues
|
||||
required: true
|
||||
- label: For bug reports, I have checked if the bug is reproducible in the latest version of fzf
|
||||
required: false
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: Output of `fzf --version`
|
||||
placeholder: e.g. 0.48.1 (d579e33)
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: OS
|
||||
options:
|
||||
- label: Linux
|
||||
- label: macOS
|
||||
- label: Windows
|
||||
- label: Etc.
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Shell
|
||||
options:
|
||||
- label: bash
|
||||
- label: zsh
|
||||
- label: fish
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Problem / Steps to reproduce
|
||||
validations:
|
||||
required: true
|
||||
10
.github/dependabot.yml
vendored
Normal file
10
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "gomod"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
44
.github/workflows/codeql-analysis.yml
vendored
Normal file
44
.github/workflows/codeql-analysis.yml
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
# https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning
|
||||
name: CodeQL
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master, devel ]
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
permissions:
|
||||
actions: read # for github/codeql-action/init to get workflow details
|
||||
contents: read # for actions/checkout to fetch code
|
||||
security-events: write # for github/codeql-action/autobuild to send a status report
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: ['go']
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
14
.github/workflows/depsreview.yaml
vendored
Normal file
14
.github/workflows/depsreview.yaml
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
name: 'Dependency Review'
|
||||
on: [pull_request]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
dependency-review:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: 'Checkout Repository'
|
||||
uses: actions/checkout@v4
|
||||
- name: 'Dependency Review'
|
||||
uses: actions/dependency-review-action@v4
|
||||
48
.github/workflows/linux.yml
vendored
Normal file
48
.github/workflows/linux.yml
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
---
|
||||
name: Test fzf on Linux
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master, devel ]
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
LANG: C.UTF-8
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.20"
|
||||
|
||||
- name: Setup Ruby
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: 3.1.0
|
||||
|
||||
- name: Install packages
|
||||
run: sudo apt-get install --yes zsh fish tmux
|
||||
|
||||
- name: Install Ruby gems
|
||||
run: sudo gem install --no-document minitest:5.25.1 rubocop:1.65.0 rubocop-minitest:0.35.1 rubocop-performance:1.21.1
|
||||
|
||||
- name: Rubocop
|
||||
run: rubocop --require rubocop-minitest --require rubocop-performance
|
||||
|
||||
- name: Unit test
|
||||
run: make test
|
||||
|
||||
- name: Integration test
|
||||
run: make install && ./install --all && tmux new-session -d && ruby test/test_go.rb --verbose
|
||||
45
.github/workflows/macos.yml
vendored
Normal file
45
.github/workflows/macos.yml
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
---
|
||||
name: Test fzf on macOS
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master, devel ]
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.20"
|
||||
|
||||
- name: Setup Ruby
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: 3.0.0
|
||||
|
||||
- name: Install packages
|
||||
run: HOMEBREW_NO_INSTALL_CLEANUP=1 brew install fish zsh tmux
|
||||
|
||||
- name: Install Ruby gems
|
||||
run: gem install --no-document minitest:5.14.2 rubocop:1.0.0 rubocop-minitest:0.10.1 rubocop-performance:1.8.1
|
||||
|
||||
- name: Rubocop
|
||||
run: rubocop --require rubocop-minitest --require rubocop-performance
|
||||
|
||||
- name: Unit test
|
||||
run: make test
|
||||
|
||||
- name: Integration test
|
||||
run: make install && ./install --all && LC_ALL=C tmux new-session -d && ruby test/test_go.rb --verbose
|
||||
24
.github/workflows/sponsors.yml
vendored
Normal file
24
.github/workflows/sponsors.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
---
|
||||
name: Generate Sponsors README
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: 0 0 * * 0
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout 🛎️
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Generate Sponsors 💖
|
||||
uses: JamesIves/github-sponsors-readme-action@v1
|
||||
with:
|
||||
token: ${{ secrets.SPONSORS_TOKEN }}
|
||||
file: 'README.md'
|
||||
|
||||
- name: Deploy to GitHub Pages 🚀
|
||||
uses: JamesIves/github-pages-deploy-action@v4
|
||||
with:
|
||||
branch: master
|
||||
folder: '.'
|
||||
10
.github/workflows/typos.yml
vendored
Normal file
10
.github/workflows/typos.yml
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
name: "Spell Check"
|
||||
on: [pull_request]
|
||||
|
||||
jobs:
|
||||
typos:
|
||||
name: Spell Check with Typos
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: crate-ci/typos@v1.24.1
|
||||
14
.github/workflows/winget.yml
vendored
Normal file
14
.github/workflows/winget.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
name: Publish to Winget
|
||||
on:
|
||||
release:
|
||||
types: [released]
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: vedantmgoyal2009/winget-releaser@v2
|
||||
with:
|
||||
identifier: junegunn.fzf
|
||||
installers-regex: '-windows_(armv7|arm64|amd64)\.zip$'
|
||||
token: ${{ secrets.WINGET_TOKEN }}
|
||||
14
.gitignore
vendored
14
.gitignore
vendored
@@ -1,6 +1,14 @@
|
||||
bin
|
||||
src/fzf/fzf-*
|
||||
gopath
|
||||
bin/fzf
|
||||
bin/fzf.exe
|
||||
dist
|
||||
target
|
||||
pkg
|
||||
Gemfile.lock
|
||||
.DS_Store
|
||||
doc/tags
|
||||
vendor
|
||||
gopath
|
||||
*.zwc
|
||||
fzf
|
||||
tmp
|
||||
*.patch
|
||||
|
||||
104
.goreleaser.yml
Normal file
104
.goreleaser.yml
Normal file
@@ -0,0 +1,104 @@
|
||||
---
|
||||
version: 2
|
||||
project_name: fzf
|
||||
|
||||
before:
|
||||
hooks:
|
||||
- go mod download
|
||||
|
||||
builds:
|
||||
- id: fzf
|
||||
goos:
|
||||
- darwin
|
||||
- linux
|
||||
- windows
|
||||
- freebsd
|
||||
- openbsd
|
||||
goarch:
|
||||
- amd64
|
||||
- arm
|
||||
- arm64
|
||||
- loong64
|
||||
- ppc64le
|
||||
- s390x
|
||||
goarm:
|
||||
- 5
|
||||
- 6
|
||||
- 7
|
||||
flags:
|
||||
- -trimpath
|
||||
ldflags:
|
||||
- "-s -w -X main.version={{ .Version }} -X main.revision={{ .ShortCommit }}"
|
||||
ignore:
|
||||
- goos: freebsd
|
||||
goarch: arm
|
||||
- goos: openbsd
|
||||
goarch: arm
|
||||
- goos: freebsd
|
||||
goarch: arm64
|
||||
- goos: openbsd
|
||||
goarch: arm64
|
||||
|
||||
# .goreleaser.yaml
|
||||
notarize:
|
||||
macos:
|
||||
- # Whether this configuration is enabled or not.
|
||||
#
|
||||
# Default: false.
|
||||
# Templates: allowed.
|
||||
enabled: "{{ not .IsSnapshot }}"
|
||||
|
||||
# Before notarizing, we need to sign the binary.
|
||||
# This blocks defines the configuration for doing so.
|
||||
sign:
|
||||
# The .p12 certificate file path or its base64'd contents.
|
||||
certificate: "{{.Env.MACOS_SIGN_P12}}"
|
||||
|
||||
# The password to be used to open the certificate.
|
||||
password: "{{.Env.MACOS_SIGN_PASSWORD}}"
|
||||
|
||||
# Then, we notarize the binaries.
|
||||
notarize:
|
||||
# The issuer ID.
|
||||
# Its the UUID you see when creating the App Store Connect key.
|
||||
issuer_id: "{{.Env.MACOS_NOTARY_ISSUER_ID}}"
|
||||
|
||||
# Key ID.
|
||||
# You can see it in the list of App Store Connect Keys.
|
||||
# It will also be in the ApiKey filename.
|
||||
key_id: "{{.Env.MACOS_NOTARY_KEY_ID}}"
|
||||
|
||||
# The .p8 key file path or its base64'd contents.
|
||||
key: "{{.Env.MACOS_NOTARY_KEY}}"
|
||||
|
||||
# Whether to wait for the notarization to finish.
|
||||
# Not recommended, as it could take a really long time.
|
||||
wait: true
|
||||
|
||||
archives:
|
||||
- name_template: "{{ .ProjectName }}-{{ .Version }}-{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}"
|
||||
builds:
|
||||
- fzf
|
||||
format: tar.gz
|
||||
format_overrides:
|
||||
- goos: windows
|
||||
format: zip
|
||||
files:
|
||||
- non-existent*
|
||||
|
||||
release:
|
||||
github:
|
||||
owner: junegunn
|
||||
name: fzf
|
||||
prerelease: auto
|
||||
name_template: '{{ .Version }}'
|
||||
|
||||
snapshot:
|
||||
name_template: "{{ .Version }}-devel"
|
||||
|
||||
changelog:
|
||||
sort: asc
|
||||
filters:
|
||||
exclude:
|
||||
- README
|
||||
- test
|
||||
32
.rubocop.yml
Normal file
32
.rubocop.yml
Normal file
@@ -0,0 +1,32 @@
|
||||
Layout/LineLength:
|
||||
Enabled: false
|
||||
Metrics:
|
||||
Enabled: false
|
||||
Lint/ShadowingOuterLocalVariable:
|
||||
Enabled: false
|
||||
Style/MethodCallWithArgsParentheses:
|
||||
Enabled: true
|
||||
AllowedMethods:
|
||||
- assert
|
||||
- exit
|
||||
- paste
|
||||
- puts
|
||||
- raise
|
||||
- refute
|
||||
- require
|
||||
- send_keys
|
||||
AllowedPatterns:
|
||||
- ^assert_
|
||||
- ^refute_
|
||||
Style/NumericPredicate:
|
||||
Enabled: false
|
||||
Style/StringConcatenation:
|
||||
Enabled: false
|
||||
Style/OptionalBooleanParameter:
|
||||
Enabled: false
|
||||
Style/WordArray:
|
||||
MinSize: 1
|
||||
Minitest/AssertEqual:
|
||||
Enabled: false
|
||||
Naming/VariableNumber:
|
||||
Enabled: false
|
||||
1
.tool-versions
Normal file
1
.tool-versions
Normal file
@@ -0,0 +1 @@
|
||||
golang 1.20.13
|
||||
28
.travis.yml
28
.travis.yml
@@ -1,28 +0,0 @@
|
||||
language: ruby
|
||||
matrix:
|
||||
include:
|
||||
- env: TAGS=
|
||||
rvm: 2.2.0
|
||||
# - env: TAGS=tcell
|
||||
# rvm: 2.2.0
|
||||
|
||||
install:
|
||||
- sudo apt-get update
|
||||
- sudo apt-get install -y libncurses-dev lib32ncurses5-dev libgpm-dev
|
||||
- sudo add-apt-repository -y ppa:pi-rho/dev
|
||||
- sudo apt-add-repository -y ppa:fish-shell/release-2
|
||||
- sudo apt-get update
|
||||
- sudo apt-get install -y tmux=1.9a-1~ppa1~p
|
||||
- sudo apt-get install -y zsh fish
|
||||
|
||||
script: |
|
||||
export GOPATH=~/go
|
||||
export FZF_BASE=$GOPATH/src/github.com/junegunn/fzf
|
||||
|
||||
mkdir -p $GOPATH/src/github.com/junegunn
|
||||
ln -s $(pwd) $FZF_BASE
|
||||
|
||||
cd $FZF_BASE/src && make test fzf/fzf-linux_amd64 install &&
|
||||
cd $FZF_BASE/bin && ln -sf fzf-linux_amd64 fzf-$(./fzf --version)-linux_amd64 &&
|
||||
cd $FZF_BASE && yes | ./install && rm -f fzf &&
|
||||
tmux new "ruby test/test_go.rb > out && touch ok" && cat out && [ -e ok ]
|
||||
676
ADVANCED.md
Normal file
676
ADVANCED.md
Normal file
@@ -0,0 +1,676 @@
|
||||
Advanced fzf examples
|
||||
======================
|
||||
|
||||
* *Last update: 2024/06/24*
|
||||
* *Requires fzf 0.54.0 or later*
|
||||
|
||||
---
|
||||
|
||||
<!-- vim-markdown-toc GFM -->
|
||||
|
||||
* [Introduction](#introduction)
|
||||
* [Display modes](#display-modes)
|
||||
* [`--height`](#--height)
|
||||
* [`--tmux`](#--tmux)
|
||||
* [Dynamic reloading of the list](#dynamic-reloading-of-the-list)
|
||||
* [Updating the list of processes by pressing CTRL-R](#updating-the-list-of-processes-by-pressing-ctrl-r)
|
||||
* [Toggling between data sources](#toggling-between-data-sources)
|
||||
* [Toggling with a single key binding](#toggling-with-a-single-key-binding)
|
||||
* [Ripgrep integration](#ripgrep-integration)
|
||||
* [Using fzf as the secondary filter](#using-fzf-as-the-secondary-filter)
|
||||
* [Using fzf as interactive Ripgrep launcher](#using-fzf-as-interactive-ripgrep-launcher)
|
||||
* [Switching to fzf-only search mode](#switching-to-fzf-only-search-mode)
|
||||
* [Switching between Ripgrep mode and fzf mode](#switching-between-ripgrep-mode-and-fzf-mode)
|
||||
* [Switching between Ripgrep mode and fzf mode using a single key binding](#switching-between-ripgrep-mode-and-fzf-mode-using-a-single-key-binding)
|
||||
* [Log tailing](#log-tailing)
|
||||
* [Key bindings for git objects](#key-bindings-for-git-objects)
|
||||
* [Files listed in `git status`](#files-listed-in-git-status)
|
||||
* [Branches](#branches)
|
||||
* [Commit hashes](#commit-hashes)
|
||||
* [Color themes](#color-themes)
|
||||
* [fzf Theme Playground](#fzf-theme-playground)
|
||||
* [Generating fzf color theme from Vim color schemes](#generating-fzf-color-theme-from-vim-color-schemes)
|
||||
|
||||
<!-- vim-markdown-toc -->
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
fzf is an interactive [Unix filter][filter] program that is designed to be
|
||||
used with other Unix tools. It reads a list of items from the standard input,
|
||||
allows you to select a subset of the items, and prints the selected ones to
|
||||
the standard output. You can think of it as an interactive version of *grep*,
|
||||
and it's already useful even if you don't know any of its options.
|
||||
|
||||
```sh
|
||||
# 1. ps: Feed the list of processes to fzf
|
||||
# 2. fzf: Interactively select a process using fuzzy matching algorithm
|
||||
# 3. awk: Take the PID from the selected line
|
||||
# 3. kill: Kill the process with the PID
|
||||
ps -ef | fzf | awk '{print $2}' | xargs kill -9
|
||||
```
|
||||
|
||||
[filter]: https://en.wikipedia.org/wiki/Filter_(software)
|
||||
|
||||
While the above example succinctly summarizes the fundamental concept of fzf,
|
||||
you can build much more sophisticated interactive workflows using fzf once you
|
||||
learn its wide variety of features.
|
||||
|
||||
- To see the full list of options and features, see `man fzf`
|
||||
- To see the latest additions, see [CHANGELOG.md](CHANGELOG.md)
|
||||
|
||||
This document will guide you through some examples that will familiarize you
|
||||
with the advanced features of fzf.
|
||||
|
||||
Display modes
|
||||
-------------
|
||||
|
||||
### `--height`
|
||||
|
||||
fzf by default opens in fullscreen mode, but it's not always desirable.
|
||||
Oftentimes, you want to see the current context of the terminal while using
|
||||
fzf. `--height` is an option for opening fzf below the cursor in
|
||||
non-fullscreen mode so you can still see the previous commands and their
|
||||
results above it.
|
||||
|
||||
```sh
|
||||
fzf --height=40%
|
||||
```
|
||||
|
||||

|
||||
|
||||
You might also want to experiment with other layout options such as
|
||||
`--layout=reverse`, `--info=inline`, `--border`, `--margin`, etc.
|
||||
|
||||
```sh
|
||||
fzf --height=40% --layout=reverse
|
||||
fzf --height=40% --layout=reverse --info=inline
|
||||
fzf --height=40% --layout=reverse --info=inline --border
|
||||
fzf --height=40% --layout=reverse --info=inline --border --margin=1
|
||||
fzf --height=40% --layout=reverse --info=inline --border --margin=1 --padding=1
|
||||
```
|
||||
|
||||

|
||||
|
||||
*(See `Layout` section of the man page to see the full list of options)*
|
||||
|
||||
But you definitely don't want to repeat `--height=40% --layout=reverse
|
||||
--info=inline --border --margin=1 --padding=1` every time you use fzf. You
|
||||
could write a wrapper script or shell alias, but there is an easier option.
|
||||
Define `$FZF_DEFAULT_OPTS` like so:
|
||||
|
||||
```sh
|
||||
export FZF_DEFAULT_OPTS="--height=40% --layout=reverse --info=inline --border --margin=1 --padding=1"
|
||||
```
|
||||
|
||||
### `--tmux`
|
||||
|
||||
(Requires tmux 3.3 or later)
|
||||
|
||||
If you're using tmux, you can open fzf in a tmux popup using `--tmux` option.
|
||||
|
||||
```sh
|
||||
# Open fzf in a tmux popup at the center of the screen with 70% width and height
|
||||
fzf --tmux 70%
|
||||
```
|
||||
|
||||

|
||||
|
||||
`--tmux` option is silently ignored if you're not on tmux. So if you're trying
|
||||
to avoid opening fzf in fullscreen, try specifying both `--height` and `--tmux`.
|
||||
|
||||
```sh
|
||||
# --tmux is specified later so it takes precedence over --height when on tmux.
|
||||
# If you're not on tmux, --tmux is ignored and --height is used instead.
|
||||
fzf --height 70% --tmux 70%
|
||||
```
|
||||
|
||||
You can also specify the position, width, and height of the popup window in
|
||||
the following format:
|
||||
|
||||
* `[center|top|bottom|left|right][,SIZE[%]][,SIZE[%]]`
|
||||
|
||||
```sh
|
||||
# 100% width and 60% height
|
||||
fzf --tmux 100%,60% --border horizontal
|
||||
```
|
||||
|
||||

|
||||
|
||||
```sh
|
||||
# On the right (50% width)
|
||||
fzf --tmux right
|
||||
```
|
||||
|
||||

|
||||
|
||||
```sh
|
||||
# On the left (40% width and 70% height)
|
||||
fzf --tmux left,40%,70%
|
||||
```
|
||||
|
||||

|
||||
|
||||
> [!TIP]
|
||||
> You might also want to check out my tmux plugins which support this popup
|
||||
> window layout.
|
||||
>
|
||||
> - https://github.com/junegunn/tmux-fzf-url
|
||||
> - https://github.com/junegunn/tmux-fzf-maccy
|
||||
|
||||
Dynamic reloading of the list
|
||||
-----------------------------
|
||||
|
||||
fzf can dynamically update the candidate list using an arbitrary program with
|
||||
`reload` bindings (The design document for `reload` can be found
|
||||
[here][reload]).
|
||||
|
||||
[reload]: https://github.com/junegunn/fzf/issues/1750
|
||||
|
||||
### Updating the list of processes by pressing CTRL-R
|
||||
|
||||
This example shows how you can set up a binding for dynamically updating the
|
||||
list without restarting fzf.
|
||||
|
||||
```sh
|
||||
(date; ps -ef) |
|
||||
fzf --bind='ctrl-r:reload(date; ps -ef)' \
|
||||
--header=$'Press CTRL-R to reload\n\n' --header-lines=2 \
|
||||
--preview='echo {}' --preview-window=down,3,wrap \
|
||||
--layout=reverse --height=80% | awk '{print $2}' | xargs kill -9
|
||||
```
|
||||
|
||||

|
||||
|
||||
- The initial command is `(date; ps -ef)`. It prints the current date and
|
||||
time, and the list of the processes.
|
||||
- With `--header` option, you can show any message as the fixed header.
|
||||
- To disallow selecting the first two lines (`date` and `ps` header), we use
|
||||
`--header-lines=2` option.
|
||||
- `--bind='ctrl-r:reload(date; ps -ef)'` binds CTRL-R to `reload` action that
|
||||
runs `date; ps -ef`, so we can update the list of the processes by pressing
|
||||
CTRL-R.
|
||||
- We use simple `echo {}` preview option, so we can see the entire line on the
|
||||
preview window below even if it's too long
|
||||
|
||||
### Toggling between data sources
|
||||
|
||||
You're not limited to just one reload binding. Set up multiple bindings so
|
||||
you can switch between data sources.
|
||||
|
||||
```sh
|
||||
find * | fzf --prompt 'All> ' \
|
||||
--header 'CTRL-D: Directories / CTRL-F: Files' \
|
||||
--bind 'ctrl-d:change-prompt(Directories> )+reload(find * -type d)' \
|
||||
--bind 'ctrl-f:change-prompt(Files> )+reload(find * -type f)'
|
||||
```
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
### Toggling with a single key binding
|
||||
|
||||
The above example uses two different key bindings to toggle between two modes,
|
||||
but can we just use a single key binding?
|
||||
|
||||
To make a key binding behave differently each time it is pressed, we need:
|
||||
|
||||
1. a way to store the current state. i.e. "which mode are we in?"
|
||||
2. and a way to dynamically perform different actions depending on the state.
|
||||
|
||||
The following example shows how to 1. store the current mode in the prompt
|
||||
string, 2. and use this information (`$FZF_PROMPT`) to determine which
|
||||
actions to perform using the `transform` action.
|
||||
|
||||
```sh
|
||||
fd --type file |
|
||||
fzf --prompt 'Files> ' \
|
||||
--header 'CTRL-T: Switch between Files/Directories' \
|
||||
--bind 'ctrl-t:transform:[[ ! $FZF_PROMPT =~ Files ]] &&
|
||||
echo "change-prompt(Files> )+reload(fd --type file)" ||
|
||||
echo "change-prompt(Directories> )+reload(fd --type directory)"' \
|
||||
--preview '[[ $FZF_PROMPT =~ Files ]] && bat --color=always {} || tree -C {}'
|
||||
```
|
||||
|
||||
Ripgrep integration
|
||||
-------------------
|
||||
|
||||
### Using fzf as the secondary filter
|
||||
|
||||
* Requires [bat][bat]
|
||||
* Requires [Ripgrep][rg]
|
||||
|
||||
[bat]: https://github.com/sharkdp/bat
|
||||
[rg]: https://github.com/BurntSushi/ripgrep
|
||||
|
||||
fzf is pretty fast for filtering a list that you will rarely have to think
|
||||
about its performance. But it is not the right tool for searching for text
|
||||
inside many large files, and in that case you should definitely use something
|
||||
like [Ripgrep][rg].
|
||||
|
||||
In the next example, Ripgrep is the primary filter that searches for the given
|
||||
text in files, and fzf is used as the secondary fuzzy filter that adds
|
||||
interactivity to the workflow. And we use [bat][bat] to show the matching line in
|
||||
the preview window.
|
||||
|
||||
This is a bash script and it will not run as expected on other non-compliant
|
||||
shells. To avoid the compatibility issue, let's save this snippet as a script
|
||||
file called `rfv`.
|
||||
|
||||
```bash
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# 1. Search for text in files using Ripgrep
|
||||
# 2. Interactively narrow down the list using fzf
|
||||
# 3. Open the file in Vim
|
||||
rg --color=always --line-number --no-heading --smart-case "${*:-}" |
|
||||
fzf --ansi \
|
||||
--color "hl:-1:underline,hl+:-1:underline:reverse" \
|
||||
--delimiter : \
|
||||
--preview 'bat --color=always {1} --highlight-line {2}' \
|
||||
--preview-window 'up,60%,border-bottom,+{2}+3/3,~3' \
|
||||
--bind 'enter:become(vim {1} +{2})'
|
||||
```
|
||||
|
||||
And run it with an initial query string.
|
||||
|
||||
```sh
|
||||
# Make the script executable
|
||||
chmod +x rfv
|
||||
|
||||
# Run it with the initial query "algo"
|
||||
./rfv algo
|
||||
```
|
||||
|
||||
> Ripgrep will perform the initial search and list all the lines that contain
|
||||
`algo`. Then we further narrow down the list on fzf.
|
||||
|
||||

|
||||
|
||||
I know it's a lot to digest, let's try to break down the code.
|
||||
|
||||
- Ripgrep prints the matching lines in the following format
|
||||
```
|
||||
man/man1/fzf.1:54:.BI "--algo=" TYPE
|
||||
man/man1/fzf.1:55:Fuzzy matching algorithm (default: v2)
|
||||
man/man1/fzf.1:58:.BR v2 " Optimal scoring algorithm (quality)"
|
||||
src/pattern_test.go:7: "github.com/junegunn/fzf/src/algo"
|
||||
```
|
||||
The first token delimited by `:` is the file path, and the second token is
|
||||
the line number of the matching line. They respectively correspond to `{1}`
|
||||
and `{2}` in the preview command.
|
||||
- `--preview 'bat --color=always {1} --highlight-line {2}'`
|
||||
- As we run `rg` with `--color=always` option, we should tell fzf to parse
|
||||
ANSI color codes in the input by setting `--ansi`.
|
||||
- We customize how fzf colors various text elements using `--color` option.
|
||||
`-1` tells fzf to keep the original color from the input. See `man fzf` for
|
||||
available color options.
|
||||
- The value of `--preview-window` option consists of 5 components delimited
|
||||
by `,`
|
||||
1. `up` — Position of the preview window
|
||||
1. `60%` — Size of the preview window
|
||||
1. `border-bottom` — Preview window border only on the bottom side
|
||||
1. `+{2}+3/3` — Scroll offset of the preview contents
|
||||
1. `~3` — Fixed header
|
||||
- Let's break down the latter two. We want to display the bat output in the
|
||||
preview window with a certain scroll offset so that the matching line is
|
||||
positioned near the center of the preview window.
|
||||
- `+{2}` — The base offset is extracted from the second token
|
||||
- `+3` — We add 3 lines to the base offset to compensate for the header
|
||||
part of `bat` output
|
||||
- ```
|
||||
───────┬──────────────────────────────────────────────────────────
|
||||
│ File: CHANGELOG.md
|
||||
───────┼──────────────────────────────────────────────────────────
|
||||
1 │ CHANGELOG
|
||||
2 │ =========
|
||||
3 │
|
||||
4 │ 0.26.0
|
||||
5 │ ------
|
||||
```
|
||||
- `/3` adjusts the offset so that the matching line is shown at a third
|
||||
position in the window
|
||||
- `~3` makes the top three lines fixed header so that they are always
|
||||
visible regardless of the scroll offset
|
||||
- Instead of using shell script to process the final output of fzf, we use
|
||||
`become(...)` action which was added in [fzf 0.38.0][0.38.0] to turn fzf
|
||||
into a new process that opens the file with `vim` (`vim {1}`) and move the
|
||||
cursor to the line (`+{2}`).
|
||||
|
||||
[0.38.0]: https://github.com/junegunn/fzf/blob/master/CHANGELOG.md#0380
|
||||
|
||||
### Using fzf as interactive Ripgrep launcher
|
||||
|
||||
We have learned that we can bind `reload` action to a key (e.g.
|
||||
`--bind=ctrl-r:execute(ps -ef)`). In the next example, we are going to **bind
|
||||
`reload` action to `change` event** so that whenever the user *changes* the
|
||||
query string on fzf, `reload` action is triggered.
|
||||
|
||||
Here is a variation of the above `rfv` script. fzf will restart Ripgrep every
|
||||
time the user updates the query string on fzf. Searching and filtering is
|
||||
completely done by Ripgrep, and fzf merely provides the interactive interface.
|
||||
So we lose the "fuzziness", but the performance will be better on larger
|
||||
projects, and it will free up memory as you narrow down the results.
|
||||
|
||||
```bash
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# 1. Search for text in files using Ripgrep
|
||||
# 2. Interactively restart Ripgrep with reload action
|
||||
# 3. Open the file in Vim
|
||||
RG_PREFIX="rg --column --line-number --no-heading --color=always --smart-case "
|
||||
INITIAL_QUERY="${*:-}"
|
||||
fzf --ansi --disabled --query "$INITIAL_QUERY" \
|
||||
--bind "start:reload:$RG_PREFIX {q}" \
|
||||
--bind "change:reload:sleep 0.1; $RG_PREFIX {q} || true" \
|
||||
--delimiter : \
|
||||
--preview 'bat --color=always {1} --highlight-line {2}' \
|
||||
--preview-window 'up,60%,border-bottom,+{2}+3/3,~3' \
|
||||
--bind 'enter:become(vim {1} +{2})'
|
||||
```
|
||||
|
||||

|
||||
|
||||
- Instead of starting fzf in the usual `rg ... | fzf` form, we make it start
|
||||
the initial Ripgrep process immediately via `start:reload` binding for the
|
||||
consistency of the code.
|
||||
- Filtering is no longer a responsibility of fzf; hence `--disabled`
|
||||
- `{q}` in the reload command evaluates to the query string on fzf prompt.
|
||||
- `sleep 0.1` in the reload command is for "debouncing". This small delay will
|
||||
reduce the number of intermediate Ripgrep processes while we're typing in
|
||||
a query.
|
||||
|
||||
### Switching to fzf-only search mode
|
||||
|
||||
In the previous example, we lost fuzzy matching capability as we completely
|
||||
delegated search functionality to Ripgrep. But we can dynamically switch to
|
||||
fzf-only search mode by *"unbinding"* `reload` action from `change` event.
|
||||
|
||||
```sh
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Two-phase filtering with Ripgrep and fzf
|
||||
#
|
||||
# 1. Search for text in files using Ripgrep
|
||||
# 2. Interactively restart Ripgrep with reload action
|
||||
# * Press alt-enter to switch to fzf-only filtering
|
||||
# 3. Open the file in Vim
|
||||
RG_PREFIX="rg --column --line-number --no-heading --color=always --smart-case "
|
||||
INITIAL_QUERY="${*:-}"
|
||||
fzf --ansi --disabled --query "$INITIAL_QUERY" \
|
||||
--bind "start:reload:$RG_PREFIX {q}" \
|
||||
--bind "change:reload:sleep 0.1; $RG_PREFIX {q} || true" \
|
||||
--bind "alt-enter:unbind(change,alt-enter)+change-prompt(2. fzf> )+enable-search+clear-query" \
|
||||
--color "hl:-1:underline,hl+:-1:underline:reverse" \
|
||||
--prompt '1. ripgrep> ' \
|
||||
--delimiter : \
|
||||
--preview 'bat --color=always {1} --highlight-line {2}' \
|
||||
--preview-window 'up,60%,border-bottom,+{2}+3/3,~3' \
|
||||
--bind 'enter:become(vim {1} +{2})'
|
||||
```
|
||||
|
||||
* Phase 1. Filtering with Ripgrep
|
||||

|
||||
* Phase 2. Filtering with fzf
|
||||

|
||||
|
||||
- We added `--prompt` option to show that fzf is initially running in "Ripgrep
|
||||
launcher mode".
|
||||
- We added `alt-enter` binding that
|
||||
1. unbinds `change` event, so Ripgrep is no longer restarted on key press
|
||||
2. changes the prompt to `2. fzf>`
|
||||
3. enables search functionality of fzf
|
||||
4. clears the current query string that was used to start Ripgrep process
|
||||
5. and unbinds `alt-enter` itself as this is a one-off event
|
||||
- We reverted `--color` option for customizing how the matching chunks are
|
||||
displayed in the second phase
|
||||
|
||||
### Switching between Ripgrep mode and fzf mode
|
||||
|
||||
[fzf 0.30.0][0.30.0] added `rebind` action so we can "rebind" the bindings
|
||||
that were previously "unbound" via `unbind`.
|
||||
|
||||
This is an improved version of the previous example that allows us to switch
|
||||
between Ripgrep launcher mode and fzf-only filtering mode via CTRL-R and
|
||||
CTRL-F.
|
||||
|
||||
```sh
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Switch between Ripgrep launcher mode (CTRL-R) and fzf filtering mode (CTRL-F)
|
||||
rm -f /tmp/rg-fzf-{r,f}
|
||||
RG_PREFIX="rg --column --line-number --no-heading --color=always --smart-case "
|
||||
INITIAL_QUERY="${*:-}"
|
||||
fzf --ansi --disabled --query "$INITIAL_QUERY" \
|
||||
--bind "start:reload($RG_PREFIX {q})+unbind(ctrl-r)" \
|
||||
--bind "change:reload:sleep 0.1; $RG_PREFIX {q} || true" \
|
||||
--bind "ctrl-f:unbind(change,ctrl-f)+change-prompt(2. fzf> )+enable-search+rebind(ctrl-r)+transform-query(echo {q} > /tmp/rg-fzf-r; cat /tmp/rg-fzf-f)" \
|
||||
--bind "ctrl-r:unbind(ctrl-r)+change-prompt(1. ripgrep> )+disable-search+reload($RG_PREFIX {q} || true)+rebind(change,ctrl-f)+transform-query(echo {q} > /tmp/rg-fzf-f; cat /tmp/rg-fzf-r)" \
|
||||
--color "hl:-1:underline,hl+:-1:underline:reverse" \
|
||||
--prompt '1. ripgrep> ' \
|
||||
--delimiter : \
|
||||
--header '╱ CTRL-R (ripgrep mode) ╱ CTRL-F (fzf mode) ╱' \
|
||||
--preview 'bat --color=always {1} --highlight-line {2}' \
|
||||
--preview-window 'up,60%,border-bottom,+{2}+3/3,~3' \
|
||||
--bind 'enter:become(vim {1} +{2})'
|
||||
```
|
||||
|
||||
- To restore the query string when switching between modes, we store the
|
||||
current query in `/tmp/rg-fzf-{r,f}` files and restore the query using
|
||||
`transform-query` action which was added in [fzf 0.36.0][0.36.0].
|
||||
- Also note that we unbind `ctrl-r` binding on `start` event which is
|
||||
triggered once when fzf starts.
|
||||
|
||||
[0.30.0]: https://github.com/junegunn/fzf/blob/master/CHANGELOG.md#0300
|
||||
[0.36.0]: https://github.com/junegunn/fzf/blob/master/CHANGELOG.md#0360
|
||||
|
||||
### Switching between Ripgrep mode and fzf mode using a single key binding
|
||||
|
||||
In contrast to the previous version, we use just one hotkey to toggle between
|
||||
ripgrep and fzf mode. This is achieved by using the `$FZF_PROMPT` as a state
|
||||
within the `transform` action, a feature introduced in [fzf 0.45.0][0.45.0]. A
|
||||
more detailed explanation of this feature can be found in a previous section -
|
||||
[Toggling with a single keybinding](#toggling-with-a-single-key-binding).
|
||||
|
||||
[0.45.0]: https://github.com/junegunn/fzf/blob/master/CHANGELOG.md#0450
|
||||
|
||||
When using the `transform` action, the placeholder (`\{q}`) should be escaped to
|
||||
prevent immediate evaluation.
|
||||
|
||||
```sh
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Switch between Ripgrep mode and fzf filtering mode (CTRL-T)
|
||||
rm -f /tmp/rg-fzf-{r,f}
|
||||
RG_PREFIX="rg --column --line-number --no-heading --color=always --smart-case "
|
||||
INITIAL_QUERY="${*:-}"
|
||||
fzf --ansi --disabled --query "$INITIAL_QUERY" \
|
||||
--bind "start:reload:$RG_PREFIX {q}" \
|
||||
--bind "change:reload:sleep 0.1; $RG_PREFIX {q} || true" \
|
||||
--bind 'ctrl-t:transform:[[ ! $FZF_PROMPT =~ ripgrep ]] &&
|
||||
echo "rebind(change)+change-prompt(1. ripgrep> )+disable-search+transform-query:echo \{q} > /tmp/rg-fzf-f; cat /tmp/rg-fzf-r" ||
|
||||
echo "unbind(change)+change-prompt(2. fzf> )+enable-search+transform-query:echo \{q} > /tmp/rg-fzf-r; cat /tmp/rg-fzf-f"' \
|
||||
--color "hl:-1:underline,hl+:-1:underline:reverse" \
|
||||
--prompt '1. ripgrep> ' \
|
||||
--delimiter : \
|
||||
--header 'CTRL-T: Switch between ripgrep/fzf' \
|
||||
--preview 'bat --color=always {1} --highlight-line {2}' \
|
||||
--preview-window 'up,60%,border-bottom,+{2}+3/3,~3' \
|
||||
--bind 'enter:become(vim {1} +{2})'
|
||||
```
|
||||
|
||||
Log tailing
|
||||
-----------
|
||||
|
||||
fzf can run long-running preview commands and render partial results before
|
||||
completion. And when you specify `follow` flag in `--preview-window` option,
|
||||
fzf will "`tail -f`" the result, automatically scrolling to the bottom.
|
||||
|
||||
```bash
|
||||
# With "follow", preview window will automatically scroll to the bottom.
|
||||
# "\033[2J" is an ANSI escape sequence for clearing the screen.
|
||||
# When fzf reads this code it clears the previous preview contents.
|
||||
fzf --preview-window follow --preview 'for i in $(seq 100000); do
|
||||
echo "$i"
|
||||
sleep 0.01
|
||||
(( i % 300 == 0 )) && printf "\033[2J"
|
||||
done'
|
||||
```
|
||||
|
||||

|
||||
|
||||
Admittedly, that was a silly example. Here's a practical one for browsing
|
||||
Kubernetes pods.
|
||||
|
||||
```bash
|
||||
pods() {
|
||||
command='kubectl get pods --all-namespaces' fzf \
|
||||
--info=inline --layout=reverse --header-lines=1 \
|
||||
--prompt "$(kubectl config current-context | sed 's/-context$//')> " \
|
||||
--header $'╱ Enter (kubectl exec) ╱ CTRL-O (open log in editor) ╱ CTRL-R (reload) ╱\n\n' \
|
||||
--bind 'start:reload:$command' \
|
||||
--bind 'ctrl-r:reload:$command' \
|
||||
--bind 'ctrl-/:change-preview-window(80%,border-bottom|hidden|)' \
|
||||
--bind 'enter:execute:kubectl exec -it --namespace {1} {2} -- bash' \
|
||||
--bind 'ctrl-o:execute:${EDITOR:-vim} <(kubectl logs --all-containers --namespace {1} {2})' \
|
||||
--preview-window up:follow \
|
||||
--preview 'kubectl logs --follow --all-containers --tail=10000 --namespace {1} {2}' "$@"
|
||||
}
|
||||
```
|
||||
|
||||

|
||||
|
||||
- The preview window will *"log tail"* the pod
|
||||
- Holding on to a large amount of log will consume a lot of memory. So we
|
||||
limited the initial log amount with `--tail=10000`.
|
||||
- `execute` bindings allow you to run any command without leaving fzf
|
||||
- Press enter key on a pod to `kubectl exec` into it
|
||||
- Press CTRL-O to open the log in your editor
|
||||
- Press CTRL-R to reload the pod list
|
||||
- Press CTRL-/ repeatedly to rotate through a different sets of preview
|
||||
window options
|
||||
1. `80%,border-bottom`
|
||||
1. `hidden`
|
||||
1. Empty string after `|` translates to the default options from `--preview-window`
|
||||
|
||||
Key bindings for git objects
|
||||
----------------------------
|
||||
|
||||
Oftentimes, you want to put the identifiers of various Git object to the
|
||||
command-line. For example, it is common to write commands like these:
|
||||
|
||||
```sh
|
||||
git checkout [SOME_COMMIT_HASH or BRANCH or TAG]
|
||||
git diff [SOME_COMMIT_HASH or BRANCH or TAG] [SOME_COMMIT_HASH or BRANCH or TAG]
|
||||
```
|
||||
|
||||
[fzf-git.sh](https://github.com/junegunn/fzf-git.sh) project defines a set of
|
||||
fzf-based key bindings for Git objects. I strongly recommend that you check
|
||||
them out because they are seriously useful.
|
||||
|
||||
### Files listed in `git status`
|
||||
|
||||
<kbd>CTRL-G</kbd><kbd>CTRL-F</kbd>
|
||||
|
||||

|
||||
|
||||
### Branches
|
||||
|
||||
<kbd>CTRL-G</kbd><kbd>CTRL-B</kbd>
|
||||
|
||||

|
||||
|
||||
### Commit hashes
|
||||
|
||||
<kbd>CTRL-G</kbd><kbd>CTRL-H</kbd>
|
||||
|
||||

|
||||
|
||||
Color themes
|
||||
------------
|
||||
|
||||
You can customize how fzf colors the text elements with `--color` option. Here
|
||||
are a few color themes. Note that you need a terminal emulator that can
|
||||
display 24-bit colors.
|
||||
|
||||
```sh
|
||||
# junegunn/seoul256.vim (dark)
|
||||
export FZF_DEFAULT_OPTS='--color=bg+:#3F3F3F,bg:#4B4B4B,border:#6B6B6B,spinner:#98BC99,hl:#719872,fg:#D9D9D9,header:#719872,info:#BDBB72,pointer:#E12672,marker:#E17899,fg+:#D9D9D9,preview-bg:#3F3F3F,prompt:#98BEDE,hl+:#98BC99'
|
||||
```
|
||||
|
||||

|
||||
|
||||
```sh
|
||||
# junegunn/seoul256.vim (light)
|
||||
export FZF_DEFAULT_OPTS='--color=bg+:#D9D9D9,bg:#E1E1E1,border:#C8C8C8,spinner:#719899,hl:#719872,fg:#616161,header:#719872,info:#727100,pointer:#E12672,marker:#E17899,fg+:#616161,preview-bg:#D9D9D9,prompt:#0099BD,hl+:#719899'
|
||||
```
|
||||
|
||||

|
||||
|
||||
```sh
|
||||
# morhetz/gruvbox
|
||||
export FZF_DEFAULT_OPTS='--color=bg+:#3c3836,bg:#32302f,spinner:#fb4934,hl:#928374,fg:#ebdbb2,header:#928374,info:#8ec07c,pointer:#fb4934,marker:#fb4934,fg+:#ebdbb2,prompt:#fb4934,hl+:#fb4934'
|
||||
```
|
||||
|
||||

|
||||
|
||||
```sh
|
||||
# arcticicestudio/nord-vim
|
||||
export FZF_DEFAULT_OPTS='--color=bg+:#3B4252,bg:#2E3440,spinner:#81A1C1,hl:#616E88,fg:#D8DEE9,header:#616E88,info:#81A1C1,pointer:#81A1C1,marker:#81A1C1,fg+:#D8DEE9,prompt:#81A1C1,hl+:#81A1C1'
|
||||
```
|
||||
|
||||

|
||||
|
||||
```sh
|
||||
# tomasr/molokai
|
||||
export FZF_DEFAULT_OPTS='--color=bg+:#293739,bg:#1B1D1E,border:#808080,spinner:#E6DB74,hl:#7E8E91,fg:#F8F8F2,header:#7E8E91,info:#A6E22E,pointer:#A6E22E,marker:#F92672,fg+:#F8F8F2,prompt:#F92672,hl+:#F92672'
|
||||
```
|
||||
|
||||

|
||||
|
||||
### fzf Theme Playground
|
||||
|
||||
[fzf Theme Playground](https://vitormv.github.io/fzf-themes/) created by
|
||||
[Vitor Mello](https://github.com/vitormv) is a webpage where you can
|
||||
interactively create fzf themes.
|
||||
|
||||
### Generating fzf color theme from Vim color schemes
|
||||
|
||||
The Vim plugin of fzf can generate `--color` option from the current color
|
||||
scheme according to `g:fzf_colors` variable. You can find the detailed
|
||||
explanation [here](https://github.com/junegunn/fzf/blob/master/README-VIM.md#explanation-of-gfzf_colors).
|
||||
|
||||
Here is an example. Add this to your Vim configuration file.
|
||||
|
||||
```vim
|
||||
let g:fzf_colors =
|
||||
\ { 'fg': ['fg', 'Normal'],
|
||||
\ 'bg': ['bg', 'Normal'],
|
||||
\ 'preview-bg': ['bg', 'NormalFloat'],
|
||||
\ 'hl': ['fg', 'Comment'],
|
||||
\ 'fg+': ['fg', 'CursorLine', 'CursorColumn', 'Normal'],
|
||||
\ 'bg+': ['bg', 'CursorLine', 'CursorColumn'],
|
||||
\ 'hl+': ['fg', 'Statement'],
|
||||
\ 'info': ['fg', 'PreProc'],
|
||||
\ 'border': ['fg', 'Ignore'],
|
||||
\ 'prompt': ['fg', 'Conditional'],
|
||||
\ 'pointer': ['fg', 'Exception'],
|
||||
\ 'marker': ['fg', 'Keyword'],
|
||||
\ 'spinner': ['fg', 'Label'],
|
||||
\ 'header': ['fg', 'Comment'] }
|
||||
```
|
||||
|
||||
Then you can see how the `--color` option is generated by printing the result
|
||||
of `fzf#wrap()`.
|
||||
|
||||
```vim
|
||||
:echo fzf#wrap()
|
||||
```
|
||||
|
||||
Use this command to append `export FZF_DEFAULT_OPTS="..."` line to the end of
|
||||
the current file.
|
||||
|
||||
```vim
|
||||
:call append('$', printf('export FZF_DEFAULT_OPTS="%s"', matchstr(fzf#wrap().options, "--color[^']*")))
|
||||
```
|
||||
61
BUILD.md
Normal file
61
BUILD.md
Normal file
@@ -0,0 +1,61 @@
|
||||
Building fzf
|
||||
============
|
||||
|
||||
Build instructions
|
||||
------------------
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Go 1.20 or above
|
||||
|
||||
### Using Makefile
|
||||
|
||||
```sh
|
||||
# Build fzf binary for your platform in target
|
||||
make
|
||||
|
||||
# Build fzf binary and copy it to bin directory
|
||||
make install
|
||||
|
||||
# Build fzf binaries and archives for all platforms using goreleaser
|
||||
make build
|
||||
|
||||
# Publish GitHub release
|
||||
make release
|
||||
```
|
||||
|
||||
> [!WARNING]
|
||||
> Makefile uses git commands to determine the version and the revision
|
||||
> information for `fzf --version`. So if you're building fzf from an
|
||||
> environment where its git information is not available, you have to manually
|
||||
> set `$FZF_VERSION` and `$FZF_REVISION`.
|
||||
>
|
||||
> e.g. `FZF_VERSION=0.24.0 FZF_REVISION=tarball make`
|
||||
|
||||
> [!TIP]
|
||||
> To build fzf with profiling options enabled, set `TAGS=pprof`
|
||||
>
|
||||
> ```sh
|
||||
> TAGS=pprof make clean install
|
||||
> fzf --profile-cpu /tmp/cpu.pprof --profile-mem /tmp/mem.pprof \
|
||||
> --profile-block /tmp/block.pprof --profile-mutex /tmp/mutex.pprof
|
||||
> ```
|
||||
|
||||
Third-party libraries used
|
||||
--------------------------
|
||||
|
||||
- [rivo/uniseg](https://github.com/rivo/uniseg)
|
||||
- Licensed under [MIT](https://raw.githubusercontent.com/rivo/uniseg/master/LICENSE.txt)
|
||||
- [mattn/go-shellwords](https://github.com/mattn/go-shellwords)
|
||||
- Licensed under [MIT](http://mattn.mit-license.org)
|
||||
- [mattn/go-isatty](https://github.com/mattn/go-isatty)
|
||||
- Licensed under [MIT](http://mattn.mit-license.org)
|
||||
- [tcell](https://github.com/gdamore/tcell)
|
||||
- Licensed under [Apache License 2.0](https://github.com/gdamore/tcell/blob/master/LICENSE)
|
||||
- [fastwalk](https://github.com/charlievieth/fastwalk)
|
||||
- Licensed under [MIT](https://raw.githubusercontent.com/charlievieth/fastwalk/master/LICENSE)
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
[MIT](LICENSE)
|
||||
1786
CHANGELOG.md
1786
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
12
Dockerfile
Normal file
12
Dockerfile
Normal file
@@ -0,0 +1,12 @@
|
||||
FROM ubuntu:24.04
|
||||
RUN apt-get update -y && apt install -y git make golang zsh fish ruby tmux
|
||||
RUN gem install --no-document -v 5.22.3 minitest
|
||||
RUN echo '. /usr/share/bash-completion/completions/git' >> ~/.bashrc
|
||||
RUN echo '. ~/.bashrc' >> ~/.bash_profile
|
||||
|
||||
# Do not set default PS1
|
||||
RUN rm -f /etc/bash.bashrc
|
||||
COPY . /fzf
|
||||
RUN cd /fzf && make install && ./install --all
|
||||
ENV LANG C.UTF-8
|
||||
CMD tmux new 'set -o pipefail; ruby /fzf/test/test_go.rb | tee out && touch ok' && cat out && [ -e ok ]
|
||||
2
LICENSE
2
LICENSE
@@ -1,6 +1,6 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2016 Junegunn Choi
|
||||
Copyright (c) 2013-2024 Junegunn Choi
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
||||
190
Makefile
Normal file
190
Makefile
Normal file
@@ -0,0 +1,190 @@
|
||||
SHELL := bash
|
||||
GO ?= go
|
||||
GOOS ?= $(shell $(GO) env GOOS)
|
||||
|
||||
MAKEFILE := $(realpath $(lastword $(MAKEFILE_LIST)))
|
||||
ROOT_DIR := $(shell dirname $(MAKEFILE))
|
||||
SOURCES := $(wildcard *.go src/*.go src/*/*.go shell/*sh man/man1/*.1) $(MAKEFILE)
|
||||
|
||||
ifdef FZF_VERSION
|
||||
VERSION := $(FZF_VERSION)
|
||||
else
|
||||
VERSION := $(shell git describe --abbrev=0 2> /dev/null | sed "s/^v//")
|
||||
endif
|
||||
ifeq ($(VERSION),)
|
||||
$(error Not on git repository; cannot determine $$FZF_VERSION)
|
||||
endif
|
||||
VERSION_TRIM := $(shell sed "s/^v//; s/-.*//" <<< $(VERSION))
|
||||
VERSION_REGEX := $(subst .,\.,$(VERSION_TRIM))
|
||||
|
||||
ifdef FZF_REVISION
|
||||
REVISION := $(FZF_REVISION)
|
||||
else
|
||||
REVISION := $(shell git log -n 1 --pretty=format:%h --abbrev=8 -- $(SOURCES) 2> /dev/null)
|
||||
endif
|
||||
ifeq ($(REVISION),)
|
||||
$(error Not on git repository; cannot determine $$FZF_REVISION)
|
||||
endif
|
||||
BUILD_FLAGS := -a -ldflags "-s -w -X main.version=$(VERSION) -X main.revision=$(REVISION)" -tags "$(TAGS)" -trimpath
|
||||
|
||||
BINARY32 := fzf-$(GOOS)_386
|
||||
BINARY64 := fzf-$(GOOS)_amd64
|
||||
BINARYS390 := fzf-$(GOOS)_s390x
|
||||
BINARYARM5 := fzf-$(GOOS)_arm5
|
||||
BINARYARM6 := fzf-$(GOOS)_arm6
|
||||
BINARYARM7 := fzf-$(GOOS)_arm7
|
||||
BINARYARM8 := fzf-$(GOOS)_arm8
|
||||
BINARYPPC64LE := fzf-$(GOOS)_ppc64le
|
||||
BINARYRISCV64 := fzf-$(GOOS)_riscv64
|
||||
BINARYLOONG64 := fzf-$(GOOS)_loong64
|
||||
|
||||
# https://en.wikipedia.org/wiki/Uname
|
||||
UNAME_M := $(shell uname -m)
|
||||
ifeq ($(UNAME_M),x86_64)
|
||||
BINARY := $(BINARY64)
|
||||
else ifeq ($(UNAME_M),amd64)
|
||||
BINARY := $(BINARY64)
|
||||
else ifeq ($(UNAME_M),s390x)
|
||||
BINARY := $(BINARYS390)
|
||||
else ifeq ($(UNAME_M),i686)
|
||||
BINARY := $(BINARY32)
|
||||
else ifeq ($(UNAME_M),i386)
|
||||
BINARY := $(BINARY32)
|
||||
else ifeq ($(UNAME_M),armv5l)
|
||||
BINARY := $(BINARYARM5)
|
||||
else ifeq ($(UNAME_M),armv6l)
|
||||
BINARY := $(BINARYARM6)
|
||||
else ifeq ($(UNAME_M),armv7l)
|
||||
BINARY := $(BINARYARM7)
|
||||
else ifeq ($(UNAME_M),armv8l)
|
||||
# armv8l is always 32-bit and should implement the armv7 ISA, so
|
||||
# just use the same filename as for armv7.
|
||||
BINARY := $(BINARYARM7)
|
||||
else ifeq ($(UNAME_M),arm64)
|
||||
BINARY := $(BINARYARM8)
|
||||
else ifeq ($(UNAME_M),aarch64)
|
||||
BINARY := $(BINARYARM8)
|
||||
else ifeq ($(UNAME_M),ppc64le)
|
||||
BINARY := $(BINARYPPC64LE)
|
||||
else ifeq ($(UNAME_M),riscv64)
|
||||
BINARY := $(BINARYRISCV64)
|
||||
else ifeq ($(UNAME_M),loongarch64)
|
||||
BINARY := $(BINARYLOONG64)
|
||||
else
|
||||
$(error Build on $(UNAME_M) is not supported, yet.)
|
||||
endif
|
||||
|
||||
all: target/$(BINARY)
|
||||
|
||||
test: $(SOURCES)
|
||||
SHELL=/bin/sh GOOS= $(GO) test -v -tags "$(TAGS)" \
|
||||
github.com/junegunn/fzf/src \
|
||||
github.com/junegunn/fzf/src/algo \
|
||||
github.com/junegunn/fzf/src/tui \
|
||||
github.com/junegunn/fzf/src/util
|
||||
|
||||
bench:
|
||||
cd src && SHELL=/bin/sh GOOS= $(GO) test -v -tags "$(TAGS)" -run=Bench -bench=. -benchmem
|
||||
|
||||
lint: $(SOURCES) test/test_go.rb
|
||||
[ -z "$$(gofmt -s -d src)" ] || (gofmt -s -d src; exit 1)
|
||||
rubocop --require rubocop-minitest --require rubocop-performance
|
||||
|
||||
install: bin/fzf
|
||||
|
||||
generate:
|
||||
PATH=$(PATH):$(GOPATH)/bin $(GO) generate ./...
|
||||
|
||||
build:
|
||||
goreleaser build --clean --snapshot --skip=post-hooks
|
||||
|
||||
release:
|
||||
# Make sure that the tests pass and the build works
|
||||
TAGS=tcell make test
|
||||
make test build clean
|
||||
|
||||
ifndef GITHUB_TOKEN
|
||||
$(error GITHUB_TOKEN is not defined)
|
||||
endif
|
||||
|
||||
# Check if we are on master branch
|
||||
ifneq ($(shell git symbolic-ref --short HEAD),master)
|
||||
$(error Not on master branch)
|
||||
endif
|
||||
|
||||
# Check if version numbers are properly updated
|
||||
grep -q ^$(VERSION_REGEX)$$ CHANGELOG.md
|
||||
grep -qF '"fzf $(VERSION_TRIM)"' man/man1/fzf.1
|
||||
grep -qF '"fzf $(VERSION_TRIM)"' man/man1/fzf-tmux.1
|
||||
grep -qF $(VERSION) install
|
||||
grep -qF $(VERSION) install.ps1
|
||||
|
||||
# Make release note out of CHANGELOG.md
|
||||
mkdir -p tmp
|
||||
sed -n '/^$(VERSION_REGEX)$$/,/^[0-9]/p' CHANGELOG.md | tail -r | \
|
||||
sed '1,/^ *$$/d' | tail -r | sed 1,2d | tee tmp/release-note
|
||||
|
||||
# Push to temp branch first so that install scripts always works on master branch
|
||||
git checkout -B temp master
|
||||
git push origin temp --follow-tags --force
|
||||
|
||||
# Make a GitHub release
|
||||
goreleaser --clean --release-notes tmp/release-note
|
||||
|
||||
# Push to master
|
||||
git checkout master
|
||||
git push origin master
|
||||
|
||||
# Delete temp branch
|
||||
git push origin --delete temp
|
||||
|
||||
clean:
|
||||
$(RM) -r dist target
|
||||
|
||||
target/$(BINARY32): $(SOURCES)
|
||||
GOARCH=386 $(GO) build $(BUILD_FLAGS) -o $@
|
||||
|
||||
target/$(BINARY64): $(SOURCES)
|
||||
GOARCH=amd64 $(GO) build $(BUILD_FLAGS) -o $@
|
||||
|
||||
target/$(BINARYS390): $(SOURCES)
|
||||
GOARCH=s390x $(GO) build $(BUILD_FLAGS) -o $@
|
||||
# https://github.com/golang/go/wiki/GoArm
|
||||
target/$(BINARYARM5): $(SOURCES)
|
||||
GOARCH=arm GOARM=5 $(GO) build $(BUILD_FLAGS) -o $@
|
||||
|
||||
target/$(BINARYARM6): $(SOURCES)
|
||||
GOARCH=arm GOARM=6 $(GO) build $(BUILD_FLAGS) -o $@
|
||||
|
||||
target/$(BINARYARM7): $(SOURCES)
|
||||
GOARCH=arm GOARM=7 $(GO) build $(BUILD_FLAGS) -o $@
|
||||
|
||||
target/$(BINARYARM8): $(SOURCES)
|
||||
GOARCH=arm64 $(GO) build $(BUILD_FLAGS) -o $@
|
||||
|
||||
target/$(BINARYPPC64LE): $(SOURCES)
|
||||
GOARCH=ppc64le $(GO) build $(BUILD_FLAGS) -o $@
|
||||
|
||||
target/$(BINARYRISCV64): $(SOURCES)
|
||||
GOARCH=riscv64 $(GO) build $(BUILD_FLAGS) -o $@
|
||||
|
||||
target/$(BINARYLOONG64): $(SOURCES)
|
||||
GOARCH=loong64 $(GO) build $(BUILD_FLAGS) -o $@
|
||||
|
||||
bin/fzf: target/$(BINARY) | bin
|
||||
-rm -f bin/fzf
|
||||
cp -f target/$(BINARY) bin/fzf
|
||||
|
||||
docker:
|
||||
docker build -t fzf-ubuntu .
|
||||
docker run -it fzf-ubuntu tmux
|
||||
|
||||
docker-test:
|
||||
docker build -t fzf-ubuntu .
|
||||
docker run -it fzf-ubuntu
|
||||
|
||||
update:
|
||||
$(GO) get -u
|
||||
$(GO) mod tidy
|
||||
|
||||
.PHONY: all generate build release test bench lint install clean docker docker-test update
|
||||
495
README-VIM.md
Normal file
495
README-VIM.md
Normal file
@@ -0,0 +1,495 @@
|
||||
FZF Vim integration
|
||||
===================
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
Once you have fzf installed, you can enable it inside Vim simply by adding the
|
||||
directory to `&runtimepath` in your Vim configuration file. The path may
|
||||
differ depending on the package manager.
|
||||
|
||||
```vim
|
||||
" If installed using Homebrew
|
||||
set rtp+=/usr/local/opt/fzf
|
||||
|
||||
" If installed using Homebrew on Apple Silicon
|
||||
set rtp+=/opt/homebrew/opt/fzf
|
||||
|
||||
" If you have cloned fzf on ~/.fzf directory
|
||||
set rtp+=~/.fzf
|
||||
```
|
||||
|
||||
If you use [vim-plug](https://github.com/junegunn/vim-plug), the same can be
|
||||
written as:
|
||||
|
||||
```vim
|
||||
" If installed using Homebrew
|
||||
Plug '/usr/local/opt/fzf'
|
||||
|
||||
" If installed using Homebrew on Apple Silicon
|
||||
Plug '/opt/homebrew/opt/fzf'
|
||||
|
||||
" If you have cloned fzf on ~/.fzf directory
|
||||
Plug '~/.fzf'
|
||||
```
|
||||
|
||||
But if you want the latest Vim plugin file from GitHub rather than the one
|
||||
included in the package, write:
|
||||
|
||||
```vim
|
||||
Plug 'junegunn/fzf'
|
||||
```
|
||||
|
||||
The Vim plugin will pick up fzf binary available on the system. If fzf is not
|
||||
found on `$PATH`, it will ask you if it should download the latest binary for
|
||||
you.
|
||||
|
||||
To make sure that you have the latest version of the binary, set up
|
||||
post-update hook like so:
|
||||
|
||||
```vim
|
||||
Plug 'junegunn/fzf', { 'do': { -> fzf#install() } }
|
||||
```
|
||||
|
||||
Summary
|
||||
-------
|
||||
|
||||
The Vim plugin of fzf provides two core functions, and `:FZF` command which is
|
||||
the basic file selector command built on top of them.
|
||||
|
||||
1. **`fzf#run([spec dict])`**
|
||||
- Starts fzf inside Vim with the given spec
|
||||
- `:call fzf#run({'source': 'ls'})`
|
||||
2. **`fzf#wrap([spec dict]) -> (dict)`**
|
||||
- Takes a spec for `fzf#run` and returns an extended version of it with
|
||||
additional options for addressing global preferences (`g:fzf_xxx`)
|
||||
- `:echo fzf#wrap({'source': 'ls'})`
|
||||
- We usually *wrap* a spec with `fzf#wrap` before passing it to `fzf#run`
|
||||
- `:call fzf#run(fzf#wrap({'source': 'ls'}))`
|
||||
3. **`:FZF [fzf_options string] [path string]`**
|
||||
- Basic fuzzy file selector
|
||||
- A reference implementation for those who don't want to write VimScript
|
||||
to implement custom commands
|
||||
- If you're looking for more such commands, check out [fzf.vim](https://github.com/junegunn/fzf.vim) project.
|
||||
|
||||
The most important of all is `fzf#run`, but it would be easier to understand
|
||||
the whole if we start off with `:FZF` command.
|
||||
|
||||
`:FZF[!]`
|
||||
---------
|
||||
|
||||
```vim
|
||||
" Look for files under current directory
|
||||
:FZF
|
||||
|
||||
" Look for files under your home directory
|
||||
:FZF ~
|
||||
|
||||
" With fzf command-line options
|
||||
:FZF --reverse --info=inline /tmp
|
||||
|
||||
" Bang version starts fzf in fullscreen mode
|
||||
:FZF!
|
||||
```
|
||||
|
||||
Similarly to [ctrlp.vim](https://github.com/kien/ctrlp.vim), use enter key,
|
||||
`CTRL-T`, `CTRL-X` or `CTRL-V` to open selected files in the current window,
|
||||
in new tabs, in horizontal splits, or in vertical splits respectively.
|
||||
|
||||
Note that the environment variables `FZF_DEFAULT_COMMAND` and
|
||||
`FZF_DEFAULT_OPTS` also apply here.
|
||||
|
||||
### Configuration
|
||||
|
||||
- `g:fzf_action`
|
||||
- Customizable extra key bindings for opening selected files in different ways
|
||||
- `g:fzf_layout`
|
||||
- Determines the size and position of fzf window
|
||||
- `g:fzf_colors`
|
||||
- Customizes fzf colors to match the current color scheme
|
||||
- `g:fzf_history_dir`
|
||||
- Enables history feature
|
||||
|
||||
#### Examples
|
||||
|
||||
```vim
|
||||
" This is the default extra key bindings
|
||||
let g:fzf_action = {
|
||||
\ 'ctrl-t': 'tab split',
|
||||
\ 'ctrl-x': 'split',
|
||||
\ 'ctrl-v': 'vsplit' }
|
||||
|
||||
" An action can be a reference to a function that processes selected lines
|
||||
function! s:build_quickfix_list(lines)
|
||||
call setqflist(map(copy(a:lines), '{ "filename": v:val, "lnum": 1 }'))
|
||||
copen
|
||||
cc
|
||||
endfunction
|
||||
|
||||
let g:fzf_action = {
|
||||
\ 'ctrl-q': function('s:build_quickfix_list'),
|
||||
\ 'ctrl-t': 'tab split',
|
||||
\ 'ctrl-x': 'split',
|
||||
\ 'ctrl-v': 'vsplit' }
|
||||
|
||||
" Default fzf layout
|
||||
" - Popup window (center of the screen)
|
||||
let g:fzf_layout = { 'window': { 'width': 0.9, 'height': 0.6 } }
|
||||
|
||||
" - Popup window (center of the current window)
|
||||
let g:fzf_layout = { 'window': { 'width': 0.9, 'height': 0.6, 'relative': v:true } }
|
||||
|
||||
" - Popup window (anchored to the bottom of the current window)
|
||||
let g:fzf_layout = { 'window': { 'width': 0.9, 'height': 0.6, 'relative': v:true, 'yoffset': 1.0 } }
|
||||
|
||||
" - down / up / left / right
|
||||
let g:fzf_layout = { 'down': '40%' }
|
||||
|
||||
" - Window using a Vim command
|
||||
let g:fzf_layout = { 'window': 'enew' }
|
||||
let g:fzf_layout = { 'window': '-tabnew' }
|
||||
let g:fzf_layout = { 'window': '10new' }
|
||||
|
||||
" Customize fzf colors to match your color scheme
|
||||
" - fzf#wrap translates this to a set of `--color` options
|
||||
let g:fzf_colors =
|
||||
\ { 'fg': ['fg', 'Normal'],
|
||||
\ 'bg': ['bg', 'Normal'],
|
||||
\ 'hl': ['fg', 'Comment'],
|
||||
\ 'fg+': ['fg', 'CursorLine', 'CursorColumn', 'Normal'],
|
||||
\ 'bg+': ['bg', 'CursorLine', 'CursorColumn'],
|
||||
\ 'hl+': ['fg', 'Statement'],
|
||||
\ 'info': ['fg', 'PreProc'],
|
||||
\ 'border': ['fg', 'Ignore'],
|
||||
\ 'prompt': ['fg', 'Conditional'],
|
||||
\ 'pointer': ['fg', 'Exception'],
|
||||
\ 'marker': ['fg', 'Keyword'],
|
||||
\ 'spinner': ['fg', 'Label'],
|
||||
\ 'header': ['fg', 'Comment'] }
|
||||
|
||||
" Enable per-command history
|
||||
" - History files will be stored in the specified directory
|
||||
" - When set, CTRL-N and CTRL-P will be bound to 'next-history' and
|
||||
" 'previous-history' instead of 'down' and 'up'.
|
||||
let g:fzf_history_dir = '~/.local/share/fzf-history'
|
||||
```
|
||||
|
||||
##### Explanation of `g:fzf_colors`
|
||||
|
||||
`g:fzf_colors` is a dictionary mapping fzf elements to a color specification
|
||||
list:
|
||||
|
||||
element: [ component, group1 [, group2, ...] ]
|
||||
|
||||
- `element` is an fzf element to apply a color to:
|
||||
|
||||
| Element | Description |
|
||||
| --- | --- |
|
||||
| `fg` / `bg` / `hl` | Item (foreground / background / highlight) |
|
||||
| `fg+` / `bg+` / `hl+` | Current item (foreground / background / highlight) |
|
||||
| `preview-fg` / `preview-bg` | Preview window text and background |
|
||||
| `hl` / `hl+` | Highlighted substrings (normal / current) |
|
||||
| `gutter` | Background of the gutter on the left |
|
||||
| `pointer` | Pointer to the current line (`>`) |
|
||||
| `marker` | Multi-select marker (`>`) |
|
||||
| `border` | Border around the window (`--border` and `--preview`) |
|
||||
| `header` | Header (`--header` or `--header-lines`) |
|
||||
| `info` | Info line (match counters) |
|
||||
| `spinner` | Streaming input indicator |
|
||||
| `query` | Query string |
|
||||
| `disabled` | Query string when search is disabled |
|
||||
| `prompt` | Prompt before query (`> `) |
|
||||
| `pointer` | Pointer to the current line (`>`) |
|
||||
|
||||
- `component` specifies the component (`fg` / `bg`) from which to extract the
|
||||
color when considering each of the following highlight groups
|
||||
|
||||
- `group1 [, group2, ...]` is a list of highlight groups that are searched (in
|
||||
order) for a matching color definition
|
||||
|
||||
For example, consider the following specification:
|
||||
|
||||
```vim
|
||||
'prompt': ['fg', 'Conditional', 'Comment'],
|
||||
```
|
||||
|
||||
This means we color the **prompt**
|
||||
- using the `fg` attribute of the `Conditional` if it exists,
|
||||
- otherwise use the `fg` attribute of the `Comment` highlight group if it exists,
|
||||
- otherwise fall back to the default color settings for the **prompt**.
|
||||
|
||||
You can examine the color option generated according the setting by printing
|
||||
the result of `fzf#wrap()` function like so:
|
||||
|
||||
```vim
|
||||
:echo fzf#wrap()
|
||||
```
|
||||
|
||||
`fzf#run`
|
||||
---------
|
||||
|
||||
`fzf#run()` function is the core of Vim integration. It takes a single
|
||||
dictionary argument, *a spec*, and starts fzf process accordingly. At the very
|
||||
least, specify `sink` option to tell what it should do with the selected
|
||||
entry.
|
||||
|
||||
```vim
|
||||
call fzf#run({'sink': 'e'})
|
||||
```
|
||||
|
||||
We haven't specified the `source`, so this is equivalent to starting fzf on
|
||||
command line without standard input pipe; fzf will traverse the file system
|
||||
under the current directory to get the list of files. (If
|
||||
`$FZF_DEFAULT_COMMAND` is set, fzf will use the output of the command
|
||||
instead.) When you select one, it will open it with the sink, `:e` command. If
|
||||
you want to open it in a new tab, you can pass `:tabedit` command instead as
|
||||
the sink.
|
||||
|
||||
```vim
|
||||
call fzf#run({'sink': 'tabedit'})
|
||||
```
|
||||
|
||||
You can use any shell command as the source to generate the list. The
|
||||
following example will list the files managed by git. It's equivalent to
|
||||
running `git ls-files | fzf` on shell.
|
||||
|
||||
```vim
|
||||
call fzf#run({'source': 'git ls-files', 'sink': 'e'})
|
||||
```
|
||||
|
||||
fzf options can be specified as `options` entry in spec dictionary.
|
||||
|
||||
```vim
|
||||
call fzf#run({'sink': 'tabedit', 'options': '--multi --reverse'})
|
||||
```
|
||||
|
||||
You can also pass a layout option if you don't want fzf window to take up the
|
||||
entire screen.
|
||||
|
||||
```vim
|
||||
" up / down / left / right / window are allowed
|
||||
call fzf#run({'source': 'git ls-files', 'sink': 'e', 'left': '40%'})
|
||||
call fzf#run({'source': 'git ls-files', 'sink': 'e', 'window': '30vnew'})
|
||||
```
|
||||
|
||||
`source` doesn't have to be an external shell command, you can pass a Vim
|
||||
array as the source. In the next example, we pass the names of color
|
||||
schemes as the source to implement a color scheme selector.
|
||||
|
||||
```vim
|
||||
call fzf#run({'source': map(split(globpath(&rtp, 'colors/*.vim')),
|
||||
\ 'fnamemodify(v:val, ":t:r")'),
|
||||
\ 'sink': 'colo', 'left': '25%'})
|
||||
```
|
||||
|
||||
The following table summarizes the available options.
|
||||
|
||||
| Option name | Type | Description |
|
||||
| -------------------------- | ------------- | ---------------------------------------------------------------- |
|
||||
| `source` | string | External command to generate input to fzf (e.g. `find .`) |
|
||||
| `source` | list | Vim list as input to fzf |
|
||||
| `sink` | string | Vim command to handle the selected item (e.g. `e`, `tabe`) |
|
||||
| `sink` | funcref | Function to be called with each selected item |
|
||||
| `sinklist` (or `sink*`) | funcref | Similar to `sink`, but takes the list of output lines at once |
|
||||
| `exit` | funcref | Function to be called with the exit status of fzf (e.g. 0, 1, 2, 130) |
|
||||
| `options` | string/list | Options to fzf |
|
||||
| `dir` | string | Working directory |
|
||||
| `up`/`down`/`left`/`right` | number/string | (Layout) Window position and size (e.g. `20`, `50%`) |
|
||||
| `tmux` | string | (Layout) `--tmux` options (e.g. `90%,70%`) |
|
||||
| `window` (Vim 8 / Neovim) | string | (Layout) Command to open fzf window (e.g. `vertical aboveleft 30new`) |
|
||||
| `window` (Vim 8 / Neovim) | dict | (Layout) Popup window settings (e.g. `{'width': 0.9, 'height': 0.6}`) |
|
||||
|
||||
`options` entry can be either a string or a list. For simple cases, string
|
||||
should suffice, but prefer to use list type to avoid escaping issues.
|
||||
|
||||
```vim
|
||||
call fzf#run({'options': '--reverse --prompt "C:\\Program Files\\"'})
|
||||
call fzf#run({'options': ['--reverse', '--prompt', 'C:\Program Files\']})
|
||||
```
|
||||
|
||||
When `window` entry is a dictionary, fzf will start in a popup window. The
|
||||
following options are allowed:
|
||||
|
||||
- Required:
|
||||
- `width` [float range [0 ~ 1]] or [integer range [8 ~ ]]
|
||||
- `height` [float range [0 ~ 1]] or [integer range [4 ~ ]]
|
||||
- Optional:
|
||||
- `yoffset` [float default 0.5 range [0 ~ 1]]
|
||||
- `xoffset` [float default 0.5 range [0 ~ 1]]
|
||||
- `relative` [boolean default v:false]
|
||||
- `border` [string default `rounded` (`sharp` on Windows)]: Border style
|
||||
- `rounded` / `sharp` / `horizontal` / `vertical` / `top` / `bottom` / `left` / `right` / `no[ne]`
|
||||
|
||||
`fzf#wrap`
|
||||
----------
|
||||
|
||||
We have seen that several aspects of `:FZF` command can be configured with
|
||||
a set of global option variables; different ways to open files
|
||||
(`g:fzf_action`), window position and size (`g:fzf_layout`), color palette
|
||||
(`g:fzf_colors`), etc.
|
||||
|
||||
So how can we make our custom `fzf#run` calls also respect those variables?
|
||||
Simply by *"wrapping"* the spec dictionary with `fzf#wrap` before passing it
|
||||
to `fzf#run`.
|
||||
|
||||
- **`fzf#wrap([name string], [spec dict], [fullscreen bool]) -> (dict)`**
|
||||
- All arguments are optional. Usually we only need to pass a spec dictionary.
|
||||
- `name` is for managing history files. It is ignored if
|
||||
`g:fzf_history_dir` is not defined.
|
||||
- `fullscreen` can be either `0` or `1` (default: 0).
|
||||
|
||||
`fzf#wrap` takes a spec and returns an extended version of it (also
|
||||
a dictionary) with additional options for addressing global preferences. You
|
||||
can examine the return value of it like so:
|
||||
|
||||
```vim
|
||||
echo fzf#wrap({'source': 'ls'})
|
||||
```
|
||||
|
||||
After we *"wrap"* our spec, we pass it to `fzf#run`.
|
||||
|
||||
```vim
|
||||
call fzf#run(fzf#wrap({'source': 'ls'}))
|
||||
```
|
||||
|
||||
Now it supports `CTRL-T`, `CTRL-V`, and `CTRL-X` key bindings (configurable
|
||||
via `g:fzf_action`) and it opens fzf window according to `g:fzf_layout`
|
||||
setting.
|
||||
|
||||
To make it easier to use, let's define `LS` command.
|
||||
|
||||
```vim
|
||||
command! LS call fzf#run(fzf#wrap({'source': 'ls'}))
|
||||
```
|
||||
|
||||
Type `:LS` and see how it works.
|
||||
|
||||
We would like to make `:LS!` (bang version) open fzf in fullscreen, just like
|
||||
`:FZF!`. Add `-bang` to command definition, and use `<bang>` value to set
|
||||
the last `fullscreen` argument of `fzf#wrap` (see `:help <bang>`).
|
||||
|
||||
```vim
|
||||
" On :LS!, <bang> evaluates to '!', and '!0' becomes 1
|
||||
command! -bang LS call fzf#run(fzf#wrap({'source': 'ls'}, <bang>0))
|
||||
```
|
||||
|
||||
Our `:LS` command will be much more useful if we can pass a directory argument
|
||||
to it, so that something like `:LS /tmp` is possible.
|
||||
|
||||
```vim
|
||||
command! -bang -complete=dir -nargs=? LS
|
||||
\ call fzf#run(fzf#wrap({'source': 'ls', 'dir': <q-args>}, <bang>0))
|
||||
```
|
||||
|
||||
Lastly, if you have enabled `g:fzf_history_dir`, you might want to assign
|
||||
a unique name to our command and pass it as the first argument to `fzf#wrap`.
|
||||
|
||||
```vim
|
||||
" The query history for this command will be stored as 'ls' inside g:fzf_history_dir.
|
||||
" The name is ignored if g:fzf_history_dir is not defined.
|
||||
command! -bang -complete=dir -nargs=? LS
|
||||
\ call fzf#run(fzf#wrap('ls', {'source': 'ls', 'dir': <q-args>}, <bang>0))
|
||||
```
|
||||
|
||||
### Global options supported by `fzf#wrap`
|
||||
|
||||
- `g:fzf_layout`
|
||||
- `g:fzf_action`
|
||||
- **Works only when no custom `sink` (or `sinklist`) is provided**
|
||||
- Having custom sink usually means that each entry is not an ordinary
|
||||
file path (e.g. name of color scheme), so we can't blindly apply the
|
||||
same strategy (i.e. `tabedit some-color-scheme` doesn't make sense)
|
||||
- `g:fzf_colors`
|
||||
- `g:fzf_history_dir`
|
||||
|
||||
Tips
|
||||
----
|
||||
|
||||
### fzf inside terminal buffer
|
||||
|
||||
On the latest versions of Vim and Neovim, fzf will start in a terminal buffer.
|
||||
If you find the default ANSI colors to be different, consider configuring the
|
||||
colors using `g:terminal_ansi_colors` in regular Vim or `g:terminal_color_x`
|
||||
in Neovim.
|
||||
|
||||
```vim
|
||||
" Terminal colors for seoul256 color scheme
|
||||
if has('nvim')
|
||||
let g:terminal_color_0 = '#4e4e4e'
|
||||
let g:terminal_color_1 = '#d68787'
|
||||
let g:terminal_color_2 = '#5f865f'
|
||||
let g:terminal_color_3 = '#d8af5f'
|
||||
let g:terminal_color_4 = '#85add4'
|
||||
let g:terminal_color_5 = '#d7afaf'
|
||||
let g:terminal_color_6 = '#87afaf'
|
||||
let g:terminal_color_7 = '#d0d0d0'
|
||||
let g:terminal_color_8 = '#626262'
|
||||
let g:terminal_color_9 = '#d75f87'
|
||||
let g:terminal_color_10 = '#87af87'
|
||||
let g:terminal_color_11 = '#ffd787'
|
||||
let g:terminal_color_12 = '#add4fb'
|
||||
let g:terminal_color_13 = '#ffafaf'
|
||||
let g:terminal_color_14 = '#87d7d7'
|
||||
let g:terminal_color_15 = '#e4e4e4'
|
||||
else
|
||||
let g:terminal_ansi_colors = [
|
||||
\ '#4e4e4e', '#d68787', '#5f865f', '#d8af5f',
|
||||
\ '#85add4', '#d7afaf', '#87afaf', '#d0d0d0',
|
||||
\ '#626262', '#d75f87', '#87af87', '#ffd787',
|
||||
\ '#add4fb', '#ffafaf', '#87d7d7', '#e4e4e4'
|
||||
\ ]
|
||||
endif
|
||||
```
|
||||
|
||||
### Starting fzf in a popup window
|
||||
|
||||
```vim
|
||||
" Required:
|
||||
" - width [float range [0 ~ 1]] or [integer range [8 ~ ]]
|
||||
" - height [float range [0 ~ 1]] or [integer range [4 ~ ]]
|
||||
"
|
||||
" Optional:
|
||||
" - xoffset [float default 0.5 range [0 ~ 1]]
|
||||
" - yoffset [float default 0.5 range [0 ~ 1]]
|
||||
" - relative [boolean default v:false]
|
||||
" - border [string default 'rounded']: Border style
|
||||
" - 'rounded' / 'sharp' / 'horizontal' / 'vertical' / 'top' / 'bottom' / 'left' / 'right'
|
||||
let g:fzf_layout = { 'window': { 'width': 0.9, 'height': 0.6 } }
|
||||
```
|
||||
|
||||
Alternatively, you can make fzf open in a tmux popup window (requires tmux 3.2
|
||||
or above) by putting `--tmux` option value in `tmux` key.
|
||||
|
||||
```vim
|
||||
" See `--tmux` option in `man fzf` for available options
|
||||
" [center|top|bottom|left|right][,SIZE[%]][,SIZE[%]]
|
||||
if exists('$TMUX')
|
||||
let g:fzf_layout = { 'tmux': '90%,70%' }
|
||||
else
|
||||
let g:fzf_layout = { 'window': { 'width': 0.9, 'height': 0.6 } }
|
||||
endif
|
||||
```
|
||||
|
||||
### Hide statusline
|
||||
|
||||
When fzf starts in a terminal buffer, the file type of the buffer is set to
|
||||
`fzf`. So you can set up `FileType fzf` autocmd to customize the settings of
|
||||
the window.
|
||||
|
||||
For example, if you open fzf on the bottom on the screen (e.g. `{'down':
|
||||
'40%'}`), you might want to temporarily disable the statusline for a cleaner
|
||||
look.
|
||||
|
||||
```vim
|
||||
let g:fzf_layout = { 'down': '30%' }
|
||||
autocmd! FileType fzf
|
||||
autocmd FileType fzf set laststatus=0 noshowmode noruler
|
||||
\| autocmd BufLeave <buffer> set laststatus=2 showmode ruler
|
||||
```
|
||||
|
||||
[License](LICENSE)
|
||||
------------------
|
||||
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2013-2024 Junegunn Choi
|
||||
74
bin/fzf-preview.sh
Executable file
74
bin/fzf-preview.sh
Executable file
@@ -0,0 +1,74 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# The purpose of this script is to demonstrate how to preview a file or an
|
||||
# image in the preview window of fzf.
|
||||
#
|
||||
# Dependencies:
|
||||
# - https://github.com/sharkdp/bat
|
||||
# - https://github.com/hpjansson/chafa
|
||||
# - https://iterm2.com/utilities/imgcat
|
||||
|
||||
if [[ $# -ne 1 ]]; then
|
||||
>&2 echo "usage: $0 FILENAME"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
file=${1/#\~\//$HOME/}
|
||||
type=$(file --dereference --mime -- "$file")
|
||||
|
||||
if [[ ! $type =~ image/ ]]; then
|
||||
if [[ $type =~ =binary ]]; then
|
||||
file "$1"
|
||||
exit
|
||||
fi
|
||||
|
||||
# Sometimes bat is installed as batcat.
|
||||
if command -v batcat > /dev/null; then
|
||||
batname="batcat"
|
||||
elif command -v bat > /dev/null; then
|
||||
batname="bat"
|
||||
else
|
||||
cat "$1"
|
||||
exit
|
||||
fi
|
||||
|
||||
${batname} --style="${BAT_STYLE:-numbers}" --color=always --pager=never -- "$file"
|
||||
exit
|
||||
fi
|
||||
|
||||
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
|
||||
if [[ $dim = x ]]; then
|
||||
dim=$(stty size < /dev/tty | awk '{print $2 "x" $1}')
|
||||
elif ! [[ $KITTY_WINDOW_ID ]] && (( FZF_PREVIEW_TOP + FZF_PREVIEW_LINES == $(stty size < /dev/tty | awk '{print $1}') )); then
|
||||
# Avoid scrolling issue when the Sixel image touches the bottom of the screen
|
||||
# * https://github.com/junegunn/fzf/issues/2544
|
||||
dim=${FZF_PREVIEW_COLUMNS}x$((FZF_PREVIEW_LINES - 1))
|
||||
fi
|
||||
|
||||
# 1. Use kitty icat on kitty terminal
|
||||
if [[ $KITTY_WINDOW_ID ]]; then
|
||||
# 1. 'memory' is the fastest option but if you want the image to be scrollable,
|
||||
# you have to use 'stream'.
|
||||
#
|
||||
# 2. The last line of the output is the ANSI reset code without newline.
|
||||
# This confuses fzf and makes it render scroll offset indicator.
|
||||
# So we remove the last line and append the reset code to its previous line.
|
||||
kitty icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed '$d' | sed $'$s/$/\e[m/'
|
||||
|
||||
# 2. Use chafa with Sixel output
|
||||
elif command -v chafa > /dev/null; then
|
||||
chafa -s "$dim" "$file"
|
||||
# Add a new line character so that fzf can display multiple images in the preview window
|
||||
echo
|
||||
|
||||
# 3. If chafa is not found but imgcat is available, use it on iTerm2
|
||||
elif command -v imgcat > /dev/null; then
|
||||
# NOTE: We should use https://iterm2.com/utilities/it2check to check if the
|
||||
# user is running iTerm2. But for the sake of simplicity, we just assume
|
||||
# that's the case here.
|
||||
imgcat -W "${dim%%x*}" -H "${dim##*x}" "$file"
|
||||
|
||||
# 4. Cannot find any suitable method to preview the image
|
||||
else
|
||||
file "$file"
|
||||
fi
|
||||
178
bin/fzf-tmux
178
bin/fzf-tmux
@@ -1,13 +1,13 @@
|
||||
#!/usr/bin/env bash
|
||||
# fzf-tmux: starts fzf in a tmux pane
|
||||
# usage: fzf-tmux [-u|-d [HEIGHT[%]]] [-l|-r [WIDTH[%]]] [--] [FZF OPTIONS]
|
||||
# usage: fzf-tmux [LAYOUT OPTIONS] [--] [FZF OPTIONS]
|
||||
|
||||
fail() {
|
||||
>&2 echo "$1"
|
||||
exit 2
|
||||
}
|
||||
|
||||
fzf="$(command -v fzf 2> /dev/null)" || fzf="$(dirname "$0")/fzf"
|
||||
fzf="$(command which fzf)" || fzf="$(dirname "$0")/fzf"
|
||||
[[ -x "$fzf" ]] || fail 'fzf executable not found'
|
||||
|
||||
args=()
|
||||
@@ -16,19 +16,30 @@ skip=""
|
||||
swap=""
|
||||
close=""
|
||||
term=""
|
||||
[[ -n "$LINES" ]] && lines=$LINES || lines=$(tput lines)
|
||||
[[ -n "$COLUMNS" ]] && columns=$COLUMNS || columns=$(tput cols)
|
||||
[[ -n "$LINES" ]] && lines=$LINES || lines=$(tput lines) || lines=$(tmux display-message -p "#{pane_height}")
|
||||
[[ -n "$COLUMNS" ]] && columns=$COLUMNS || columns=$(tput cols) || columns=$(tmux display-message -p "#{pane_width}")
|
||||
|
||||
tmux_version=$(tmux -V | sed 's/[^0-9.]//g')
|
||||
tmux_32=$(awk '{print ($1 >= 3.2)}' <<< "$tmux_version" 2> /dev/null || bc -l <<< "$tmux_version >= 3.2")
|
||||
|
||||
help() {
|
||||
>&2 echo 'usage: fzf-tmux [-u|-d [HEIGHT[%]]] [-l|-r [WIDTH[%]]] [--] [FZF OPTIONS]
|
||||
>&2 echo 'usage: fzf-tmux [LAYOUT OPTIONS] [--] [FZF OPTIONS]
|
||||
|
||||
Layout
|
||||
-u [HEIGHT[%]] Split above (up)
|
||||
-d [HEIGHT[%]] Split below (down)
|
||||
-l [WIDTH[%]] Split left
|
||||
-r [WIDTH[%]] Split right
|
||||
LAYOUT OPTIONS:
|
||||
(default layout: -d 50%)
|
||||
|
||||
(default: -d 50%)
|
||||
Popup window (requires tmux 3.2 or above):
|
||||
-p [WIDTH[%][,HEIGHT[%]]] (default: 50%)
|
||||
-w WIDTH[%]
|
||||
-h HEIGHT[%]
|
||||
-x COL
|
||||
-y ROW
|
||||
|
||||
Split pane:
|
||||
-u [HEIGHT[%]] Split above (up)
|
||||
-d [HEIGHT[%]] Split below (down)
|
||||
-l [WIDTH[%]] Split left
|
||||
-r [WIDTH[%]] Split right
|
||||
'
|
||||
exit
|
||||
}
|
||||
@@ -47,8 +58,10 @@ while [[ $# -gt 0 ]]; do
|
||||
echo "fzf-tmux (with fzf $("$fzf" --version))"
|
||||
exit
|
||||
;;
|
||||
-w*|-h*|-d*|-u*|-r*|-l*)
|
||||
if [[ "$arg" =~ ^.[lrw] ]]; then
|
||||
-p*|-w*|-h*|-x*|-y*|-d*|-u*|-r*|-l*)
|
||||
if [[ "$arg" =~ ^-[pwhxy] ]]; then
|
||||
[[ "$opt" =~ "-E" ]] || opt="-E"
|
||||
elif [[ "$arg" =~ ^.[lr] ]]; then
|
||||
opt="-h"
|
||||
if [[ "$arg" =~ ^.l ]]; then
|
||||
opt="$opt -d"
|
||||
@@ -66,7 +79,7 @@ while [[ $# -gt 0 ]]; do
|
||||
if [[ ${#arg} -gt 2 ]]; then
|
||||
size="${arg:2}"
|
||||
else
|
||||
if [[ "$1" =~ ^[0-9]+%?$ ]]; then
|
||||
if [[ "$1" =~ ^[0-9%,]+$ ]] || [[ "$1" =~ ^[A-Z]$ ]]; then
|
||||
size="$1"
|
||||
shift
|
||||
else
|
||||
@@ -74,12 +87,28 @@ while [[ $# -gt 0 ]]; do
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$size" =~ %$ ]]; then
|
||||
if [[ "$arg" =~ ^-p ]]; then
|
||||
if [[ -n "$size" ]]; then
|
||||
w=${size%%,*}
|
||||
h=${size##*,}
|
||||
opt="$opt -w$w -h$h"
|
||||
fi
|
||||
elif [[ "$arg" =~ ^-[whxy] ]]; then
|
||||
opt="$opt ${arg:0:2}$size"
|
||||
elif [[ "$size" =~ %$ ]]; then
|
||||
size=${size:0:((${#size}-1))}
|
||||
if [[ -n "$swap" ]]; then
|
||||
opt="$opt -p $(( 100 - size ))"
|
||||
if [[ $tmux_32 = 1 ]]; then
|
||||
if [[ -n "$swap" ]]; then
|
||||
opt="$opt -l $(( 100 - size ))%"
|
||||
else
|
||||
opt="$opt -l $size%"
|
||||
fi
|
||||
else
|
||||
opt="$opt -p $size"
|
||||
if [[ -n "$swap" ]]; then
|
||||
opt="$opt -p $(( 100 - size ))"
|
||||
else
|
||||
opt="$opt -p $size"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
if [[ -n "$swap" ]]; then
|
||||
@@ -109,16 +138,21 @@ while [[ $# -gt 0 ]]; do
|
||||
[[ -n "$skip" ]] && args+=("$arg")
|
||||
done
|
||||
|
||||
if [[ -z "$TMUX" || "$opt" =~ ^-h && "$columns" -le 40 || ! "$opt" =~ ^-h && "$lines" -le 15 ]]; then
|
||||
if [[ -z "$TMUX" ]]; then
|
||||
"$fzf" "${args[@]}"
|
||||
exit $?
|
||||
fi
|
||||
|
||||
# Handle zoomed tmux pane by moving it to a temp window
|
||||
if tmux list-panes -F '#F' | grep -q Z; then
|
||||
zoomed=1
|
||||
# * --height option is not allowed
|
||||
# * CTRL-Z is also disabled
|
||||
# * fzf-tmux script is not compatible with --tmux option in fzf 0.53.0 or later
|
||||
args=("${args[@]}" "--no-height" "--bind=ctrl-z:ignore" "--no-tmux")
|
||||
|
||||
# Handle zoomed tmux pane without popup options by moving it to a temp window
|
||||
if [[ ! "$opt" =~ "-E" ]] && tmux list-panes -F '#F' | grep -q Z; then
|
||||
zoomed_without_popup=1
|
||||
original_window=$(tmux display-message -p "#{window_id}")
|
||||
tmp_window=$(tmux new-window -d -P -F "#{window_id}" "bash -c 'while :; do for c in \\| / - \\\\; do sleep 0.2; printf \"\\r\$c fzf-tmux is running\\r\"; done; done'")
|
||||
tmp_window=$(tmux new-window -d -P -F "#{window_id}" "bash -c 'while :; do for c in \\| / - '\\;' do sleep 0.2; printf \"\\r\$c fzf-tmux is running\\r\"; done; done'")
|
||||
tmux swap-pane -t $tmp_window \; select-window -t $tmp_window
|
||||
fi
|
||||
|
||||
@@ -130,51 +164,91 @@ argsf="${TMPDIR:-/tmp}/fzf-args-$id"
|
||||
fifo1="${TMPDIR:-/tmp}/fzf-fifo1-$id"
|
||||
fifo2="${TMPDIR:-/tmp}/fzf-fifo2-$id"
|
||||
fifo3="${TMPDIR:-/tmp}/fzf-fifo3-$id"
|
||||
if tmux_win_opts=$(tmux show-options -p remain-on-exit \; show-options -p synchronize-panes 2> /dev/null); then
|
||||
tmux_win_opts=( $(sed '/ off/d; s/synchronize-panes/set-option -p synchronize-panes/; s/remain-on-exit/set-option -p remain-on-exit/; s/$/ \\;/' <<< "$tmux_win_opts") )
|
||||
tmux_off_opts='; set-option -p synchronize-panes off ; set-option -p remain-on-exit off'
|
||||
else
|
||||
tmux_win_opts=( $(tmux show-window-options remain-on-exit \; show-window-options synchronize-panes | sed '/ off/d; s/^/set-window-option /; s/$/ \\;/') )
|
||||
tmux_off_opts='; set-window-option synchronize-panes off ; set-window-option remain-on-exit off'
|
||||
fi
|
||||
cleanup() {
|
||||
rm -f $argsf $fifo1 $fifo2 $fifo3
|
||||
\rm -f $argsf $fifo1 $fifo2 $fifo3
|
||||
|
||||
# Remove temp window if we were zoomed
|
||||
if [[ -n "$zoomed" ]]; then
|
||||
# Restore tmux window options
|
||||
if [[ "${#tmux_win_opts[@]}" -gt 1 ]]; then
|
||||
eval "tmux ${tmux_win_opts[*]}"
|
||||
fi
|
||||
|
||||
# Remove temp window if we were zoomed without popup options
|
||||
if [[ -n "$zoomed_without_popup" ]]; then
|
||||
tmux display-message -p "#{window_id}" > /dev/null
|
||||
tmux swap-pane -t $original_window \; \
|
||||
select-window -t $original_window \; \
|
||||
kill-window -t $tmp_window \; \
|
||||
resize-pane -Z
|
||||
fi
|
||||
|
||||
if [[ $# -gt 0 ]]; then
|
||||
trap - EXIT
|
||||
exit 130
|
||||
fi
|
||||
}
|
||||
trap cleanup EXIT SIGINT SIGTERM
|
||||
trap 'cleanup 1' SIGUSR1
|
||||
trap 'cleanup' EXIT
|
||||
|
||||
envs="env TERM=$TERM "
|
||||
[[ -n "$FZF_DEFAULT_OPTS" ]] && envs="$envs FZF_DEFAULT_OPTS=$(printf %q "$FZF_DEFAULT_OPTS")"
|
||||
[[ -n "$FZF_DEFAULT_COMMAND" ]] && envs="$envs FZF_DEFAULT_COMMAND=$(printf %q "$FZF_DEFAULT_COMMAND")"
|
||||
|
||||
mkfifo -m o+w $fifo2
|
||||
mkfifo -m o+w $fifo3
|
||||
envs="export TERM=$TERM "
|
||||
if [[ "$opt" =~ "-E" ]]; then
|
||||
if [[ $tmux_version = 3.2 ]]; then
|
||||
FZF_DEFAULT_OPTS="--margin 0,1 $FZF_DEFAULT_OPTS"
|
||||
elif [[ $tmux_32 = 1 ]]; then
|
||||
FZF_DEFAULT_OPTS="--border $FZF_DEFAULT_OPTS"
|
||||
opt="-B $opt"
|
||||
else
|
||||
echo "fzf-tmux: tmux 3.2 or above is required for popup mode" >&2
|
||||
exit 2
|
||||
fi
|
||||
fi
|
||||
envs="$envs FZF_DEFAULT_COMMAND=$(printf %q "$FZF_DEFAULT_COMMAND")"
|
||||
envs="$envs FZF_DEFAULT_OPTS=$(printf %q "$FZF_DEFAULT_OPTS")"
|
||||
envs="$envs FZF_DEFAULT_OPTS_FILE=$(printf %q "$FZF_DEFAULT_OPTS_FILE")"
|
||||
[[ -n "$RUNEWIDTH_EASTASIAN" ]] && envs="$envs RUNEWIDTH_EASTASIAN=$(printf %q "$RUNEWIDTH_EASTASIAN")"
|
||||
[[ -n "$BAT_THEME" ]] && envs="$envs BAT_THEME=$(printf %q "$BAT_THEME")"
|
||||
echo "$envs;" > "$argsf"
|
||||
|
||||
# Build arguments to fzf
|
||||
opts=""
|
||||
for arg in "${args[@]}"; do
|
||||
arg="${arg//\\/\\\\}"
|
||||
arg="${arg//\"/\\\"}"
|
||||
arg="${arg//\`/\\\`}"
|
||||
arg="${arg//$/\\$}"
|
||||
opts="$opts \"$arg\""
|
||||
done
|
||||
opts=$(printf "%q " "${args[@]}")
|
||||
|
||||
pppid=$$
|
||||
echo -n "trap 'kill -SIGUSR1 -$pppid' EXIT SIGINT SIGTERM;" >> $argsf
|
||||
close="; trap - EXIT SIGINT SIGTERM $close"
|
||||
|
||||
export TMUX=$(cut -d , -f 1,2 <<< "$TMUX")
|
||||
mkfifo -m o+w $fifo2
|
||||
if [[ "$opt" =~ "-E" ]]; then
|
||||
cat $fifo2 &
|
||||
if [[ -n "$term" ]] || [[ -t 0 ]]; then
|
||||
cat <<< "\"$fzf\" $opts > $fifo2; out=\$? $close; exit \$out" >> $argsf
|
||||
else
|
||||
mkfifo $fifo1
|
||||
cat <<< "\"$fzf\" $opts < $fifo1 > $fifo2; out=\$? $close; exit \$out" >> $argsf
|
||||
cat <&0 > $fifo1 &
|
||||
fi
|
||||
|
||||
tmux popup -d "$PWD" $opt "bash $argsf" > /dev/null 2>&1
|
||||
exit $?
|
||||
fi
|
||||
|
||||
mkfifo -m o+w $fifo3
|
||||
if [[ -n "$term" ]] || [[ -t 0 ]]; then
|
||||
cat <<< "\"$fzf\" $opts > $fifo2; echo \$? > $fifo3 $close" > $argsf
|
||||
TMUX=$(echo $TMUX | cut -d , -f 1,2) tmux set-window-option synchronize-panes off \;\
|
||||
set-window-option remain-on-exit off \;\
|
||||
split-window $opt "cd $(printf %q "$PWD");$envs bash $argsf" $swap \
|
||||
> /dev/null 2>&1
|
||||
cat <<< "\"$fzf\" $opts > $fifo2; echo \$? > $fifo3 $close" >> $argsf
|
||||
else
|
||||
mkfifo $fifo1
|
||||
cat <<< "\"$fzf\" $opts < $fifo1 > $fifo2; echo \$? > $fifo3 $close" > $argsf
|
||||
TMUX=$(echo $TMUX | cut -d , -f 1,2) tmux set-window-option synchronize-panes off \;\
|
||||
set-window-option remain-on-exit off \;\
|
||||
split-window $opt "$envs bash $argsf" $swap \
|
||||
> /dev/null 2>&1
|
||||
cat <<< "\"$fzf\" $opts < $fifo1 > $fifo2; echo \$? > $fifo3 $close" >> $argsf
|
||||
cat <&0 > $fifo1 &
|
||||
fi
|
||||
tmux \
|
||||
split-window -c "$PWD" $opt "bash -c 'exec -a fzf bash $argsf'" $swap \
|
||||
$tmux_off_opts \
|
||||
> /dev/null 2>&1 || { "$fzf" "${args[@]}"; exit $?; }
|
||||
cat $fifo2
|
||||
exit "$(cat $fifo3)"
|
||||
|
||||
|
||||
509
doc/fzf.txt
Normal file
509
doc/fzf.txt
Normal file
@@ -0,0 +1,509 @@
|
||||
fzf.txt fzf Last change: February 15 2024
|
||||
FZF - TABLE OF CONTENTS *fzf* *fzf-toc*
|
||||
==============================================================================
|
||||
|
||||
FZF Vim integration |fzf-vim-integration|
|
||||
Installation |fzf-installation|
|
||||
Summary |fzf-summary|
|
||||
:FZF[!] |:FZF|
|
||||
Configuration |fzf-configuration|
|
||||
Examples |fzf-examples|
|
||||
Explanation of g:fzf_colors |fzf-explanation-of-gfzfcolors|
|
||||
fzf#run |fzf#run|
|
||||
fzf#wrap |fzf#wrap|
|
||||
Global options supported by fzf#wrap |fzf-global-options-supported-by-fzf#wrap|
|
||||
Tips |fzf-tips|
|
||||
fzf inside terminal buffer |fzf-inside-terminal-buffer|
|
||||
Starting fzf in a popup window |fzf-starting-fzf-in-a-popup-window|
|
||||
Hide statusline |fzf-hide-statusline|
|
||||
License |fzf-license|
|
||||
|
||||
FZF VIM INTEGRATION *fzf-vim-integration*
|
||||
==============================================================================
|
||||
|
||||
|
||||
INSTALLATION *fzf-installation*
|
||||
==============================================================================
|
||||
|
||||
Once you have fzf installed, you can enable it inside Vim simply by adding the
|
||||
directory to 'runtimepath' in your Vim configuration file. The path may differ
|
||||
depending on the package manager.
|
||||
>
|
||||
" If installed using Homebrew
|
||||
set rtp+=/usr/local/opt/fzf
|
||||
|
||||
" If installed using Homebrew on Apple Silicon
|
||||
set rtp+=/opt/homebrew/opt/fzf
|
||||
|
||||
" If you have cloned fzf on ~/.fzf directory
|
||||
set rtp+=~/.fzf
|
||||
<
|
||||
If you use {vim-plug}{1}, the same can be written as:
|
||||
>
|
||||
" If installed using Homebrew
|
||||
Plug '/usr/local/opt/fzf'
|
||||
|
||||
" If installed using Homebrew on Apple Silicon
|
||||
Plug '/opt/homebrew/opt/fzf'
|
||||
|
||||
" If you have cloned fzf on ~/.fzf directory
|
||||
Plug '~/.fzf'
|
||||
<
|
||||
But if you want the latest Vim plugin file from GitHub rather than the one
|
||||
included in the package, write:
|
||||
>
|
||||
Plug 'junegunn/fzf'
|
||||
<
|
||||
The Vim plugin will pick up fzf binary available on the system. If fzf is not
|
||||
found on `$PATH`, it will ask you if it should download the latest binary for
|
||||
you.
|
||||
|
||||
To make sure that you have the latest version of the binary, set up
|
||||
post-update hook like so:
|
||||
|
||||
*fzf#install*
|
||||
>
|
||||
Plug 'junegunn/fzf', { 'do': { -> fzf#install() } }
|
||||
<
|
||||
{1} https://github.com/junegunn/vim-plug
|
||||
|
||||
|
||||
SUMMARY *fzf-summary*
|
||||
==============================================================================
|
||||
|
||||
The Vim plugin of fzf provides two core functions, and `:FZF` command which is
|
||||
the basic file selector command built on top of them.
|
||||
|
||||
1. `fzf#run([spec dict])`
|
||||
- Starts fzf inside Vim with the given spec
|
||||
- `:call fzf#run({'source': 'ls'})`
|
||||
2. `fzf#wrap([spec dict]) -> (dict)`
|
||||
- Takes a spec for `fzf#run` and returns an extended version of it with
|
||||
additional options for addressing global preferences (`g:fzf_xxx`)
|
||||
- `:echo fzf#wrap({'source': 'ls'})`
|
||||
- We usually wrap a spec with `fzf#wrap` before passing it to `fzf#run`
|
||||
- `:call fzf#run(fzf#wrap({'source': 'ls'}))`
|
||||
3. `:FZF [fzf_options string] [path string]`
|
||||
- Basic fuzzy file selector
|
||||
- A reference implementation for those who don't want to write VimScript to
|
||||
implement custom commands
|
||||
- If you're looking for more such commands, check out {fzf.vim}{2} project.
|
||||
|
||||
The most important of all is `fzf#run`, but it would be easier to understand
|
||||
the whole if we start off with `:FZF` command.
|
||||
|
||||
{2} https://github.com/junegunn/fzf.vim
|
||||
|
||||
|
||||
:FZF[!]
|
||||
==============================================================================
|
||||
|
||||
*:FZF*
|
||||
>
|
||||
" Look for files under current directory
|
||||
:FZF
|
||||
|
||||
" Look for files under your home directory
|
||||
:FZF ~
|
||||
|
||||
" With fzf command-line options
|
||||
:FZF --reverse --info=inline /tmp
|
||||
|
||||
" Bang version starts fzf in fullscreen mode
|
||||
:FZF!
|
||||
<
|
||||
Similarly to {ctrlp.vim}{3}, use enter key, CTRL-T, CTRL-X or CTRL-V to open
|
||||
selected files in the current window, in new tabs, in horizontal splits, or in
|
||||
vertical splits respectively.
|
||||
|
||||
Note that the environment variables `FZF_DEFAULT_COMMAND` and
|
||||
`FZF_DEFAULT_OPTS` also apply here.
|
||||
|
||||
{3} https://github.com/kien/ctrlp.vim
|
||||
|
||||
|
||||
< Configuration >_____________________________________________________________~
|
||||
*fzf-configuration*
|
||||
|
||||
*g:fzf_action* *g:fzf_layout* *g:fzf_colors* *g:fzf_history_dir*
|
||||
|
||||
- `g:fzf_action`
|
||||
- Customizable extra key bindings for opening selected files in different
|
||||
ways
|
||||
- `g:fzf_layout`
|
||||
- Determines the size and position of fzf window
|
||||
- `g:fzf_colors`
|
||||
- Customizes fzf colors to match the current color scheme
|
||||
- `g:fzf_history_dir`
|
||||
- Enables history feature
|
||||
|
||||
|
||||
Examples~
|
||||
*fzf-examples*
|
||||
>
|
||||
" This is the default extra key bindings
|
||||
let g:fzf_action = {
|
||||
\ 'ctrl-t': 'tab split',
|
||||
\ 'ctrl-x': 'split',
|
||||
\ 'ctrl-v': 'vsplit' }
|
||||
|
||||
" An action can be a reference to a function that processes selected lines
|
||||
function! s:build_quickfix_list(lines)
|
||||
call setqflist(map(copy(a:lines), '{ "filename": v:val, "lnum": 1 }'))
|
||||
copen
|
||||
cc
|
||||
endfunction
|
||||
|
||||
let g:fzf_action = {
|
||||
\ 'ctrl-q': function('s:build_quickfix_list'),
|
||||
\ 'ctrl-t': 'tab split',
|
||||
\ 'ctrl-x': 'split',
|
||||
\ 'ctrl-v': 'vsplit' }
|
||||
|
||||
" Default fzf layout
|
||||
" - Popup window (center of the screen)
|
||||
let g:fzf_layout = { 'window': { 'width': 0.9, 'height': 0.6 } }
|
||||
|
||||
" - Popup window (center of the current window)
|
||||
let g:fzf_layout = { 'window': { 'width': 0.9, 'height': 0.6, 'relative': v:true } }
|
||||
|
||||
" - Popup window (anchored to the bottom of the current window)
|
||||
let g:fzf_layout = { 'window': { 'width': 0.9, 'height': 0.6, 'relative': v:true, 'yoffset': 1.0 } }
|
||||
|
||||
" - down / up / left / right
|
||||
let g:fzf_layout = { 'down': '40%' }
|
||||
|
||||
" - Window using a Vim command
|
||||
let g:fzf_layout = { 'window': 'enew' }
|
||||
let g:fzf_layout = { 'window': '-tabnew' }
|
||||
let g:fzf_layout = { 'window': '10new' }
|
||||
|
||||
" Customize fzf colors to match your color scheme
|
||||
" - fzf#wrap translates this to a set of `--color` options
|
||||
let g:fzf_colors =
|
||||
\ { 'fg': ['fg', 'Normal'],
|
||||
\ 'bg': ['bg', 'Normal'],
|
||||
\ 'hl': ['fg', 'Comment'],
|
||||
\ 'fg+': ['fg', 'CursorLine', 'CursorColumn', 'Normal'],
|
||||
\ 'bg+': ['bg', 'CursorLine', 'CursorColumn'],
|
||||
\ 'hl+': ['fg', 'Statement'],
|
||||
\ 'info': ['fg', 'PreProc'],
|
||||
\ 'border': ['fg', 'Ignore'],
|
||||
\ 'prompt': ['fg', 'Conditional'],
|
||||
\ 'pointer': ['fg', 'Exception'],
|
||||
\ 'marker': ['fg', 'Keyword'],
|
||||
\ 'spinner': ['fg', 'Label'],
|
||||
\ 'header': ['fg', 'Comment'] }
|
||||
|
||||
" Enable per-command history
|
||||
" - History files will be stored in the specified directory
|
||||
" - When set, CTRL-N and CTRL-P will be bound to 'next-history' and
|
||||
" 'previous-history' instead of 'down' and 'up'.
|
||||
let g:fzf_history_dir = '~/.local/share/fzf-history'
|
||||
<
|
||||
|
||||
Explanation of g:fzf_colors~
|
||||
*fzf-explanation-of-gfzfcolors*
|
||||
|
||||
`g:fzf_colors` is a dictionary mapping fzf elements to a color specification
|
||||
list:
|
||||
>
|
||||
element: [ component, group1 [, group2, ...] ]
|
||||
<
|
||||
- `element` is an fzf element to apply a color to:
|
||||
|
||||
----------------------------+------------------------------------------------------
|
||||
Element | Description ~
|
||||
----------------------------+------------------------------------------------------
|
||||
`fg` / `bg` / `hl` | Item (foreground / background / highlight)
|
||||
`fg+` / `bg+` / `hl+` | Current item (foreground / background / highlight)
|
||||
`preview-fg` / `preview-bg` | Preview window text and background
|
||||
`hl` / `hl+` | Highlighted substrings (normal / current)
|
||||
`gutter` | Background of the gutter on the left
|
||||
`pointer` | Pointer to the current line ( `>` )
|
||||
`marker` | Multi-select marker ( `>` )
|
||||
`border` | Border around the window ( `--border` and `--preview` )
|
||||
`header` | Header ( `--header` or `--header-lines` )
|
||||
`info` | Info line (match counters)
|
||||
`spinner` | Streaming input indicator
|
||||
`query` | Query string
|
||||
`disabled` | Query string when search is disabled
|
||||
`prompt` | Prompt before query ( `> ` )
|
||||
`pointer` | Pointer to the current line ( `>` )
|
||||
----------------------------+------------------------------------------------------
|
||||
- `component` specifies the component (`fg` / `bg`) from which to extract the
|
||||
color when considering each of the following highlight groups
|
||||
- `group1 [, group2, ...]` is a list of highlight groups that are searched (in
|
||||
order) for a matching color definition
|
||||
|
||||
For example, consider the following specification:
|
||||
>
|
||||
'prompt': ['fg', 'Conditional', 'Comment'],
|
||||
<
|
||||
This means we color the prompt - using the `fg` attribute of the `Conditional`
|
||||
if it exists, - otherwise use the `fg` attribute of the `Comment` highlight
|
||||
group if it exists, - otherwise fall back to the default color settings for
|
||||
the prompt.
|
||||
|
||||
You can examine the color option generated according the setting by printing
|
||||
the result of `fzf#wrap()` function like so:
|
||||
>
|
||||
:echo fzf#wrap()
|
||||
<
|
||||
|
||||
FZF#RUN
|
||||
==============================================================================
|
||||
|
||||
*fzf#run*
|
||||
|
||||
`fzf#run()` function is the core of Vim integration. It takes a single
|
||||
dictionary argument, a spec, and starts fzf process accordingly. At the very
|
||||
least, specify `sink` option to tell what it should do with the selected
|
||||
entry.
|
||||
>
|
||||
call fzf#run({'sink': 'e'})
|
||||
<
|
||||
We haven't specified the `source`, so this is equivalent to starting fzf on
|
||||
command line without standard input pipe; fzf will traverse the file system
|
||||
under the current directory to get the list of files. (If
|
||||
`$FZF_DEFAULT_COMMAND` is set, fzf will use the output of the command
|
||||
instead.) When you select one, it will open it with the sink, `:e` command. If
|
||||
you want to open it in a new tab, you can pass `:tabedit` command instead as
|
||||
the sink.
|
||||
>
|
||||
call fzf#run({'sink': 'tabedit'})
|
||||
<
|
||||
You can use any shell command as the source to generate the list. The
|
||||
following example will list the files managed by git. It's equivalent to
|
||||
running `git ls-files | fzf` on shell.
|
||||
>
|
||||
call fzf#run({'source': 'git ls-files', 'sink': 'e'})
|
||||
<
|
||||
fzf options can be specified as `options` entry in spec dictionary.
|
||||
>
|
||||
call fzf#run({'sink': 'tabedit', 'options': '--multi --reverse'})
|
||||
<
|
||||
You can also pass a layout option if you don't want fzf window to take up the
|
||||
entire screen.
|
||||
>
|
||||
" up / down / left / right / window are allowed
|
||||
call fzf#run({'source': 'git ls-files', 'sink': 'e', 'left': '40%'})
|
||||
call fzf#run({'source': 'git ls-files', 'sink': 'e', 'window': '30vnew'})
|
||||
<
|
||||
`source` doesn't have to be an external shell command, you can pass a Vim
|
||||
array as the source. In the next example, we pass the names of color schemes
|
||||
as the source to implement a color scheme selector.
|
||||
>
|
||||
call fzf#run({'source': map(split(globpath(&rtp, 'colors/*.vim')),
|
||||
\ 'fnamemodify(v:val, ":t:r")'),
|
||||
\ 'sink': 'colo', 'left': '25%'})
|
||||
<
|
||||
The following table summarizes the available options.
|
||||
|
||||
---------------------------+---------------+----------------------------------------------------------------------
|
||||
Option name | Type | Description ~
|
||||
---------------------------+---------------+----------------------------------------------------------------------
|
||||
`source` | string | External command to generate input to fzf (e.g. `find .` )
|
||||
`source` | list | Vim list as input to fzf
|
||||
`sink` | string | Vim command to handle the selected item (e.g. `e` , `tabe` )
|
||||
`sink` | funcref | Function to be called with each selected item
|
||||
`sinklist` (or `sink*` ) | funcref | Similar to `sink` , but takes the list of output lines at once
|
||||
`exit` | funcref | Function to be called with the exit status of fzf (e.g. 0, 1, 2, 130)
|
||||
`options` | string/list | Options to fzf
|
||||
`dir` | string | Working directory
|
||||
`up` / `down` / `left` / `right` | number/string | (Layout) Window position and size (e.g. `20` , `50%` )
|
||||
`tmux` | string | (Layout) `--tmux` options (e.g. `90%,70%` )
|
||||
`window` (Vim 8 / Neovim) | string | (Layout) Command to open fzf window (e.g. `vertical aboveleft 30new` )
|
||||
`window` (Vim 8 / Neovim) | dict | (Layout) Popup window settings (e.g. `{'width': 0.9, 'height': 0.6}` )
|
||||
---------------------------+---------------+----------------------------------------------------------------------
|
||||
|
||||
`options` entry can be either a string or a list. For simple cases, string
|
||||
should suffice, but prefer to use list type to avoid escaping issues.
|
||||
>
|
||||
call fzf#run({'options': '--reverse --prompt "C:\\Program Files\\"'})
|
||||
call fzf#run({'options': ['--reverse', '--prompt', 'C:\Program Files\']})
|
||||
<
|
||||
When `window` entry is a dictionary, fzf will start in a popup window. The
|
||||
following options are allowed:
|
||||
|
||||
- Required:
|
||||
- `width` [float range [0 ~ 1]] or [integer range [8 ~ ]]
|
||||
- `height` [float range [0 ~ 1]] or [integer range [4 ~ ]]
|
||||
- Optional:
|
||||
- `yoffset` [float default 0.5 range [0 ~ 1]]
|
||||
- `xoffset` [float default 0.5 range [0 ~ 1]]
|
||||
- `relative` [boolean default v:false]
|
||||
- `border` [string default `rounded` (`sharp` on Windows)]: Border style
|
||||
- `rounded` / `sharp` / `horizontal` / `vertical` / `top` / `bottom` / `left` / `right` / `no[ne]`
|
||||
|
||||
|
||||
FZF#WRAP
|
||||
==============================================================================
|
||||
|
||||
*fzf#wrap*
|
||||
|
||||
We have seen that several aspects of `:FZF` command can be configured with a
|
||||
set of global option variables; different ways to open files (`g:fzf_action`),
|
||||
window position and size (`g:fzf_layout`), color palette (`g:fzf_colors`),
|
||||
etc.
|
||||
|
||||
So how can we make our custom `fzf#run` calls also respect those variables?
|
||||
Simply by "wrapping" the spec dictionary with `fzf#wrap` before passing it to
|
||||
`fzf#run`.
|
||||
|
||||
- `fzf#wrap([name string], [spec dict], [fullscreen bool]) -> (dict)`
|
||||
- All arguments are optional. Usually we only need to pass a spec
|
||||
dictionary.
|
||||
- `name` is for managing history files. It is ignored if `g:fzf_history_dir`
|
||||
is not defined.
|
||||
- `fullscreen` can be either `0` or `1` (default: 0).
|
||||
|
||||
`fzf#wrap` takes a spec and returns an extended version of it (also a
|
||||
dictionary) with additional options for addressing global preferences. You can
|
||||
examine the return value of it like so:
|
||||
>
|
||||
echo fzf#wrap({'source': 'ls'})
|
||||
<
|
||||
After we "wrap" our spec, we pass it to `fzf#run`.
|
||||
>
|
||||
call fzf#run(fzf#wrap({'source': 'ls'}))
|
||||
<
|
||||
Now it supports CTRL-T, CTRL-V, and CTRL-X key bindings (configurable via
|
||||
`g:fzf_action`) and it opens fzf window according to `g:fzf_layout` setting.
|
||||
|
||||
To make it easier to use, let's define `LS` command.
|
||||
>
|
||||
command! LS call fzf#run(fzf#wrap({'source': 'ls'}))
|
||||
<
|
||||
Type `:LS` and see how it works.
|
||||
|
||||
We would like to make `:LS!` (bang version) open fzf in fullscreen, just like
|
||||
`:FZF!`. Add `-bang` to command definition, and use <bang> value to set the
|
||||
last `fullscreen` argument of `fzf#wrap` (see :help <bang>).
|
||||
>
|
||||
" On :LS!, <bang> evaluates to '!', and '!0' becomes 1
|
||||
command! -bang LS call fzf#run(fzf#wrap({'source': 'ls'}, <bang>0))
|
||||
<
|
||||
Our `:LS` command will be much more useful if we can pass a directory argument
|
||||
to it, so that something like `:LS /tmp` is possible.
|
||||
>
|
||||
command! -bang -complete=dir -nargs=? LS
|
||||
\ call fzf#run(fzf#wrap({'source': 'ls', 'dir': <q-args>}, <bang>0))
|
||||
<
|
||||
Lastly, if you have enabled `g:fzf_history_dir`, you might want to assign a
|
||||
unique name to our command and pass it as the first argument to `fzf#wrap`.
|
||||
>
|
||||
" The query history for this command will be stored as 'ls' inside g:fzf_history_dir.
|
||||
" The name is ignored if g:fzf_history_dir is not defined.
|
||||
command! -bang -complete=dir -nargs=? LS
|
||||
\ call fzf#run(fzf#wrap('ls', {'source': 'ls', 'dir': <q-args>}, <bang>0))
|
||||
<
|
||||
|
||||
< Global options supported by fzf#wrap >______________________________________~
|
||||
*fzf-global-options-supported-by-fzf#wrap*
|
||||
|
||||
- `g:fzf_layout`
|
||||
- `g:fzf_action`
|
||||
- Works only when no custom `sink` (or `sinklist`) is provided
|
||||
- Having custom sink usually means that each entry is not an ordinary
|
||||
file path (e.g. name of color scheme), so we can't blindly apply the
|
||||
same strategy (i.e. `tabedit some-color-scheme` doesn't make sense)
|
||||
- `g:fzf_colors`
|
||||
- `g:fzf_history_dir`
|
||||
|
||||
|
||||
TIPS *fzf-tips*
|
||||
==============================================================================
|
||||
|
||||
|
||||
< fzf inside terminal buffer >________________________________________________~
|
||||
*fzf-inside-terminal-buffer*
|
||||
|
||||
|
||||
On the latest versions of Vim and Neovim, fzf will start in a terminal buffer.
|
||||
If you find the default ANSI colors to be different, consider configuring the
|
||||
colors using `g:terminal_ansi_colors` in regular Vim or `g:terminal_color_x`
|
||||
in Neovim.
|
||||
|
||||
>
|
||||
" Terminal colors for seoul256 color scheme
|
||||
if has('nvim')
|
||||
let g:terminal_color_0 = '#4e4e4e'
|
||||
let g:terminal_color_1 = '#d68787'
|
||||
let g:terminal_color_2 = '#5f865f'
|
||||
let g:terminal_color_3 = '#d8af5f'
|
||||
let g:terminal_color_4 = '#85add4'
|
||||
let g:terminal_color_5 = '#d7afaf'
|
||||
let g:terminal_color_6 = '#87afaf'
|
||||
let g:terminal_color_7 = '#d0d0d0'
|
||||
let g:terminal_color_8 = '#626262'
|
||||
let g:terminal_color_9 = '#d75f87'
|
||||
let g:terminal_color_10 = '#87af87'
|
||||
let g:terminal_color_11 = '#ffd787'
|
||||
let g:terminal_color_12 = '#add4fb'
|
||||
let g:terminal_color_13 = '#ffafaf'
|
||||
let g:terminal_color_14 = '#87d7d7'
|
||||
let g:terminal_color_15 = '#e4e4e4'
|
||||
else
|
||||
let g:terminal_ansi_colors = [
|
||||
\ '#4e4e4e', '#d68787', '#5f865f', '#d8af5f',
|
||||
\ '#85add4', '#d7afaf', '#87afaf', '#d0d0d0',
|
||||
\ '#626262', '#d75f87', '#87af87', '#ffd787',
|
||||
\ '#add4fb', '#ffafaf', '#87d7d7', '#e4e4e4'
|
||||
\ ]
|
||||
endif
|
||||
<
|
||||
|
||||
< Starting fzf in a popup window >____________________________________________~
|
||||
*fzf-starting-fzf-in-a-popup-window*
|
||||
>
|
||||
" Required:
|
||||
" - width [float range [0 ~ 1]] or [integer range [8 ~ ]]
|
||||
" - height [float range [0 ~ 1]] or [integer range [4 ~ ]]
|
||||
"
|
||||
" Optional:
|
||||
" - xoffset [float default 0.5 range [0 ~ 1]]
|
||||
" - yoffset [float default 0.5 range [0 ~ 1]]
|
||||
" - relative [boolean default v:false]
|
||||
" - border [string default 'rounded']: Border style
|
||||
" - 'rounded' / 'sharp' / 'horizontal' / 'vertical' / 'top' / 'bottom' / 'left' / 'right'
|
||||
let g:fzf_layout = { 'window': { 'width': 0.9, 'height': 0.6 } }
|
||||
<
|
||||
Alternatively, you can make fzf open in a tmux popup window (requires tmux 3.2
|
||||
or above) by putting `--tmux` options in `tmux` key.
|
||||
>
|
||||
" See `--tmux` option in `man fzf` for available options
|
||||
" [center|top|bottom|left|right][,SIZE[%]][,SIZE[%]]
|
||||
if exists('$TMUX')
|
||||
let g:fzf_layout = { 'tmux': '90%,70%' }
|
||||
else
|
||||
let g:fzf_layout = { 'window': { 'width': 0.9, 'height': 0.6 } }
|
||||
endif
|
||||
<
|
||||
|
||||
< Hide statusline >___________________________________________________________~
|
||||
*fzf-hide-statusline*
|
||||
|
||||
When fzf starts in a terminal buffer, the file type of the buffer is set to
|
||||
`fzf`. So you can set up `FileType fzf` autocmd to customize the settings of
|
||||
the window.
|
||||
|
||||
For example, if you open fzf on the bottom on the screen (e.g. `{'down':
|
||||
'40%'}`), you might want to temporarily disable the statusline for a cleaner
|
||||
look.
|
||||
>
|
||||
let g:fzf_layout = { 'down': '30%' }
|
||||
autocmd! FileType fzf
|
||||
autocmd FileType fzf set laststatus=0 noshowmode noruler
|
||||
\| autocmd BufLeave <buffer> set laststatus=2 showmode ruler
|
||||
<
|
||||
|
||||
LICENSE *fzf-license*
|
||||
==============================================================================
|
||||
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2013-2024 Junegunn Choi
|
||||
|
||||
==============================================================================
|
||||
vim:tw=78:sw=2:ts=2:ft=help:norl:nowrap:
|
||||
20
go.mod
Normal file
20
go.mod
Normal file
@@ -0,0 +1,20 @@
|
||||
module github.com/junegunn/fzf
|
||||
|
||||
require (
|
||||
github.com/charlievieth/fastwalk v1.0.8
|
||||
github.com/gdamore/tcell/v2 v2.7.4
|
||||
github.com/junegunn/go-shellwords v0.0.0-20240813092932-a62c48c52e97
|
||||
github.com/mattn/go-isatty v0.0.20
|
||||
github.com/rivo/uniseg v0.4.7
|
||||
golang.org/x/sys v0.25.0
|
||||
golang.org/x/term v0.24.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/gdamore/encoding v1.0.0 // indirect
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.15 // indirect
|
||||
golang.org/x/text v0.14.0 // indirect
|
||||
)
|
||||
|
||||
go 1.20
|
||||
57
go.sum
Normal file
57
go.sum
Normal file
@@ -0,0 +1,57 @@
|
||||
github.com/charlievieth/fastwalk v1.0.8 h1:uaoH6cAKSk73aK7aKXqs0+bL+J3Txzd3NGH8tRXgHko=
|
||||
github.com/charlievieth/fastwalk v1.0.8/go.mod h1:yGy1zbxog41ZVMcKA/i8ojXLFsuayX5VvwhQVoj9PBI=
|
||||
github.com/gdamore/encoding v1.0.0 h1:+7OoQ1Bc6eTm5niUzBa0Ctsh6JbMW6Ra+YNuAtDBdko=
|
||||
github.com/gdamore/encoding v1.0.0/go.mod h1:alR0ol34c49FCSBLjhosxzcPHQbf2trDkoo5dl+VrEg=
|
||||
github.com/gdamore/tcell/v2 v2.7.4 h1:sg6/UnTM9jGpZU+oFYAsDahfchWAFW8Xx2yFinNSAYU=
|
||||
github.com/gdamore/tcell/v2 v2.7.4/go.mod h1:dSXtXTSK0VsW1biw65DZLZ2NKr7j0qP/0J7ONmsraWg=
|
||||
github.com/junegunn/go-shellwords v0.0.0-20240813092932-a62c48c52e97 h1:rqzLixVo1c/GQW6px9j1xQmlvQIn+lf/V6M1UQ7IFzw=
|
||||
github.com/junegunn/go-shellwords v0.0.0-20240813092932-a62c48c52e97/go.mod h1:6EILKtGpo5t+KLb85LNZLAF6P9LKp78hJI80PXMcn3c=
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U=
|
||||
github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/rivo/uniseg v0.4.3/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34=
|
||||
golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
||||
golang.org/x/term v0.24.0 h1:Mh5cbb+Zk2hqqXNO7S1iTjEphVL+jb8ZWaqh/g+JWkM=
|
||||
golang.org/x/term v0.24.0/go.mod h1:lOBK/LVxemqiMij05LGJ0tzNr8xlmwBRJ81PX6wVLH8=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
348
install
348
install
@@ -2,14 +2,14 @@
|
||||
|
||||
set -u
|
||||
|
||||
[[ "$@" =~ --pre ]] && version=0.15.6 pre=1 ||
|
||||
version=0.15.6 pre=0
|
||||
|
||||
version=0.55.0
|
||||
auto_completion=
|
||||
key_bindings=
|
||||
update_config=2
|
||||
binary_arch=
|
||||
allow_legacy=
|
||||
shells="bash zsh fish"
|
||||
prefix='~/.fzf'
|
||||
prefix_expand=~/.fzf
|
||||
fish_dir=${XDG_CONFIG_HOME:-$HOME/.config}/fish
|
||||
|
||||
help() {
|
||||
cat << EOF
|
||||
@@ -19,12 +19,14 @@ usage: $0 [OPTIONS]
|
||||
--bin Download fzf binary only; Do not generate ~/.fzf.{bash,zsh}
|
||||
--all Download fzf binary and update configuration files
|
||||
to enable key bindings and fuzzy completion
|
||||
--xdg Generate files under \$XDG_CONFIG_HOME/fzf
|
||||
--[no-]key-bindings Enable/disable key bindings (CTRL-T, CTRL-R, ALT-C)
|
||||
--[no-]completion Enable/disable fuzzy completion (bash & zsh)
|
||||
--[no-]update-rc Whether or not to update shell configuration files
|
||||
|
||||
--32 Download 32-bit binary
|
||||
--64 Download 64-bit binary
|
||||
--no-bash Do not set up bash configuration
|
||||
--no-zsh Do not set up zsh configuration
|
||||
--no-fish Do not set up fish configuration
|
||||
EOF
|
||||
}
|
||||
|
||||
@@ -38,7 +40,11 @@ for opt in "$@"; do
|
||||
auto_completion=1
|
||||
key_bindings=1
|
||||
update_config=1
|
||||
allow_legacy=1
|
||||
;;
|
||||
--xdg)
|
||||
prefix='"${XDG_CONFIG_HOME:-$HOME/.config}"/fzf/fzf'
|
||||
prefix_expand=${XDG_CONFIG_HOME:-$HOME/.config}/fzf/fzf
|
||||
mkdir -p "${XDG_CONFIG_HOME:-$HOME/.config}/fzf"
|
||||
;;
|
||||
--key-bindings) key_bindings=1 ;;
|
||||
--no-key-bindings) key_bindings=0 ;;
|
||||
@@ -46,9 +52,10 @@ for opt in "$@"; do
|
||||
--no-completion) auto_completion=0 ;;
|
||||
--update-rc) update_config=1 ;;
|
||||
--no-update-rc) update_config=0 ;;
|
||||
--32) binary_arch=386 ;;
|
||||
--64) binary_arch=amd64 ;;
|
||||
--bin|--pre) ;;
|
||||
--bin) ;;
|
||||
--no-bash) shells=${shells/bash/} ;;
|
||||
--no-zsh) shells=${shells/zsh/} ;;
|
||||
--no-fish) shells=${shells/fish/} ;;
|
||||
*)
|
||||
echo "unknown option: $opt"
|
||||
help
|
||||
@@ -58,17 +65,19 @@ for opt in "$@"; do
|
||||
done
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||
fzf_base="$(pwd)"
|
||||
fzf_base=$(pwd)
|
||||
fzf_base_esc=$(printf %q "$fzf_base")
|
||||
|
||||
ask() {
|
||||
# If stdin is a tty, we are "interactive".
|
||||
# non-interactive shell: wait for a linefeed
|
||||
# interactive shell: continue after a single keypress
|
||||
read_n=$([ -t 0 ] && echo "-n 1")
|
||||
|
||||
read -p "$1 ([y]/n) " $read_n -r
|
||||
echo
|
||||
[[ $REPLY =~ ^[Nn]$ ]]
|
||||
while true; do
|
||||
read -p "$1 ([y]/n) " -r
|
||||
REPLY=${REPLY:-"y"}
|
||||
if [[ $REPLY =~ ^[Yy]$ ]]; then
|
||||
return 1
|
||||
elif [[ $REPLY =~ ^[Nn]$ ]]; then
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
check_binary() {
|
||||
@@ -78,33 +87,25 @@ check_binary() {
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Error: $output"
|
||||
binary_error="Invalid binary"
|
||||
elif [ "$version" != "$output" ]; then
|
||||
echo "$output != $version"
|
||||
binary_error="Invalid version"
|
||||
else
|
||||
echo "$output"
|
||||
binary_error=""
|
||||
return 0
|
||||
output=${output/ */}
|
||||
if [ "$version" != "$output" ]; then
|
||||
echo "$output != $version"
|
||||
binary_error="Invalid version"
|
||||
else
|
||||
echo "$output"
|
||||
binary_error=""
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
rm -f "$fzf_base"/bin/fzf
|
||||
return 1
|
||||
}
|
||||
|
||||
symlink() {
|
||||
echo " - Creating symlink: bin/$1 -> bin/fzf"
|
||||
(cd "$fzf_base"/bin &&
|
||||
rm -f fzf &&
|
||||
ln -sf $1 fzf)
|
||||
if [ $? -ne 0 ]; then
|
||||
binary_error="Failed to create symlink"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
link_fzf_in_path() {
|
||||
if which_fzf="$(command -v fzf)"; then
|
||||
echo " - Found in \$PATH"
|
||||
echo " - Creating symlink: $which_fzf -> bin/fzf"
|
||||
echo " - Creating symlink: bin/fzf -> $which_fzf"
|
||||
(cd "$fzf_base"/bin && rm -f fzf && ln -sf "$which_fzf" fzf)
|
||||
check_binary && return
|
||||
fi
|
||||
@@ -112,32 +113,40 @@ link_fzf_in_path() {
|
||||
}
|
||||
|
||||
try_curl() {
|
||||
command -v curl > /dev/null && curl -fL $1 | tar -xz
|
||||
command -v curl > /dev/null &&
|
||||
if [[ $1 =~ tar.gz$ ]]; then
|
||||
curl -fL $1 | tar --no-same-owner -xzf -
|
||||
else
|
||||
local temp=${TMPDIR:-/tmp}/fzf.zip
|
||||
curl -fLo "$temp" $1 && unzip -o "$temp" && rm -f "$temp"
|
||||
fi
|
||||
}
|
||||
|
||||
try_wget() {
|
||||
command -v wget > /dev/null && wget -O - $1 | tar -xz
|
||||
command -v wget > /dev/null &&
|
||||
if [[ $1 =~ tar.gz$ ]]; then
|
||||
wget -O - $1 | tar --no-same-owner -xzf -
|
||||
else
|
||||
local temp=${TMPDIR:-/tmp}/fzf.zip
|
||||
wget -O "$temp" $1 && unzip -o "$temp" && rm -f "$temp"
|
||||
fi
|
||||
}
|
||||
|
||||
download() {
|
||||
echo "Downloading bin/fzf ..."
|
||||
if [ $pre = 0 ]; then
|
||||
if [ -x "$fzf_base"/bin/fzf ]; then
|
||||
echo " - Already exists"
|
||||
check_binary && return
|
||||
fi
|
||||
if [ -x "$fzf_base"/bin/$1 ]; then
|
||||
symlink $1 && check_binary && return
|
||||
fi
|
||||
link_fzf_in_path && return
|
||||
if [ -x "$fzf_base"/bin/fzf ]; then
|
||||
echo " - Already exists"
|
||||
check_binary && return
|
||||
fi
|
||||
link_fzf_in_path && return
|
||||
mkdir -p "$fzf_base"/bin && cd "$fzf_base"/bin
|
||||
if [ $? -ne 0 ]; then
|
||||
binary_error="Failed to create bin directory"
|
||||
return
|
||||
fi
|
||||
|
||||
local url=https://github.com/junegunn/fzf-bin/releases/download/$version/${1}.tgz
|
||||
local url
|
||||
url=https://github.com/junegunn/fzf/releases/download/v$version/${1}
|
||||
set -o pipefail
|
||||
if ! (try_curl $url || try_wget $url); then
|
||||
set +o pipefail
|
||||
@@ -146,12 +155,12 @@ download() {
|
||||
fi
|
||||
set +o pipefail
|
||||
|
||||
if [ ! -f $1 ]; then
|
||||
if [ ! -f fzf ]; then
|
||||
binary_error="Failed to download ${1}"
|
||||
return
|
||||
fi
|
||||
|
||||
chmod +x $1 && symlink $1 && check_binary
|
||||
chmod +x fzf && check_binary
|
||||
}
|
||||
|
||||
# Try to download binary executable
|
||||
@@ -159,84 +168,26 @@ archi=$(uname -sm)
|
||||
binary_available=1
|
||||
binary_error=""
|
||||
case "$archi" in
|
||||
Darwin\ x86_64) download fzf-$version-darwin_${binary_arch:-amd64} ;;
|
||||
Darwin\ i*86) download fzf-$version-darwin_${binary_arch:-386} ;;
|
||||
Linux\ x86_64) download fzf-$version-linux_${binary_arch:-amd64} ;;
|
||||
Linux\ i*86) download fzf-$version-linux_${binary_arch:-386} ;;
|
||||
*) binary_available=0 binary_error=1 ;;
|
||||
Darwin\ arm64) download fzf-$version-darwin_arm64.tar.gz ;;
|
||||
Darwin\ x86_64) download fzf-$version-darwin_amd64.tar.gz ;;
|
||||
Linux\ armv5*) download fzf-$version-linux_armv5.tar.gz ;;
|
||||
Linux\ armv6*) download fzf-$version-linux_armv6.tar.gz ;;
|
||||
Linux\ armv7*) download fzf-$version-linux_armv7.tar.gz ;;
|
||||
Linux\ armv8*) download fzf-$version-linux_arm64.tar.gz ;;
|
||||
Linux\ aarch64*) download fzf-$version-linux_arm64.tar.gz ;;
|
||||
Linux\ loongarch64) download fzf-$version-linux_loong64.tar.gz ;;
|
||||
Linux\ ppc64le) download fzf-$version-linux_ppc64le.tar.gz ;;
|
||||
Linux\ *64) download fzf-$version-linux_amd64.tar.gz ;;
|
||||
Linux\ s390x) download fzf-$version-linux_s390x.tar.gz ;;
|
||||
FreeBSD\ *64) download fzf-$version-freebsd_amd64.tar.gz ;;
|
||||
OpenBSD\ *64) download fzf-$version-openbsd_amd64.tar.gz ;;
|
||||
CYGWIN*\ *64) download fzf-$version-windows_amd64.zip ;;
|
||||
MINGW*\ *64) download fzf-$version-windows_amd64.zip ;;
|
||||
MSYS*\ *64) download fzf-$version-windows_amd64.zip ;;
|
||||
Windows*\ *64) download fzf-$version-windows_amd64.zip ;;
|
||||
*) binary_available=0 binary_error=1 ;;
|
||||
esac
|
||||
|
||||
install_ruby_fzf() {
|
||||
if [ -z "$allow_legacy" ]; then
|
||||
ask "Do you want to install legacy Ruby version instead?" && exit 1
|
||||
fi
|
||||
echo "Installing legacy Ruby version ..."
|
||||
|
||||
# ruby executable
|
||||
echo -n "Checking Ruby executable ... "
|
||||
ruby=$(command -v ruby)
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ruby executable not found !!!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# System ruby is preferred
|
||||
system_ruby=/usr/bin/ruby
|
||||
if [ -x $system_ruby ] && [ $system_ruby != "$ruby" ]; then
|
||||
$system_ruby --disable-gems -rcurses -e0 2> /dev/null
|
||||
[ $? -eq 0 ] && ruby=$system_ruby
|
||||
fi
|
||||
|
||||
echo "OK ($ruby)"
|
||||
|
||||
# Curses-support
|
||||
echo -n "Checking Curses support ... "
|
||||
"$ruby" -rcurses -e0 2> /dev/null
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "OK"
|
||||
else
|
||||
echo "Not found"
|
||||
echo "Installing 'curses' gem ... "
|
||||
if (( EUID )); then
|
||||
/usr/bin/env gem install curses --user-install
|
||||
else
|
||||
/usr/bin/env gem install curses
|
||||
fi
|
||||
if [ $? -ne 0 ]; then
|
||||
echo
|
||||
echo "Failed to install 'curses' gem."
|
||||
if [[ $(uname -r) =~ 'ARCH' ]]; then
|
||||
echo "Make sure that base-devel package group is installed."
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Ruby version
|
||||
echo -n "Checking Ruby version ... "
|
||||
"$ruby" -e 'exit RUBY_VERSION >= "1.9"'
|
||||
if [ $? -eq 0 ]; then
|
||||
echo ">= 1.9"
|
||||
"$ruby" --disable-gems -rcurses -e0 2> /dev/null
|
||||
if [ $? -eq 0 ]; then
|
||||
fzf_cmd="$ruby --disable-gems $fzf_base/fzf"
|
||||
else
|
||||
fzf_cmd="$ruby $fzf_base/fzf"
|
||||
fi
|
||||
else
|
||||
echo "< 1.9"
|
||||
fzf_cmd="$ruby $fzf_base/fzf"
|
||||
fi
|
||||
|
||||
# Create fzf script
|
||||
echo -n "Creating wrapper script for fzf ... "
|
||||
rm -f "$fzf_base"/bin/fzf
|
||||
echo "#!/bin/sh" > "$fzf_base"/bin/fzf
|
||||
echo "$fzf_cmd \"\$@\"" >> "$fzf_base"/bin/fzf
|
||||
chmod +x "$fzf_base"/bin/fzf
|
||||
echo "OK"
|
||||
}
|
||||
|
||||
cd "$fzf_base"
|
||||
if [ -n "$binary_error" ]; then
|
||||
if [ $binary_available -eq 0 ]; then
|
||||
@@ -245,26 +196,37 @@ if [ -n "$binary_error" ]; then
|
||||
echo " - $binary_error !!!"
|
||||
fi
|
||||
if command -v go > /dev/null; then
|
||||
echo -n "Building binary (go get -u github.com/junegunn/fzf/src/fzf) ... "
|
||||
echo -n "Building binary (go install github.com/junegunn/fzf) ... "
|
||||
if [ -z "${GOPATH-}" ]; then
|
||||
export GOPATH="${TMPDIR:-/tmp}/fzf-gopath"
|
||||
mkdir -p "$GOPATH"
|
||||
fi
|
||||
if go get -u github.com/junegunn/fzf/src/fzf; then
|
||||
if go install -ldflags "-s -w -X main.version=$version -X main.revision=go-install" github.com/junegunn/fzf; then
|
||||
echo "OK"
|
||||
cp "$GOPATH/bin/fzf" "$fzf_base/bin/"
|
||||
else
|
||||
echo "Failed to build binary ..."
|
||||
install_ruby_fzf
|
||||
echo "Failed to build binary. Installation failed."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "go executable not found. Cannot build binary ..."
|
||||
install_ruby_fzf
|
||||
echo "go executable not found. Installation failed."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
[[ "$*" =~ "--bin" ]] && exit 0
|
||||
|
||||
for s in $shells; do
|
||||
if ! command -v "$s" > /dev/null; then
|
||||
shells=${shells/$s/}
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ ${#shells} -lt 3 ]]; then
|
||||
echo "No shell configuration to be updated."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Auto-completion
|
||||
if [ -z "$auto_completion" ]; then
|
||||
ask "Do you want to enable fuzzy auto-completion?"
|
||||
@@ -278,13 +240,12 @@ if [ -z "$key_bindings" ]; then
|
||||
fi
|
||||
|
||||
echo
|
||||
has_zsh=$(command -v zsh > /dev/null && echo 1 || echo 0)
|
||||
shells=$([ $has_zsh -eq 1 ] && echo "bash zsh" || echo "bash")
|
||||
for shell in $shells; do
|
||||
echo -n "Generate ~/.fzf.$shell ... "
|
||||
src=~/.fzf.${shell}
|
||||
[[ "$shell" = fish ]] && continue
|
||||
src=${prefix_expand}.${shell}
|
||||
echo -n "Generate $src ... "
|
||||
|
||||
fzf_completion="[[ \$- == *i* ]] && source \"$fzf_base/shell/completion.${shell}\" 2> /dev/null"
|
||||
fzf_completion="source \"$fzf_base/shell/completion.${shell}\""
|
||||
if [ $auto_completion -eq 0 ]; then
|
||||
fzf_completion="# $fzf_completion"
|
||||
fi
|
||||
@@ -294,19 +255,23 @@ for shell in $shells; do
|
||||
fzf_key_bindings="# $fzf_key_bindings"
|
||||
fi
|
||||
|
||||
cat > $src << EOF
|
||||
cat > "$src" << EOF
|
||||
# Setup fzf
|
||||
# ---------
|
||||
if [[ ! "\$PATH" == *$fzf_base/bin* ]]; then
|
||||
export PATH="\$PATH:$fzf_base/bin"
|
||||
if [[ ! "\$PATH" == *$fzf_base_esc/bin* ]]; then
|
||||
PATH="\${PATH:+\${PATH}:}$fzf_base/bin"
|
||||
fi
|
||||
|
||||
# Man path
|
||||
# --------
|
||||
if [[ ! "\$MANPATH" == *$fzf_base/man* && -d "$fzf_base/man" ]]; then
|
||||
export MANPATH="\$MANPATH:$fzf_base/man"
|
||||
fi
|
||||
EOF
|
||||
|
||||
if [[ $auto_completion -eq 1 ]] && [[ $key_bindings -eq 1 ]]; then
|
||||
if [[ "$shell" = zsh ]]; then
|
||||
echo "source <(fzf --$shell)" >> "$src"
|
||||
else
|
||||
echo "eval \"\$(fzf --$shell)\"" >> "$src"
|
||||
fi
|
||||
else
|
||||
cat >> "$src" << EOF
|
||||
# Auto-completion
|
||||
# ---------------
|
||||
$fzf_completion
|
||||
@@ -314,37 +279,19 @@ $fzf_completion
|
||||
# Key bindings
|
||||
# ------------
|
||||
$fzf_key_bindings
|
||||
|
||||
EOF
|
||||
fi
|
||||
echo "OK"
|
||||
done
|
||||
|
||||
# fish
|
||||
has_fish=$(command -v fish > /dev/null && echo 1 || echo 0)
|
||||
if [ $has_fish -eq 1 ]; then
|
||||
if [[ "$shells" =~ fish ]]; then
|
||||
echo -n "Update fish_user_paths ... "
|
||||
fish << EOF
|
||||
echo \$fish_user_paths | grep $fzf_base/bin > /dev/null
|
||||
or set --universal fish_user_paths \$fish_user_paths $fzf_base/bin
|
||||
echo \$fish_user_paths | \grep "$fzf_base"/bin > /dev/null
|
||||
or set --universal fish_user_paths \$fish_user_paths "$fzf_base"/bin
|
||||
EOF
|
||||
[ $? -eq 0 ] && echo "OK" || echo "Failed"
|
||||
|
||||
mkdir -p ~/.config/fish/functions
|
||||
if [ -e ~/.config/fish/functions/fzf.fish ]; then
|
||||
echo -n "Remove unnecessary ~/.config/fish/functions/fzf.fish ... "
|
||||
rm -f ~/.config/fish/functions/fzf.fish && echo "OK" || echo "Failed"
|
||||
fi
|
||||
|
||||
fish_binding=~/.config/fish/functions/fzf_key_bindings.fish
|
||||
if [ $key_bindings -ne 0 ]; then
|
||||
echo -n "Symlink $fish_binding ... "
|
||||
ln -sf "$fzf_base/shell/key-bindings.fish" \
|
||||
"$fish_binding" && echo "OK" || echo "Failed"
|
||||
else
|
||||
echo -n "Removing $fish_binding ... "
|
||||
rm -f "$fish_binding"
|
||||
echo "OK"
|
||||
fi
|
||||
fi
|
||||
|
||||
append_line() {
|
||||
@@ -355,20 +302,22 @@ append_line() {
|
||||
line="$2"
|
||||
file="$3"
|
||||
pat="${4:-}"
|
||||
lno=""
|
||||
|
||||
echo "Update $file:"
|
||||
echo " - $line"
|
||||
[ -f "$file" ] || touch "$file"
|
||||
if [ $# -lt 4 ]; then
|
||||
lno=$(\grep -nF "$line" "$file" | sed 's/:.*//' | tr '\n' ' ')
|
||||
else
|
||||
lno=$(\grep -nF "$pat" "$file" | sed 's/:.*//' | tr '\n' ' ')
|
||||
if [ -f "$file" ]; then
|
||||
if [ $# -lt 4 ]; then
|
||||
lno=$(\grep -nF "$line" "$file" | sed 's/:.*//' | tr '\n' ' ')
|
||||
else
|
||||
lno=$(\grep -nF "$pat" "$file" | sed 's/:.*//' | tr '\n' ' ')
|
||||
fi
|
||||
fi
|
||||
if [ -n "$lno" ]; then
|
||||
echo " - Already exists: line #$lno"
|
||||
else
|
||||
if [ $update -eq 1 ]; then
|
||||
echo >> "$file"
|
||||
[ -f "$file" ] && echo >> "$file"
|
||||
echo "$line" >> "$file"
|
||||
echo " + Added"
|
||||
else
|
||||
@@ -379,6 +328,17 @@ append_line() {
|
||||
set +e
|
||||
}
|
||||
|
||||
create_file() {
|
||||
local file="$1"
|
||||
shift
|
||||
echo "Create $file:"
|
||||
for line in "$@"; do
|
||||
echo " $line"
|
||||
echo "$line" >> "$file"
|
||||
done
|
||||
echo
|
||||
}
|
||||
|
||||
if [ $update_config -eq 2 ]; then
|
||||
echo
|
||||
ask "Do you want to update your shell configuration files?"
|
||||
@@ -386,23 +346,45 @@ if [ $update_config -eq 2 ]; then
|
||||
fi
|
||||
echo
|
||||
for shell in $shells; do
|
||||
[[ "$shell" = fish ]] && continue
|
||||
[ $shell = zsh ] && dest=${ZDOTDIR:-~}/.zshrc || dest=~/.bashrc
|
||||
append_line $update_config "[ -f ~/.fzf.${shell} ] && source ~/.fzf.${shell}" "$dest" "~/.fzf.${shell}"
|
||||
append_line $update_config "[ -f ${prefix}.${shell} ] && source ${prefix}.${shell}" "$dest" "${prefix}.${shell}"
|
||||
done
|
||||
|
||||
if [ $key_bindings -eq 1 ] && [ $has_fish -eq 1 ]; then
|
||||
bind_file=~/.config/fish/functions/fish_user_key_bindings.fish
|
||||
append_line $update_config "fzf_key_bindings" "$bind_file"
|
||||
if [ $key_bindings -eq 1 ] && [[ "$shells" =~ fish ]]; then
|
||||
bind_file="${fish_dir}/functions/fish_user_key_bindings.fish"
|
||||
if [ ! -e "$bind_file" ]; then
|
||||
mkdir -p "${fish_dir}/functions"
|
||||
create_file "$bind_file" \
|
||||
'function fish_user_key_bindings' \
|
||||
' fzf --fish | source' \
|
||||
'end'
|
||||
else
|
||||
echo "Check $bind_file:"
|
||||
lno=$(\grep -nF "fzf_key_bindings" "$bind_file" | sed 's/:.*//' | tr '\n' ' ')
|
||||
if [[ -n $lno ]]; then
|
||||
echo " ** Found 'fzf_key_bindings' in line #$lno"
|
||||
echo " ** You have to replace the line to 'fzf --fish | source'"
|
||||
echo
|
||||
else
|
||||
echo " - Clear"
|
||||
echo
|
||||
append_line $update_config "fzf --fish | source" "$bind_file"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ $update_config -eq 1 ]; then
|
||||
echo 'Finished. Restart your shell or reload config file.'
|
||||
echo ' source ~/.bashrc # bash'
|
||||
[ $has_zsh -eq 1 ] && echo " source ${ZDOTDIR:-~}/.zshrc # zsh"
|
||||
[ $has_fish -eq 1 ] && [ $key_bindings -eq 1 ] && echo ' fzf_key_bindings # fish'
|
||||
if [[ "$shells" =~ bash ]]; then
|
||||
echo -n ' source ~/.bashrc # bash'
|
||||
[[ "$archi" =~ Darwin ]] && echo -n ' (.bashrc should be loaded from .bash_profile)'
|
||||
echo
|
||||
fi
|
||||
[[ "$shells" =~ zsh ]] && echo " source ${ZDOTDIR:-~}/.zshrc # zsh"
|
||||
[[ "$shells" =~ fish ]] && [ $key_bindings -eq 1 ] && echo ' fzf_key_bindings # fish'
|
||||
echo
|
||||
echo 'Use uninstall script to remove fzf.'
|
||||
echo
|
||||
fi
|
||||
echo 'For more information, see: https://github.com/junegunn/fzf'
|
||||
|
||||
|
||||
65
install.ps1
Normal file
65
install.ps1
Normal file
@@ -0,0 +1,65 @@
|
||||
$version="0.55.0"
|
||||
|
||||
$fzf_base=Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
|
||||
function check_binary () {
|
||||
Write-Host " - Checking fzf executable ... " -NoNewline
|
||||
$output=cmd /c $fzf_base\bin\fzf.exe --version 2>&1
|
||||
if (-not $?) {
|
||||
Write-Host "Error: $output"
|
||||
$binary_error="Invalid binary"
|
||||
} else {
|
||||
$output=(-Split $output)[0]
|
||||
if ($version -ne $output) {
|
||||
Write-Host "$output != $version"
|
||||
$binary_error="Invalid version"
|
||||
} else {
|
||||
Write-Host "$output"
|
||||
$binary_error=""
|
||||
return 1
|
||||
}
|
||||
}
|
||||
Remove-Item "$fzf_base\bin\fzf.exe"
|
||||
return 0
|
||||
}
|
||||
|
||||
function download {
|
||||
param($file)
|
||||
Write-Host "Downloading bin/fzf ..."
|
||||
if (Test-Path "$fzf_base\bin\fzf.exe") {
|
||||
Write-Host " - Already exists"
|
||||
if (check_binary) {
|
||||
return
|
||||
}
|
||||
}
|
||||
if (-not (Test-Path "$fzf_base\bin")) {
|
||||
md "$fzf_base\bin"
|
||||
}
|
||||
if (-not $?) {
|
||||
$binary_error="Failed to create bin directory"
|
||||
return
|
||||
}
|
||||
cd "$fzf_base\bin"
|
||||
$url="https://github.com/junegunn/fzf/releases/download/v$version/$file"
|
||||
$temp=$env:TMP + "\fzf.zip"
|
||||
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
|
||||
if ($PSVersionTable.PSVersion.Major -ge 3) {
|
||||
Invoke-WebRequest -Uri $url -OutFile $temp
|
||||
} else {
|
||||
(New-Object Net.WebClient).DownloadFile($url, $ExecutionContext.SessionState.Path.GetUnresolvedProviderPathFromPSPath("$temp"))
|
||||
}
|
||||
if ($?) {
|
||||
(Microsoft.PowerShell.Archive\Expand-Archive -Path $temp -DestinationPath .); (Remove-Item $temp)
|
||||
} else {
|
||||
$binary_error="Failed to download with powershell"
|
||||
}
|
||||
if (-not (Test-Path fzf.exe)) {
|
||||
$binary_error="Failed to download $file"
|
||||
return
|
||||
}
|
||||
echo y | icacls $fzf_base\bin\fzf.exe /grant Administrator:F ; check_binary >$null
|
||||
}
|
||||
|
||||
download "fzf-$version-windows_amd64.zip"
|
||||
|
||||
Write-Host 'For more information, see: https://github.com/junegunn/fzf'
|
||||
101
main.go
Normal file
101
main.go
Normal file
@@ -0,0 +1,101 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
_ "embed"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strings"
|
||||
|
||||
fzf "github.com/junegunn/fzf/src"
|
||||
"github.com/junegunn/fzf/src/protector"
|
||||
)
|
||||
|
||||
var version = "0.55"
|
||||
var revision = "devel"
|
||||
|
||||
//go:embed shell/key-bindings.bash
|
||||
var bashKeyBindings []byte
|
||||
|
||||
//go:embed shell/completion.bash
|
||||
var bashCompletion []byte
|
||||
|
||||
//go:embed shell/key-bindings.zsh
|
||||
var zshKeyBindings []byte
|
||||
|
||||
//go:embed shell/completion.zsh
|
||||
var zshCompletion []byte
|
||||
|
||||
//go:embed shell/key-bindings.fish
|
||||
var fishKeyBindings []byte
|
||||
|
||||
//go:embed man/man1/fzf.1
|
||||
var manPage []byte
|
||||
|
||||
func printScript(label string, content []byte) {
|
||||
fmt.Println("### " + label + " ###")
|
||||
fmt.Println(strings.TrimSpace(string(content)))
|
||||
fmt.Println("### end: " + label + " ###")
|
||||
}
|
||||
|
||||
func exit(code int, err error) {
|
||||
if code == fzf.ExitError && err != nil {
|
||||
fmt.Fprintln(os.Stderr, err.Error())
|
||||
}
|
||||
os.Exit(code)
|
||||
}
|
||||
|
||||
func main() {
|
||||
protector.Protect()
|
||||
|
||||
options, err := fzf.ParseOptions(true, os.Args[1:])
|
||||
if err != nil {
|
||||
exit(fzf.ExitError, err)
|
||||
return
|
||||
}
|
||||
if options.Bash {
|
||||
printScript("key-bindings.bash", bashKeyBindings)
|
||||
printScript("completion.bash", bashCompletion)
|
||||
return
|
||||
}
|
||||
if options.Zsh {
|
||||
printScript("key-bindings.zsh", zshKeyBindings)
|
||||
printScript("completion.zsh", zshCompletion)
|
||||
return
|
||||
}
|
||||
if options.Fish {
|
||||
printScript("key-bindings.fish", fishKeyBindings)
|
||||
fmt.Println("fzf_key_bindings")
|
||||
return
|
||||
}
|
||||
if options.Help {
|
||||
fmt.Print(fzf.Usage)
|
||||
return
|
||||
}
|
||||
if options.Version {
|
||||
if len(revision) > 0 {
|
||||
fmt.Printf("%s (%s)\n", version, revision)
|
||||
} else {
|
||||
fmt.Println(version)
|
||||
}
|
||||
return
|
||||
}
|
||||
if options.Man {
|
||||
file := fzf.WriteTemporaryFile([]string{string(manPage)}, "\n")
|
||||
if len(file) == 0 {
|
||||
fmt.Print(string(manPage))
|
||||
return
|
||||
}
|
||||
defer os.Remove(file)
|
||||
cmd := exec.Command("man", file)
|
||||
cmd.Stdin = os.Stdin
|
||||
cmd.Stdout = os.Stdout
|
||||
if err := cmd.Run(); err != nil {
|
||||
fmt.Print(string(manPage))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
code, err := fzf.Run(options)
|
||||
exit(code, err)
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
.ig
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2016 Junegunn Choi
|
||||
Copyright (c) 2013-2024 Junegunn Choi
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
@@ -21,34 +21,48 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
..
|
||||
.TH fzf-tmux 1 "Nov 2016" "fzf 0.15.6" "fzf-tmux - open fzf in tmux split pane"
|
||||
.TH fzf\-tmux 1 "Aug 2024" "fzf 0.55.0" "fzf\-tmux - open fzf in tmux split pane"
|
||||
|
||||
.SH NAME
|
||||
fzf-tmux - open fzf in tmux split pane
|
||||
fzf\-tmux - open fzf in tmux split pane
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B fzf-tmux [-u|-d [HEIGHT[%]]] [-l|-r [WIDTH[%]]] [--] [FZF OPTIONS]
|
||||
.B fzf\-tmux [\fILAYOUT OPTIONS\fR] [\-\-] [\fIFZF OPTIONS\fR]
|
||||
|
||||
.SH DESCRIPTION
|
||||
fzf-tmux is a wrapper script for fzf that opens fzf in a tmux split pane. It is
|
||||
designed to work just like fzf except that it does not take up the whole
|
||||
screen. You can safely use fzf-tmux instead of fzf in your scripts as the extra
|
||||
options will be silently ignored if you're not on tmux.
|
||||
fzf\-tmux is a wrapper script for fzf that opens fzf in a tmux split pane or in
|
||||
a tmux popup window. It is designed to work just like fzf except that it does
|
||||
not take up the whole screen. You can safely use fzf\-tmux instead of fzf in
|
||||
your scripts as the extra options will be silently ignored if you're not on
|
||||
tmux.
|
||||
|
||||
.SH OPTIONS
|
||||
.SS Layout
|
||||
.SH LAYOUT OPTIONS
|
||||
|
||||
(default: \fB-d 50%\fR)
|
||||
(default layout: \fB\-d 50%\fR)
|
||||
|
||||
.SS Popup window
|
||||
(requires tmux 3.2 or above)
|
||||
.TP
|
||||
.B "-u [height[%]]"
|
||||
.B "\-p [WIDTH[%][,HEIGHT[%]]]"
|
||||
.TP
|
||||
.B "\-w WIDTH[%]"
|
||||
.TP
|
||||
.B "\-h WIDTH[%]"
|
||||
.TP
|
||||
.B "\-x COL"
|
||||
.TP
|
||||
.B "\-y ROW"
|
||||
|
||||
.SS Split pane
|
||||
.TP
|
||||
.B "\-u [height[%]]"
|
||||
Split above (up)
|
||||
.TP
|
||||
.B "-d [height[%]]"
|
||||
.B "\-d [height[%]]"
|
||||
Split below (down)
|
||||
.TP
|
||||
.B "-l [width[%]]"
|
||||
.B "\-l [width[%]]"
|
||||
Split left
|
||||
.TP
|
||||
.B "-r [width[%]]"
|
||||
.B "\-r [width[%]]"
|
||||
Split right
|
||||
|
||||
1639
man/man1/fzf.1
1639
man/man1/fzf.1
File diff suppressed because it is too large
Load Diff
859
plugin/fzf.vim
859
plugin/fzf.vim
File diff suppressed because it is too large
Load Diff
@@ -1,38 +1,98 @@
|
||||
#!/bin/bash
|
||||
# ____ ____
|
||||
# / __/___ / __/
|
||||
# / /_/_ / / /_
|
||||
# / __/ / /_/ __/
|
||||
# /_/ /___/_/-completion.bash
|
||||
# /_/ /___/_/ completion.bash
|
||||
#
|
||||
# - $FZF_TMUX (default: 1)
|
||||
# - $FZF_TMUX_HEIGHT (default: '40%')
|
||||
# - $FZF_COMPLETION_TRIGGER (default: '**')
|
||||
# - $FZF_COMPLETION_OPTS (default: empty)
|
||||
# - $FZF_TMUX (default: 0)
|
||||
# - $FZF_TMUX_OPTS (default: empty)
|
||||
# - $FZF_COMPLETION_TRIGGER (default: '**')
|
||||
# - $FZF_COMPLETION_OPTS (default: empty)
|
||||
# - $FZF_COMPLETION_PATH_OPTS (default: empty)
|
||||
# - $FZF_COMPLETION_DIR_OPTS (default: empty)
|
||||
|
||||
if [[ $- =~ i ]]; then
|
||||
|
||||
|
||||
# To use custom commands instead of find, override _fzf_compgen_{path,dir}
|
||||
if ! declare -f _fzf_compgen_path > /dev/null; then
|
||||
_fzf_compgen_path() {
|
||||
echo "$1"
|
||||
command find -L "$1" \
|
||||
-name .git -prune -o -name .svn -prune -o \( -type d -o -type f -o -type l \) \
|
||||
-a -not -path "$1" -print 2> /dev/null | sed 's@^\./@@'
|
||||
}
|
||||
fi
|
||||
|
||||
if ! declare -f _fzf_compgen_dir > /dev/null; then
|
||||
_fzf_compgen_dir() {
|
||||
command find -L "$1" \
|
||||
-name .git -prune -o -name .svn -prune -o -type d \
|
||||
-a -not -path "$1" -print 2> /dev/null | sed 's@^\./@@'
|
||||
}
|
||||
fi
|
||||
#
|
||||
# _fzf_compgen_path() {
|
||||
# echo "$1"
|
||||
# command find -L "$1" \
|
||||
# -name .git -prune -o -name .hg -prune -o -name .svn -prune -o \( -type d -o -type f -o -type l \) \
|
||||
# -a -not -path "$1" -print 2> /dev/null | command sed 's@^\./@@'
|
||||
# }
|
||||
#
|
||||
# _fzf_compgen_dir() {
|
||||
# command find -L "$1" \
|
||||
# -name .git -prune -o -name .hg -prune -o -name .svn -prune -o -type d \
|
||||
# -a -not -path "$1" -print 2> /dev/null | command sed 's@^\./@@'
|
||||
# }
|
||||
|
||||
###########################################################
|
||||
|
||||
_fzf_orig_completion_filter() {
|
||||
sed 's/^\(.*-F\) *\([^ ]*\).* \([^ ]*\)$/export _fzf_orig_completion_\3="\1 %s \3 #\2";/' |
|
||||
awk -F= '{gsub(/[^A-Za-z0-9_= ;]/, "_", $1); print $1"="$2}'
|
||||
# To redraw line after fzf closes (printf '\e[5n')
|
||||
bind '"\e[0n": redraw-current-line' 2> /dev/null
|
||||
|
||||
__fzf_defaults() {
|
||||
# $1: Prepend to FZF_DEFAULT_OPTS_FILE and FZF_DEFAULT_OPTS
|
||||
# $2: Append to FZF_DEFAULT_OPTS_FILE and FZF_DEFAULT_OPTS
|
||||
echo "--height ${FZF_TMUX_HEIGHT:-40%} --bind=ctrl-z:ignore $1"
|
||||
command cat "${FZF_DEFAULT_OPTS_FILE-}" 2> /dev/null
|
||||
echo "${FZF_DEFAULT_OPTS-} $2"
|
||||
}
|
||||
|
||||
__fzf_comprun() {
|
||||
if [[ "$(type -t _fzf_comprun 2>&1)" = function ]]; then
|
||||
_fzf_comprun "$@"
|
||||
elif [[ -n "${TMUX_PANE-}" ]] && { [[ "${FZF_TMUX:-0}" != 0 ]] || [[ -n "${FZF_TMUX_OPTS-}" ]]; }; then
|
||||
shift
|
||||
fzf-tmux ${FZF_TMUX_OPTS:--d${FZF_TMUX_HEIGHT:-40%}} -- "$@"
|
||||
else
|
||||
shift
|
||||
fzf "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
__fzf_orig_completion() {
|
||||
local l comp f cmd
|
||||
while read -r l; do
|
||||
if [[ "$l" =~ ^(.*\ -F)\ *([^ ]*).*\ ([^ ]*)$ ]]; then
|
||||
comp="${BASH_REMATCH[1]}"
|
||||
f="${BASH_REMATCH[2]}"
|
||||
cmd="${BASH_REMATCH[3]}"
|
||||
[[ "$f" = _fzf_* ]] && continue
|
||||
printf -v "_fzf_orig_completion_${cmd//[^A-Za-z0-9_]/_}" "%s" "${comp} %s ${cmd} #${f}"
|
||||
if [[ "$l" = *" -o nospace "* ]] && [[ ! "${__fzf_nospace_commands-}" = *" $cmd "* ]]; then
|
||||
__fzf_nospace_commands="${__fzf_nospace_commands-} $cmd "
|
||||
fi
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# @param $1 cmd - Command name for which the original completion is searched
|
||||
# @var[out] REPLY - Original function name is returned
|
||||
__fzf_orig_completion_get_orig_func() {
|
||||
local cmd orig_var orig
|
||||
cmd=$1
|
||||
orig_var="_fzf_orig_completion_${cmd//[^A-Za-z0-9_]/_}"
|
||||
orig="${!orig_var-}"
|
||||
REPLY="${orig##*#}"
|
||||
[[ $REPLY ]] && type "$REPLY" &> /dev/null
|
||||
}
|
||||
|
||||
# @param $1 cmd - Command name for which the original completion is searched
|
||||
# @param $2 func - Fzf's completion function to replace the original function
|
||||
# @var[out] REPLY - Completion setting is returned as a string to "eval"
|
||||
__fzf_orig_completion_instantiate() {
|
||||
local cmd func orig_var orig
|
||||
cmd=$1
|
||||
func=$2
|
||||
orig_var="_fzf_orig_completion_${cmd//[^A-Za-z0-9_]/_}"
|
||||
orig="${!orig_var-}"
|
||||
orig="${orig%#*}"
|
||||
[[ $orig == *' %s '* ]] || return 1
|
||||
printf -v REPLY "$orig" "$func"
|
||||
}
|
||||
|
||||
_fzf_opts_completion() {
|
||||
@@ -41,49 +101,143 @@ _fzf_opts_completion() {
|
||||
cur="${COMP_WORDS[COMP_CWORD]}"
|
||||
prev="${COMP_WORDS[COMP_CWORD-1]}"
|
||||
opts="
|
||||
-x --extended
|
||||
-e --exact
|
||||
-i +i
|
||||
-n --nth
|
||||
-d --delimiter
|
||||
+c --no-color
|
||||
+i --no-ignore-case
|
||||
+s --no-sort
|
||||
--tac
|
||||
--tiebreak
|
||||
+x --no-extended
|
||||
--ansi
|
||||
--bash
|
||||
--bind
|
||||
-m --multi
|
||||
--no-mouse
|
||||
--border
|
||||
--border-label
|
||||
--border-label-pos
|
||||
--color
|
||||
--black
|
||||
--reverse
|
||||
--no-hscroll
|
||||
--inline-info
|
||||
--prompt
|
||||
-q --query
|
||||
-1 --select-1
|
||||
-0 --exit-0
|
||||
-f --filter
|
||||
--print-query
|
||||
--expect
|
||||
--toggle-sort
|
||||
--sync
|
||||
--cycle
|
||||
--disabled
|
||||
--ellipsis
|
||||
--expect
|
||||
--filepath-word
|
||||
--fish
|
||||
--header
|
||||
--header-first
|
||||
--header-lines
|
||||
--height
|
||||
--highlight-line
|
||||
--history
|
||||
--history-size
|
||||
--header
|
||||
--header-lines
|
||||
--margin"
|
||||
--hscroll-off
|
||||
--info
|
||||
--jump-labels
|
||||
--keep-right
|
||||
--layout
|
||||
--listen
|
||||
--listen-unsafe
|
||||
--literal
|
||||
--man
|
||||
--margin
|
||||
--marker
|
||||
--min-height
|
||||
--no-bold
|
||||
--no-clear
|
||||
--no-hscroll
|
||||
--no-mouse
|
||||
--no-scrollbar
|
||||
--no-separator
|
||||
--no-unicode
|
||||
--padding
|
||||
--pointer
|
||||
--preview
|
||||
--preview-label
|
||||
--preview-label-pos
|
||||
--preview-window
|
||||
--print-query
|
||||
--print0
|
||||
--prompt
|
||||
--read0
|
||||
--reverse
|
||||
--scheme
|
||||
--scroll-off
|
||||
--separator
|
||||
--sync
|
||||
--tabstop
|
||||
--tac
|
||||
--tiebreak
|
||||
--tmux
|
||||
--track
|
||||
--version
|
||||
--with-nth
|
||||
--with-shell
|
||||
--wrap
|
||||
--zsh
|
||||
-0 --exit-0
|
||||
-1 --select-1
|
||||
-d --delimiter
|
||||
-e --exact
|
||||
-f --filter
|
||||
-h --help
|
||||
-i --ignore-case
|
||||
-m --multi
|
||||
-n --nth
|
||||
-q --query
|
||||
--"
|
||||
|
||||
case "${prev}" in
|
||||
--scheme)
|
||||
COMPREPLY=( $(compgen -W "default path history" -- "$cur") )
|
||||
return 0
|
||||
;;
|
||||
--tiebreak)
|
||||
COMPREPLY=( $(compgen -W "length begin end index" -- "$cur") )
|
||||
COMPREPLY=( $(compgen -W "length chunk begin end index" -- "$cur") )
|
||||
return 0
|
||||
;;
|
||||
--color)
|
||||
COMPREPLY=( $(compgen -W "dark light 16 bw" -- "$cur") )
|
||||
COMPREPLY=( $(compgen -W "dark light 16 bw no" -- "$cur") )
|
||||
return 0
|
||||
;;
|
||||
--history)
|
||||
COMPREPLY=()
|
||||
--layout)
|
||||
COMPREPLY=( $(compgen -W "default reverse reverse-list" -- "$cur") )
|
||||
return 0
|
||||
;;
|
||||
--info)
|
||||
COMPREPLY=( $(compgen -W "default right hidden inline inline-right" -- "$cur") )
|
||||
return 0
|
||||
;;
|
||||
--preview-window)
|
||||
COMPREPLY=( $(compgen -W "
|
||||
default
|
||||
hidden
|
||||
nohidden
|
||||
wrap
|
||||
nowrap
|
||||
cycle
|
||||
nocycle
|
||||
up top
|
||||
down bottom
|
||||
left
|
||||
right
|
||||
rounded border border-rounded
|
||||
sharp border-sharp
|
||||
border-bold
|
||||
border-block
|
||||
border-thinblock
|
||||
border-double
|
||||
noborder border-none
|
||||
border-horizontal
|
||||
border-vertical
|
||||
border-up border-top
|
||||
border-down border-bottom
|
||||
border-left
|
||||
border-right
|
||||
follow
|
||||
nofollow" -- "$cur") )
|
||||
return 0
|
||||
;;
|
||||
--border)
|
||||
COMPREPLY=( $(compgen -W "rounded sharp bold block thinblock double horizontal vertical top bottom left right none" -- "$cur") )
|
||||
return 0
|
||||
;;
|
||||
--border-label-pos|--preview-label-pos)
|
||||
COMPREPLY=( $(compgen -W "center bottom top" -- "$cur") )
|
||||
return 0
|
||||
;;
|
||||
esac
|
||||
@@ -97,55 +251,101 @@ _fzf_opts_completion() {
|
||||
}
|
||||
|
||||
_fzf_handle_dynamic_completion() {
|
||||
local cmd orig_var orig ret orig_cmd
|
||||
local cmd ret REPLY orig_cmd orig_complete
|
||||
cmd="$1"
|
||||
shift
|
||||
orig_cmd="$1"
|
||||
orig_var="_fzf_orig_completion_$cmd"
|
||||
orig="${!orig_var##*#}"
|
||||
if [ -n "$orig" ] && type "$orig" > /dev/null 2>&1; then
|
||||
$orig "$@"
|
||||
elif [ -n "$_fzf_completion_loader" ]; then
|
||||
_completion_loader "$@"
|
||||
if __fzf_orig_completion_get_orig_func "$cmd"; then
|
||||
"$REPLY" "$@"
|
||||
elif [[ -n "${_fzf_completion_loader-}" ]]; then
|
||||
orig_complete=$(complete -p "$orig_cmd" 2> /dev/null)
|
||||
$_fzf_completion_loader "$@"
|
||||
ret=$?
|
||||
eval "$(complete | command grep "\-F.* $orig_cmd$" | _fzf_orig_completion_filter)"
|
||||
source "${BASH_SOURCE[0]}"
|
||||
# _completion_loader may not have updated completion for the command
|
||||
if [[ "$(complete -p "$orig_cmd" 2> /dev/null)" != "$orig_complete" ]]; then
|
||||
__fzf_orig_completion < <(complete -p "$orig_cmd" 2> /dev/null)
|
||||
__fzf_orig_completion_get_orig_func "$cmd" || ret=1
|
||||
|
||||
# Update orig_complete by _fzf_orig_completion entry
|
||||
[[ $orig_complete =~ ' -F '(_fzf_[^ ]+)' ' ]] &&
|
||||
__fzf_orig_completion_instantiate "$cmd" "${BASH_REMATCH[1]}" &&
|
||||
orig_complete=$REPLY
|
||||
|
||||
if [[ "${__fzf_nospace_commands-}" = *" $orig_cmd "* ]]; then
|
||||
eval "${orig_complete/ -F / -o nospace -F }"
|
||||
else
|
||||
eval "$orig_complete"
|
||||
fi
|
||||
fi
|
||||
[[ $ret -eq 0 ]] && return 124
|
||||
return $ret
|
||||
fi
|
||||
}
|
||||
|
||||
__fzf_generic_path_completion() {
|
||||
local cur base dir leftover matches trigger cmd fzf
|
||||
[ "${FZF_TMUX:-1}" != 0 ] && fzf="fzf-tmux -d ${FZF_TMUX_HEIGHT:-40%}" || fzf="fzf"
|
||||
cmd="${COMP_WORDS[0]//[^A-Za-z0-9_=]/_}"
|
||||
local cur base dir leftover matches trigger cmd rest
|
||||
cmd="${COMP_WORDS[0]}"
|
||||
if [[ $cmd == \\* ]]; then
|
||||
cmd="${cmd:1}"
|
||||
fi
|
||||
COMPREPLY=()
|
||||
trigger=${FZF_COMPLETION_TRIGGER-'**'}
|
||||
cur="${COMP_WORDS[COMP_CWORD]}"
|
||||
if [[ "$cur" == *"$trigger" ]]; then
|
||||
[[ $COMP_CWORD -ge 0 ]] && cur="${COMP_WORDS[COMP_CWORD]}"
|
||||
if [[ "$cur" == *"$trigger" ]] && [[ $cur != *'$('* ]] && [[ $cur != *':='* ]] && [[ $cur != *'`'* ]]; then
|
||||
base=${cur:0:${#cur}-${#trigger}}
|
||||
eval "base=$base"
|
||||
eval "base=$base" 2> /dev/null || return
|
||||
|
||||
dir="$base"
|
||||
# Try to leverage existing completion
|
||||
rest=("${@:4}")
|
||||
unset 'rest[${#rest[@]}-2]'
|
||||
COMP_LINE=${COMP_LINE:0:${#COMP_LINE}-${#trigger}}
|
||||
COMP_POINT=$((COMP_POINT-${#trigger}))
|
||||
COMP_WORDS[$COMP_CWORD]=$base
|
||||
_fzf_handle_dynamic_completion "$cmd" "${rest[@]}"
|
||||
[[ $? -ne 0 ]] &&
|
||||
_fzf_handle_dynamic_completion "$cmd" "${rest[@]}"
|
||||
|
||||
dir=
|
||||
[[ $base = *"/"* ]] && dir="$base"
|
||||
while true; do
|
||||
if [ -z "$dir" ] || [ -d "$dir" ]; then
|
||||
if [[ -z "$dir" ]] || [[ -d "$dir" ]]; then
|
||||
leftover=${base/#"$dir"}
|
||||
leftover=${leftover/#\/}
|
||||
[ -z "$dir" ] && dir='.'
|
||||
[ "$dir" != "/" ] && dir="${dir/%\//}"
|
||||
tput sc
|
||||
matches=$(eval "$1 $(printf %q "$dir")" | $fzf $FZF_COMPLETION_OPTS $2 -q "$leftover" | while read -r item; do
|
||||
printf "%q$3 " "$item"
|
||||
done)
|
||||
[[ -z "$dir" ]] && dir='.'
|
||||
[[ "$dir" != "/" ]] && dir="${dir/%\//}"
|
||||
matches=$(
|
||||
export FZF_DEFAULT_OPTS=$(__fzf_defaults "--reverse --scheme=path" "${FZF_COMPLETION_OPTS-} $2")
|
||||
unset FZF_DEFAULT_COMMAND FZF_DEFAULT_OPTS_FILE
|
||||
if [[ ${#COMPREPLY[@]} -gt 0 ]]; then
|
||||
for h in "${COMPREPLY[@]}"; do
|
||||
echo "$h"
|
||||
done | command sort -u | __fzf_comprun "$4" -q "$leftover"
|
||||
elif declare -F "$1" > /dev/null; then
|
||||
eval "$1 $(printf %q "$dir")" | __fzf_comprun "$4" -q "$leftover"
|
||||
else
|
||||
if [[ $1 =~ dir ]]; then
|
||||
walker=dir,follow
|
||||
rest=${FZF_COMPLETION_DIR_OPTS-}
|
||||
else
|
||||
walker=file,dir,follow,hidden
|
||||
rest=${FZF_COMPLETION_PATH_OPTS-}
|
||||
fi
|
||||
__fzf_comprun "$4" -q "$leftover" --walker "$walker" --walker-root="$dir" $rest
|
||||
fi | while read -r item; do
|
||||
printf "%q " "${item%$3}$3"
|
||||
done
|
||||
)
|
||||
matches=${matches% }
|
||||
if [ -n "$matches" ]; then
|
||||
[[ -z "$3" ]] && [[ "${__fzf_nospace_commands-}" = *" ${COMP_WORDS[0]} "* ]] && matches="$matches "
|
||||
if [[ -n "$matches" ]]; then
|
||||
COMPREPLY=( "$matches" )
|
||||
else
|
||||
COMPREPLY=( "$cur" )
|
||||
fi
|
||||
tput rc
|
||||
printf '\e[5n'
|
||||
return 0
|
||||
fi
|
||||
dir=$(dirname "$dir")
|
||||
dir=$(command dirname "$dir")
|
||||
[[ "$dir" =~ /$ ]] || dir="$dir"/
|
||||
done
|
||||
else
|
||||
@@ -157,29 +357,64 @@ __fzf_generic_path_completion() {
|
||||
}
|
||||
|
||||
_fzf_complete() {
|
||||
local cur selected trigger cmd fzf post
|
||||
post="$(caller 0 | awk '{print $2}')_post"
|
||||
type -t "$post" > /dev/null 2>&1 || post=cat
|
||||
[ "${FZF_TMUX:-1}" != 0 ] && fzf="fzf-tmux -d ${FZF_TMUX_HEIGHT:-40%}" || fzf="fzf"
|
||||
# Split arguments around --
|
||||
local args rest str_arg i sep
|
||||
args=("$@")
|
||||
sep=
|
||||
for i in "${!args[@]}"; do
|
||||
if [[ "${args[$i]}" = -- ]]; then
|
||||
sep=$i
|
||||
break
|
||||
fi
|
||||
done
|
||||
if [[ -n "$sep" ]]; then
|
||||
str_arg=
|
||||
rest=("${args[@]:$((sep + 1)):${#args[@]}}")
|
||||
args=("${args[@]:0:$sep}")
|
||||
else
|
||||
str_arg=$1
|
||||
args=()
|
||||
shift
|
||||
rest=("$@")
|
||||
fi
|
||||
|
||||
local cur selected trigger cmd post
|
||||
post="$(caller 0 | command awk '{print $2}')_post"
|
||||
type -t "$post" > /dev/null 2>&1 || post='command cat'
|
||||
|
||||
cmd="${COMP_WORDS[0]//[^A-Za-z0-9_=]/_}"
|
||||
trigger=${FZF_COMPLETION_TRIGGER-'**'}
|
||||
cmd="${COMP_WORDS[0]}"
|
||||
cur="${COMP_WORDS[COMP_CWORD]}"
|
||||
if [[ "$cur" == *"$trigger" ]]; then
|
||||
if [[ "$cur" == *"$trigger" ]] && [[ $cur != *'$('* ]] && [[ $cur != *':='* ]] && [[ $cur != *'`'* ]]; then
|
||||
cur=${cur:0:${#cur}-${#trigger}}
|
||||
|
||||
tput sc
|
||||
selected=$(cat | $fzf $FZF_COMPLETION_OPTS $1 -q "$cur" | $post | tr '\n' ' ')
|
||||
selected=${selected% } # Strip trailing space not to repeat "-o nospace"
|
||||
tput rc
|
||||
# Try to leverage existing completion
|
||||
COMP_LINE=${COMP_LINE:0:${#COMP_LINE}-${#trigger}}
|
||||
COMP_POINT=$((COMP_POINT-${#trigger}))
|
||||
unset 'rest[${#rest[@]}-2]'
|
||||
_fzf_handle_dynamic_completion "$cmd" "${rest[@]}"
|
||||
[[ $? -ne 0 ]] &&
|
||||
_fzf_handle_dynamic_completion "$cmd" "${rest[@]}"
|
||||
|
||||
if [ -n "$selected" ]; then
|
||||
selected=$(
|
||||
(if [[ ${#COMPREPLY[@]} -gt 0 ]]; then
|
||||
for h in "${COMPREPLY[@]}"; do
|
||||
echo "$h"
|
||||
done
|
||||
fi; cat) | command sort -u |
|
||||
FZF_DEFAULT_OPTS=$(__fzf_defaults "--reverse" "${FZF_COMPLETION_OPTS-} $str_arg") \
|
||||
FZF_DEFAULT_OPTS_FILE='' \
|
||||
__fzf_comprun "${rest[0]}" "${args[@]}" -q "$cur" | $post | command tr '\n' ' ')
|
||||
selected=${selected% } # Strip trailing space not to repeat "-o nospace"
|
||||
if [[ -n "$selected" ]]; then
|
||||
COMPREPLY=("$selected")
|
||||
return 0
|
||||
else
|
||||
COMPREPLY=("$cur")
|
||||
fi
|
||||
printf '\e[5n'
|
||||
return 0
|
||||
else
|
||||
shift
|
||||
_fzf_handle_dynamic_completion "$cmd" "$@"
|
||||
_fzf_handle_dynamic_completion "$cmd" "${rest[@]}"
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -197,91 +432,146 @@ _fzf_dir_completion() {
|
||||
}
|
||||
|
||||
_fzf_complete_kill() {
|
||||
[ -n "${COMP_WORDS[COMP_CWORD]}" ] && return 1
|
||||
|
||||
local selected fzf
|
||||
[ "${FZF_TMUX:-1}" != 0 ] && fzf="fzf-tmux -d ${FZF_TMUX_HEIGHT:-40%}" || fzf="fzf"
|
||||
tput sc
|
||||
selected=$(ps -ef | sed 1d | $fzf -m $FZF_COMPLETION_OPTS | awk '{print $2}' | tr '\n' ' ')
|
||||
tput rc
|
||||
|
||||
if [ -n "$selected" ]; then
|
||||
COMPREPLY=( "$selected" )
|
||||
return 0
|
||||
fi
|
||||
_fzf_proc_completion "$@"
|
||||
}
|
||||
|
||||
_fzf_complete_telnet() {
|
||||
_fzf_complete '+m' "$@" < <(
|
||||
command grep -v '^\s*\(#\|$\)' /etc/hosts | command grep -Fv '0.0.0.0' |
|
||||
awk '{if (length($2) > 0) {print $2}}' | sort -u
|
||||
_fzf_proc_completion() {
|
||||
_fzf_complete -m --header-lines=1 --no-preview --wrap -- "$@" < <(
|
||||
command ps -eo user,pid,ppid,start,time,command 2> /dev/null ||
|
||||
command ps -eo user,pid,ppid,time,args 2> /dev/null || # For BusyBox
|
||||
command ps --everyone --full --windows # For cygwin
|
||||
)
|
||||
}
|
||||
|
||||
_fzf_proc_completion_post() {
|
||||
command awk '{print $2}'
|
||||
}
|
||||
|
||||
# To use custom hostname lists, override __fzf_list_hosts.
|
||||
# The function is expected to print hostnames, one per line as well as in the
|
||||
# desired sorting and with any duplicates removed, to standard output.
|
||||
#
|
||||
# e.g.
|
||||
# # Use bash-completions’s _known_hosts_real() for getting the list of hosts
|
||||
# __fzf_list_hosts() {
|
||||
# # Set the local attribute for any non-local variable that is set by _known_hosts_real()
|
||||
# local COMPREPLY=()
|
||||
# _known_hosts_real ''
|
||||
# printf '%s\n' "${COMPREPLY[@]}" | command sort -u --version-sort
|
||||
# }
|
||||
if ! declare -F __fzf_list_hosts > /dev/null; then
|
||||
__fzf_list_hosts() {
|
||||
command cat <(command tail -n +1 ~/.ssh/config ~/.ssh/config.d/* /etc/ssh/ssh_config 2> /dev/null | command grep -i '^\s*host\(name\)\? ' | command awk '{for (i = 2; i <= NF; i++) print $1 " " $i}' | command grep -v '[*?%]') \
|
||||
<(command grep -oE '^[[a-z0-9.,:-]+' ~/.ssh/known_hosts 2> /dev/null | command tr ',' '\n' | command tr -d '[' | command awk '{ print $1 " " $1 }') \
|
||||
<(command grep -v '^\s*\(#\|$\)' /etc/hosts 2> /dev/null | command grep -Fv '0.0.0.0' | command sed 's/#.*//') |
|
||||
command awk '{for (i = 2; i <= NF; i++) print $i}' | command sort -u
|
||||
}
|
||||
fi
|
||||
|
||||
_fzf_host_completion() {
|
||||
_fzf_complete +m -- "$@" < <(__fzf_list_hosts)
|
||||
}
|
||||
|
||||
# Values for $1 $2 $3 are described here
|
||||
# https://www.gnu.org/software/bash/manual/html_node/Programmable-Completion.html
|
||||
# > the first argument ($1) is the name of the command whose arguments are being completed,
|
||||
# > the second argument ($2) is the word being completed,
|
||||
# > and the third argument ($3) is the word preceding the word being completed on the current command line.
|
||||
_fzf_complete_ssh() {
|
||||
_fzf_complete '+m' "$@" < <(
|
||||
cat <(cat ~/.ssh/config /etc/ssh/ssh_config 2> /dev/null | command grep -i '^host' | command grep -v '*') \
|
||||
<(command grep -oE '^[^ ]+' ~/.ssh/known_hosts | tr ',' '\n' | awk '{ print $1 " " $1 }') \
|
||||
<(command grep -v '^\s*\(#\|$\)' /etc/hosts | command grep -Fv '0.0.0.0') |
|
||||
awk '{if (length($2) > 0) {print $2}}' | sort -u
|
||||
case $3 in
|
||||
-i|-F|-E)
|
||||
_fzf_path_completion "$@"
|
||||
;;
|
||||
*)
|
||||
local user=
|
||||
[[ "$2" =~ '@' ]] && user="${2%%@*}@"
|
||||
_fzf_complete +m -- "$@" < <(__fzf_list_hosts | command awk -v user="$user" '{print user $0}')
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
_fzf_var_completion() {
|
||||
_fzf_complete -m -- "$@" < <(
|
||||
declare -xp | command sed -En 's|^declare [^ ]+ ([^=]+).*|\1|p'
|
||||
)
|
||||
}
|
||||
|
||||
_fzf_complete_unset() {
|
||||
_fzf_complete '-m' "$@" < <(
|
||||
declare -xp | sed 's/=.*//' | sed 's/.* //'
|
||||
)
|
||||
}
|
||||
|
||||
_fzf_complete_export() {
|
||||
_fzf_complete '-m' "$@" < <(
|
||||
declare -xp | sed 's/=.*//' | sed 's/.* //'
|
||||
)
|
||||
}
|
||||
|
||||
_fzf_complete_unalias() {
|
||||
_fzf_complete '-m' "$@" < <(
|
||||
alias | sed 's/=.*//' | sed 's/.* //'
|
||||
_fzf_alias_completion() {
|
||||
_fzf_complete -m -- "$@" < <(
|
||||
alias | command sed -En 's|^alias ([^=]+).*|\1|p'
|
||||
)
|
||||
}
|
||||
|
||||
# fzf options
|
||||
complete -o default -F _fzf_opts_completion fzf
|
||||
# fzf-tmux is a thin fzf wrapper that has only a few more options than fzf
|
||||
# itself. As a quick improvement we take fzf's completion. Adding the few extra
|
||||
# fzf-tmux specific options (like `-w WIDTH`) are left as a future patch.
|
||||
complete -o default -F _fzf_opts_completion fzf-tmux
|
||||
|
||||
d_cmds="${FZF_COMPLETION_DIR_COMMANDS:-cd pushd rmdir}"
|
||||
a_cmds="
|
||||
awk cat diff diff3
|
||||
emacs emacsclient ex file ftp g++ gcc gvim head hg java
|
||||
# Default path completion
|
||||
__fzf_default_completion() {
|
||||
__fzf_generic_path_completion _fzf_compgen_path "-m" "" "$@"
|
||||
|
||||
# Dynamic completion loader has updated the completion for the command
|
||||
if [[ $? -eq 124 ]]; then
|
||||
# We trigger _fzf_setup_completion so that fuzzy completion for the command
|
||||
# still works. However, loader can update the completion for multiple
|
||||
# commands at once, and fuzzy completion will no longer work for those
|
||||
# other commands. e.g. pytest -> py.test, pytest-2, pytest-3, etc
|
||||
_fzf_setup_completion path "$1"
|
||||
return 124
|
||||
fi
|
||||
}
|
||||
|
||||
# Set fuzzy path completion as the default completion for all commands.
|
||||
# We can't set up default completion,
|
||||
# 1. if it's already set up by another script
|
||||
# 2. or if the current version of bash doesn't support -D option
|
||||
complete | command grep -q __fzf_default_completion ||
|
||||
complete | command grep -- '-D$' | command grep -qv _comp_complete_load ||
|
||||
complete -D -F __fzf_default_completion -o default -o bashdefault 2> /dev/null
|
||||
|
||||
d_cmds="${FZF_COMPLETION_DIR_COMMANDS-cd pushd rmdir}"
|
||||
|
||||
# NOTE: $FZF_COMPLETION_PATH_COMMANDS and $FZF_COMPLETION_VAR_COMMANDS are
|
||||
# undocumented and subject to change in the future.
|
||||
#
|
||||
# NOTE: Although we have default completion, we still need to set up completion
|
||||
# for each command in case they already have completion set up by another script.
|
||||
a_cmds="${FZF_COMPLETION_PATH_COMMANDS-"
|
||||
awk bat cat code diff diff3
|
||||
emacs emacsclient ex file ftp g++ gcc gvim head hg hx java
|
||||
javac ld less more mvim nvim patch perl python ruby
|
||||
sed sftp sort source tail tee uniq vi view vim wc xdg-open
|
||||
basename bunzip2 bzip2 chmod chown curl cp dirname du
|
||||
find git grep gunzip gzip hg jar
|
||||
ln ls mv open rm rsync scp
|
||||
svn tar unzip zip"
|
||||
x_cmds="kill ssh telnet unset unalias export"
|
||||
svn tar unzip zip"}"
|
||||
v_cmds="${FZF_COMPLETION_VAR_COMMANDS-export unset printenv}"
|
||||
|
||||
# Preserve existing completion
|
||||
if [ "$_fzf_completion_loaded" != '0.11.3' ]; then
|
||||
# Really wish I could use associative array but OSX comes with bash 3.2 :(
|
||||
eval $(complete | command grep '\-F' | command grep -v _fzf_ |
|
||||
command grep -E " ($(echo $d_cmds $a_cmds $x_cmds | sed 's/ /|/g' | sed 's/+/\\+/g'))$" | _fzf_orig_completion_filter)
|
||||
export _fzf_completion_loaded=0.11.3
|
||||
__fzf_orig_completion < <(complete -p $d_cmds $a_cmds $v_cmds unalias kill ssh 2> /dev/null)
|
||||
|
||||
if type _comp_load > /dev/null 2>&1; then
|
||||
# _comp_load was added in bash-completion 2.12 to replace _completion_loader.
|
||||
# We use it without -D option so that it does not use _comp_complete_minimal as the fallback.
|
||||
_fzf_completion_loader=_comp_load
|
||||
elif type __load_completion > /dev/null 2>&1; then
|
||||
# In bash-completion 2.11, _completion_loader internally calls __load_completion
|
||||
# and if it returns a non-zero status, it sets the default 'minimal' completion.
|
||||
_fzf_completion_loader=__load_completion
|
||||
elif type _completion_loader > /dev/null 2>&1; then
|
||||
_fzf_completion_loader=_completion_loader
|
||||
fi
|
||||
|
||||
if type _completion_loader > /dev/null 2>&1; then
|
||||
_fzf_completion_loader=1
|
||||
fi
|
||||
|
||||
_fzf_defc() {
|
||||
local cmd func opts orig_var orig def
|
||||
__fzf_defc() {
|
||||
local cmd func opts REPLY
|
||||
cmd="$1"
|
||||
func="$2"
|
||||
opts="$3"
|
||||
orig_var="_fzf_orig_completion_${cmd//[^A-Za-z0-9_]/_}"
|
||||
orig="${!orig_var}"
|
||||
if [ -n "$orig" ]; then
|
||||
printf -v def "$orig" "$func"
|
||||
eval "$def"
|
||||
if __fzf_orig_completion_instantiate "$cmd" "$func"; then
|
||||
eval "$REPLY"
|
||||
else
|
||||
complete -F "$func" $opts "$cmd"
|
||||
fi
|
||||
@@ -289,26 +579,48 @@ _fzf_defc() {
|
||||
|
||||
# Anything
|
||||
for cmd in $a_cmds; do
|
||||
_fzf_defc "$cmd" _fzf_path_completion "-o default -o bashdefault"
|
||||
__fzf_defc "$cmd" _fzf_path_completion "-o default -o bashdefault"
|
||||
done
|
||||
|
||||
# Directory
|
||||
for cmd in $d_cmds; do
|
||||
_fzf_defc "$cmd" _fzf_dir_completion "-o nospace -o plusdirs"
|
||||
__fzf_defc "$cmd" _fzf_dir_completion "-o bashdefault -o nospace -o dirnames"
|
||||
done
|
||||
|
||||
unset _fzf_defc
|
||||
# Variables
|
||||
for cmd in $v_cmds; do
|
||||
__fzf_defc "$cmd" _fzf_var_completion "-o default -o nospace -v"
|
||||
done
|
||||
|
||||
# Kill completion
|
||||
complete -F _fzf_complete_kill -o nospace -o default -o bashdefault kill
|
||||
# Aliases
|
||||
__fzf_defc unalias _fzf_alias_completion "-a"
|
||||
|
||||
# Host completion
|
||||
complete -F _fzf_complete_ssh -o default -o bashdefault ssh
|
||||
complete -F _fzf_complete_telnet -o default -o bashdefault telnet
|
||||
# Processes
|
||||
__fzf_defc kill _fzf_proc_completion "-o default -o bashdefault"
|
||||
|
||||
# Environment variables / Aliases
|
||||
complete -F _fzf_complete_unset -o default -o bashdefault unset
|
||||
complete -F _fzf_complete_export -o default -o bashdefault export
|
||||
complete -F _fzf_complete_unalias -o default -o bashdefault unalias
|
||||
# ssh
|
||||
__fzf_defc ssh _fzf_complete_ssh "-o default -o bashdefault"
|
||||
|
||||
unset cmd d_cmds a_cmds x_cmds
|
||||
unset cmd d_cmds a_cmds v_cmds
|
||||
|
||||
_fzf_setup_completion() {
|
||||
local kind fn cmd
|
||||
kind=$1
|
||||
fn=_fzf_${1}_completion
|
||||
if [[ $# -lt 2 ]] || ! type -t "$fn" > /dev/null; then
|
||||
echo "usage: ${FUNCNAME[0]} path|dir|var|alias|host|proc COMMANDS..."
|
||||
return 1
|
||||
fi
|
||||
shift
|
||||
__fzf_orig_completion < <(complete -p "$@" 2> /dev/null)
|
||||
for cmd in "$@"; do
|
||||
case "$kind" in
|
||||
dir) __fzf_defc "$cmd" "$fn" "-o nospace -o dirnames" ;;
|
||||
var) __fzf_defc "$cmd" "$fn" "-o default -o nospace -v" ;;
|
||||
alias) __fzf_defc "$cmd" "$fn" "-a" ;;
|
||||
*) __fzf_defc "$cmd" "$fn" "-o default -o bashdefault" ;;
|
||||
esac
|
||||
done
|
||||
}
|
||||
|
||||
fi
|
||||
|
||||
@@ -1,64 +1,186 @@
|
||||
#!/bin/zsh
|
||||
# ____ ____
|
||||
# / __/___ / __/
|
||||
# / /_/_ / / /_
|
||||
# / __/ / /_/ __/
|
||||
# /_/ /___/_/-completion.zsh
|
||||
# /_/ /___/_/ completion.zsh
|
||||
#
|
||||
# - $FZF_TMUX (default: 1)
|
||||
# - $FZF_TMUX_HEIGHT (default: '40%')
|
||||
# - $FZF_COMPLETION_TRIGGER (default: '**')
|
||||
# - $FZF_COMPLETION_OPTS (default: empty)
|
||||
# - $FZF_TMUX (default: 0)
|
||||
# - $FZF_TMUX_OPTS (default: empty)
|
||||
# - $FZF_COMPLETION_TRIGGER (default: '**')
|
||||
# - $FZF_COMPLETION_OPTS (default: empty)
|
||||
# - $FZF_COMPLETION_PATH_OPTS (default: empty)
|
||||
# - $FZF_COMPLETION_DIR_OPTS (default: empty)
|
||||
|
||||
|
||||
# Both branches of the following `if` do the same thing -- define
|
||||
# __fzf_completion_options such that `eval $__fzf_completion_options` sets
|
||||
# all options to the same values they currently have. We'll do just that at
|
||||
# the bottom of the file after changing options to what we prefer.
|
||||
#
|
||||
# IMPORTANT: Until we get to the `emulate` line, all words that *can* be quoted
|
||||
# *must* be quoted in order to prevent alias expansion. In addition, code must
|
||||
# be written in a way works with any set of zsh options. This is very tricky, so
|
||||
# careful when you change it.
|
||||
#
|
||||
# Start by loading the builtin zsh/parameter module. It provides `options`
|
||||
# associative array that stores current shell options.
|
||||
if 'zmodload' 'zsh/parameter' 2>'/dev/null' && (( ${+options} )); then
|
||||
# This is the fast branch and it gets taken on virtually all Zsh installations.
|
||||
#
|
||||
# ${(kv)options[@]} expands to array of keys (option names) and values ("on"
|
||||
# or "off"). The subsequent expansion# with (j: :) flag joins all elements
|
||||
# together separated by spaces. __fzf_completion_options ends up with a value
|
||||
# like this: "options=(shwordsplit off aliases on ...)".
|
||||
__fzf_completion_options="options=(${(j: :)${(kv)options[@]}})"
|
||||
else
|
||||
# This branch is much slower because it forks to get the names of all
|
||||
# zsh options. It's possible to eliminate this fork but it's not worth the
|
||||
# trouble because this branch gets taken only on very ancient or broken
|
||||
# zsh installations.
|
||||
() {
|
||||
# That `()` above defines an anonymous function. This is essentially a scope
|
||||
# for local parameters. We use it to avoid polluting global scope.
|
||||
'local' '__fzf_opt'
|
||||
__fzf_completion_options="setopt"
|
||||
# `set -o` prints one line for every zsh option. Each line contains option
|
||||
# name, some spaces, and then either "on" or "off". We just want option names.
|
||||
# Expansion with (@f) flag splits a string into lines. The outer expansion
|
||||
# removes spaces and everything that follow them on every line. __fzf_opt
|
||||
# ends up iterating over option names: shwordsplit, aliases, etc.
|
||||
for __fzf_opt in "${(@)${(@f)$(set -o)}%% *}"; do
|
||||
if [[ -o "$__fzf_opt" ]]; then
|
||||
# Option $__fzf_opt is currently on, so remember to set it back on.
|
||||
__fzf_completion_options+=" -o $__fzf_opt"
|
||||
else
|
||||
# Option $__fzf_opt is currently off, so remember to set it back off.
|
||||
__fzf_completion_options+=" +o $__fzf_opt"
|
||||
fi
|
||||
done
|
||||
# The value of __fzf_completion_options here looks like this:
|
||||
# "setopt +o shwordsplit -o aliases ..."
|
||||
}
|
||||
fi
|
||||
|
||||
# Enable the default zsh options (those marked with <Z> in `man zshoptions`)
|
||||
# but without `aliases`. Aliases in functions are expanded when functions are
|
||||
# defined, so if we disable aliases here, we'll be sure to have no pesky
|
||||
# aliases in any of our functions. This way we won't need prefix every
|
||||
# command with `command` or to quote every word to defend against global
|
||||
# aliases. Note that `aliases` is not the only option that's important to
|
||||
# control. There are several others that could wreck havoc if they are set
|
||||
# to values we don't expect. With the following `emulate` command we
|
||||
# sidestep this issue entirely.
|
||||
'builtin' 'emulate' 'zsh' && 'builtin' 'setopt' 'no_aliases'
|
||||
|
||||
# This brace is the start of try-always block. The `always` part is like
|
||||
# `finally` in lesser languages. We use it to *always* restore user options.
|
||||
{
|
||||
# The 'emulate' command should not be placed inside the interactive if check;
|
||||
# placing it there fails to disable alias expansion. See #3731.
|
||||
if [[ -o interactive ]]; then
|
||||
|
||||
# To use custom commands instead of find, override _fzf_compgen_{path,dir}
|
||||
if ! declare -f _fzf_compgen_path > /dev/null; then
|
||||
_fzf_compgen_path() {
|
||||
echo "$1"
|
||||
command find -L "$1" \
|
||||
-name .git -prune -o -name .svn -prune -o \( -type d -o -type f -o -type l \) \
|
||||
-a -not -path "$1" -print 2> /dev/null | sed 's@^\./@@'
|
||||
}
|
||||
fi
|
||||
|
||||
if ! declare -f _fzf_compgen_dir > /dev/null; then
|
||||
_fzf_compgen_dir() {
|
||||
command find -L "$1" \
|
||||
-name .git -prune -o -name .svn -prune -o -type d \
|
||||
-a -not -path "$1" -print 2> /dev/null | sed 's@^\./@@'
|
||||
}
|
||||
fi
|
||||
#
|
||||
# _fzf_compgen_path() {
|
||||
# echo "$1"
|
||||
# command find -L "$1" \
|
||||
# -name .git -prune -o -name .hg -prune -o -name .svn -prune -o \( -type d -o -type f -o -type l \) \
|
||||
# -a -not -path "$1" -print 2> /dev/null | sed 's@^\./@@'
|
||||
# }
|
||||
#
|
||||
# _fzf_compgen_dir() {
|
||||
# command find -L "$1" \
|
||||
# -name .git -prune -o -name .hg -prune -o -name .svn -prune -o -type d \
|
||||
# -a -not -path "$1" -print 2> /dev/null | sed 's@^\./@@'
|
||||
# }
|
||||
|
||||
###########################################################
|
||||
|
||||
__fzf_defaults() {
|
||||
# $1: Prepend to FZF_DEFAULT_OPTS_FILE and FZF_DEFAULT_OPTS
|
||||
# $2: Append to FZF_DEFAULT_OPTS_FILE and FZF_DEFAULT_OPTS
|
||||
echo "--height ${FZF_TMUX_HEIGHT:-40%} --bind=ctrl-z:ignore $1"
|
||||
command cat "${FZF_DEFAULT_OPTS_FILE-}" 2> /dev/null
|
||||
echo "${FZF_DEFAULT_OPTS-} $2"
|
||||
}
|
||||
|
||||
__fzf_comprun() {
|
||||
if [[ "$(type _fzf_comprun 2>&1)" =~ function ]]; then
|
||||
_fzf_comprun "$@"
|
||||
elif [ -n "${TMUX_PANE-}" ] && { [ "${FZF_TMUX:-0}" != 0 ] || [ -n "${FZF_TMUX_OPTS-}" ]; }; then
|
||||
shift
|
||||
if [ -n "${FZF_TMUX_OPTS-}" ]; then
|
||||
fzf-tmux ${(Q)${(Z+n+)FZF_TMUX_OPTS}} -- "$@"
|
||||
else
|
||||
fzf-tmux -d ${FZF_TMUX_HEIGHT:-40%} -- "$@"
|
||||
fi
|
||||
else
|
||||
shift
|
||||
fzf "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
# Extract the name of the command. e.g. foo=1 bar baz**<tab>
|
||||
__fzf_extract_command() {
|
||||
local token tokens
|
||||
tokens=(${(z)1})
|
||||
for token in $tokens; do
|
||||
token=${(Q)token}
|
||||
if [[ "$token" =~ [[:alnum:]] && ! "$token" =~ "=" ]]; then
|
||||
echo "$token"
|
||||
return
|
||||
fi
|
||||
done
|
||||
echo "${tokens[1]}"
|
||||
}
|
||||
|
||||
__fzf_generic_path_completion() {
|
||||
local base lbuf compgen fzf_opts suffix tail fzf dir leftover matches
|
||||
# (Q) flag removes a quoting level: "foo\ bar" => "foo bar"
|
||||
base=${(Q)1}
|
||||
local base lbuf cmd compgen fzf_opts suffix tail dir leftover matches
|
||||
base=$1
|
||||
lbuf=$2
|
||||
cmd=$(__fzf_extract_command "$lbuf")
|
||||
compgen=$3
|
||||
fzf_opts=$4
|
||||
suffix=$5
|
||||
tail=$6
|
||||
[ ${FZF_TMUX:-1} -eq 1 ] && fzf="fzf-tmux -d ${FZF_TMUX_HEIGHT:-40%}" || fzf="fzf"
|
||||
|
||||
setopt localoptions nonomatch
|
||||
dir="$base"
|
||||
if [[ $base = *'$('* ]] || [[ $base = *'<('* ]] || [[ $base = *'>('* ]] || [[ $base = *':='* ]] || [[ $base = *'`'* ]]; then
|
||||
return
|
||||
fi
|
||||
eval "base=$base" 2> /dev/null || return
|
||||
[[ $base = *"/"* ]] && dir="$base"
|
||||
while [ 1 ]; do
|
||||
if [[ -z "$dir" || -d ${~dir} ]]; then
|
||||
if [[ -z "$dir" || -d ${dir} ]]; then
|
||||
leftover=${base/#"$dir"}
|
||||
leftover=${leftover/#\/}
|
||||
[ -z "$dir" ] && dir='.'
|
||||
[ "$dir" != "/" ] && dir="${dir/%\//}"
|
||||
dir=${~dir}
|
||||
matches=$(eval "$compgen $(printf %q "$dir")" | ${=fzf} ${=FZF_COMPLETION_OPTS} ${=fzf_opts} -q "$leftover" | while read item; do
|
||||
echo -n "${(q)item}$suffix "
|
||||
done)
|
||||
matches=$(
|
||||
export FZF_DEFAULT_OPTS
|
||||
FZF_DEFAULT_OPTS=$(__fzf_defaults "--reverse --scheme=path" "${FZF_COMPLETION_OPTS-}")
|
||||
unset FZF_DEFAULT_COMMAND FZF_DEFAULT_OPTS_FILE
|
||||
if declare -f "$compgen" > /dev/null; then
|
||||
eval "$compgen $(printf %q "$dir")" | __fzf_comprun "$cmd" ${(Q)${(Z+n+)fzf_opts}} -q "$leftover"
|
||||
else
|
||||
if [[ $compgen =~ dir ]]; then
|
||||
walker=dir,follow
|
||||
rest=${FZF_COMPLETION_DIR_OPTS-}
|
||||
else
|
||||
walker=file,dir,follow,hidden
|
||||
rest=${FZF_COMPLETION_PATH_OPTS-}
|
||||
fi
|
||||
__fzf_comprun "$cmd" ${(Q)${(Z+n+)fzf_opts}} -q "$leftover" --walker "$walker" --walker-root="$dir" ${(Q)${(Z+n+)rest}} < /dev/tty
|
||||
fi | while read -r item; do
|
||||
item="${item%$suffix}$suffix"
|
||||
echo -n -E "${(q)item} "
|
||||
done
|
||||
)
|
||||
matches=${matches% }
|
||||
if [ -n "$matches" ]; then
|
||||
LBUFFER="$lbuf$matches$tail"
|
||||
fi
|
||||
zle redisplay
|
||||
typeset -f zle-line-init >/dev/null && zle zle-line-init
|
||||
zle reset-prompt
|
||||
break
|
||||
fi
|
||||
dir=$(dirname "$dir")
|
||||
@@ -76,69 +198,120 @@ _fzf_dir_completion() {
|
||||
"" "/" ""
|
||||
}
|
||||
|
||||
_fzf_feed_fifo() (
|
||||
_fzf_feed_fifo() {
|
||||
command rm -f "$1"
|
||||
mkfifo "$1"
|
||||
cat <&0 > "$1" &
|
||||
)
|
||||
cat <&0 > "$1" &|
|
||||
}
|
||||
|
||||
_fzf_complete() {
|
||||
local fifo fzf_opts lbuf fzf matches post
|
||||
setopt localoptions ksh_arrays
|
||||
# Split arguments around --
|
||||
local args rest str_arg i sep
|
||||
args=("$@")
|
||||
sep=
|
||||
for i in {0..${#args[@]}}; do
|
||||
if [[ "${args[$i]-}" = -- ]]; then
|
||||
sep=$i
|
||||
break
|
||||
fi
|
||||
done
|
||||
if [[ -n "$sep" ]]; then
|
||||
str_arg=
|
||||
rest=("${args[@]:$((sep + 1)):${#args[@]}}")
|
||||
args=("${args[@]:0:$sep}")
|
||||
else
|
||||
str_arg=$1
|
||||
args=()
|
||||
shift
|
||||
rest=("$@")
|
||||
fi
|
||||
|
||||
local fifo lbuf cmd matches post
|
||||
fifo="${TMPDIR:-/tmp}/fzf-complete-fifo-$$"
|
||||
fzf_opts=$1
|
||||
lbuf=$2
|
||||
post="${funcstack[2]}_post"
|
||||
lbuf=${rest[0]}
|
||||
cmd=$(__fzf_extract_command "$lbuf")
|
||||
post="${funcstack[1]}_post"
|
||||
type $post > /dev/null 2>&1 || post=cat
|
||||
|
||||
[ ${FZF_TMUX:-1} -eq 1 ] && fzf="fzf-tmux -d ${FZF_TMUX_HEIGHT:-40%}" || fzf="fzf"
|
||||
|
||||
_fzf_feed_fifo "$fifo"
|
||||
matches=$(cat "$fifo" | ${=fzf} ${=FZF_COMPLETION_OPTS} ${=fzf_opts} -q "${(Q)prefix}" | $post | tr '\n' ' ')
|
||||
matches=$(
|
||||
FZF_DEFAULT_OPTS=$(__fzf_defaults "--reverse" "${FZF_COMPLETION_OPTS-} $str_arg") \
|
||||
FZF_DEFAULT_OPTS_FILE='' \
|
||||
__fzf_comprun "$cmd" "${args[@]}" -q "${(Q)prefix}" < "$fifo" | $post | tr '\n' ' ')
|
||||
if [ -n "$matches" ]; then
|
||||
LBUFFER="$lbuf$matches"
|
||||
fi
|
||||
zle redisplay
|
||||
typeset -f zle-line-init >/dev/null && zle zle-line-init
|
||||
command rm -f "$fifo"
|
||||
}
|
||||
|
||||
# To use custom hostname lists, override __fzf_list_hosts.
|
||||
# The function is expected to print hostnames, one per line as well as in the
|
||||
# desired sorting and with any duplicates removed, to standard output.
|
||||
if ! declare -f __fzf_list_hosts > /dev/null; then
|
||||
__fzf_list_hosts() {
|
||||
setopt localoptions nonomatch
|
||||
command cat <(command tail -n +1 ~/.ssh/config ~/.ssh/config.d/* /etc/ssh/ssh_config 2> /dev/null | command grep -i '^\s*host\(name\)\? ' | awk '{for (i = 2; i <= NF; i++) print $1 " " $i}' | command grep -v '[*?%]') \
|
||||
<(command grep -oE '^[[a-z0-9.,:-]+' ~/.ssh/known_hosts 2> /dev/null | tr ',' '\n' | tr -d '[' | awk '{ print $1 " " $1 }') \
|
||||
<(command grep -v '^\s*\(#\|$\)' /etc/hosts 2> /dev/null | command grep -Fv '0.0.0.0' | command sed 's/#.*//') |
|
||||
awk '{for (i = 2; i <= NF; i++) print $i}' | sort -u
|
||||
}
|
||||
fi
|
||||
|
||||
_fzf_complete_telnet() {
|
||||
_fzf_complete '+m' "$@" < <(
|
||||
command grep -v '^\s*\(#\|$\)' /etc/hosts | command grep -Fv '0.0.0.0' |
|
||||
awk '{if (length($2) > 0) {print $2}}' | sort -u
|
||||
)
|
||||
_fzf_complete +m -- "$@" < <(__fzf_list_hosts)
|
||||
}
|
||||
|
||||
# The first and the only argument is the LBUFFER without the current word that contains the trigger.
|
||||
# The current word without the trigger is in the $prefix variable passed from the caller.
|
||||
_fzf_complete_ssh() {
|
||||
_fzf_complete '+m' "$@" < <(
|
||||
command cat <(cat ~/.ssh/config /etc/ssh/ssh_config 2> /dev/null | command grep -i '^host' | command grep -v '*') \
|
||||
<(command grep -oE '^[^ ]+' ~/.ssh/known_hosts | tr ',' '\n' | awk '{ print $1 " " $1 }') \
|
||||
<(command grep -v '^\s*\(#\|$\)' /etc/hosts | command grep -Fv '0.0.0.0') |
|
||||
awk '{if (length($2) > 0) {print $2}}' | sort -u
|
||||
)
|
||||
local -a tokens
|
||||
tokens=(${(z)1})
|
||||
case ${tokens[-1]} in
|
||||
-i|-F|-E)
|
||||
_fzf_path_completion "$prefix" "$1"
|
||||
;;
|
||||
*)
|
||||
local user
|
||||
[[ $prefix =~ @ ]] && user="${prefix%%@*}@"
|
||||
_fzf_complete +m -- "$@" < <(__fzf_list_hosts | awk -v user="$user" '{print user $0}')
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
_fzf_complete_export() {
|
||||
_fzf_complete '-m' "$@" < <(
|
||||
_fzf_complete -m -- "$@" < <(
|
||||
declare -xp | sed 's/=.*//' | sed 's/.* //'
|
||||
)
|
||||
}
|
||||
|
||||
_fzf_complete_unset() {
|
||||
_fzf_complete '-m' "$@" < <(
|
||||
_fzf_complete -m -- "$@" < <(
|
||||
declare -xp | sed 's/=.*//' | sed 's/.* //'
|
||||
)
|
||||
}
|
||||
|
||||
_fzf_complete_unalias() {
|
||||
_fzf_complete '+m' "$@" < <(
|
||||
_fzf_complete +m -- "$@" < <(
|
||||
alias | sed 's/=.*//'
|
||||
)
|
||||
}
|
||||
|
||||
_fzf_complete_kill() {
|
||||
_fzf_complete -m --header-lines=1 --no-preview --wrap -- "$@" < <(
|
||||
command ps -eo user,pid,ppid,start,time,command 2> /dev/null ||
|
||||
command ps -eo user,pid,ppid,time,args 2> /dev/null || # For BusyBox
|
||||
command ps --everyone --full --windows # For cygwin
|
||||
)
|
||||
}
|
||||
|
||||
_fzf_complete_kill_post() {
|
||||
awk '{print $2}'
|
||||
}
|
||||
|
||||
fzf-completion() {
|
||||
local tokens cmd prefix trigger tail fzf matches lbuf d_cmds
|
||||
setopt localoptions noshwordsplit noksh_arrays
|
||||
local tokens cmd prefix trigger tail matches lbuf d_cmds
|
||||
setopt localoptions noshwordsplit noksh_arrays noposixbuiltins
|
||||
|
||||
# http://zsh.sourceforge.net/FAQ/zshfaq03.html
|
||||
# http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion-Flags
|
||||
@@ -148,31 +321,34 @@ fzf-completion() {
|
||||
return
|
||||
fi
|
||||
|
||||
cmd=${tokens[1]}
|
||||
cmd=$(__fzf_extract_command "$LBUFFER")
|
||||
|
||||
# Explicitly allow for empty trigger.
|
||||
trigger=${FZF_COMPLETION_TRIGGER-'**'}
|
||||
[ -z "$trigger" -a ${LBUFFER[-1]} = ' ' ] && tokens+=("")
|
||||
|
||||
# When the trigger starts with ';', it becomes a separate token
|
||||
if [[ ${LBUFFER} = *"${tokens[-2]-}${tokens[-1]}" ]]; then
|
||||
tokens[-2]="${tokens[-2]-}${tokens[-1]}"
|
||||
tokens=(${tokens[0,-2]})
|
||||
fi
|
||||
|
||||
lbuf=$LBUFFER
|
||||
tail=${LBUFFER:$(( ${#LBUFFER} - ${#trigger} ))}
|
||||
# Kill completion (do not require trigger sequence)
|
||||
if [ $cmd = kill -a ${LBUFFER[-1]} = ' ' ]; then
|
||||
[ ${FZF_TMUX:-1} -eq 1 ] && fzf="fzf-tmux -d ${FZF_TMUX_HEIGHT:-40%}" || fzf="fzf"
|
||||
matches=$(ps -ef | sed 1d | ${=fzf} ${=FZF_COMPLETION_OPTS} -m | awk '{print $2}' | tr '\n' ' ')
|
||||
if [ -n "$matches" ]; then
|
||||
LBUFFER="$LBUFFER$matches"
|
||||
fi
|
||||
zle redisplay
|
||||
typeset -f zle-line-init >/dev/null && zle zle-line-init
|
||||
|
||||
# Trigger sequence given
|
||||
elif [ ${#tokens} -gt 1 -a "$tail" = "$trigger" ]; then
|
||||
d_cmds=(${=FZF_COMPLETION_DIR_COMMANDS:-cd pushd rmdir})
|
||||
if [ ${#tokens} -gt 1 -a "$tail" = "$trigger" ]; then
|
||||
d_cmds=(${=FZF_COMPLETION_DIR_COMMANDS-cd pushd rmdir})
|
||||
|
||||
[ -z "$trigger" ] && prefix=${tokens[-1]} || prefix=${tokens[-1]:0:-${#trigger}}
|
||||
[ -z "${tokens[-1]}" ] && lbuf=$LBUFFER || lbuf=${LBUFFER:0:-${#tokens[-1]}}
|
||||
if [[ $prefix = *'$('* ]] || [[ $prefix = *'<('* ]] || [[ $prefix = *'>('* ]] || [[ $prefix = *':='* ]] || [[ $prefix = *'`'* ]]; then
|
||||
return
|
||||
fi
|
||||
[ -n "${tokens[-1]}" ] && lbuf=${lbuf:0:-${#tokens[-1]}}
|
||||
|
||||
if eval "type _fzf_complete_${cmd} > /dev/null"; then
|
||||
eval "prefix=\"$prefix\" _fzf_complete_${cmd} \"$lbuf\""
|
||||
prefix="$prefix" eval _fzf_complete_${cmd} ${(q)lbuf}
|
||||
zle reset-prompt
|
||||
elif [ ${d_cmds[(i)$cmd]} -le ${#d_cmds} ]; then
|
||||
_fzf_dir_completion "$prefix" "$lbuf"
|
||||
else
|
||||
@@ -192,3 +368,10 @@ fzf-completion() {
|
||||
|
||||
zle -N fzf-completion
|
||||
bindkey '^I' fzf-completion
|
||||
fi
|
||||
|
||||
} always {
|
||||
# Restore the original options.
|
||||
eval $__fzf_completion_options
|
||||
'unset' '__fzf_completion_options'
|
||||
}
|
||||
|
||||
@@ -1,125 +1,146 @@
|
||||
# Key bindings
|
||||
# ------------
|
||||
__fzf_select__() {
|
||||
local cmd="${FZF_CTRL_T_COMMAND:-"command find -L . \\( -path '*/\\.*' -o -fstype 'dev' -o -fstype 'proc' \\) -prune \
|
||||
-o -type f -print \
|
||||
-o -type d -print \
|
||||
-o -type l -print 2> /dev/null | sed 1d | cut -b3-"}"
|
||||
eval "$cmd | fzf -m $FZF_CTRL_T_OPTS" | while read -r item; do
|
||||
printf '%q ' "$item"
|
||||
done
|
||||
echo
|
||||
}
|
||||
# ____ ____
|
||||
# / __/___ / __/
|
||||
# / /_/_ / / /_
|
||||
# / __/ / /_/ __/
|
||||
# /_/ /___/_/ key-bindings.bash
|
||||
#
|
||||
# - $FZF_TMUX_OPTS
|
||||
# - $FZF_CTRL_T_COMMAND
|
||||
# - $FZF_CTRL_T_OPTS
|
||||
# - $FZF_CTRL_R_OPTS
|
||||
# - $FZF_ALT_C_COMMAND
|
||||
# - $FZF_ALT_C_OPTS
|
||||
|
||||
if [[ $- =~ i ]]; then
|
||||
|
||||
__fzfcmd() {
|
||||
[ "${FZF_TMUX:-1}" != 0 ] && echo "fzf-tmux -d${FZF_TMUX_HEIGHT:-40%}" || echo "fzf"
|
||||
|
||||
# Key bindings
|
||||
# ------------
|
||||
|
||||
__fzf_defaults() {
|
||||
# $1: Prepend to FZF_DEFAULT_OPTS_FILE and FZF_DEFAULT_OPTS
|
||||
# $2: Append to FZF_DEFAULT_OPTS_FILE and FZF_DEFAULT_OPTS
|
||||
echo "--height ${FZF_TMUX_HEIGHT:-40%} --bind=ctrl-z:ignore $1"
|
||||
command cat "${FZF_DEFAULT_OPTS_FILE-}" 2> /dev/null
|
||||
echo "${FZF_DEFAULT_OPTS-} $2"
|
||||
}
|
||||
|
||||
__fzf_select_tmux__() {
|
||||
local height
|
||||
height=${FZF_TMUX_HEIGHT:-40%}
|
||||
if [[ $height =~ %$ ]]; then
|
||||
height="-p ${height%\%}"
|
||||
else
|
||||
height="-l $height"
|
||||
fi
|
||||
__fzf_select__() {
|
||||
FZF_DEFAULT_COMMAND=${FZF_CTRL_T_COMMAND:-} \
|
||||
FZF_DEFAULT_OPTS=$(__fzf_defaults "--reverse --walker=file,dir,follow,hidden --scheme=path" "${FZF_CTRL_T_OPTS-} -m") \
|
||||
FZF_DEFAULT_OPTS_FILE='' $(__fzfcmd) "$@" |
|
||||
while read -r item; do
|
||||
printf '%q ' "$item" # escape special chars
|
||||
done
|
||||
}
|
||||
|
||||
tmux split-window $height "cd $(printf %q "$PWD"); FZF_DEFAULT_OPTS=$(printf %q "$FZF_DEFAULT_OPTS") PATH=$(printf %q "$PATH") FZF_CTRL_T_COMMAND=$(printf %q "$FZF_CTRL_T_COMMAND") FZF_CTRL_T_OPTS=$(printf %q "$FZF_CTRL_T_OPTS") bash -c 'source \"${BASH_SOURCE[0]}\"; RESULT=\"\$(__fzf_select__)\"; tmux setb -b fzf \"\$RESULT\" \\; pasteb -b fzf -t $TMUX_PANE \\; deleteb -b fzf || tmux send-keys -t $TMUX_PANE \"\$RESULT\"'"
|
||||
__fzfcmd() {
|
||||
[[ -n "${TMUX_PANE-}" ]] && { [[ "${FZF_TMUX:-0}" != 0 ]] || [[ -n "${FZF_TMUX_OPTS-}" ]]; } &&
|
||||
echo "fzf-tmux ${FZF_TMUX_OPTS:--d${FZF_TMUX_HEIGHT:-40%}} -- " || echo "fzf"
|
||||
}
|
||||
|
||||
fzf-file-widget() {
|
||||
if __fzf_use_tmux__; then
|
||||
__fzf_select_tmux__
|
||||
else
|
||||
local selected="$(__fzf_select__)"
|
||||
READLINE_LINE="${READLINE_LINE:0:$READLINE_POINT}$selected${READLINE_LINE:$READLINE_POINT}"
|
||||
READLINE_POINT=$(( READLINE_POINT + ${#selected} ))
|
||||
fi
|
||||
local selected="$(__fzf_select__ "$@")"
|
||||
READLINE_LINE="${READLINE_LINE:0:$READLINE_POINT}$selected${READLINE_LINE:$READLINE_POINT}"
|
||||
READLINE_POINT=$(( READLINE_POINT + ${#selected} ))
|
||||
}
|
||||
|
||||
__fzf_cd__() {
|
||||
local cmd dir
|
||||
cmd="${FZF_ALT_C_COMMAND:-"command find -L . \\( -path '*/\\.*' -o -fstype 'dev' -o -fstype 'proc' \\) -prune \
|
||||
-o -type d -print 2> /dev/null | sed 1d | cut -b3-"}"
|
||||
dir=$(eval "$cmd | $(__fzfcmd) +m $FZF_ALT_C_OPTS") && printf 'cd %q' "$dir"
|
||||
local dir
|
||||
dir=$(
|
||||
FZF_DEFAULT_COMMAND=${FZF_ALT_C_COMMAND:-} \
|
||||
FZF_DEFAULT_OPTS=$(__fzf_defaults "--reverse --walker=dir,follow,hidden --scheme=path" "${FZF_ALT_C_OPTS-} +m") \
|
||||
FZF_DEFAULT_OPTS_FILE='' $(__fzfcmd)
|
||||
) && printf 'builtin cd -- %q' "$(builtin unset CDPATH && builtin cd -- "$dir" && builtin pwd)"
|
||||
}
|
||||
|
||||
__fzf_history__() (
|
||||
local line
|
||||
shopt -u nocaseglob nocasematch
|
||||
line=$(
|
||||
HISTTIMEFORMAT= history |
|
||||
eval "$(__fzfcmd) +s --tac +m -n2..,.. --tiebreak=index --toggle-sort=ctrl-r $FZF_CTRL_R_OPTS" |
|
||||
command grep '^ *[0-9]') &&
|
||||
if [[ $- =~ H ]]; then
|
||||
sed 's/^ *\([0-9]*\)\** .*/!\1/' <<< "$line"
|
||||
if command -v perl > /dev/null; then
|
||||
__fzf_history__() {
|
||||
local output script
|
||||
script='BEGIN { getc; $/ = "\n\t"; $HISTCOUNT = $ENV{last_hist} + 1 } s/^[ *]//; s/\n/\n\t/gm; print $HISTCOUNT - $. . "\t$_" if !$seen{$_}++'
|
||||
output=$(
|
||||
set +o pipefail
|
||||
builtin fc -lnr -2147483648 |
|
||||
last_hist=$(HISTTIMEFORMAT='' builtin history 1) command perl -n -l0 -e "$script" |
|
||||
FZF_DEFAULT_OPTS=$(__fzf_defaults "" "-n2..,.. --scheme=history --bind=ctrl-r:toggle-sort --wrap-sign '"$'\t'"↳ ' --highlight-line ${FZF_CTRL_R_OPTS-} +m --read0") \
|
||||
FZF_DEFAULT_OPTS_FILE='' $(__fzfcmd) --query "$READLINE_LINE"
|
||||
) || return
|
||||
READLINE_LINE=$(command perl -pe 's/^\d*\t//' <<< "$output")
|
||||
if [[ -z "$READLINE_POINT" ]]; then
|
||||
echo "$READLINE_LINE"
|
||||
else
|
||||
sed 's/^ *\([0-9]*\)\** *//' <<< "$line"
|
||||
READLINE_POINT=0x7fffffff
|
||||
fi
|
||||
)
|
||||
}
|
||||
else # awk - fallback for POSIX systems
|
||||
__fzf_history__() {
|
||||
local output script n x y z d
|
||||
if [[ -z $__fzf_awk ]]; then
|
||||
__fzf_awk=awk
|
||||
# choose the faster mawk if: it's installed && build date >= 20230322 && version >= 1.3.4
|
||||
IFS=' .' read n x y z d <<< $(command mawk -W version 2> /dev/null)
|
||||
[[ $n == mawk ]] && (( d >= 20230302 && (x *1000 +y) *1000 +z >= 1003004 )) && __fzf_awk=mawk
|
||||
fi
|
||||
[[ $(HISTTIMEFORMAT='' builtin history 1) =~ [[:digit:]]+ ]] # how many history entries
|
||||
script='function P(b) { ++n; sub(/^[ *]/, "", b); if (!seen[b]++) { printf "%d\t%s%c", '$((BASH_REMATCH + 1))' - n, b, 0 } }
|
||||
NR==1 { b = substr($0, 2); next }
|
||||
/^\t/ { P(b); b = substr($0, 2); next }
|
||||
{ b = b RS $0 }
|
||||
END { if (NR) P(b) }'
|
||||
output=$(
|
||||
set +o pipefail
|
||||
builtin fc -lnr -2147483648 2> /dev/null | # ( $'\t '<lines>$'\n' )* ; <lines> ::= [^\n]* ( $'\n'<lines> )*
|
||||
command $__fzf_awk "$script" | # ( <counter>$'\t'<lines>$'\000' )*
|
||||
FZF_DEFAULT_OPTS=$(__fzf_defaults "" "-n2..,.. --scheme=history --bind=ctrl-r:toggle-sort --wrap-sign '"$'\t'"↳ ' --highlight-line ${FZF_CTRL_R_OPTS-} +m --read0") \
|
||||
FZF_DEFAULT_OPTS_FILE='' $(__fzfcmd) --query "$READLINE_LINE"
|
||||
) || return
|
||||
READLINE_LINE=${output#*$'\t'}
|
||||
if [[ -z "$READLINE_POINT" ]]; then
|
||||
echo "$READLINE_LINE"
|
||||
else
|
||||
READLINE_POINT=0x7fffffff
|
||||
fi
|
||||
}
|
||||
fi
|
||||
|
||||
__fzf_use_tmux__() {
|
||||
[ -n "$TMUX_PANE" ] && [ "${FZF_TMUX:-1}" != 0 ] && [ ${LINES:-40} -gt 15 ]
|
||||
}
|
||||
# Required to refresh the prompt after fzf
|
||||
bind -m emacs-standard '"\er": redraw-current-line'
|
||||
|
||||
[ $BASH_VERSINFO -gt 3 ] && __use_bind_x=1 || __use_bind_x=0
|
||||
__fzf_use_tmux__ && __use_tmux=1 || __use_tmux=0
|
||||
|
||||
if [[ ! -o vi ]]; then
|
||||
# Required to refresh the prompt after fzf
|
||||
bind '"\er": redraw-current-line'
|
||||
bind '"\e^": history-expand-line'
|
||||
bind -m vi-command '"\C-z": emacs-editing-mode'
|
||||
bind -m vi-insert '"\C-z": emacs-editing-mode'
|
||||
bind -m emacs-standard '"\C-z": vi-editing-mode'
|
||||
|
||||
if (( BASH_VERSINFO[0] < 4 )); then
|
||||
# CTRL-T - Paste the selected file path into the command line
|
||||
if [ $__use_bind_x -eq 1 ]; then
|
||||
bind -x '"\C-t": "fzf-file-widget"'
|
||||
elif [ $__use_tmux -eq 1 ]; then
|
||||
bind '"\C-t": " \C-u \C-a\C-k$(__fzf_select_tmux__)\e\C-e\C-y\C-a\C-d\C-y\ey\C-h"'
|
||||
else
|
||||
bind '"\C-t": " \C-u \C-a\C-k$(__fzf_select__)\e\C-e\C-y\C-a\C-y\ey\C-h\C-e\er \C-h"'
|
||||
if [[ "${FZF_CTRL_T_COMMAND-x}" != "" ]]; then
|
||||
bind -m emacs-standard '"\C-t": " \C-b\C-k \C-u`__fzf_select__`\e\C-e\er\C-a\C-y\C-h\C-e\e \C-y\ey\C-x\C-x\C-f"'
|
||||
bind -m vi-command '"\C-t": "\C-z\C-t\C-z"'
|
||||
bind -m vi-insert '"\C-t": "\C-z\C-t\C-z"'
|
||||
fi
|
||||
|
||||
# CTRL-R - Paste the selected command from history into the command line
|
||||
bind '"\C-r": " \C-e\C-u`__fzf_history__`\e\C-e\e^\er"'
|
||||
|
||||
# ALT-C - cd into the selected directory
|
||||
bind '"\ec": " \C-e\C-u`__fzf_cd__`\e\C-e\er\C-m"'
|
||||
bind -m emacs-standard '"\C-r": "\C-e \C-u\C-y\ey\C-u`__fzf_history__`\e\C-e\er"'
|
||||
bind -m vi-command '"\C-r": "\C-z\C-r\C-z"'
|
||||
bind -m vi-insert '"\C-r": "\C-z\C-r\C-z"'
|
||||
else
|
||||
# We'd usually use "\e" to enter vi-movement-mode so we can do our magic,
|
||||
# but this incurs a very noticeable delay of a half second or so,
|
||||
# because many other commands start with "\e".
|
||||
# Instead, we bind an unused key, "\C-x\C-a",
|
||||
# to also enter vi-movement-mode,
|
||||
# and then use that thereafter.
|
||||
# (We imagine that "\C-x\C-a" is relatively unlikely to be in use.)
|
||||
bind '"\C-x\C-a": vi-movement-mode'
|
||||
|
||||
bind '"\C-x\C-e": shell-expand-line'
|
||||
bind '"\C-x\C-r": redraw-current-line'
|
||||
bind '"\C-x^": history-expand-line'
|
||||
|
||||
# CTRL-T - Paste the selected file path into the command line
|
||||
# - FIXME: Selected items are attached to the end regardless of cursor position
|
||||
if [ $__use_bind_x -eq 1 ]; then
|
||||
bind -x '"\C-t": "fzf-file-widget"'
|
||||
elif [ $__use_tmux -eq 1 ]; then
|
||||
bind '"\C-t": "\C-x\C-a$a \C-x\C-addi$(__fzf_select_tmux__)\C-x\C-e\C-x\C-a0P$xa"'
|
||||
else
|
||||
bind '"\C-t": "\C-x\C-a$a \C-x\C-addi$(__fzf_select__)\C-x\C-e\C-x\C-a0Px$a \C-x\C-r\C-x\C-axa "'
|
||||
if [[ "${FZF_CTRL_T_COMMAND-x}" != "" ]]; then
|
||||
bind -m emacs-standard -x '"\C-t": fzf-file-widget'
|
||||
bind -m vi-command -x '"\C-t": fzf-file-widget'
|
||||
bind -m vi-insert -x '"\C-t": fzf-file-widget'
|
||||
fi
|
||||
bind -m vi-command '"\C-t": "i\C-t"'
|
||||
|
||||
# CTRL-R - Paste the selected command from history into the command line
|
||||
bind '"\C-r": "\C-x\C-addi$(__fzf_history__)\C-x\C-e\C-x^\C-x\C-a$a\C-x\C-r"'
|
||||
bind -m vi-command '"\C-r": "i\C-r"'
|
||||
|
||||
# ALT-C - cd into the selected directory
|
||||
bind '"\ec": "\C-x\C-addi$(__fzf_cd__)\C-x\C-e\C-x\C-r\C-m"'
|
||||
bind -m vi-command '"\ec": "ddi$(__fzf_cd__)\C-x\C-e\C-x\C-r\C-m"'
|
||||
bind -m emacs-standard -x '"\C-r": __fzf_history__'
|
||||
bind -m vi-command -x '"\C-r": __fzf_history__'
|
||||
bind -m vi-insert -x '"\C-r": __fzf_history__'
|
||||
fi
|
||||
|
||||
unset -v __use_tmux __use_bind_x
|
||||
# ALT-C - cd into the selected directory
|
||||
if [[ "${FZF_ALT_C_COMMAND-x}" != "" ]]; then
|
||||
bind -m emacs-standard '"\ec": " \C-b\C-k \C-u`__fzf_cd__`\e\C-e\er\C-m\C-y\C-h\e \C-y\ey\C-x\C-x\C-d"'
|
||||
bind -m vi-command '"\ec": "\C-z\ec\C-z"'
|
||||
bind -m vi-insert '"\ec": "\C-z\ec\C-z"'
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
@@ -1,71 +1,197 @@
|
||||
# ____ ____
|
||||
# / __/___ / __/
|
||||
# / /_/_ / / /_
|
||||
# / __/ / /_/ __/
|
||||
# /_/ /___/_/ key-bindings.fish
|
||||
#
|
||||
# - $FZF_TMUX_OPTS
|
||||
# - $FZF_CTRL_T_COMMAND
|
||||
# - $FZF_CTRL_T_OPTS
|
||||
# - $FZF_CTRL_R_OPTS
|
||||
# - $FZF_ALT_C_COMMAND
|
||||
# - $FZF_ALT_C_OPTS
|
||||
|
||||
status is-interactive; or exit 0
|
||||
|
||||
|
||||
# Key bindings
|
||||
# ------------
|
||||
function fzf_key_bindings
|
||||
# Due to a bug of fish, we cannot use command substitution,
|
||||
# so we use temporary file instead
|
||||
if [ -z "$TMPDIR" ]
|
||||
set -g TMPDIR /tmp
|
||||
|
||||
function __fzf_defaults
|
||||
# $1: Prepend to FZF_DEFAULT_OPTS_FILE and FZF_DEFAULT_OPTS
|
||||
# $2: Append to FZF_DEFAULT_OPTS_FILE and FZF_DEFAULT_OPTS
|
||||
test -n "$FZF_TMUX_HEIGHT"; or set FZF_TMUX_HEIGHT 40%
|
||||
echo "--height $FZF_TMUX_HEIGHT --bind=ctrl-z:ignore" $argv[1]
|
||||
command cat "$FZF_DEFAULT_OPTS_FILE" 2> /dev/null
|
||||
echo $FZF_DEFAULT_OPTS $argv[2]
|
||||
end
|
||||
|
||||
function __fzf_escape
|
||||
while read item
|
||||
echo -n (echo -n "$item" | sed -E 's/([ "$~'\''([{<>})])/\\\\\\1/g')' '
|
||||
# Store current token in $dir as root for the 'find' command
|
||||
function fzf-file-widget -d "List files and folders"
|
||||
set -l commandline (__fzf_parse_commandline)
|
||||
set -lx dir $commandline[1]
|
||||
set -l fzf_query $commandline[2]
|
||||
set -l prefix $commandline[3]
|
||||
|
||||
test -n "$FZF_TMUX_HEIGHT"; or set FZF_TMUX_HEIGHT 40%
|
||||
begin
|
||||
set -lx FZF_DEFAULT_OPTS (__fzf_defaults "--reverse --walker=file,dir,follow,hidden --scheme=path --walker-root='$dir'" "$FZF_CTRL_T_OPTS")
|
||||
set -lx FZF_DEFAULT_COMMAND "$FZF_CTRL_T_COMMAND"
|
||||
set -lx FZF_DEFAULT_OPTS_FILE ''
|
||||
eval (__fzfcmd)' -m --query "'$fzf_query'"' | while read -l r; set result $result $r; end
|
||||
end
|
||||
if [ -z "$result" ]
|
||||
commandline -f repaint
|
||||
return
|
||||
else
|
||||
# Remove last token from commandline.
|
||||
commandline -t ""
|
||||
end
|
||||
for i in $result
|
||||
commandline -it -- $prefix
|
||||
commandline -it -- (string escape $i)
|
||||
commandline -it -- ' '
|
||||
end
|
||||
commandline -f repaint
|
||||
end
|
||||
|
||||
function fzf-file-widget
|
||||
set -q FZF_CTRL_T_COMMAND; or set -l FZF_CTRL_T_COMMAND "
|
||||
command find -L . \\( -path '*/\\.*' -o -fstype 'dev' -o -fstype 'proc' \\) -prune \
|
||||
-o -type f -print \
|
||||
-o -type d -print \
|
||||
-o -type l -print 2> /dev/null | sed 1d | cut -b3-"
|
||||
eval "$FZF_CTRL_T_COMMAND | "(__fzfcmd)" -m $FZF_CTRL_T_OPTS > $TMPDIR/fzf.result"
|
||||
and for i in (seq 20); commandline -i (cat $TMPDIR/fzf.result | __fzf_escape) 2> /dev/null; and break; sleep 0.1; end
|
||||
function fzf-history-widget -d "Show command history"
|
||||
test -n "$FZF_TMUX_HEIGHT"; or set FZF_TMUX_HEIGHT 40%
|
||||
begin
|
||||
set -l FISH_MAJOR (echo $version | cut -f1 -d.)
|
||||
set -l FISH_MINOR (echo $version | cut -f2 -d.)
|
||||
|
||||
# merge history from other sessions before searching
|
||||
if test -z "$fish_private_mode"
|
||||
builtin history merge
|
||||
end
|
||||
|
||||
# history's -z flag is needed for multi-line support.
|
||||
# history's -z flag was added in fish 2.4.0, so don't use it for versions
|
||||
# before 2.4.0.
|
||||
if [ "$FISH_MAJOR" -gt 2 -o \( "$FISH_MAJOR" -eq 2 -a "$FISH_MINOR" -ge 4 \) ];
|
||||
if type -P perl > /dev/null 2>&1
|
||||
set -lx FZF_DEFAULT_OPTS (__fzf_defaults "" "-n2..,.. --scheme=history --bind=ctrl-r:toggle-sort --wrap-sign '"\t"↳ ' --highlight-line $FZF_CTRL_R_OPTS +m")
|
||||
set -lx FZF_DEFAULT_OPTS_FILE ''
|
||||
builtin history -z --reverse | command perl -0 -pe 's/^/$.\t/g; s/\n/\n\t/gm' | eval (__fzfcmd) --tac --read0 --print0 -q '(commandline)' | command perl -pe 's/^\d*\t//' | read -lz result
|
||||
and commandline -- $result
|
||||
else
|
||||
set -lx FZF_DEFAULT_OPTS (__fzf_defaults "" "--scheme=history --bind=ctrl-r:toggle-sort --wrap-sign '"\t"↳ ' --highlight-line $FZF_CTRL_R_OPTS +m")
|
||||
set -lx FZF_DEFAULT_OPTS_FILE ''
|
||||
builtin history -z | eval (__fzfcmd) --read0 --print0 -q '(commandline)' | read -lz result
|
||||
and commandline -- $result
|
||||
end
|
||||
else
|
||||
builtin history | eval (__fzfcmd) -q '(commandline)' | read -l result
|
||||
and commandline -- $result
|
||||
end
|
||||
end
|
||||
commandline -f repaint
|
||||
rm -f $TMPDIR/fzf.result
|
||||
end
|
||||
|
||||
function fzf-history-widget
|
||||
history | eval (__fzfcmd) +s +m --tiebreak=index --toggle-sort=ctrl-r $FZF_CTRL_R_OPTS -q '(commandline)' > $TMPDIR/fzf.result
|
||||
and commandline -- (cat $TMPDIR/fzf.result)
|
||||
commandline -f repaint
|
||||
rm -f $TMPDIR/fzf.result
|
||||
end
|
||||
function fzf-cd-widget -d "Change directory"
|
||||
set -l commandline (__fzf_parse_commandline)
|
||||
set -lx dir $commandline[1]
|
||||
set -l fzf_query $commandline[2]
|
||||
set -l prefix $commandline[3]
|
||||
|
||||
test -n "$FZF_TMUX_HEIGHT"; or set FZF_TMUX_HEIGHT 40%
|
||||
begin
|
||||
set -lx FZF_DEFAULT_OPTS (__fzf_defaults "--reverse --walker=dir,follow,hidden --scheme=path --walker-root='$dir'" "$FZF_ALT_C_OPTS")
|
||||
set -lx FZF_DEFAULT_OPTS_FILE ''
|
||||
set -lx FZF_DEFAULT_COMMAND "$FZF_ALT_C_COMMAND"
|
||||
eval (__fzfcmd)' +m --query "'$fzf_query'"' | read -l result
|
||||
|
||||
if [ -n "$result" ]
|
||||
cd -- $result
|
||||
|
||||
# Remove last token from commandline.
|
||||
commandline -t ""
|
||||
commandline -it -- $prefix
|
||||
end
|
||||
end
|
||||
|
||||
function fzf-cd-widget
|
||||
set -q FZF_ALT_C_COMMAND; or set -l FZF_ALT_C_COMMAND "
|
||||
command find -L . \\( -path '*/\\.*' -o -fstype 'dev' -o -fstype 'proc' \\) -prune \
|
||||
-o -type d -print 2> /dev/null | sed 1d | cut -b3-"
|
||||
# Fish hangs if the command before pipe redirects (2> /dev/null)
|
||||
eval "$FZF_ALT_C_COMMAND | "(__fzfcmd)" +m $FZF_ALT_C_OPTS > $TMPDIR/fzf.result"
|
||||
[ (cat $TMPDIR/fzf.result | wc -l) -gt 0 ]
|
||||
and cd (cat $TMPDIR/fzf.result)
|
||||
commandline -f repaint
|
||||
rm -f $TMPDIR/fzf.result
|
||||
end
|
||||
|
||||
function __fzfcmd
|
||||
set -q FZF_TMUX; or set FZF_TMUX 1
|
||||
|
||||
if [ $FZF_TMUX -eq 1 ]
|
||||
if set -q FZF_TMUX_HEIGHT
|
||||
echo "fzf-tmux -d$FZF_TMUX_HEIGHT"
|
||||
else
|
||||
echo "fzf-tmux -d40%"
|
||||
end
|
||||
test -n "$FZF_TMUX"; or set FZF_TMUX 0
|
||||
test -n "$FZF_TMUX_HEIGHT"; or set FZF_TMUX_HEIGHT 40%
|
||||
if [ -n "$FZF_TMUX_OPTS" ]
|
||||
echo "fzf-tmux $FZF_TMUX_OPTS -- "
|
||||
else if [ $FZF_TMUX -eq 1 ]
|
||||
echo "fzf-tmux -d$FZF_TMUX_HEIGHT -- "
|
||||
else
|
||||
echo "fzf"
|
||||
end
|
||||
end
|
||||
|
||||
bind \ct fzf-file-widget
|
||||
bind \cr fzf-history-widget
|
||||
bind \ec fzf-cd-widget
|
||||
if not set -q FZF_CTRL_T_COMMAND; or test -n "$FZF_CTRL_T_COMMAND"
|
||||
bind \ct fzf-file-widget
|
||||
end
|
||||
if not set -q FZF_ALT_C_COMMAND; or test -n "$FZF_ALT_C_COMMAND"
|
||||
bind \ec fzf-cd-widget
|
||||
end
|
||||
|
||||
if bind -M insert > /dev/null 2>&1
|
||||
bind -M insert \ct fzf-file-widget
|
||||
bind -M insert \cr fzf-history-widget
|
||||
bind -M insert \ec fzf-cd-widget
|
||||
if not set -q FZF_CTRL_T_COMMAND; or test -n "$FZF_CTRL_T_COMMAND"
|
||||
bind -M insert \ct fzf-file-widget
|
||||
end
|
||||
if not set -q FZF_ALT_C_COMMAND; or test -n "$FZF_ALT_C_COMMAND"
|
||||
bind -M insert \ec fzf-cd-widget
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
function __fzf_parse_commandline -d 'Parse the current command line token and return split of existing filepath, fzf query, and optional -option= prefix'
|
||||
set -l commandline (commandline -t)
|
||||
|
||||
# strip -option= from token if present
|
||||
set -l prefix (string match -r -- '^-[^\s=]+=' $commandline)
|
||||
set commandline (string replace -- "$prefix" '' $commandline)
|
||||
|
||||
# eval is used to do shell expansion on paths
|
||||
eval set commandline $commandline
|
||||
|
||||
if [ -z $commandline ]
|
||||
# Default to current directory with no --query
|
||||
set dir '.'
|
||||
set fzf_query ''
|
||||
else
|
||||
set dir (__fzf_get_dir $commandline)
|
||||
|
||||
if [ "$dir" = "." -a (string sub -l 1 -- $commandline) != '.' ]
|
||||
# if $dir is "." but commandline is not a relative path, this means no file path found
|
||||
set fzf_query $commandline
|
||||
else
|
||||
# Also remove trailing slash after dir, to "split" input properly
|
||||
set fzf_query (string replace -r "^$dir/?" -- '' "$commandline")
|
||||
end
|
||||
end
|
||||
|
||||
echo $dir
|
||||
echo $fzf_query
|
||||
echo $prefix
|
||||
end
|
||||
|
||||
function __fzf_get_dir -d 'Find the longest existing filepath from input string'
|
||||
set dir $argv
|
||||
|
||||
# Strip all trailing slashes. Ignore if $dir is root dir (/)
|
||||
if [ (string length -- $dir) -gt 1 ]
|
||||
set dir (string replace -r '/*$' -- '' $dir)
|
||||
end
|
||||
|
||||
# Iteratively check if dir exists and strip tail end of path
|
||||
while [ ! -d "$dir" ]
|
||||
# If path is absolute, this can keep going until ends up at /
|
||||
# If path is relative, this can keep going until entire input is consumed, dirname returns "."
|
||||
set dir (dirname -- "$dir")
|
||||
end
|
||||
|
||||
echo $dir
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
@@ -1,16 +1,59 @@
|
||||
# ____ ____
|
||||
# / __/___ / __/
|
||||
# / /_/_ / / /_
|
||||
# / __/ / /_/ __/
|
||||
# /_/ /___/_/ key-bindings.zsh
|
||||
#
|
||||
# - $FZF_TMUX_OPTS
|
||||
# - $FZF_CTRL_T_COMMAND
|
||||
# - $FZF_CTRL_T_OPTS
|
||||
# - $FZF_CTRL_R_OPTS
|
||||
# - $FZF_ALT_C_COMMAND
|
||||
# - $FZF_ALT_C_OPTS
|
||||
|
||||
|
||||
# Key bindings
|
||||
# ------------
|
||||
if [[ $- == *i* ]]; then
|
||||
|
||||
# The code at the top and the bottom of this file is the same as in completion.zsh.
|
||||
# Refer to that file for explanation.
|
||||
if 'zmodload' 'zsh/parameter' 2>'/dev/null' && (( ${+options} )); then
|
||||
__fzf_key_bindings_options="options=(${(j: :)${(kv)options[@]}})"
|
||||
else
|
||||
() {
|
||||
__fzf_key_bindings_options="setopt"
|
||||
'local' '__fzf_opt'
|
||||
for __fzf_opt in "${(@)${(@f)$(set -o)}%% *}"; do
|
||||
if [[ -o "$__fzf_opt" ]]; then
|
||||
__fzf_key_bindings_options+=" -o $__fzf_opt"
|
||||
else
|
||||
__fzf_key_bindings_options+=" +o $__fzf_opt"
|
||||
fi
|
||||
done
|
||||
}
|
||||
fi
|
||||
|
||||
'builtin' 'emulate' 'zsh' && 'builtin' 'setopt' 'no_aliases'
|
||||
|
||||
{
|
||||
if [[ -o interactive ]]; then
|
||||
|
||||
__fzf_defaults() {
|
||||
# $1: Prepend to FZF_DEFAULT_OPTS_FILE and FZF_DEFAULT_OPTS
|
||||
# $2: Append to FZF_DEFAULT_OPTS_FILE and FZF_DEFAULT_OPTS
|
||||
echo "--height ${FZF_TMUX_HEIGHT:-40%} --bind=ctrl-z:ignore $1"
|
||||
command cat "${FZF_DEFAULT_OPTS_FILE-}" 2> /dev/null
|
||||
echo "${FZF_DEFAULT_OPTS-} $2"
|
||||
}
|
||||
|
||||
# CTRL-T - Paste the selected file path(s) into the command line
|
||||
__fsel() {
|
||||
local cmd="${FZF_CTRL_T_COMMAND:-"command find -L . \\( -path '*/\\.*' -o -fstype 'dev' -o -fstype 'proc' \\) -prune \
|
||||
-o -type f -print \
|
||||
-o -type d -print \
|
||||
-o -type l -print 2> /dev/null | sed 1d | cut -b3-"}"
|
||||
setopt localoptions pipefail 2> /dev/null
|
||||
eval "$cmd | $(__fzfcmd) -m $FZF_CTRL_T_OPTS" | while read item; do
|
||||
echo -n "${(q)item} "
|
||||
__fzf_select() {
|
||||
setopt localoptions pipefail no_aliases 2> /dev/null
|
||||
local item
|
||||
FZF_DEFAULT_COMMAND=${FZF_CTRL_T_COMMAND:-} \
|
||||
FZF_DEFAULT_OPTS=$(__fzf_defaults "--reverse --walker=file,dir,follow,hidden --scheme=path" "${FZF_CTRL_T_OPTS-} -m") \
|
||||
FZF_DEFAULT_OPTS_FILE='' $(__fzfcmd) "$@" < /dev/tty | while read -r item; do
|
||||
echo -n -E "${(q)item} "
|
||||
done
|
||||
local ret=$?
|
||||
echo
|
||||
@@ -18,51 +61,83 @@ __fsel() {
|
||||
}
|
||||
|
||||
__fzfcmd() {
|
||||
[ ${FZF_TMUX:-1} -eq 1 ] && echo "fzf-tmux -d${FZF_TMUX_HEIGHT:-40%}" || echo "fzf"
|
||||
[ -n "${TMUX_PANE-}" ] && { [ "${FZF_TMUX:-0}" != 0 ] || [ -n "${FZF_TMUX_OPTS-}" ]; } &&
|
||||
echo "fzf-tmux ${FZF_TMUX_OPTS:--d${FZF_TMUX_HEIGHT:-40%}} -- " || echo "fzf"
|
||||
}
|
||||
|
||||
fzf-file-widget() {
|
||||
LBUFFER="${LBUFFER}$(__fsel)"
|
||||
LBUFFER="${LBUFFER}$(__fzf_select)"
|
||||
local ret=$?
|
||||
zle redisplay
|
||||
typeset -f zle-line-init >/dev/null && zle zle-line-init
|
||||
zle reset-prompt
|
||||
return $ret
|
||||
}
|
||||
zle -N fzf-file-widget
|
||||
bindkey '^T' fzf-file-widget
|
||||
if [[ "${FZF_CTRL_T_COMMAND-x}" != "" ]]; then
|
||||
zle -N fzf-file-widget
|
||||
bindkey -M emacs '^T' fzf-file-widget
|
||||
bindkey -M vicmd '^T' fzf-file-widget
|
||||
bindkey -M viins '^T' fzf-file-widget
|
||||
fi
|
||||
|
||||
# ALT-C - cd into the selected directory
|
||||
fzf-cd-widget() {
|
||||
local cmd="${FZF_ALT_C_COMMAND:-"command find -L . \\( -path '*/\\.*' -o -fstype 'dev' -o -fstype 'proc' \\) -prune \
|
||||
-o -type d -print 2> /dev/null | sed 1d | cut -b3-"}"
|
||||
setopt localoptions pipefail 2> /dev/null
|
||||
cd "${$(eval "$cmd | $(__fzfcmd) +m $FZF_ALT_C_OPTS"):-.}"
|
||||
setopt localoptions pipefail no_aliases 2> /dev/null
|
||||
local dir="$(
|
||||
FZF_DEFAULT_COMMAND=${FZF_ALT_C_COMMAND:-} \
|
||||
FZF_DEFAULT_OPTS=$(__fzf_defaults "--reverse --walker=dir,follow,hidden --scheme=path" "${FZF_ALT_C_OPTS-} +m") \
|
||||
FZF_DEFAULT_OPTS_FILE='' $(__fzfcmd) < /dev/tty)"
|
||||
if [[ -z "$dir" ]]; then
|
||||
zle redisplay
|
||||
return 0
|
||||
fi
|
||||
zle push-line # Clear buffer. Auto-restored on next prompt.
|
||||
BUFFER="builtin cd -- ${(q)dir:a}"
|
||||
zle accept-line
|
||||
local ret=$?
|
||||
unset dir # ensure this doesn't end up appearing in prompt expansion
|
||||
zle reset-prompt
|
||||
typeset -f zle-line-init >/dev/null && zle zle-line-init
|
||||
return $ret
|
||||
}
|
||||
zle -N fzf-cd-widget
|
||||
bindkey '\ec' fzf-cd-widget
|
||||
if [[ "${FZF_ALT_C_COMMAND-x}" != "" ]]; then
|
||||
zle -N fzf-cd-widget
|
||||
bindkey -M emacs '\ec' fzf-cd-widget
|
||||
bindkey -M vicmd '\ec' fzf-cd-widget
|
||||
bindkey -M viins '\ec' fzf-cd-widget
|
||||
fi
|
||||
|
||||
# CTRL-R - Paste the selected command from history into the command line
|
||||
fzf-history-widget() {
|
||||
local selected num
|
||||
setopt localoptions noglobsubst pipefail 2> /dev/null
|
||||
selected=( $(fc -l 1 | eval "$(__fzfcmd) +s --tac +m -n2..,.. --tiebreak=index --toggle-sort=ctrl-r $FZF_CTRL_R_OPTS -q ${(q)LBUFFER}") )
|
||||
local selected
|
||||
setopt localoptions noglobsubst noposixbuiltins pipefail no_aliases noglob nobash_rematch 2> /dev/null
|
||||
# Ensure the associative history array, which maps event numbers to the full
|
||||
# history lines, is loaded, and that Perl is installed for multi-line output.
|
||||
if zmodload -F zsh/parameter p:history 2>/dev/null && (( ${#commands[perl]} )); then
|
||||
selected="$(printf '%s\t%s\000' "${(kv)history[@]}" |
|
||||
perl -0 -ne 'if (!$seen{(/^\s*[0-9]+\**\t(.*)/s, $1)}++) { s/\n/\n\t/g; print; }' |
|
||||
FZF_DEFAULT_OPTS=$(__fzf_defaults "" "-n2..,.. --scheme=history --bind=ctrl-r:toggle-sort --wrap-sign '\t↳ ' --highlight-line ${FZF_CTRL_R_OPTS-} --query=${(qqq)LBUFFER} +m --read0") \
|
||||
FZF_DEFAULT_OPTS_FILE='' $(__fzfcmd))"
|
||||
else
|
||||
selected="$(fc -rl 1 | awk '{ cmd=$0; sub(/^[ \t]*[0-9]+\**[ \t]+/, "", cmd); if (!seen[cmd]++) print $0 }' |
|
||||
FZF_DEFAULT_OPTS=$(__fzf_defaults "" "-n2..,.. --scheme=history --bind=ctrl-r:toggle-sort --wrap-sign '\t↳ ' --highlight-line ${FZF_CTRL_R_OPTS-} --query=${(qqq)LBUFFER} +m") \
|
||||
FZF_DEFAULT_OPTS_FILE='' $(__fzfcmd))"
|
||||
fi
|
||||
local ret=$?
|
||||
if [ -n "$selected" ]; then
|
||||
num=$selected[1]
|
||||
if [ -n "$num" ]; then
|
||||
zle vi-fetch-history -n $num
|
||||
if [[ $(awk '{print $1; exit}' <<< "$selected") =~ ^[1-9][0-9]* ]]; then
|
||||
zle vi-fetch-history -n $MATCH
|
||||
else # selected is a custom query, not from history
|
||||
LBUFFER="$selected"
|
||||
fi
|
||||
fi
|
||||
zle redisplay
|
||||
typeset -f zle-line-init >/dev/null && zle zle-line-init
|
||||
zle reset-prompt
|
||||
return $ret
|
||||
}
|
||||
zle -N fzf-history-widget
|
||||
bindkey '^R' fzf-history-widget
|
||||
|
||||
zle -N fzf-history-widget
|
||||
bindkey -M emacs '^R' fzf-history-widget
|
||||
bindkey -M vicmd '^R' fzf-history-widget
|
||||
bindkey -M viins '^R' fzf-history-widget
|
||||
fi
|
||||
|
||||
} always {
|
||||
eval $__fzf_key_bindings_options
|
||||
'unset' '__fzf_key_bindings_options'
|
||||
}
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
FROM ubuntu:14.04
|
||||
MAINTAINER Junegunn Choi <junegunn.c@gmail.com>
|
||||
|
||||
# apt-get
|
||||
RUN apt-get update && apt-get -y upgrade && \
|
||||
apt-get install -y --force-yes git curl build-essential
|
||||
|
||||
# Install Go 1.4
|
||||
RUN cd / && curl \
|
||||
https://storage.googleapis.com/golang/go1.4.2.linux-amd64.tar.gz | \
|
||||
tar -xz && mv go go1.4 && \
|
||||
sed -i 's@#define PTHREAD_KEYS_MAX 128@@' /go1.4/src/runtime/cgo/gcc_android_arm.c
|
||||
|
||||
ENV GOROOT /go1.4
|
||||
ENV PATH /go1.4/bin:$PATH
|
||||
|
||||
RUN cd / && \
|
||||
curl -O http://dl.google.com/android/ndk/android-ndk-r10e-linux-x86_64.bin && \
|
||||
chmod 755 /android-ndk* && /android-ndk-r10e-linux-x86_64.bin && \
|
||||
mv android-ndk-r10e /android-ndk
|
||||
|
||||
RUN cd /android-ndk && bash ./build/tools/make-standalone-toolchain.sh --platform=android-21 --install-dir=/ndk --arch=arm
|
||||
|
||||
ENV NDK_CC /ndk/bin/arm-linux-androideabi-gcc
|
||||
|
||||
RUN cd $GOROOT/src && \
|
||||
CC_FOR_TARGET=$NDK_CC GOOS=android GOARCH=arm GOARM=7 ./make.bash
|
||||
|
||||
RUN cd / && curl \
|
||||
http://ftp.gnu.org/gnu/ncurses/ncurses-5.9.tar.gz | \
|
||||
tar -xz && cd /ncurses-5.9 && \
|
||||
./configure CC=$NDK_CC CFLAGS="-fPIE -march=armv7-a -mfpu=neon -mhard-float -Wl,--no-warn-mismatch" LDFLAGS="-march=armv7-a -Wl,--no-warn-mismatch" --host=arm-linux --enable-overwrite --enable-const --without-cxx-binding --without-shared --without-debug --enable-widec --enable-ext-colors --enable-ext-mouse --enable-pc-files --with-pkg-config-libdir=$PKG_CONFIG_LIBDIR --without-manpages --without-ada --disable-shared --without-tests --prefix=/ndk/sysroot/usr --with-default-terminfo-dirs=/usr/share/terminfo --with-terminfo-dirs=/usr/share/terminfo ac_cv_header_locale_h=n ac_cv_func_getpwent=no ac_cv_func_getpwnam=no ac_cv_func_getpwuid=no && \
|
||||
sed -i 's@#define HAVE_LOCALE_H 1@/* #undef HAVE_LOCALE_H */@' include/ncurses_cfg.h && \
|
||||
make && \
|
||||
sed -i '0,/echo.*/{s/echo.*/exit 0/}' misc/run_tic.sh && \
|
||||
make install && \
|
||||
mv /ndk/sysroot/usr/lib/libncursesw.a /ndk/sysroot/usr/lib/libncurses.a
|
||||
|
||||
# Default CMD
|
||||
CMD cd /fzf/src && /bin/bash
|
||||
@@ -1,24 +0,0 @@
|
||||
FROM base/archlinux:2014.07.03
|
||||
MAINTAINER Junegunn Choi <junegunn.c@gmail.com>
|
||||
|
||||
# apt-get
|
||||
RUN pacman-key --populate archlinux && pacman-key --refresh-keys
|
||||
RUN pacman-db-upgrade && pacman -Syu --noconfirm base-devel git
|
||||
|
||||
# Install Go 1.4
|
||||
RUN cd / && curl \
|
||||
https://storage.googleapis.com/golang/go1.4.2.linux-amd64.tar.gz | \
|
||||
tar -xz && mv go go1.4
|
||||
|
||||
ENV GOROOT /go1.4
|
||||
ENV PATH /go1.4/bin:$PATH
|
||||
|
||||
# For i386 build
|
||||
RUN echo '[multilib]' >> /etc/pacman.conf && \
|
||||
echo 'Include = /etc/pacman.d/mirrorlist' >> /etc/pacman.conf && \
|
||||
pacman-db-upgrade && yes | pacman -Sy gcc-multilib lib32-ncurses && \
|
||||
cd $GOROOT/src && GOARCH=386 ./make.bash
|
||||
|
||||
# Default CMD
|
||||
CMD cd /fzf/src && /bin/bash
|
||||
|
||||
@@ -1,32 +0,0 @@
|
||||
FROM centos:centos6
|
||||
MAINTAINER Junegunn Choi <junegunn.c@gmail.com>
|
||||
|
||||
# yum
|
||||
RUN yum install -y git gcc make tar glibc-devel glibc-devel.i686 \
|
||||
ncurses-devel ncurses-static ncurses-devel.i686 \
|
||||
gpm-devel gpm-static libgcc.i686
|
||||
|
||||
# Install Go 1.4
|
||||
RUN cd / && curl \
|
||||
https://storage.googleapis.com/golang/go1.4.2.linux-amd64.tar.gz | \
|
||||
tar -xz && mv go go1.4
|
||||
|
||||
# Install Go 1.7
|
||||
RUN cd / && curl \
|
||||
https://storage.googleapis.com/golang/go1.7.linux-amd64.tar.gz | \
|
||||
tar -xz && mv go go1.7
|
||||
|
||||
# Install RPMs for building static 32-bit binary
|
||||
RUN curl ftp://ftp.pbone.net/mirror/ftp.centos.org/6.8/os/i386/Packages/ncurses-static-5.7-4.20090207.el6.i686.rpm -o rpm && rpm -i rpm && \
|
||||
curl ftp://ftp.pbone.net/mirror/ftp.centos.org/6.8/os/i386/Packages/gpm-static-1.20.6-12.el6.i686.rpm -o rpm && rpm -i rpm
|
||||
|
||||
ENV GOROOT_BOOTSTRAP /go1.4
|
||||
ENV GOROOT /go1.7
|
||||
ENV PATH /go1.7/bin:$PATH
|
||||
|
||||
# For i386 build
|
||||
RUN cd $GOROOT/src && GOARCH=386 ./make.bash
|
||||
|
||||
# Default CMD
|
||||
CMD cd /fzf/src && /bin/bash
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
FROM ubuntu:14.04
|
||||
MAINTAINER Junegunn Choi <junegunn.c@gmail.com>
|
||||
|
||||
# apt-get
|
||||
RUN apt-get update && apt-get -y upgrade && \
|
||||
apt-get install -y --force-yes git curl build-essential libncurses-dev libgpm-dev
|
||||
|
||||
# Install Go 1.4
|
||||
RUN cd / && curl \
|
||||
https://storage.googleapis.com/golang/go1.4.2.linux-amd64.tar.gz | \
|
||||
tar -xz && mv go go1.4
|
||||
|
||||
ENV GOROOT /go1.4
|
||||
ENV PATH /go1.4/bin:$PATH
|
||||
|
||||
# For i386 build
|
||||
RUN apt-get install -y lib32ncurses5-dev && \
|
||||
cd $GOROOT/src && GOARCH=386 ./make.bash
|
||||
|
||||
# Default CMD
|
||||
CMD cd /fzf/src && /bin/bash
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2016 Junegunn Choi
|
||||
Copyright (c) 2013-2024 Junegunn Choi
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
||||
123
src/Makefile
123
src/Makefile
@@ -1,123 +0,0 @@
|
||||
ifndef GOOS
|
||||
UNAME_S := $(shell uname -s)
|
||||
ifeq ($(UNAME_S),Darwin)
|
||||
GOOS := darwin
|
||||
else ifeq ($(UNAME_S),Linux)
|
||||
GOOS := linux
|
||||
endif
|
||||
endif
|
||||
|
||||
SOURCES := $(wildcard *.go */*.go)
|
||||
ROOTDIR := $(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
|
||||
BINDIR := $(shell dirname $(ROOTDIR))/bin
|
||||
GOPATH := $(shell dirname $(ROOTDIR))/gopath
|
||||
SRCDIR := $(GOPATH)/src/github.com/junegunn/fzf/src
|
||||
DOCKEROPTS := -i -t -v $(ROOTDIR):/fzf/src
|
||||
BINARY32 := fzf-$(GOOS)_386
|
||||
BINARY64 := fzf-$(GOOS)_amd64
|
||||
BINARYARM7 := fzf-$(GOOS)_arm7
|
||||
VERSION := $(shell awk -F= '/version =/ {print $$2}' constants.go | tr -d "\" ")
|
||||
RELEASE32 := fzf-$(VERSION)-$(GOOS)_386
|
||||
RELEASE64 := fzf-$(VERSION)-$(GOOS)_amd64
|
||||
RELEASEARM7 := fzf-$(VERSION)-$(GOOS)_arm7
|
||||
export GOPATH
|
||||
|
||||
UNAME_M := $(shell uname -m)
|
||||
ifeq ($(UNAME_M),x86_64)
|
||||
BINARY := $(BINARY64)
|
||||
else ifeq ($(UNAME_M),i686)
|
||||
BINARY := $(BINARY32)
|
||||
else
|
||||
$(error "Build on $(UNAME_M) is not supported, yet.")
|
||||
endif
|
||||
|
||||
all: fzf/$(BINARY)
|
||||
|
||||
ifeq ($(GOOS),windows)
|
||||
release: fzf/$(BINARY32) fzf/$(BINARY64)
|
||||
-cd fzf && cp $(BINARY32) $(RELEASE32).exe && zip $(RELEASE32).zip $(RELEASE32).exe
|
||||
cd fzf && cp $(BINARY64) $(RELEASE64).exe && zip $(RELEASE64).zip $(RELEASE64).exe && \
|
||||
rm -f $(RELEASE32).exe $(RELEASE64).exe
|
||||
else
|
||||
release: test fzf/$(BINARY32) fzf/$(BINARY64)
|
||||
-cd fzf && cp $(BINARY32) $(RELEASE32) && tar -czf $(RELEASE32).tgz $(RELEASE32)
|
||||
cd fzf && cp $(BINARY64) $(RELEASE64) && tar -czf $(RELEASE64).tgz $(RELEASE64) && \
|
||||
rm -f $(RELEASE32) $(RELEASE64)
|
||||
endif
|
||||
|
||||
$(SRCDIR):
|
||||
mkdir -p $(shell dirname $(SRCDIR))
|
||||
ln -s $(ROOTDIR) $(SRCDIR)
|
||||
|
||||
deps: $(SRCDIR) $(SOURCES)
|
||||
cd $(SRCDIR) && go get -tags "$(TAGS)"
|
||||
|
||||
android-build: $(SRCDIR)
|
||||
cd $(SRCDIR) && GOARCH=arm GOARM=7 CGO_ENABLED=1 go get
|
||||
cd $(SRCDIR)/fzf && GOARCH=arm GOARM=7 CGO_ENABLED=1 go build -a -ldflags="-w -extldflags=-pie" -o $(BINARYARM7)
|
||||
cd $(SRCDIR)/fzf && cp $(BINARYARM7) $(RELEASEARM7) && tar -czf $(RELEASEARM7).tgz $(RELEASEARM7) && \
|
||||
rm -f $(RELEASEARM7)
|
||||
|
||||
test: deps
|
||||
SHELL=/bin/sh GOOS=$(GOOS) go test -v -tags "$(TAGS)" ./...
|
||||
|
||||
install: $(BINDIR)/fzf
|
||||
|
||||
uninstall:
|
||||
rm -f $(BINDIR)/fzf $(BINDIR)/$(BINARY)
|
||||
|
||||
clean:
|
||||
cd fzf && rm -f fzf-*
|
||||
|
||||
fzf/$(BINARY32): deps
|
||||
cd fzf && GOARCH=386 CGO_ENABLED=1 go build -a -ldflags -w -tags "$(TAGS)" -o $(BINARY32)
|
||||
|
||||
fzf/$(BINARY64): deps
|
||||
cd fzf && go build -a -ldflags -w -tags "$(TAGS)" -o $(BINARY64)
|
||||
|
||||
$(BINDIR)/fzf: fzf/$(BINARY) | $(BINDIR)
|
||||
cp -f fzf/$(BINARY) $(BINDIR)
|
||||
cd $(BINDIR) && ln -sf $(BINARY) fzf
|
||||
|
||||
$(BINDIR):
|
||||
mkdir -p $@
|
||||
|
||||
docker-arch:
|
||||
docker build -t junegunn/arch-sandbox - < Dockerfile.arch
|
||||
|
||||
docker-ubuntu:
|
||||
docker build -t junegunn/ubuntu-sandbox - < Dockerfile.ubuntu
|
||||
|
||||
docker-centos:
|
||||
docker build -t junegunn/centos-sandbox - < Dockerfile.centos
|
||||
|
||||
docker-android:
|
||||
docker build -t junegunn/android-sandbox - < Dockerfile.android
|
||||
|
||||
arch: docker-arch
|
||||
docker run $(DOCKEROPTS) junegunn/$@-sandbox \
|
||||
sh -c 'cd /fzf/src; /bin/bash'
|
||||
|
||||
ubuntu: docker-ubuntu
|
||||
docker run $(DOCKEROPTS) junegunn/$@-sandbox \
|
||||
sh -c 'cd /fzf/src; /bin/bash'
|
||||
|
||||
centos: docker-centos
|
||||
docker run $(DOCKEROPTS) junegunn/$@-sandbox \
|
||||
sh -c 'cd /fzf/src; /bin/bash'
|
||||
|
||||
linux: docker-centos
|
||||
docker run $(DOCKEROPTS) junegunn/centos-sandbox \
|
||||
/bin/bash -ci 'cd /fzf/src; make TAGS=static release'
|
||||
|
||||
ubuntu-android: docker-android
|
||||
docker run $(DOCKEROPTS) junegunn/android-sandbox \
|
||||
sh -c 'cd /fzf/src; /bin/bash'
|
||||
|
||||
android: docker-android
|
||||
docker run $(DOCKEROPTS) junegunn/android-sandbox \
|
||||
/bin/bash -ci 'cd /fzf/src; GOOS=android make android-build'
|
||||
|
||||
.PHONY: all deps release test install uninstall clean \
|
||||
linux arch ubuntu centos docker-arch docker-ubuntu docker-centos \
|
||||
android-build docker-android ubuntu-android android
|
||||
102
src/README.md
102
src/README.md
@@ -1,102 +0,0 @@
|
||||
fzf in Go
|
||||
=========
|
||||
|
||||
<img src="https://cloud.githubusercontent.com/assets/700826/5725028/028ea834-9b93-11e4-9198-43088c3f295d.gif" height="463" alt="fzf in go">
|
||||
|
||||
This directory contains the source code for the new fzf implementation in
|
||||
[Go][go].
|
||||
|
||||
Upgrade from Ruby version
|
||||
-------------------------
|
||||
|
||||
The install script has been updated to download the right binary for your
|
||||
system. If you already have installed fzf, simply git-pull the repository and
|
||||
rerun the install script.
|
||||
|
||||
```sh
|
||||
cd ~/.fzf
|
||||
git pull
|
||||
./install
|
||||
```
|
||||
|
||||
Otherwise, follow [the instruction][install] as before. You can also install
|
||||
fzf using Homebrew if you prefer that way.
|
||||
|
||||
Motivations
|
||||
-----------
|
||||
|
||||
### No Ruby dependency
|
||||
|
||||
There have always been complaints about fzf being a Ruby script. To make
|
||||
matters worse, Ruby 2.1 removed ncurses binding from its standard libary.
|
||||
Because of the change, users running Ruby 2.1 or above are forced to build C
|
||||
extensions of curses gem to meet the requirement of fzf. The new Go version
|
||||
will be distributed as an executable binary so it will be much more accessible
|
||||
and should be easier to setup.
|
||||
|
||||
### Performance
|
||||
|
||||
Many people have been surprised to see how fast fzf is even when it was
|
||||
written in Ruby. It stays quite responsive even for 100k+ lines, which is
|
||||
well above the size of the usual input.
|
||||
|
||||
The new Go version, of course, is significantly faster than that. It has all
|
||||
the performance optimization techniques used in Ruby implementation and more.
|
||||
It also doesn't suffer from [GIL][gil], so the search performance scales
|
||||
proportional to the number of CPU cores. On my MacBook Pro (Mid 2012), the new
|
||||
version was shown to be an order of magnitude faster on certain cases. It also
|
||||
starts much faster though the difference may not be noticeable.
|
||||
|
||||
Build
|
||||
-----
|
||||
|
||||
```sh
|
||||
# Build fzf executables and tarballs
|
||||
make release
|
||||
|
||||
# Install the executable to ../bin directory
|
||||
make install
|
||||
|
||||
# Build executables and tarballs for Linux using Docker
|
||||
make linux
|
||||
```
|
||||
|
||||
Test
|
||||
----
|
||||
|
||||
Unit tests can be run with `make test`. Integration tests are written in Ruby
|
||||
script that should be run on tmux.
|
||||
|
||||
```sh
|
||||
# Unit tests
|
||||
make test
|
||||
|
||||
# Install the executable to ../bin directory
|
||||
make install
|
||||
|
||||
# Integration tests
|
||||
ruby ../test/test_go.rb
|
||||
```
|
||||
|
||||
Third-party libraries used
|
||||
--------------------------
|
||||
|
||||
- [ncurses][ncurses]
|
||||
- [mattn/go-runewidth](https://github.com/mattn/go-runewidth)
|
||||
- Licensed under [MIT](http://mattn.mit-license.org)
|
||||
- [mattn/go-shellwords](https://github.com/mattn/go-shellwords)
|
||||
- Licensed under [MIT](http://mattn.mit-license.org)
|
||||
- [mattn/go-isatty](https://github.com/mattn/go-isatty)
|
||||
- Licensed under [MIT](http://mattn.mit-license.org)
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
[MIT](LICENSE)
|
||||
|
||||
[install]: https://github.com/junegunn/fzf#installation
|
||||
[go]: https://golang.org/
|
||||
[gil]: http://en.wikipedia.org/wiki/Global_Interpreter_Lock
|
||||
[ncurses]: https://www.gnu.org/software/ncurses/
|
||||
[req]: http://golang.org/doc/install
|
||||
[termbox]: https://github.com/nsf/termbox-go
|
||||
135
src/actiontype_string.go
Normal file
135
src/actiontype_string.go
Normal file
@@ -0,0 +1,135 @@
|
||||
// Code generated by "stringer -type=actionType"; DO NOT EDIT.
|
||||
|
||||
package fzf
|
||||
|
||||
import "strconv"
|
||||
|
||||
func _() {
|
||||
// An "invalid array index" compiler error signifies that the constant values have changed.
|
||||
// Re-run the stringer command to generate them again.
|
||||
var x [1]struct{}
|
||||
_ = x[actIgnore-0]
|
||||
_ = x[actStart-1]
|
||||
_ = x[actClick-2]
|
||||
_ = x[actInvalid-3]
|
||||
_ = x[actChar-4]
|
||||
_ = x[actMouse-5]
|
||||
_ = x[actBeginningOfLine-6]
|
||||
_ = x[actAbort-7]
|
||||
_ = x[actAccept-8]
|
||||
_ = x[actAcceptNonEmpty-9]
|
||||
_ = x[actAcceptOrPrintQuery-10]
|
||||
_ = x[actBackwardChar-11]
|
||||
_ = x[actBackwardDeleteChar-12]
|
||||
_ = x[actBackwardDeleteCharEof-13]
|
||||
_ = x[actBackwardWord-14]
|
||||
_ = x[actCancel-15]
|
||||
_ = x[actChangeBorderLabel-16]
|
||||
_ = x[actChangeHeader-17]
|
||||
_ = x[actChangeMulti-18]
|
||||
_ = x[actChangePreviewLabel-19]
|
||||
_ = x[actChangePrompt-20]
|
||||
_ = x[actChangeQuery-21]
|
||||
_ = x[actClearScreen-22]
|
||||
_ = x[actClearQuery-23]
|
||||
_ = x[actClearSelection-24]
|
||||
_ = x[actClose-25]
|
||||
_ = x[actDeleteChar-26]
|
||||
_ = x[actDeleteCharEof-27]
|
||||
_ = x[actEndOfLine-28]
|
||||
_ = x[actFatal-29]
|
||||
_ = x[actForwardChar-30]
|
||||
_ = x[actForwardWord-31]
|
||||
_ = x[actKillLine-32]
|
||||
_ = x[actKillWord-33]
|
||||
_ = x[actUnixLineDiscard-34]
|
||||
_ = x[actUnixWordRubout-35]
|
||||
_ = x[actYank-36]
|
||||
_ = x[actBackwardKillWord-37]
|
||||
_ = x[actSelectAll-38]
|
||||
_ = x[actDeselectAll-39]
|
||||
_ = x[actToggle-40]
|
||||
_ = x[actToggleSearch-41]
|
||||
_ = x[actToggleAll-42]
|
||||
_ = x[actToggleDown-43]
|
||||
_ = x[actToggleUp-44]
|
||||
_ = x[actToggleIn-45]
|
||||
_ = x[actToggleOut-46]
|
||||
_ = x[actToggleTrack-47]
|
||||
_ = x[actToggleTrackCurrent-48]
|
||||
_ = x[actToggleHeader-49]
|
||||
_ = x[actToggleWrap-50]
|
||||
_ = x[actTrackCurrent-51]
|
||||
_ = x[actUntrackCurrent-52]
|
||||
_ = x[actDown-53]
|
||||
_ = x[actUp-54]
|
||||
_ = x[actPageUp-55]
|
||||
_ = x[actPageDown-56]
|
||||
_ = x[actPosition-57]
|
||||
_ = x[actHalfPageUp-58]
|
||||
_ = x[actHalfPageDown-59]
|
||||
_ = x[actOffsetUp-60]
|
||||
_ = x[actOffsetDown-61]
|
||||
_ = x[actOffsetMiddle-62]
|
||||
_ = x[actJump-63]
|
||||
_ = x[actJumpAccept-64]
|
||||
_ = x[actPrintQuery-65]
|
||||
_ = x[actRefreshPreview-66]
|
||||
_ = x[actReplaceQuery-67]
|
||||
_ = x[actToggleSort-68]
|
||||
_ = x[actShowPreview-69]
|
||||
_ = x[actHidePreview-70]
|
||||
_ = x[actTogglePreview-71]
|
||||
_ = x[actTogglePreviewWrap-72]
|
||||
_ = x[actTransform-73]
|
||||
_ = x[actTransformBorderLabel-74]
|
||||
_ = x[actTransformHeader-75]
|
||||
_ = x[actTransformPreviewLabel-76]
|
||||
_ = x[actTransformPrompt-77]
|
||||
_ = x[actTransformQuery-78]
|
||||
_ = x[actPreview-79]
|
||||
_ = x[actChangePreview-80]
|
||||
_ = x[actChangePreviewWindow-81]
|
||||
_ = x[actPreviewTop-82]
|
||||
_ = x[actPreviewBottom-83]
|
||||
_ = x[actPreviewUp-84]
|
||||
_ = x[actPreviewDown-85]
|
||||
_ = x[actPreviewPageUp-86]
|
||||
_ = x[actPreviewPageDown-87]
|
||||
_ = x[actPreviewHalfPageUp-88]
|
||||
_ = x[actPreviewHalfPageDown-89]
|
||||
_ = x[actPrevHistory-90]
|
||||
_ = x[actPrevSelected-91]
|
||||
_ = x[actPrint-92]
|
||||
_ = x[actPut-93]
|
||||
_ = x[actNextHistory-94]
|
||||
_ = x[actNextSelected-95]
|
||||
_ = x[actExecute-96]
|
||||
_ = x[actExecuteSilent-97]
|
||||
_ = x[actExecuteMulti-98]
|
||||
_ = x[actSigStop-99]
|
||||
_ = x[actFirst-100]
|
||||
_ = x[actLast-101]
|
||||
_ = x[actReload-102]
|
||||
_ = x[actReloadSync-103]
|
||||
_ = x[actDisableSearch-104]
|
||||
_ = x[actEnableSearch-105]
|
||||
_ = x[actSelect-106]
|
||||
_ = x[actDeselect-107]
|
||||
_ = x[actUnbind-108]
|
||||
_ = x[actRebind-109]
|
||||
_ = x[actBecome-110]
|
||||
_ = x[actShowHeader-111]
|
||||
_ = x[actHideHeader-112]
|
||||
}
|
||||
|
||||
const _actionType_name = "actIgnoreactStartactClickactInvalidactCharactMouseactBeginningOfLineactAbortactAcceptactAcceptNonEmptyactAcceptOrPrintQueryactBackwardCharactBackwardDeleteCharactBackwardDeleteCharEofactBackwardWordactCancelactChangeBorderLabelactChangeHeaderactChangeMultiactChangePreviewLabelactChangePromptactChangeQueryactClearScreenactClearQueryactClearSelectionactCloseactDeleteCharactDeleteCharEofactEndOfLineactFatalactForwardCharactForwardWordactKillLineactKillWordactUnixLineDiscardactUnixWordRuboutactYankactBackwardKillWordactSelectAllactDeselectAllactToggleactToggleSearchactToggleAllactToggleDownactToggleUpactToggleInactToggleOutactToggleTrackactToggleTrackCurrentactToggleHeaderactToggleWrapactTrackCurrentactUntrackCurrentactDownactUpactPageUpactPageDownactPositionactHalfPageUpactHalfPageDownactOffsetUpactOffsetDownactOffsetMiddleactJumpactJumpAcceptactPrintQueryactRefreshPreviewactReplaceQueryactToggleSortactShowPreviewactHidePreviewactTogglePreviewactTogglePreviewWrapactTransformactTransformBorderLabelactTransformHeaderactTransformPreviewLabelactTransformPromptactTransformQueryactPreviewactChangePreviewactChangePreviewWindowactPreviewTopactPreviewBottomactPreviewUpactPreviewDownactPreviewPageUpactPreviewPageDownactPreviewHalfPageUpactPreviewHalfPageDownactPrevHistoryactPrevSelectedactPrintactPutactNextHistoryactNextSelectedactExecuteactExecuteSilentactExecuteMultiactSigStopactFirstactLastactReloadactReloadSyncactDisableSearchactEnableSearchactSelectactDeselectactUnbindactRebindactBecomeactShowHeaderactHideHeader"
|
||||
|
||||
var _actionType_index = [...]uint16{0, 9, 17, 25, 35, 42, 50, 68, 76, 85, 102, 123, 138, 159, 183, 198, 207, 227, 242, 256, 277, 292, 306, 320, 333, 350, 358, 371, 387, 399, 407, 421, 435, 446, 457, 475, 492, 499, 518, 530, 544, 553, 568, 580, 593, 604, 615, 627, 641, 662, 677, 690, 705, 722, 729, 734, 743, 754, 765, 778, 793, 804, 817, 832, 839, 852, 865, 882, 897, 910, 924, 938, 954, 974, 986, 1009, 1027, 1051, 1069, 1086, 1096, 1112, 1134, 1147, 1163, 1175, 1189, 1205, 1223, 1243, 1265, 1279, 1294, 1302, 1308, 1322, 1337, 1347, 1363, 1378, 1388, 1396, 1403, 1412, 1425, 1441, 1456, 1465, 1476, 1485, 1494, 1503, 1516, 1529}
|
||||
|
||||
func (i actionType) String() string {
|
||||
if i < 0 || i >= actionType(len(_actionType_index)-1) {
|
||||
return "actionType(" + strconv.FormatInt(int64(i), 10) + ")"
|
||||
}
|
||||
return _actionType_name[_actionType_index[i]:_actionType_index[i+1]]
|
||||
}
|
||||
648
src/algo/algo.go
648
src/algo/algo.go
@@ -78,15 +78,22 @@ Scoring criteria
|
||||
*/
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/junegunn/fzf/src/util"
|
||||
)
|
||||
|
||||
var DEBUG bool
|
||||
|
||||
var delimiterChars = "/,:;|"
|
||||
|
||||
const whiteChars = " \t\n\v\f\r\x85\xA0"
|
||||
|
||||
func indexAt(index int, max int, forward bool) int {
|
||||
if forward {
|
||||
return index
|
||||
@@ -105,7 +112,7 @@ type Result struct {
|
||||
const (
|
||||
scoreMatch = 16
|
||||
scoreGapStart = -3
|
||||
scoreGapExtention = -1
|
||||
scoreGapExtension = -1
|
||||
|
||||
// We prefer matches at the beginning of a word, but the bonus should not be
|
||||
// too great to prevent the longer acronym matches from always winning over
|
||||
@@ -123,31 +130,93 @@ const (
|
||||
// Edge-triggered bonus for matches in camelCase words.
|
||||
// Compared to word-boundary case, they don't accompany single-character gaps
|
||||
// (e.g. FooBar vs. foo-bar), so we deduct bonus point accordingly.
|
||||
bonusCamel123 = bonusBoundary + scoreGapExtention
|
||||
bonusCamel123 = bonusBoundary + scoreGapExtension
|
||||
|
||||
// Minimum bonus point given to characters in consecutive chunks.
|
||||
// Note that bonus points for consecutive matches shouldn't have needed if we
|
||||
// used fixed match score as in the original algorithm.
|
||||
bonusConsecutive = -(scoreGapStart + scoreGapExtention)
|
||||
bonusConsecutive = -(scoreGapStart + scoreGapExtension)
|
||||
|
||||
// The first character in the typed pattern usually has more significance
|
||||
// than the rest so it's important that it appears at special positions where
|
||||
// bonus points are given. e.g. "to-go" vs. "ongoing" on "og" or on "ogo".
|
||||
// bonus points are given, e.g. "to-go" vs. "ongoing" on "og" or on "ogo".
|
||||
// The amount of the extra bonus should be limited so that the gap penalty is
|
||||
// still respected.
|
||||
bonusFirstCharMultiplier = 2
|
||||
)
|
||||
|
||||
var (
|
||||
// Extra bonus for word boundary after whitespace character or beginning of the string
|
||||
bonusBoundaryWhite int16 = bonusBoundary + 2
|
||||
|
||||
// Extra bonus for word boundary after slash, colon, semi-colon, and comma
|
||||
bonusBoundaryDelimiter int16 = bonusBoundary + 1
|
||||
|
||||
initialCharClass = charWhite
|
||||
|
||||
// A minor optimization that can give 15%+ performance boost
|
||||
asciiCharClasses [unicode.MaxASCII + 1]charClass
|
||||
|
||||
// A minor optimization that can give yet another 5% performance boost
|
||||
bonusMatrix [charNumber + 1][charNumber + 1]int16
|
||||
)
|
||||
|
||||
type charClass int
|
||||
|
||||
const (
|
||||
charNonWord charClass = iota
|
||||
charWhite charClass = iota
|
||||
charNonWord
|
||||
charDelimiter
|
||||
charLower
|
||||
charUpper
|
||||
charLetter
|
||||
charNumber
|
||||
)
|
||||
|
||||
func Init(scheme string) bool {
|
||||
switch scheme {
|
||||
case "default":
|
||||
bonusBoundaryWhite = bonusBoundary + 2
|
||||
bonusBoundaryDelimiter = bonusBoundary + 1
|
||||
case "path":
|
||||
bonusBoundaryWhite = bonusBoundary
|
||||
bonusBoundaryDelimiter = bonusBoundary + 1
|
||||
if os.PathSeparator == '/' {
|
||||
delimiterChars = "/"
|
||||
} else {
|
||||
delimiterChars = string([]rune{os.PathSeparator, '/'})
|
||||
}
|
||||
initialCharClass = charDelimiter
|
||||
case "history":
|
||||
bonusBoundaryWhite = bonusBoundary
|
||||
bonusBoundaryDelimiter = bonusBoundary
|
||||
default:
|
||||
return false
|
||||
}
|
||||
for i := 0; i <= unicode.MaxASCII; i++ {
|
||||
char := rune(i)
|
||||
c := charNonWord
|
||||
if char >= 'a' && char <= 'z' {
|
||||
c = charLower
|
||||
} else if char >= 'A' && char <= 'Z' {
|
||||
c = charUpper
|
||||
} else if char >= '0' && char <= '9' {
|
||||
c = charNumber
|
||||
} else if strings.ContainsRune(whiteChars, char) {
|
||||
c = charWhite
|
||||
} else if strings.ContainsRune(delimiterChars, char) {
|
||||
c = charDelimiter
|
||||
}
|
||||
asciiCharClasses[i] = c
|
||||
}
|
||||
for i := 0; i <= int(charNumber); i++ {
|
||||
for j := 0; j <= int(charNumber); j++ {
|
||||
bonusMatrix[i][j] = bonusFor(charClass(i), charClass(j))
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func posArray(withPos bool, len int) *[]int {
|
||||
if withPos {
|
||||
pos := make([]int, 0, len)
|
||||
@@ -156,43 +225,22 @@ func posArray(withPos bool, len int) *[]int {
|
||||
return nil
|
||||
}
|
||||
|
||||
func alloc16(offset int, slab *util.Slab, size int, clear bool) (int, []int16) {
|
||||
func alloc16(offset int, slab *util.Slab, size int) (int, []int16) {
|
||||
if slab != nil && cap(slab.I16) > offset+size {
|
||||
slice := slab.I16[offset : offset+size]
|
||||
if clear {
|
||||
for idx := range slice {
|
||||
slice[idx] = 0
|
||||
}
|
||||
}
|
||||
return offset + size, slice
|
||||
}
|
||||
return offset, make([]int16, size)
|
||||
}
|
||||
|
||||
func alloc32(offset int, slab *util.Slab, size int, clear bool) (int, []int32) {
|
||||
func alloc32(offset int, slab *util.Slab, size int) (int, []int32) {
|
||||
if slab != nil && cap(slab.I32) > offset+size {
|
||||
slice := slab.I32[offset : offset+size]
|
||||
if clear {
|
||||
for idx := range slice {
|
||||
slice[idx] = 0
|
||||
}
|
||||
}
|
||||
return offset + size, slice
|
||||
}
|
||||
return offset, make([]int32, size)
|
||||
}
|
||||
|
||||
func charClassOfAscii(char rune) charClass {
|
||||
if char >= 'a' && char <= 'z' {
|
||||
return charLower
|
||||
} else if char >= 'A' && char <= 'Z' {
|
||||
return charUpper
|
||||
} else if char >= '0' && char <= '9' {
|
||||
return charNumber
|
||||
}
|
||||
return charNonWord
|
||||
}
|
||||
|
||||
func charClassOfNonAscii(char rune) charClass {
|
||||
if unicode.IsLower(char) {
|
||||
return charLower
|
||||
@@ -202,208 +250,378 @@ func charClassOfNonAscii(char rune) charClass {
|
||||
return charNumber
|
||||
} else if unicode.IsLetter(char) {
|
||||
return charLetter
|
||||
} else if unicode.IsSpace(char) {
|
||||
return charWhite
|
||||
} else if strings.ContainsRune(delimiterChars, char) {
|
||||
return charDelimiter
|
||||
}
|
||||
return charNonWord
|
||||
}
|
||||
|
||||
func charClassOf(char rune) charClass {
|
||||
if char <= unicode.MaxASCII {
|
||||
return charClassOfAscii(char)
|
||||
return asciiCharClasses[char]
|
||||
}
|
||||
return charClassOfNonAscii(char)
|
||||
}
|
||||
|
||||
func bonusFor(prevClass charClass, class charClass) int16 {
|
||||
if prevClass == charNonWord && class != charNonWord {
|
||||
// Word boundary
|
||||
return bonusBoundary
|
||||
} else if prevClass == charLower && class == charUpper ||
|
||||
if class > charNonWord {
|
||||
switch prevClass {
|
||||
case charWhite:
|
||||
// Word boundary after whitespace
|
||||
return bonusBoundaryWhite
|
||||
case charDelimiter:
|
||||
// Word boundary after a delimiter character
|
||||
return bonusBoundaryDelimiter
|
||||
case charNonWord:
|
||||
// Word boundary
|
||||
return bonusBoundary
|
||||
}
|
||||
}
|
||||
|
||||
if prevClass == charLower && class == charUpper ||
|
||||
prevClass != charNumber && class == charNumber {
|
||||
// camelCase letter123
|
||||
return bonusCamel123
|
||||
} else if class == charNonWord {
|
||||
}
|
||||
|
||||
switch class {
|
||||
case charNonWord, charDelimiter:
|
||||
return bonusNonWord
|
||||
case charWhite:
|
||||
return bonusBoundaryWhite
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func bonusAt(input util.Chars, idx int) int16 {
|
||||
func bonusAt(input *util.Chars, idx int) int16 {
|
||||
if idx == 0 {
|
||||
return bonusBoundary
|
||||
return bonusBoundaryWhite
|
||||
}
|
||||
return bonusFor(charClassOf(input.Get(idx-1)), charClassOf(input.Get(idx)))
|
||||
return bonusMatrix[charClassOf(input.Get(idx-1))][charClassOf(input.Get(idx))]
|
||||
}
|
||||
|
||||
type Algo func(caseSensitive bool, forward bool, input util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int)
|
||||
func normalizeRune(r rune) rune {
|
||||
if r < 0x00C0 || r > 0x2184 {
|
||||
return r
|
||||
}
|
||||
|
||||
func FuzzyMatchV2(caseSensitive bool, forward bool, input util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
||||
n := normalized[r]
|
||||
if n > 0 {
|
||||
return n
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// Algo functions make two assumptions
|
||||
// 1. "pattern" is given in lowercase if "caseSensitive" is false
|
||||
// 2. "pattern" is already normalized if "normalize" is true
|
||||
type Algo func(caseSensitive bool, normalize bool, forward bool, input *util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int)
|
||||
|
||||
func trySkip(input *util.Chars, caseSensitive bool, b byte, from int) int {
|
||||
byteArray := input.Bytes()[from:]
|
||||
idx := bytes.IndexByte(byteArray, b)
|
||||
if idx == 0 {
|
||||
// Can't skip any further
|
||||
return from
|
||||
}
|
||||
// We may need to search for the uppercase letter again. We don't have to
|
||||
// consider normalization as we can be sure that this is an ASCII string.
|
||||
if !caseSensitive && b >= 'a' && b <= 'z' {
|
||||
if idx > 0 {
|
||||
byteArray = byteArray[:idx]
|
||||
}
|
||||
uidx := bytes.IndexByte(byteArray, b-32)
|
||||
if uidx >= 0 {
|
||||
idx = uidx
|
||||
}
|
||||
}
|
||||
if idx < 0 {
|
||||
return -1
|
||||
}
|
||||
return from + idx
|
||||
}
|
||||
|
||||
func isAscii(runes []rune) bool {
|
||||
for _, r := range runes {
|
||||
if r >= utf8.RuneSelf {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func asciiFuzzyIndex(input *util.Chars, pattern []rune, caseSensitive bool) (int, int) {
|
||||
// Can't determine
|
||||
if !input.IsBytes() {
|
||||
return 0, input.Length()
|
||||
}
|
||||
|
||||
// Not possible
|
||||
if !isAscii(pattern) {
|
||||
return -1, -1
|
||||
}
|
||||
|
||||
firstIdx, idx, lastIdx := 0, 0, 0
|
||||
var b byte
|
||||
for pidx := 0; pidx < len(pattern); pidx++ {
|
||||
b = byte(pattern[pidx])
|
||||
idx = trySkip(input, caseSensitive, b, idx)
|
||||
if idx < 0 {
|
||||
return -1, -1
|
||||
}
|
||||
if pidx == 0 && idx > 0 {
|
||||
// Step back to find the right bonus point
|
||||
firstIdx = idx - 1
|
||||
}
|
||||
lastIdx = idx
|
||||
idx++
|
||||
}
|
||||
|
||||
// Find the last appearance of the last character of the pattern to limit the search scope
|
||||
bu := b
|
||||
if !caseSensitive && b >= 'a' && b <= 'z' {
|
||||
bu = b - 32
|
||||
}
|
||||
scope := input.Bytes()[lastIdx:]
|
||||
for offset := len(scope) - 1; offset > 0; offset-- {
|
||||
if scope[offset] == b || scope[offset] == bu {
|
||||
return firstIdx, lastIdx + offset + 1
|
||||
}
|
||||
}
|
||||
return firstIdx, lastIdx + 1
|
||||
}
|
||||
|
||||
func debugV2(T []rune, pattern []rune, F []int32, lastIdx int, H []int16, C []int16) {
|
||||
width := lastIdx - int(F[0]) + 1
|
||||
|
||||
for i, f := range F {
|
||||
I := i * width
|
||||
if i == 0 {
|
||||
fmt.Print(" ")
|
||||
for j := int(f); j <= lastIdx; j++ {
|
||||
fmt.Printf(" " + string(T[j]) + " ")
|
||||
}
|
||||
fmt.Println()
|
||||
}
|
||||
fmt.Print(string(pattern[i]) + " ")
|
||||
for idx := int(F[0]); idx < int(f); idx++ {
|
||||
fmt.Print(" 0 ")
|
||||
}
|
||||
for idx := int(f); idx <= lastIdx; idx++ {
|
||||
fmt.Printf("%2d ", H[i*width+idx-int(F[0])])
|
||||
}
|
||||
fmt.Println()
|
||||
|
||||
fmt.Print(" ")
|
||||
for idx, p := range C[I : I+width] {
|
||||
if idx+int(F[0]) < int(F[i]) {
|
||||
p = 0
|
||||
}
|
||||
if p > 0 {
|
||||
fmt.Printf("%2d ", p)
|
||||
} else {
|
||||
fmt.Print(" ")
|
||||
}
|
||||
}
|
||||
fmt.Println()
|
||||
}
|
||||
}
|
||||
|
||||
func FuzzyMatchV2(caseSensitive bool, normalize bool, forward bool, input *util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
||||
// Assume that pattern is given in lowercase if case-insensitive.
|
||||
// First check if there's a match and calculate bonus for each position.
|
||||
// If the input string is too long, consider finding the matching chars in
|
||||
// this phase as well (non-optimal alignment).
|
||||
N := input.Length()
|
||||
M := len(pattern)
|
||||
switch M {
|
||||
case 0:
|
||||
if M == 0 {
|
||||
return Result{0, 0, 0}, posArray(withPos, M)
|
||||
case 1:
|
||||
return ExactMatchNaive(caseSensitive, forward, input, pattern[0:1], withPos, slab)
|
||||
}
|
||||
N := input.Length()
|
||||
if M > N {
|
||||
return Result{-1, -1, 0}, nil
|
||||
}
|
||||
|
||||
// Since O(nm) algorithm can be prohibitively expensive for large input,
|
||||
// we fall back to the greedy algorithm.
|
||||
if slab != nil && N*M > cap(slab.I16) {
|
||||
return FuzzyMatchV1(caseSensitive, forward, input, pattern, withPos, slab)
|
||||
return FuzzyMatchV1(caseSensitive, normalize, forward, input, pattern, withPos, slab)
|
||||
}
|
||||
|
||||
// Phase 1. Optimized search for ASCII string
|
||||
minIdx, maxIdx := asciiFuzzyIndex(input, pattern, caseSensitive)
|
||||
if minIdx < 0 {
|
||||
return Result{-1, -1, 0}, nil
|
||||
}
|
||||
// fmt.Println(N, maxIdx, idx, maxIdx-idx, input.ToString())
|
||||
N = maxIdx - minIdx
|
||||
|
||||
// Reuse pre-allocated integer slice to avoid unnecessary sweeping of garbages
|
||||
offset16 := 0
|
||||
offset32 := 0
|
||||
offset16, H0 := alloc16(offset16, slab, N)
|
||||
offset16, C0 := alloc16(offset16, slab, N)
|
||||
// Bonus point for each position
|
||||
offset16, B := alloc16(offset16, slab, N, false)
|
||||
offset16, B := alloc16(offset16, slab, N)
|
||||
// The first occurrence of each character in the pattern
|
||||
offset32, F := alloc32(offset32, slab, M, false)
|
||||
offset32, F := alloc32(offset32, slab, M)
|
||||
// Rune array
|
||||
offset32, T := alloc32(offset32, slab, N, false)
|
||||
_, T := alloc32(offset32, slab, N)
|
||||
input.CopyRunes(T, minIdx)
|
||||
|
||||
// Phase 1. Check if there's a match and calculate bonus for each point
|
||||
pidx, lastIdx, prevClass := 0, 0, charNonWord
|
||||
for idx := 0; idx < N; idx++ {
|
||||
char := input.Get(idx)
|
||||
// Phase 2. Calculate bonus for each point
|
||||
maxScore, maxScorePos := int16(0), 0
|
||||
pidx, lastIdx := 0, 0
|
||||
pchar0, pchar, prevH0, prevClass, inGap := pattern[0], pattern[0], int16(0), initialCharClass, false
|
||||
for off, char := range T {
|
||||
var class charClass
|
||||
if char <= unicode.MaxASCII {
|
||||
class = charClassOfAscii(char)
|
||||
class = asciiCharClasses[char]
|
||||
if !caseSensitive && class == charUpper {
|
||||
char += 32
|
||||
T[off] = char
|
||||
}
|
||||
} else {
|
||||
class = charClassOfNonAscii(char)
|
||||
}
|
||||
|
||||
if !caseSensitive && class == charUpper {
|
||||
if char <= unicode.MaxASCII {
|
||||
char += 32
|
||||
} else {
|
||||
if !caseSensitive && class == charUpper {
|
||||
char = unicode.To(unicode.LowerCase, char)
|
||||
}
|
||||
if normalize {
|
||||
char = normalizeRune(char)
|
||||
}
|
||||
T[off] = char
|
||||
}
|
||||
|
||||
T[idx] = char
|
||||
B[idx] = bonusFor(prevClass, class)
|
||||
bonus := bonusMatrix[prevClass][class]
|
||||
B[off] = bonus
|
||||
prevClass = class
|
||||
|
||||
if pidx < M {
|
||||
if char == pattern[pidx] {
|
||||
lastIdx = idx
|
||||
F[pidx] = int32(idx)
|
||||
if char == pchar {
|
||||
if pidx < M {
|
||||
F[pidx] = int32(off)
|
||||
pidx++
|
||||
pchar = pattern[util.Min(pidx, M-1)]
|
||||
}
|
||||
} else {
|
||||
if char == pattern[M-1] {
|
||||
lastIdx = idx
|
||||
}
|
||||
lastIdx = off
|
||||
}
|
||||
|
||||
if char == pchar0 {
|
||||
score := scoreMatch + bonus*bonusFirstCharMultiplier
|
||||
H0[off] = score
|
||||
C0[off] = 1
|
||||
if M == 1 && (forward && score > maxScore || !forward && score >= maxScore) {
|
||||
maxScore, maxScorePos = score, off
|
||||
if forward && bonus >= bonusBoundary {
|
||||
break
|
||||
}
|
||||
}
|
||||
inGap = false
|
||||
} else {
|
||||
if inGap {
|
||||
H0[off] = util.Max16(prevH0+scoreGapExtension, 0)
|
||||
} else {
|
||||
H0[off] = util.Max16(prevH0+scoreGapStart, 0)
|
||||
}
|
||||
C0[off] = 0
|
||||
inGap = true
|
||||
}
|
||||
prevH0 = H0[off]
|
||||
}
|
||||
if pidx != M {
|
||||
return Result{-1, -1, 0}, nil
|
||||
}
|
||||
if M == 1 {
|
||||
result := Result{minIdx + maxScorePos, minIdx + maxScorePos + 1, int(maxScore)}
|
||||
if !withPos {
|
||||
return result, nil
|
||||
}
|
||||
pos := []int{minIdx + maxScorePos}
|
||||
return result, &pos
|
||||
}
|
||||
|
||||
// Phase 2. Fill in score matrix (H)
|
||||
// Phase 3. Fill in score matrix (H)
|
||||
// Unlike the original algorithm, we do not allow omission.
|
||||
width := lastIdx - int(F[0]) + 1
|
||||
offset16, H := alloc16(offset16, slab, width*M, false)
|
||||
f0 := int(F[0])
|
||||
width := lastIdx - f0 + 1
|
||||
offset16, H := alloc16(offset16, slab, width*M)
|
||||
copy(H, H0[f0:lastIdx+1])
|
||||
|
||||
// Possible length of consecutive chunk at each position.
|
||||
offset16, C := alloc16(offset16, slab, width*M, false)
|
||||
_, C := alloc16(offset16, slab, width*M)
|
||||
copy(C, C0[f0:lastIdx+1])
|
||||
|
||||
maxScore, maxScorePos := int16(0), 0
|
||||
for i := 0; i < M; i++ {
|
||||
I := i * width
|
||||
Fsub := F[1:]
|
||||
Psub := pattern[1:][:len(Fsub)]
|
||||
for off, f := range Fsub {
|
||||
f := int(f)
|
||||
pchar := Psub[off]
|
||||
pidx := off + 1
|
||||
row := pidx * width
|
||||
inGap := false
|
||||
for j := int(F[i]); j <= lastIdx; j++ {
|
||||
j0 := j - int(F[0])
|
||||
Tsub := T[f : lastIdx+1]
|
||||
Bsub := B[f:][:len(Tsub)]
|
||||
Csub := C[row+f-f0:][:len(Tsub)]
|
||||
Cdiag := C[row+f-f0-1-width:][:len(Tsub)]
|
||||
Hsub := H[row+f-f0:][:len(Tsub)]
|
||||
Hdiag := H[row+f-f0-1-width:][:len(Tsub)]
|
||||
Hleft := H[row+f-f0-1:][:len(Tsub)]
|
||||
Hleft[0] = 0
|
||||
for off, char := range Tsub {
|
||||
col := off + f
|
||||
var s1, s2, consecutive int16
|
||||
|
||||
if j > int(F[i]) {
|
||||
if inGap {
|
||||
s2 = H[I+j0-1] + scoreGapExtention
|
||||
} else {
|
||||
s2 = H[I+j0-1] + scoreGapStart
|
||||
}
|
||||
if inGap {
|
||||
s2 = Hleft[off] + scoreGapExtension
|
||||
} else {
|
||||
s2 = Hleft[off] + scoreGapStart
|
||||
}
|
||||
|
||||
if pattern[i] == T[j] {
|
||||
var diag int16
|
||||
if i > 0 && j0 > 0 {
|
||||
diag = H[I-width+j0-1]
|
||||
}
|
||||
s1 = diag + scoreMatch
|
||||
b := B[j]
|
||||
if i > 0 {
|
||||
// j > 0 if i > 0
|
||||
consecutive = C[I-width+j0-1] + 1
|
||||
if pchar == char {
|
||||
s1 = Hdiag[off] + scoreMatch
|
||||
b := Bsub[off]
|
||||
consecutive = Cdiag[off] + 1
|
||||
if consecutive > 1 {
|
||||
fb := B[col-int(consecutive)+1]
|
||||
// Break consecutive chunk
|
||||
if b == bonusBoundary {
|
||||
if b >= bonusBoundary && b > fb {
|
||||
consecutive = 1
|
||||
} else if consecutive > 1 {
|
||||
b = util.Max16(b, util.Max16(bonusConsecutive, B[j-int(consecutive)+1]))
|
||||
} else {
|
||||
b = util.Max16(b, util.Max16(bonusConsecutive, fb))
|
||||
}
|
||||
} else {
|
||||
consecutive = 1
|
||||
b *= bonusFirstCharMultiplier
|
||||
}
|
||||
if s1+b < s2 {
|
||||
s1 += B[j]
|
||||
s1 += Bsub[off]
|
||||
consecutive = 0
|
||||
} else {
|
||||
s1 += b
|
||||
}
|
||||
}
|
||||
C[I+j0] = consecutive
|
||||
Csub[off] = consecutive
|
||||
|
||||
inGap = s1 < s2
|
||||
score := util.Max16(util.Max16(s1, s2), 0)
|
||||
if i == M-1 && (forward && score > maxScore || !forward && score >= maxScore) {
|
||||
maxScore, maxScorePos = score, j
|
||||
if pidx == M-1 && (forward && score > maxScore || !forward && score >= maxScore) {
|
||||
maxScore, maxScorePos = score, col
|
||||
}
|
||||
H[I+j0] = score
|
||||
}
|
||||
|
||||
if DEBUG {
|
||||
if i == 0 {
|
||||
fmt.Print(" ")
|
||||
for j := int(F[i]); j <= lastIdx; j++ {
|
||||
fmt.Printf(" " + string(input.Get(j)) + " ")
|
||||
}
|
||||
fmt.Println()
|
||||
}
|
||||
fmt.Print(string(pattern[i]) + " ")
|
||||
for idx := int(F[0]); idx < int(F[i]); idx++ {
|
||||
fmt.Print(" 0 ")
|
||||
}
|
||||
for idx := int(F[i]); idx <= lastIdx; idx++ {
|
||||
fmt.Printf("%2d ", H[i*width+idx-int(F[0])])
|
||||
}
|
||||
fmt.Println()
|
||||
|
||||
fmt.Print(" ")
|
||||
for idx, p := range C[I : I+width] {
|
||||
if idx+int(F[0]) < int(F[i]) {
|
||||
p = 0
|
||||
}
|
||||
fmt.Printf("%2d ", p)
|
||||
}
|
||||
fmt.Println()
|
||||
Hsub[off] = score
|
||||
}
|
||||
}
|
||||
|
||||
// Phase 3. (Optional) Backtrace to find character positions
|
||||
if DEBUG {
|
||||
debugV2(T, pattern, F, lastIdx, H, C)
|
||||
}
|
||||
|
||||
// Phase 4. (Optional) Backtrace to find character positions
|
||||
pos := posArray(withPos, M)
|
||||
j := int(F[0])
|
||||
j := f0
|
||||
if withPos {
|
||||
i := M - 1
|
||||
j = maxScorePos
|
||||
preferMatch := true
|
||||
for {
|
||||
I := i * width
|
||||
j0 := j - int(F[0])
|
||||
j0 := j - f0
|
||||
s := H[I+j0]
|
||||
|
||||
var s1, s2 int16
|
||||
@@ -415,7 +633,7 @@ func FuzzyMatchV2(caseSensitive bool, forward bool, input util.Chars, pattern []
|
||||
}
|
||||
|
||||
if s > s1 && (s > s2 || s == s2 && preferMatch) {
|
||||
*pos = append(*pos, j)
|
||||
*pos = append(*pos, j+minIdx)
|
||||
if i == 0 {
|
||||
break
|
||||
}
|
||||
@@ -428,14 +646,14 @@ func FuzzyMatchV2(caseSensitive bool, forward bool, input util.Chars, pattern []
|
||||
// Start offset we return here is only relevant when begin tiebreak is used.
|
||||
// However finding the accurate offset requires backtracking, and we don't
|
||||
// want to pay extra cost for the option that has lost its importance.
|
||||
return Result{j, maxScorePos + 1, int(maxScore)}, pos
|
||||
return Result{minIdx + j, minIdx + maxScorePos + 1, int(maxScore)}, pos
|
||||
}
|
||||
|
||||
// Implement the same sorting criteria as V2
|
||||
func calculateScore(caseSensitive bool, text util.Chars, pattern []rune, sidx int, eidx int, withPos bool) (int, *[]int) {
|
||||
func calculateScore(caseSensitive bool, normalize bool, text *util.Chars, pattern []rune, sidx int, eidx int, withPos bool) (int, *[]int) {
|
||||
pidx, score, inGap, consecutive, firstBonus := 0, 0, false, 0, int16(0)
|
||||
pos := posArray(withPos, len(pattern))
|
||||
prevClass := charNonWord
|
||||
prevClass := initialCharClass
|
||||
if sidx > 0 {
|
||||
prevClass = charClassOf(text.Get(sidx - 1))
|
||||
}
|
||||
@@ -449,17 +667,21 @@ func calculateScore(caseSensitive bool, text util.Chars, pattern []rune, sidx in
|
||||
char = unicode.To(unicode.LowerCase, char)
|
||||
}
|
||||
}
|
||||
// pattern is already normalized
|
||||
if normalize {
|
||||
char = normalizeRune(char)
|
||||
}
|
||||
if char == pattern[pidx] {
|
||||
if withPos {
|
||||
*pos = append(*pos, idx)
|
||||
}
|
||||
score += scoreMatch
|
||||
bonus := bonusFor(prevClass, class)
|
||||
bonus := bonusMatrix[prevClass][class]
|
||||
if consecutive == 0 {
|
||||
firstBonus = bonus
|
||||
} else {
|
||||
// Break consecutive chunk
|
||||
if bonus == bonusBoundary {
|
||||
if bonus >= bonusBoundary && bonus > firstBonus {
|
||||
firstBonus = bonus
|
||||
}
|
||||
bonus = util.Max16(util.Max16(bonus, firstBonus), bonusConsecutive)
|
||||
@@ -474,7 +696,7 @@ func calculateScore(caseSensitive bool, text util.Chars, pattern []rune, sidx in
|
||||
pidx++
|
||||
} else {
|
||||
if inGap {
|
||||
score += scoreGapExtention
|
||||
score += scoreGapExtension
|
||||
} else {
|
||||
score += scoreGapStart
|
||||
}
|
||||
@@ -488,10 +710,14 @@ func calculateScore(caseSensitive bool, text util.Chars, pattern []rune, sidx in
|
||||
}
|
||||
|
||||
// FuzzyMatchV1 performs fuzzy-match
|
||||
func FuzzyMatchV1(caseSensitive bool, forward bool, text util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
||||
func FuzzyMatchV1(caseSensitive bool, normalize bool, forward bool, text *util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
||||
if len(pattern) == 0 {
|
||||
return Result{0, 0, 0}, nil
|
||||
}
|
||||
idx, _ := asciiFuzzyIndex(text, pattern, caseSensitive)
|
||||
if idx < 0 {
|
||||
return Result{-1, -1, 0}, nil
|
||||
}
|
||||
|
||||
pidx := 0
|
||||
sidx := -1
|
||||
@@ -514,6 +740,9 @@ func FuzzyMatchV1(caseSensitive bool, forward bool, text util.Chars, pattern []r
|
||||
char = unicode.To(unicode.LowerCase, char)
|
||||
}
|
||||
}
|
||||
if normalize {
|
||||
char = normalizeRune(char)
|
||||
}
|
||||
pchar := pattern[indexAt(pidx, lenPattern, forward)]
|
||||
if char == pchar {
|
||||
if sidx < 0 {
|
||||
@@ -553,7 +782,7 @@ func FuzzyMatchV1(caseSensitive bool, forward bool, text util.Chars, pattern []r
|
||||
sidx, eidx = lenRunes-eidx, lenRunes-sidx
|
||||
}
|
||||
|
||||
score, pos := calculateScore(caseSensitive, text, pattern, sidx, eidx, withPos)
|
||||
score, pos := calculateScore(caseSensitive, normalize, text, pattern, sidx, eidx, withPos)
|
||||
return Result{sidx, eidx, score}, pos
|
||||
}
|
||||
return Result{-1, -1, 0}, nil
|
||||
@@ -568,7 +797,15 @@ func FuzzyMatchV1(caseSensitive bool, forward bool, text util.Chars, pattern []r
|
||||
// bonus point, instead of stopping immediately after finding the first match.
|
||||
// The solution is much cheaper since there is only one possible alignment of
|
||||
// the pattern.
|
||||
func ExactMatchNaive(caseSensitive bool, forward bool, text util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
||||
func ExactMatchNaive(caseSensitive bool, normalize bool, forward bool, text *util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
||||
return exactMatchNaive(caseSensitive, normalize, forward, false, text, pattern, withPos, slab)
|
||||
}
|
||||
|
||||
func ExactMatchBoundary(caseSensitive bool, normalize bool, forward bool, text *util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
||||
return exactMatchNaive(caseSensitive, normalize, forward, true, text, pattern, withPos, slab)
|
||||
}
|
||||
|
||||
func exactMatchNaive(caseSensitive bool, normalize bool, forward bool, boundaryCheck bool, text *util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
||||
if len(pattern) == 0 {
|
||||
return Result{0, 0, 0}, nil
|
||||
}
|
||||
@@ -580,6 +817,11 @@ func ExactMatchNaive(caseSensitive bool, forward bool, text util.Chars, pattern
|
||||
return Result{-1, -1, 0}, nil
|
||||
}
|
||||
|
||||
idx, _ := asciiFuzzyIndex(text, pattern, caseSensitive)
|
||||
if idx < 0 {
|
||||
return Result{-1, -1, 0}, nil
|
||||
}
|
||||
|
||||
// For simplicity, only look at the bonus at the first character position
|
||||
pidx := 0
|
||||
bestPos, bonus, bestBonus := -1, int16(0), int16(-1)
|
||||
@@ -593,18 +835,33 @@ func ExactMatchNaive(caseSensitive bool, forward bool, text util.Chars, pattern
|
||||
char = unicode.To(unicode.LowerCase, char)
|
||||
}
|
||||
}
|
||||
if normalize {
|
||||
char = normalizeRune(char)
|
||||
}
|
||||
pidx_ := indexAt(pidx, lenPattern, forward)
|
||||
pchar := pattern[pidx_]
|
||||
if pchar == char {
|
||||
ok := pchar == char
|
||||
if ok {
|
||||
if pidx_ == 0 {
|
||||
bonus = bonusAt(text, index_)
|
||||
}
|
||||
if boundaryCheck {
|
||||
ok = bonus >= bonusBoundary
|
||||
if ok && pidx_ == 0 {
|
||||
ok = index_ == 0 || charClassOf(text.Get(index_-1)) <= charDelimiter
|
||||
}
|
||||
if ok && pidx_ == len(pattern)-1 {
|
||||
ok = index_ == lenRunes-1 || charClassOf(text.Get(index_+1)) <= charDelimiter
|
||||
}
|
||||
}
|
||||
}
|
||||
if ok {
|
||||
pidx++
|
||||
if pidx == lenPattern {
|
||||
if bonus > bestBonus {
|
||||
bestPos, bestBonus = index, bonus
|
||||
}
|
||||
if bonus == bonusBoundary {
|
||||
if bonus >= bonusBoundary {
|
||||
break
|
||||
}
|
||||
index -= pidx - 1
|
||||
@@ -624,40 +881,67 @@ func ExactMatchNaive(caseSensitive bool, forward bool, text util.Chars, pattern
|
||||
sidx = lenRunes - (bestPos + 1)
|
||||
eidx = lenRunes - (bestPos - lenPattern + 1)
|
||||
}
|
||||
score, _ := calculateScore(caseSensitive, text, pattern, sidx, eidx, false)
|
||||
var score int
|
||||
if boundaryCheck {
|
||||
// Underscore boundaries should be ranked lower than the other types of boundaries
|
||||
score = int(bonus)
|
||||
deduct := int(bonus-bonusBoundary) + 1
|
||||
if sidx > 0 && text.Get(sidx-1) == '_' {
|
||||
score -= deduct + 1
|
||||
deduct = 1
|
||||
}
|
||||
if eidx < lenRunes && text.Get(eidx) == '_' {
|
||||
score -= deduct
|
||||
}
|
||||
// Add base score so that this can compete with other match types e.g. 'foo' | bar
|
||||
score += scoreMatch*lenPattern + int(bonusBoundaryWhite)*(lenPattern+1)
|
||||
} else {
|
||||
score, _ = calculateScore(caseSensitive, normalize, text, pattern, sidx, eidx, false)
|
||||
}
|
||||
return Result{sidx, eidx, score}, nil
|
||||
}
|
||||
return Result{-1, -1, 0}, nil
|
||||
}
|
||||
|
||||
// PrefixMatch performs prefix-match
|
||||
func PrefixMatch(caseSensitive bool, forward bool, text util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
||||
func PrefixMatch(caseSensitive bool, normalize bool, forward bool, text *util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
||||
if len(pattern) == 0 {
|
||||
return Result{0, 0, 0}, nil
|
||||
}
|
||||
|
||||
if text.Length() < len(pattern) {
|
||||
trimmedLen := 0
|
||||
if !unicode.IsSpace(pattern[0]) {
|
||||
trimmedLen = text.LeadingWhitespaces()
|
||||
}
|
||||
|
||||
if text.Length()-trimmedLen < len(pattern) {
|
||||
return Result{-1, -1, 0}, nil
|
||||
}
|
||||
|
||||
for index, r := range pattern {
|
||||
char := text.Get(index)
|
||||
char := text.Get(trimmedLen + index)
|
||||
if !caseSensitive {
|
||||
char = unicode.ToLower(char)
|
||||
}
|
||||
if normalize {
|
||||
char = normalizeRune(char)
|
||||
}
|
||||
if char != r {
|
||||
return Result{-1, -1, 0}, nil
|
||||
}
|
||||
}
|
||||
lenPattern := len(pattern)
|
||||
score, _ := calculateScore(caseSensitive, text, pattern, 0, lenPattern, false)
|
||||
return Result{0, lenPattern, score}, nil
|
||||
score, _ := calculateScore(caseSensitive, normalize, text, pattern, trimmedLen, trimmedLen+lenPattern, false)
|
||||
return Result{trimmedLen, trimmedLen + lenPattern, score}, nil
|
||||
}
|
||||
|
||||
// SuffixMatch performs suffix-match
|
||||
func SuffixMatch(caseSensitive bool, forward bool, text util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
||||
func SuffixMatch(caseSensitive bool, normalize bool, forward bool, text *util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
||||
lenRunes := text.Length()
|
||||
trimmedLen := lenRunes - text.TrailingWhitespaces()
|
||||
trimmedLen := lenRunes
|
||||
if len(pattern) == 0 || !unicode.IsSpace(pattern[len(pattern)-1]) {
|
||||
trimmedLen -= text.TrailingWhitespaces()
|
||||
}
|
||||
if len(pattern) == 0 {
|
||||
return Result{trimmedLen, trimmedLen, 0}, nil
|
||||
}
|
||||
@@ -671,6 +955,9 @@ func SuffixMatch(caseSensitive bool, forward bool, text util.Chars, pattern []ru
|
||||
if !caseSensitive {
|
||||
char = unicode.ToLower(char)
|
||||
}
|
||||
if normalize {
|
||||
char = normalizeRune(char)
|
||||
}
|
||||
if char != r {
|
||||
return Result{-1, -1, 0}, nil
|
||||
}
|
||||
@@ -678,23 +965,56 @@ func SuffixMatch(caseSensitive bool, forward bool, text util.Chars, pattern []ru
|
||||
lenPattern := len(pattern)
|
||||
sidx := trimmedLen - lenPattern
|
||||
eidx := trimmedLen
|
||||
score, _ := calculateScore(caseSensitive, text, pattern, sidx, eidx, false)
|
||||
score, _ := calculateScore(caseSensitive, normalize, text, pattern, sidx, eidx, false)
|
||||
return Result{sidx, eidx, score}, nil
|
||||
}
|
||||
|
||||
// EqualMatch performs equal-match
|
||||
func EqualMatch(caseSensitive bool, forward bool, text util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
||||
func EqualMatch(caseSensitive bool, normalize bool, forward bool, text *util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
||||
lenPattern := len(pattern)
|
||||
if text.Length() != lenPattern {
|
||||
if lenPattern == 0 {
|
||||
return Result{-1, -1, 0}, nil
|
||||
}
|
||||
runesStr := text.ToString()
|
||||
if !caseSensitive {
|
||||
runesStr = strings.ToLower(runesStr)
|
||||
|
||||
// Strip leading whitespaces
|
||||
trimmedLen := 0
|
||||
if !unicode.IsSpace(pattern[0]) {
|
||||
trimmedLen = text.LeadingWhitespaces()
|
||||
}
|
||||
if runesStr == string(pattern) {
|
||||
return Result{0, lenPattern, (scoreMatch+bonusBoundary)*lenPattern +
|
||||
(bonusFirstCharMultiplier-1)*bonusBoundary}, nil
|
||||
|
||||
// Strip trailing whitespaces
|
||||
trimmedEndLen := 0
|
||||
if !unicode.IsSpace(pattern[lenPattern-1]) {
|
||||
trimmedEndLen = text.TrailingWhitespaces()
|
||||
}
|
||||
|
||||
if text.Length()-trimmedLen-trimmedEndLen != lenPattern {
|
||||
return Result{-1, -1, 0}, nil
|
||||
}
|
||||
match := true
|
||||
if normalize {
|
||||
runes := text.ToRunes()
|
||||
for idx, pchar := range pattern {
|
||||
char := runes[trimmedLen+idx]
|
||||
if !caseSensitive {
|
||||
char = unicode.To(unicode.LowerCase, char)
|
||||
}
|
||||
if normalizeRune(pchar) != normalizeRune(char) {
|
||||
match = false
|
||||
break
|
||||
}
|
||||
}
|
||||
} else {
|
||||
runes := text.ToRunes()
|
||||
runesStr := string(runes[trimmedLen : len(runes)-trimmedEndLen])
|
||||
if !caseSensitive {
|
||||
runesStr = strings.ToLower(runesStr)
|
||||
}
|
||||
match = runesStr == string(pattern)
|
||||
}
|
||||
if match {
|
||||
return Result{trimmedLen, trimmedLen + lenPattern, (scoreMatch+int(bonusBoundaryWhite))*lenPattern +
|
||||
(bonusFirstCharMultiplier-1)*int(bonusBoundaryWhite)}, nil
|
||||
}
|
||||
return Result{-1, -1, 0}, nil
|
||||
}
|
||||
|
||||
@@ -9,11 +9,20 @@ import (
|
||||
"github.com/junegunn/fzf/src/util"
|
||||
)
|
||||
|
||||
func init() {
|
||||
Init("default")
|
||||
}
|
||||
|
||||
func assertMatch(t *testing.T, fun Algo, caseSensitive, forward bool, input, pattern string, sidx int, eidx int, score int) {
|
||||
assertMatch2(t, fun, caseSensitive, false, forward, input, pattern, sidx, eidx, score)
|
||||
}
|
||||
|
||||
func assertMatch2(t *testing.T, fun Algo, caseSensitive, normalize, forward bool, input, pattern string, sidx int, eidx int, score int) {
|
||||
if !caseSensitive {
|
||||
pattern = strings.ToLower(pattern)
|
||||
}
|
||||
res, pos := fun(caseSensitive, forward, util.RunesToChars([]rune(input)), []rune(pattern), true, nil)
|
||||
chars := util.ToChars([]byte(input))
|
||||
res, pos := fun(caseSensitive, normalize, forward, &chars, []rune(pattern), true, nil)
|
||||
var start, end int
|
||||
if pos == nil || len(*pos) == 0 {
|
||||
start = res.Start
|
||||
@@ -38,46 +47,46 @@ func TestFuzzyMatch(t *testing.T) {
|
||||
for _, fn := range []Algo{FuzzyMatchV1, FuzzyMatchV2} {
|
||||
for _, forward := range []bool{true, false} {
|
||||
assertMatch(t, fn, false, forward, "fooBarbaz1", "oBZ", 2, 9,
|
||||
scoreMatch*3+bonusCamel123+scoreGapStart+scoreGapExtention*3)
|
||||
scoreMatch*3+bonusCamel123+scoreGapStart+scoreGapExtension*3)
|
||||
assertMatch(t, fn, false, forward, "foo bar baz", "fbb", 0, 9,
|
||||
scoreMatch*3+bonusBoundary*bonusFirstCharMultiplier+
|
||||
bonusBoundary*2+2*scoreGapStart+4*scoreGapExtention)
|
||||
scoreMatch*3+int(bonusBoundaryWhite)*bonusFirstCharMultiplier+
|
||||
int(bonusBoundaryWhite)*2+2*scoreGapStart+4*scoreGapExtension)
|
||||
assertMatch(t, fn, false, forward, "/AutomatorDocument.icns", "rdoc", 9, 13,
|
||||
scoreMatch*4+bonusCamel123+bonusConsecutive*2)
|
||||
assertMatch(t, fn, false, forward, "/man1/zshcompctl.1", "zshc", 6, 10,
|
||||
scoreMatch*4+bonusBoundary*bonusFirstCharMultiplier+bonusBoundary*3)
|
||||
scoreMatch*4+int(bonusBoundaryDelimiter)*bonusFirstCharMultiplier+int(bonusBoundaryDelimiter)*3)
|
||||
assertMatch(t, fn, false, forward, "/.oh-my-zsh/cache", "zshc", 8, 13,
|
||||
scoreMatch*4+bonusBoundary*bonusFirstCharMultiplier+bonusBoundary*3+scoreGapStart)
|
||||
scoreMatch*4+bonusBoundary*bonusFirstCharMultiplier+bonusBoundary*2+scoreGapStart+int(bonusBoundaryDelimiter))
|
||||
assertMatch(t, fn, false, forward, "ab0123 456", "12356", 3, 10,
|
||||
scoreMatch*5+bonusConsecutive*3+scoreGapStart+scoreGapExtention)
|
||||
scoreMatch*5+bonusConsecutive*3+scoreGapStart+scoreGapExtension)
|
||||
assertMatch(t, fn, false, forward, "abc123 456", "12356", 3, 10,
|
||||
scoreMatch*5+bonusCamel123*bonusFirstCharMultiplier+bonusCamel123*2+bonusConsecutive+scoreGapStart+scoreGapExtention)
|
||||
scoreMatch*5+bonusCamel123*bonusFirstCharMultiplier+bonusCamel123*2+bonusConsecutive+scoreGapStart+scoreGapExtension)
|
||||
assertMatch(t, fn, false, forward, "foo/bar/baz", "fbb", 0, 9,
|
||||
scoreMatch*3+bonusBoundary*bonusFirstCharMultiplier+
|
||||
bonusBoundary*2+2*scoreGapStart+4*scoreGapExtention)
|
||||
scoreMatch*3+int(bonusBoundaryWhite)*bonusFirstCharMultiplier+
|
||||
int(bonusBoundaryDelimiter)*2+2*scoreGapStart+4*scoreGapExtension)
|
||||
assertMatch(t, fn, false, forward, "fooBarBaz", "fbb", 0, 7,
|
||||
scoreMatch*3+bonusBoundary*bonusFirstCharMultiplier+
|
||||
bonusCamel123*2+2*scoreGapStart+2*scoreGapExtention)
|
||||
scoreMatch*3+int(bonusBoundaryWhite)*bonusFirstCharMultiplier+
|
||||
bonusCamel123*2+2*scoreGapStart+2*scoreGapExtension)
|
||||
assertMatch(t, fn, false, forward, "foo barbaz", "fbb", 0, 8,
|
||||
scoreMatch*3+bonusBoundary*bonusFirstCharMultiplier+bonusBoundary+
|
||||
scoreGapStart*2+scoreGapExtention*3)
|
||||
scoreMatch*3+int(bonusBoundaryWhite)*bonusFirstCharMultiplier+int(bonusBoundaryWhite)+
|
||||
scoreGapStart*2+scoreGapExtension*3)
|
||||
assertMatch(t, fn, false, forward, "fooBar Baz", "foob", 0, 4,
|
||||
scoreMatch*4+bonusBoundary*bonusFirstCharMultiplier+bonusBoundary*3)
|
||||
scoreMatch*4+int(bonusBoundaryWhite)*bonusFirstCharMultiplier+int(bonusBoundaryWhite)*3)
|
||||
assertMatch(t, fn, false, forward, "xFoo-Bar Baz", "foo-b", 1, 6,
|
||||
scoreMatch*5+bonusCamel123*bonusFirstCharMultiplier+bonusCamel123*2+
|
||||
bonusNonWord+bonusBoundary)
|
||||
|
||||
assertMatch(t, fn, true, forward, "fooBarbaz", "oBz", 2, 9,
|
||||
scoreMatch*3+bonusCamel123+scoreGapStart+scoreGapExtention*3)
|
||||
scoreMatch*3+bonusCamel123+scoreGapStart+scoreGapExtension*3)
|
||||
assertMatch(t, fn, true, forward, "Foo/Bar/Baz", "FBB", 0, 9,
|
||||
scoreMatch*3+bonusBoundary*(bonusFirstCharMultiplier+2)+
|
||||
scoreGapStart*2+scoreGapExtention*4)
|
||||
scoreMatch*3+int(bonusBoundaryWhite)*bonusFirstCharMultiplier+int(bonusBoundaryDelimiter)*2+
|
||||
scoreGapStart*2+scoreGapExtension*4)
|
||||
assertMatch(t, fn, true, forward, "FooBarBaz", "FBB", 0, 7,
|
||||
scoreMatch*3+bonusBoundary*bonusFirstCharMultiplier+bonusCamel123*2+
|
||||
scoreGapStart*2+scoreGapExtention*2)
|
||||
scoreMatch*3+int(bonusBoundaryWhite)*bonusFirstCharMultiplier+bonusCamel123*2+
|
||||
scoreGapStart*2+scoreGapExtension*2)
|
||||
assertMatch(t, fn, true, forward, "FooBar Baz", "FooB", 0, 4,
|
||||
scoreMatch*4+bonusBoundary*bonusFirstCharMultiplier+bonusBoundary*2+
|
||||
util.Max(bonusCamel123, bonusBoundary))
|
||||
scoreMatch*4+int(bonusBoundaryWhite)*bonusFirstCharMultiplier+int(bonusBoundaryWhite)*2+
|
||||
util.Max(bonusCamel123, int(bonusBoundaryWhite)))
|
||||
|
||||
// Consecutive bonus updated
|
||||
assertMatch(t, fn, true, forward, "foo-bar", "o-ba", 2, 6,
|
||||
@@ -93,10 +102,10 @@ func TestFuzzyMatch(t *testing.T) {
|
||||
|
||||
func TestFuzzyMatchBackward(t *testing.T) {
|
||||
assertMatch(t, FuzzyMatchV1, false, true, "foobar fb", "fb", 0, 4,
|
||||
scoreMatch*2+bonusBoundary*bonusFirstCharMultiplier+
|
||||
scoreGapStart+scoreGapExtention)
|
||||
scoreMatch*2+int(bonusBoundaryWhite)*bonusFirstCharMultiplier+
|
||||
scoreGapStart+scoreGapExtension)
|
||||
assertMatch(t, FuzzyMatchV1, false, false, "foobar fb", "fb", 7, 9,
|
||||
scoreMatch*2+bonusBoundary*bonusFirstCharMultiplier+bonusBoundary)
|
||||
scoreMatch*2+int(bonusBoundaryWhite)*bonusFirstCharMultiplier+int(bonusBoundaryWhite))
|
||||
}
|
||||
|
||||
func TestExactMatchNaive(t *testing.T) {
|
||||
@@ -109,9 +118,9 @@ func TestExactMatchNaive(t *testing.T) {
|
||||
assertMatch(t, ExactMatchNaive, false, dir, "/AutomatorDocument.icns", "rdoc", 9, 13,
|
||||
scoreMatch*4+bonusCamel123+bonusConsecutive*2)
|
||||
assertMatch(t, ExactMatchNaive, false, dir, "/man1/zshcompctl.1", "zshc", 6, 10,
|
||||
scoreMatch*4+bonusBoundary*(bonusFirstCharMultiplier+3))
|
||||
scoreMatch*4+int(bonusBoundaryDelimiter)*(bonusFirstCharMultiplier+3))
|
||||
assertMatch(t, ExactMatchNaive, false, dir, "/.oh-my-zsh/cache", "zsh/c", 8, 13,
|
||||
scoreMatch*5+bonusBoundary*(bonusFirstCharMultiplier+4))
|
||||
scoreMatch*5+bonusBoundary*(bonusFirstCharMultiplier+3)+int(bonusBoundaryDelimiter))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -123,7 +132,7 @@ func TestExactMatchNaiveBackward(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestPrefixMatch(t *testing.T) {
|
||||
score := (scoreMatch+bonusBoundary)*3 + bonusBoundary*(bonusFirstCharMultiplier-1)
|
||||
score := scoreMatch*3 + int(bonusBoundaryWhite)*bonusFirstCharMultiplier + int(bonusBoundaryWhite)*2
|
||||
|
||||
for _, dir := range []bool{true, false} {
|
||||
assertMatch(t, PrefixMatch, true, dir, "fooBarbaz", "Foo", -1, -1, 0)
|
||||
@@ -131,6 +140,10 @@ func TestPrefixMatch(t *testing.T) {
|
||||
assertMatch(t, PrefixMatch, false, dir, "fooBarbaz", "Foo", 0, 3, score)
|
||||
assertMatch(t, PrefixMatch, false, dir, "foOBarBaZ", "foo", 0, 3, score)
|
||||
assertMatch(t, PrefixMatch, false, dir, "f-oBarbaz", "f-o", 0, 3, score)
|
||||
|
||||
assertMatch(t, PrefixMatch, false, dir, " fooBar", "foo", 1, 4, score)
|
||||
assertMatch(t, PrefixMatch, false, dir, " fooBar", " fo", 0, 3, score)
|
||||
assertMatch(t, PrefixMatch, false, dir, " fo", "foo", -1, -1, 0)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -143,6 +156,14 @@ func TestSuffixMatch(t *testing.T) {
|
||||
scoreMatch*3+bonusConsecutive*2)
|
||||
assertMatch(t, SuffixMatch, false, dir, "fooBarBaZ", "baz", 6, 9,
|
||||
(scoreMatch+bonusCamel123)*3+bonusCamel123*(bonusFirstCharMultiplier-1))
|
||||
|
||||
// Strip trailing white space from the string
|
||||
assertMatch(t, SuffixMatch, false, dir, "fooBarbaz ", "baz", 6, 9,
|
||||
scoreMatch*3+bonusConsecutive*2)
|
||||
|
||||
// Only when the pattern doesn't end with a space
|
||||
assertMatch(t, SuffixMatch, false, dir, "fooBarbaz ", "baz ", 6, 10,
|
||||
scoreMatch*4+bonusConsecutive*2+int(bonusBoundaryWhite))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -156,6 +177,21 @@ func TestEmptyPattern(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalize(t *testing.T) {
|
||||
caseSensitive := false
|
||||
normalize := true
|
||||
forward := true
|
||||
test := func(input, pattern string, sidx, eidx, score int, funs ...Algo) {
|
||||
for _, fun := range funs {
|
||||
assertMatch2(t, fun, caseSensitive, normalize, forward,
|
||||
input, pattern, sidx, eidx, score)
|
||||
}
|
||||
}
|
||||
test("Só Danço Samba", "So", 0, 2, 62, FuzzyMatchV1, FuzzyMatchV2, PrefixMatch, ExactMatchNaive)
|
||||
test("Só Danço Samba", "sodc", 0, 7, 97, FuzzyMatchV1, FuzzyMatchV2)
|
||||
test("Danço", "danco", 0, 5, 140, FuzzyMatchV1, FuzzyMatchV2, PrefixMatch, SuffixMatch, ExactMatchNaive, EqualMatch)
|
||||
}
|
||||
|
||||
func TestLongString(t *testing.T) {
|
||||
bytes := make([]byte, math.MaxUint16*2)
|
||||
for i := range bytes {
|
||||
|
||||
492
src/algo/normalize.go
Normal file
492
src/algo/normalize.go
Normal file
@@ -0,0 +1,492 @@
|
||||
// Normalization of latin script letters
|
||||
// Reference: http://www.unicode.org/Public/UCD/latest/ucd/Index.txt
|
||||
|
||||
package algo
|
||||
|
||||
var normalized = map[rune]rune{
|
||||
0x00E1: 'a', // WITH ACUTE, LATIN SMALL LETTER
|
||||
0x0103: 'a', // WITH BREVE, LATIN SMALL LETTER
|
||||
0x01CE: 'a', // WITH CARON, LATIN SMALL LETTER
|
||||
0x00E2: 'a', // WITH CIRCUMFLEX, LATIN SMALL LETTER
|
||||
0x00E4: 'a', // WITH DIAERESIS, LATIN SMALL LETTER
|
||||
0x0227: 'a', // WITH DOT ABOVE, LATIN SMALL LETTER
|
||||
0x1EA1: 'a', // WITH DOT BELOW, LATIN SMALL LETTER
|
||||
0x0201: 'a', // WITH DOUBLE GRAVE, LATIN SMALL LETTER
|
||||
0x00E0: 'a', // WITH GRAVE, LATIN SMALL LETTER
|
||||
0x1EA3: 'a', // WITH HOOK ABOVE, LATIN SMALL LETTER
|
||||
0x0203: 'a', // WITH INVERTED BREVE, LATIN SMALL LETTER
|
||||
0x0101: 'a', // WITH MACRON, LATIN SMALL LETTER
|
||||
0x0105: 'a', // WITH OGONEK, LATIN SMALL LETTER
|
||||
0x1E9A: 'a', // WITH RIGHT HALF RING, LATIN SMALL LETTER
|
||||
0x00E5: 'a', // WITH RING ABOVE, LATIN SMALL LETTER
|
||||
0x1E01: 'a', // WITH RING BELOW, LATIN SMALL LETTER
|
||||
0x00E3: 'a', // WITH TILDE, LATIN SMALL LETTER
|
||||
0x0363: 'a', // , COMBINING LATIN SMALL LETTER
|
||||
0x0250: 'a', // , LATIN SMALL LETTER TURNED
|
||||
0x1E03: 'b', // WITH DOT ABOVE, LATIN SMALL LETTER
|
||||
0x1E05: 'b', // WITH DOT BELOW, LATIN SMALL LETTER
|
||||
0x0253: 'b', // WITH HOOK, LATIN SMALL LETTER
|
||||
0x1E07: 'b', // WITH LINE BELOW, LATIN SMALL LETTER
|
||||
0x0180: 'b', // WITH STROKE, LATIN SMALL LETTER
|
||||
0x0183: 'b', // WITH TOPBAR, LATIN SMALL LETTER
|
||||
0x0107: 'c', // WITH ACUTE, LATIN SMALL LETTER
|
||||
0x010D: 'c', // WITH CARON, LATIN SMALL LETTER
|
||||
0x00E7: 'c', // WITH CEDILLA, LATIN SMALL LETTER
|
||||
0x0109: 'c', // WITH CIRCUMFLEX, LATIN SMALL LETTER
|
||||
0x0255: 'c', // WITH CURL, LATIN SMALL LETTER
|
||||
0x010B: 'c', // WITH DOT ABOVE, LATIN SMALL LETTER
|
||||
0x0188: 'c', // WITH HOOK, LATIN SMALL LETTER
|
||||
0x023C: 'c', // WITH STROKE, LATIN SMALL LETTER
|
||||
0x0368: 'c', // , COMBINING LATIN SMALL LETTER
|
||||
0x0297: 'c', // , LATIN LETTER STRETCHED
|
||||
0x2184: 'c', // , LATIN SMALL LETTER REVERSED
|
||||
0x010F: 'd', // WITH CARON, LATIN SMALL LETTER
|
||||
0x1E11: 'd', // WITH CEDILLA, LATIN SMALL LETTER
|
||||
0x1E13: 'd', // WITH CIRCUMFLEX BELOW, LATIN SMALL LETTER
|
||||
0x0221: 'd', // WITH CURL, LATIN SMALL LETTER
|
||||
0x1E0B: 'd', // WITH DOT ABOVE, LATIN SMALL LETTER
|
||||
0x1E0D: 'd', // WITH DOT BELOW, LATIN SMALL LETTER
|
||||
0x0257: 'd', // WITH HOOK, LATIN SMALL LETTER
|
||||
0x1E0F: 'd', // WITH LINE BELOW, LATIN SMALL LETTER
|
||||
0x0111: 'd', // WITH STROKE, LATIN SMALL LETTER
|
||||
0x0256: 'd', // WITH TAIL, LATIN SMALL LETTER
|
||||
0x018C: 'd', // WITH TOPBAR, LATIN SMALL LETTER
|
||||
0x0369: 'd', // , COMBINING LATIN SMALL LETTER
|
||||
0x00E9: 'e', // WITH ACUTE, LATIN SMALL LETTER
|
||||
0x0115: 'e', // WITH BREVE, LATIN SMALL LETTER
|
||||
0x011B: 'e', // WITH CARON, LATIN SMALL LETTER
|
||||
0x0229: 'e', // WITH CEDILLA, LATIN SMALL LETTER
|
||||
0x1E19: 'e', // WITH CIRCUMFLEX BELOW, LATIN SMALL LETTER
|
||||
0x00EA: 'e', // WITH CIRCUMFLEX, LATIN SMALL LETTER
|
||||
0x00EB: 'e', // WITH DIAERESIS, LATIN SMALL LETTER
|
||||
0x0117: 'e', // WITH DOT ABOVE, LATIN SMALL LETTER
|
||||
0x1EB9: 'e', // WITH DOT BELOW, LATIN SMALL LETTER
|
||||
0x0205: 'e', // WITH DOUBLE GRAVE, LATIN SMALL LETTER
|
||||
0x00E8: 'e', // WITH GRAVE, LATIN SMALL LETTER
|
||||
0x1EBB: 'e', // WITH HOOK ABOVE, LATIN SMALL LETTER
|
||||
0x025D: 'e', // WITH HOOK, LATIN SMALL LETTER REVERSED OPEN
|
||||
0x0207: 'e', // WITH INVERTED BREVE, LATIN SMALL LETTER
|
||||
0x0113: 'e', // WITH MACRON, LATIN SMALL LETTER
|
||||
0x0119: 'e', // WITH OGONEK, LATIN SMALL LETTER
|
||||
0x0247: 'e', // WITH STROKE, LATIN SMALL LETTER
|
||||
0x1E1B: 'e', // WITH TILDE BELOW, LATIN SMALL LETTER
|
||||
0x1EBD: 'e', // WITH TILDE, LATIN SMALL LETTER
|
||||
0x0364: 'e', // , COMBINING LATIN SMALL LETTER
|
||||
0x029A: 'e', // , LATIN SMALL LETTER CLOSED OPEN
|
||||
0x025E: 'e', // , LATIN SMALL LETTER CLOSED REVERSED OPEN
|
||||
0x025B: 'e', // , LATIN SMALL LETTER OPEN
|
||||
0x0258: 'e', // , LATIN SMALL LETTER REVERSED
|
||||
0x025C: 'e', // , LATIN SMALL LETTER REVERSED OPEN
|
||||
0x01DD: 'e', // , LATIN SMALL LETTER TURNED
|
||||
0x1D08: 'e', // , LATIN SMALL LETTER TURNED OPEN
|
||||
0x1E1F: 'f', // WITH DOT ABOVE, LATIN SMALL LETTER
|
||||
0x0192: 'f', // WITH HOOK, LATIN SMALL LETTER
|
||||
0x01F5: 'g', // WITH ACUTE, LATIN SMALL LETTER
|
||||
0x011F: 'g', // WITH BREVE, LATIN SMALL LETTER
|
||||
0x01E7: 'g', // WITH CARON, LATIN SMALL LETTER
|
||||
0x0123: 'g', // WITH CEDILLA, LATIN SMALL LETTER
|
||||
0x011D: 'g', // WITH CIRCUMFLEX, LATIN SMALL LETTER
|
||||
0x0121: 'g', // WITH DOT ABOVE, LATIN SMALL LETTER
|
||||
0x0260: 'g', // WITH HOOK, LATIN SMALL LETTER
|
||||
0x1E21: 'g', // WITH MACRON, LATIN SMALL LETTER
|
||||
0x01E5: 'g', // WITH STROKE, LATIN SMALL LETTER
|
||||
0x0261: 'g', // , LATIN SMALL LETTER SCRIPT
|
||||
0x1E2B: 'h', // WITH BREVE BELOW, LATIN SMALL LETTER
|
||||
0x021F: 'h', // WITH CARON, LATIN SMALL LETTER
|
||||
0x1E29: 'h', // WITH CEDILLA, LATIN SMALL LETTER
|
||||
0x0125: 'h', // WITH CIRCUMFLEX, LATIN SMALL LETTER
|
||||
0x1E27: 'h', // WITH DIAERESIS, LATIN SMALL LETTER
|
||||
0x1E23: 'h', // WITH DOT ABOVE, LATIN SMALL LETTER
|
||||
0x1E25: 'h', // WITH DOT BELOW, LATIN SMALL LETTER
|
||||
0x02AE: 'h', // WITH FISHHOOK, LATIN SMALL LETTER TURNED
|
||||
0x0266: 'h', // WITH HOOK, LATIN SMALL LETTER
|
||||
0x1E96: 'h', // WITH LINE BELOW, LATIN SMALL LETTER
|
||||
0x0127: 'h', // WITH STROKE, LATIN SMALL LETTER
|
||||
0x036A: 'h', // , COMBINING LATIN SMALL LETTER
|
||||
0x0265: 'h', // , LATIN SMALL LETTER TURNED
|
||||
0x2095: 'h', // , LATIN SUBSCRIPT SMALL LETTER
|
||||
0x00ED: 'i', // WITH ACUTE, LATIN SMALL LETTER
|
||||
0x012D: 'i', // WITH BREVE, LATIN SMALL LETTER
|
||||
0x01D0: 'i', // WITH CARON, LATIN SMALL LETTER
|
||||
0x00EE: 'i', // WITH CIRCUMFLEX, LATIN SMALL LETTER
|
||||
0x00EF: 'i', // WITH DIAERESIS, LATIN SMALL LETTER
|
||||
0x1ECB: 'i', // WITH DOT BELOW, LATIN SMALL LETTER
|
||||
0x0209: 'i', // WITH DOUBLE GRAVE, LATIN SMALL LETTER
|
||||
0x00EC: 'i', // WITH GRAVE, LATIN SMALL LETTER
|
||||
0x1EC9: 'i', // WITH HOOK ABOVE, LATIN SMALL LETTER
|
||||
0x020B: 'i', // WITH INVERTED BREVE, LATIN SMALL LETTER
|
||||
0x012B: 'i', // WITH MACRON, LATIN SMALL LETTER
|
||||
0x012F: 'i', // WITH OGONEK, LATIN SMALL LETTER
|
||||
0x0268: 'i', // WITH STROKE, LATIN SMALL LETTER
|
||||
0x1E2D: 'i', // WITH TILDE BELOW, LATIN SMALL LETTER
|
||||
0x0129: 'i', // WITH TILDE, LATIN SMALL LETTER
|
||||
0x0365: 'i', // , COMBINING LATIN SMALL LETTER
|
||||
0x0131: 'i', // , LATIN SMALL LETTER DOTLESS
|
||||
0x1D09: 'i', // , LATIN SMALL LETTER TURNED
|
||||
0x1D62: 'i', // , LATIN SUBSCRIPT SMALL LETTER
|
||||
0x2071: 'i', // , SUPERSCRIPT LATIN SMALL LETTER
|
||||
0x01F0: 'j', // WITH CARON, LATIN SMALL LETTER
|
||||
0x0135: 'j', // WITH CIRCUMFLEX, LATIN SMALL LETTER
|
||||
0x029D: 'j', // WITH CROSSED-TAIL, LATIN SMALL LETTER
|
||||
0x0249: 'j', // WITH STROKE, LATIN SMALL LETTER
|
||||
0x025F: 'j', // WITH STROKE, LATIN SMALL LETTER DOTLESS
|
||||
0x0237: 'j', // , LATIN SMALL LETTER DOTLESS
|
||||
0x1E31: 'k', // WITH ACUTE, LATIN SMALL LETTER
|
||||
0x01E9: 'k', // WITH CARON, LATIN SMALL LETTER
|
||||
0x0137: 'k', // WITH CEDILLA, LATIN SMALL LETTER
|
||||
0x1E33: 'k', // WITH DOT BELOW, LATIN SMALL LETTER
|
||||
0x0199: 'k', // WITH HOOK, LATIN SMALL LETTER
|
||||
0x1E35: 'k', // WITH LINE BELOW, LATIN SMALL LETTER
|
||||
0x029E: 'k', // , LATIN SMALL LETTER TURNED
|
||||
0x2096: 'k', // , LATIN SUBSCRIPT SMALL LETTER
|
||||
0x013A: 'l', // WITH ACUTE, LATIN SMALL LETTER
|
||||
0x019A: 'l', // WITH BAR, LATIN SMALL LETTER
|
||||
0x026C: 'l', // WITH BELT, LATIN SMALL LETTER
|
||||
0x013E: 'l', // WITH CARON, LATIN SMALL LETTER
|
||||
0x013C: 'l', // WITH CEDILLA, LATIN SMALL LETTER
|
||||
0x1E3D: 'l', // WITH CIRCUMFLEX BELOW, LATIN SMALL LETTER
|
||||
0x0234: 'l', // WITH CURL, LATIN SMALL LETTER
|
||||
0x1E37: 'l', // WITH DOT BELOW, LATIN SMALL LETTER
|
||||
0x1E3B: 'l', // WITH LINE BELOW, LATIN SMALL LETTER
|
||||
0x0140: 'l', // WITH MIDDLE DOT, LATIN SMALL LETTER
|
||||
0x026B: 'l', // WITH MIDDLE TILDE, LATIN SMALL LETTER
|
||||
0x026D: 'l', // WITH RETROFLEX HOOK, LATIN SMALL LETTER
|
||||
0x0142: 'l', // WITH STROKE, LATIN SMALL LETTER
|
||||
0x2097: 'l', // , LATIN SUBSCRIPT SMALL LETTER
|
||||
0x1E3F: 'm', // WITH ACUTE, LATIN SMALL LETTER
|
||||
0x1E41: 'm', // WITH DOT ABOVE, LATIN SMALL LETTER
|
||||
0x1E43: 'm', // WITH DOT BELOW, LATIN SMALL LETTER
|
||||
0x0271: 'm', // WITH HOOK, LATIN SMALL LETTER
|
||||
0x0270: 'm', // WITH LONG LEG, LATIN SMALL LETTER TURNED
|
||||
0x036B: 'm', // , COMBINING LATIN SMALL LETTER
|
||||
0x1D1F: 'm', // , LATIN SMALL LETTER SIDEWAYS TURNED
|
||||
0x026F: 'm', // , LATIN SMALL LETTER TURNED
|
||||
0x2098: 'm', // , LATIN SUBSCRIPT SMALL LETTER
|
||||
0x0144: 'n', // WITH ACUTE, LATIN SMALL LETTER
|
||||
0x0148: 'n', // WITH CARON, LATIN SMALL LETTER
|
||||
0x0146: 'n', // WITH CEDILLA, LATIN SMALL LETTER
|
||||
0x1E4B: 'n', // WITH CIRCUMFLEX BELOW, LATIN SMALL LETTER
|
||||
0x0235: 'n', // WITH CURL, LATIN SMALL LETTER
|
||||
0x1E45: 'n', // WITH DOT ABOVE, LATIN SMALL LETTER
|
||||
0x1E47: 'n', // WITH DOT BELOW, LATIN SMALL LETTER
|
||||
0x01F9: 'n', // WITH GRAVE, LATIN SMALL LETTER
|
||||
0x0272: 'n', // WITH LEFT HOOK, LATIN SMALL LETTER
|
||||
0x1E49: 'n', // WITH LINE BELOW, LATIN SMALL LETTER
|
||||
0x019E: 'n', // WITH LONG RIGHT LEG, LATIN SMALL LETTER
|
||||
0x0273: 'n', // WITH RETROFLEX HOOK, LATIN SMALL LETTER
|
||||
0x00F1: 'n', // WITH TILDE, LATIN SMALL LETTER
|
||||
0x2099: 'n', // , LATIN SUBSCRIPT SMALL LETTER
|
||||
0x00F3: 'o', // WITH ACUTE, LATIN SMALL LETTER
|
||||
0x014F: 'o', // WITH BREVE, LATIN SMALL LETTER
|
||||
0x01D2: 'o', // WITH CARON, LATIN SMALL LETTER
|
||||
0x00F4: 'o', // WITH CIRCUMFLEX, LATIN SMALL LETTER
|
||||
0x00F6: 'o', // WITH DIAERESIS, LATIN SMALL LETTER
|
||||
0x022F: 'o', // WITH DOT ABOVE, LATIN SMALL LETTER
|
||||
0x1ECD: 'o', // WITH DOT BELOW, LATIN SMALL LETTER
|
||||
0x0151: 'o', // WITH DOUBLE ACUTE, LATIN SMALL LETTER
|
||||
0x020D: 'o', // WITH DOUBLE GRAVE, LATIN SMALL LETTER
|
||||
0x00F2: 'o', // WITH GRAVE, LATIN SMALL LETTER
|
||||
0x1ECF: 'o', // WITH HOOK ABOVE, LATIN SMALL LETTER
|
||||
0x01A1: 'o', // WITH HORN, LATIN SMALL LETTER
|
||||
0x020F: 'o', // WITH INVERTED BREVE, LATIN SMALL LETTER
|
||||
0x014D: 'o', // WITH MACRON, LATIN SMALL LETTER
|
||||
0x01EB: 'o', // WITH OGONEK, LATIN SMALL LETTER
|
||||
0x00F8: 'o', // WITH STROKE, LATIN SMALL LETTER
|
||||
0x1D13: 'o', // WITH STROKE, LATIN SMALL LETTER SIDEWAYS
|
||||
0x00F5: 'o', // WITH TILDE, LATIN SMALL LETTER
|
||||
0x0366: 'o', // , COMBINING LATIN SMALL LETTER
|
||||
0x0275: 'o', // , LATIN SMALL LETTER BARRED
|
||||
0x1D17: 'o', // , LATIN SMALL LETTER BOTTOM HALF
|
||||
0x0254: 'o', // , LATIN SMALL LETTER OPEN
|
||||
0x1D11: 'o', // , LATIN SMALL LETTER SIDEWAYS
|
||||
0x1D12: 'o', // , LATIN SMALL LETTER SIDEWAYS OPEN
|
||||
0x1D16: 'o', // , LATIN SMALL LETTER TOP HALF
|
||||
0x1E55: 'p', // WITH ACUTE, LATIN SMALL LETTER
|
||||
0x1E57: 'p', // WITH DOT ABOVE, LATIN SMALL LETTER
|
||||
0x01A5: 'p', // WITH HOOK, LATIN SMALL LETTER
|
||||
0x209A: 'p', // , LATIN SUBSCRIPT SMALL LETTER
|
||||
0x024B: 'q', // WITH HOOK TAIL, LATIN SMALL LETTER
|
||||
0x02A0: 'q', // WITH HOOK, LATIN SMALL LETTER
|
||||
0x0155: 'r', // WITH ACUTE, LATIN SMALL LETTER
|
||||
0x0159: 'r', // WITH CARON, LATIN SMALL LETTER
|
||||
0x0157: 'r', // WITH CEDILLA, LATIN SMALL LETTER
|
||||
0x1E59: 'r', // WITH DOT ABOVE, LATIN SMALL LETTER
|
||||
0x1E5B: 'r', // WITH DOT BELOW, LATIN SMALL LETTER
|
||||
0x0211: 'r', // WITH DOUBLE GRAVE, LATIN SMALL LETTER
|
||||
0x027E: 'r', // WITH FISHHOOK, LATIN SMALL LETTER
|
||||
0x027F: 'r', // WITH FISHHOOK, LATIN SMALL LETTER REVERSED
|
||||
0x027B: 'r', // WITH HOOK, LATIN SMALL LETTER TURNED
|
||||
0x0213: 'r', // WITH INVERTED BREVE, LATIN SMALL LETTER
|
||||
0x1E5F: 'r', // WITH LINE BELOW, LATIN SMALL LETTER
|
||||
0x027C: 'r', // WITH LONG LEG, LATIN SMALL LETTER
|
||||
0x027A: 'r', // WITH LONG LEG, LATIN SMALL LETTER TURNED
|
||||
0x024D: 'r', // WITH STROKE, LATIN SMALL LETTER
|
||||
0x027D: 'r', // WITH TAIL, LATIN SMALL LETTER
|
||||
0x036C: 'r', // , COMBINING LATIN SMALL LETTER
|
||||
0x0279: 'r', // , LATIN SMALL LETTER TURNED
|
||||
0x1D63: 'r', // , LATIN SUBSCRIPT SMALL LETTER
|
||||
0x015B: 's', // WITH ACUTE, LATIN SMALL LETTER
|
||||
0x0161: 's', // WITH CARON, LATIN SMALL LETTER
|
||||
0x015F: 's', // WITH CEDILLA, LATIN SMALL LETTER
|
||||
0x015D: 's', // WITH CIRCUMFLEX, LATIN SMALL LETTER
|
||||
0x0219: 's', // WITH COMMA BELOW, LATIN SMALL LETTER
|
||||
0x1E61: 's', // WITH DOT ABOVE, LATIN SMALL LETTER
|
||||
0x1E9B: 's', // WITH DOT ABOVE, LATIN SMALL LETTER LONG
|
||||
0x1E63: 's', // WITH DOT BELOW, LATIN SMALL LETTER
|
||||
0x0282: 's', // WITH HOOK, LATIN SMALL LETTER
|
||||
0x023F: 's', // WITH SWASH TAIL, LATIN SMALL LETTER
|
||||
0x017F: 's', // , LATIN SMALL LETTER LONG
|
||||
0x00DF: 's', // , LATIN SMALL LETTER SHARP
|
||||
0x209B: 's', // , LATIN SUBSCRIPT SMALL LETTER
|
||||
0x0165: 't', // WITH CARON, LATIN SMALL LETTER
|
||||
0x0163: 't', // WITH CEDILLA, LATIN SMALL LETTER
|
||||
0x1E71: 't', // WITH CIRCUMFLEX BELOW, LATIN SMALL LETTER
|
||||
0x021B: 't', // WITH COMMA BELOW, LATIN SMALL LETTER
|
||||
0x0236: 't', // WITH CURL, LATIN SMALL LETTER
|
||||
0x1E97: 't', // WITH DIAERESIS, LATIN SMALL LETTER
|
||||
0x1E6B: 't', // WITH DOT ABOVE, LATIN SMALL LETTER
|
||||
0x1E6D: 't', // WITH DOT BELOW, LATIN SMALL LETTER
|
||||
0x01AD: 't', // WITH HOOK, LATIN SMALL LETTER
|
||||
0x1E6F: 't', // WITH LINE BELOW, LATIN SMALL LETTER
|
||||
0x01AB: 't', // WITH PALATAL HOOK, LATIN SMALL LETTER
|
||||
0x0288: 't', // WITH RETROFLEX HOOK, LATIN SMALL LETTER
|
||||
0x0167: 't', // WITH STROKE, LATIN SMALL LETTER
|
||||
0x036D: 't', // , COMBINING LATIN SMALL LETTER
|
||||
0x0287: 't', // , LATIN SMALL LETTER TURNED
|
||||
0x209C: 't', // , LATIN SUBSCRIPT SMALL LETTER
|
||||
0x0289: 'u', // BAR, LATIN SMALL LETTER
|
||||
0x00FA: 'u', // WITH ACUTE, LATIN SMALL LETTER
|
||||
0x016D: 'u', // WITH BREVE, LATIN SMALL LETTER
|
||||
0x01D4: 'u', // WITH CARON, LATIN SMALL LETTER
|
||||
0x1E77: 'u', // WITH CIRCUMFLEX BELOW, LATIN SMALL LETTER
|
||||
0x00FB: 'u', // WITH CIRCUMFLEX, LATIN SMALL LETTER
|
||||
0x1E73: 'u', // WITH DIAERESIS BELOW, LATIN SMALL LETTER
|
||||
0x00FC: 'u', // WITH DIAERESIS, LATIN SMALL LETTER
|
||||
0x1EE5: 'u', // WITH DOT BELOW, LATIN SMALL LETTER
|
||||
0x0171: 'u', // WITH DOUBLE ACUTE, LATIN SMALL LETTER
|
||||
0x0215: 'u', // WITH DOUBLE GRAVE, LATIN SMALL LETTER
|
||||
0x00F9: 'u', // WITH GRAVE, LATIN SMALL LETTER
|
||||
0x1EE7: 'u', // WITH HOOK ABOVE, LATIN SMALL LETTER
|
||||
0x01B0: 'u', // WITH HORN, LATIN SMALL LETTER
|
||||
0x0217: 'u', // WITH INVERTED BREVE, LATIN SMALL LETTER
|
||||
0x016B: 'u', // WITH MACRON, LATIN SMALL LETTER
|
||||
0x0173: 'u', // WITH OGONEK, LATIN SMALL LETTER
|
||||
0x016F: 'u', // WITH RING ABOVE, LATIN SMALL LETTER
|
||||
0x1E75: 'u', // WITH TILDE BELOW, LATIN SMALL LETTER
|
||||
0x0169: 'u', // WITH TILDE, LATIN SMALL LETTER
|
||||
0x0367: 'u', // , COMBINING LATIN SMALL LETTER
|
||||
0x1D1D: 'u', // , LATIN SMALL LETTER SIDEWAYS
|
||||
0x1D1E: 'u', // , LATIN SMALL LETTER SIDEWAYS DIAERESIZED
|
||||
0x1D64: 'u', // , LATIN SUBSCRIPT SMALL LETTER
|
||||
0x1E7F: 'v', // WITH DOT BELOW, LATIN SMALL LETTER
|
||||
0x028B: 'v', // WITH HOOK, LATIN SMALL LETTER
|
||||
0x1E7D: 'v', // WITH TILDE, LATIN SMALL LETTER
|
||||
0x036E: 'v', // , COMBINING LATIN SMALL LETTER
|
||||
0x028C: 'v', // , LATIN SMALL LETTER TURNED
|
||||
0x1D65: 'v', // , LATIN SUBSCRIPT SMALL LETTER
|
||||
0x1E83: 'w', // WITH ACUTE, LATIN SMALL LETTER
|
||||
0x0175: 'w', // WITH CIRCUMFLEX, LATIN SMALL LETTER
|
||||
0x1E85: 'w', // WITH DIAERESIS, LATIN SMALL LETTER
|
||||
0x1E87: 'w', // WITH DOT ABOVE, LATIN SMALL LETTER
|
||||
0x1E89: 'w', // WITH DOT BELOW, LATIN SMALL LETTER
|
||||
0x1E81: 'w', // WITH GRAVE, LATIN SMALL LETTER
|
||||
0x1E98: 'w', // WITH RING ABOVE, LATIN SMALL LETTER
|
||||
0x028D: 'w', // , LATIN SMALL LETTER TURNED
|
||||
0x1E8D: 'x', // WITH DIAERESIS, LATIN SMALL LETTER
|
||||
0x1E8B: 'x', // WITH DOT ABOVE, LATIN SMALL LETTER
|
||||
0x036F: 'x', // , COMBINING LATIN SMALL LETTER
|
||||
0x00FD: 'y', // WITH ACUTE, LATIN SMALL LETTER
|
||||
0x0177: 'y', // WITH CIRCUMFLEX, LATIN SMALL LETTER
|
||||
0x00FF: 'y', // WITH DIAERESIS, LATIN SMALL LETTER
|
||||
0x1E8F: 'y', // WITH DOT ABOVE, LATIN SMALL LETTER
|
||||
0x1EF5: 'y', // WITH DOT BELOW, LATIN SMALL LETTER
|
||||
0x1EF3: 'y', // WITH GRAVE, LATIN SMALL LETTER
|
||||
0x1EF7: 'y', // WITH HOOK ABOVE, LATIN SMALL LETTER
|
||||
0x01B4: 'y', // WITH HOOK, LATIN SMALL LETTER
|
||||
0x0233: 'y', // WITH MACRON, LATIN SMALL LETTER
|
||||
0x1E99: 'y', // WITH RING ABOVE, LATIN SMALL LETTER
|
||||
0x024F: 'y', // WITH STROKE, LATIN SMALL LETTER
|
||||
0x1EF9: 'y', // WITH TILDE, LATIN SMALL LETTER
|
||||
0x028E: 'y', // , LATIN SMALL LETTER TURNED
|
||||
0x017A: 'z', // WITH ACUTE, LATIN SMALL LETTER
|
||||
0x017E: 'z', // WITH CARON, LATIN SMALL LETTER
|
||||
0x1E91: 'z', // WITH CIRCUMFLEX, LATIN SMALL LETTER
|
||||
0x0291: 'z', // WITH CURL, LATIN SMALL LETTER
|
||||
0x017C: 'z', // WITH DOT ABOVE, LATIN SMALL LETTER
|
||||
0x1E93: 'z', // WITH DOT BELOW, LATIN SMALL LETTER
|
||||
0x0225: 'z', // WITH HOOK, LATIN SMALL LETTER
|
||||
0x1E95: 'z', // WITH LINE BELOW, LATIN SMALL LETTER
|
||||
0x0290: 'z', // WITH RETROFLEX HOOK, LATIN SMALL LETTER
|
||||
0x01B6: 'z', // WITH STROKE, LATIN SMALL LETTER
|
||||
0x0240: 'z', // WITH SWASH TAIL, LATIN SMALL LETTER
|
||||
0x0251: 'a', // , latin small letter script
|
||||
0x00C1: 'A', // WITH ACUTE, LATIN CAPITAL LETTER
|
||||
0x00C2: 'A', // WITH CIRCUMFLEX, LATIN CAPITAL LETTER
|
||||
0x00C4: 'A', // WITH DIAERESIS, LATIN CAPITAL LETTER
|
||||
0x00C0: 'A', // WITH GRAVE, LATIN CAPITAL LETTER
|
||||
0x00C5: 'A', // WITH RING ABOVE, LATIN CAPITAL LETTER
|
||||
0x023A: 'A', // WITH STROKE, LATIN CAPITAL LETTER
|
||||
0x00C3: 'A', // WITH TILDE, LATIN CAPITAL LETTER
|
||||
0x1D00: 'A', // , LATIN LETTER SMALL CAPITAL
|
||||
0x0181: 'B', // WITH HOOK, LATIN CAPITAL LETTER
|
||||
0x0243: 'B', // WITH STROKE, LATIN CAPITAL LETTER
|
||||
0x0299: 'B', // , LATIN LETTER SMALL CAPITAL
|
||||
0x1D03: 'B', // , LATIN LETTER SMALL CAPITAL BARRED
|
||||
0x00C7: 'C', // WITH CEDILLA, LATIN CAPITAL LETTER
|
||||
0x023B: 'C', // WITH STROKE, LATIN CAPITAL LETTER
|
||||
0x1D04: 'C', // , LATIN LETTER SMALL CAPITAL
|
||||
0x018A: 'D', // WITH HOOK, LATIN CAPITAL LETTER
|
||||
0x0189: 'D', // , LATIN CAPITAL LETTER AFRICAN
|
||||
0x1D05: 'D', // , LATIN LETTER SMALL CAPITAL
|
||||
0x00C9: 'E', // WITH ACUTE, LATIN CAPITAL LETTER
|
||||
0x00CA: 'E', // WITH CIRCUMFLEX, LATIN CAPITAL LETTER
|
||||
0x00CB: 'E', // WITH DIAERESIS, LATIN CAPITAL LETTER
|
||||
0x00C8: 'E', // WITH GRAVE, LATIN CAPITAL LETTER
|
||||
0x0246: 'E', // WITH STROKE, LATIN CAPITAL LETTER
|
||||
0x0190: 'E', // , LATIN CAPITAL LETTER OPEN
|
||||
0x018E: 'E', // , LATIN CAPITAL LETTER REVERSED
|
||||
0x1D07: 'E', // , LATIN LETTER SMALL CAPITAL
|
||||
0x0193: 'G', // WITH HOOK, LATIN CAPITAL LETTER
|
||||
0x029B: 'G', // WITH HOOK, LATIN LETTER SMALL CAPITAL
|
||||
0x0262: 'G', // , LATIN LETTER SMALL CAPITAL
|
||||
0x029C: 'H', // , LATIN LETTER SMALL CAPITAL
|
||||
0x00CD: 'I', // WITH ACUTE, LATIN CAPITAL LETTER
|
||||
0x00CE: 'I', // WITH CIRCUMFLEX, LATIN CAPITAL LETTER
|
||||
0x00CF: 'I', // WITH DIAERESIS, LATIN CAPITAL LETTER
|
||||
0x0130: 'I', // WITH DOT ABOVE, LATIN CAPITAL LETTER
|
||||
0x00CC: 'I', // WITH GRAVE, LATIN CAPITAL LETTER
|
||||
0x0197: 'I', // WITH STROKE, LATIN CAPITAL LETTER
|
||||
0x026A: 'I', // , LATIN LETTER SMALL CAPITAL
|
||||
0x0248: 'J', // WITH STROKE, LATIN CAPITAL LETTER
|
||||
0x1D0A: 'J', // , LATIN LETTER SMALL CAPITAL
|
||||
0x1D0B: 'K', // , LATIN LETTER SMALL CAPITAL
|
||||
0x023D: 'L', // WITH BAR, LATIN CAPITAL LETTER
|
||||
0x1D0C: 'L', // WITH STROKE, LATIN LETTER SMALL CAPITAL
|
||||
0x029F: 'L', // , LATIN LETTER SMALL CAPITAL
|
||||
0x019C: 'M', // , LATIN CAPITAL LETTER TURNED
|
||||
0x1D0D: 'M', // , LATIN LETTER SMALL CAPITAL
|
||||
0x019D: 'N', // WITH LEFT HOOK, LATIN CAPITAL LETTER
|
||||
0x0220: 'N', // WITH LONG RIGHT LEG, LATIN CAPITAL LETTER
|
||||
0x00D1: 'N', // WITH TILDE, LATIN CAPITAL LETTER
|
||||
0x0274: 'N', // , LATIN LETTER SMALL CAPITAL
|
||||
0x1D0E: 'N', // , LATIN LETTER SMALL CAPITAL REVERSED
|
||||
0x00D3: 'O', // WITH ACUTE, LATIN CAPITAL LETTER
|
||||
0x00D4: 'O', // WITH CIRCUMFLEX, LATIN CAPITAL LETTER
|
||||
0x00D6: 'O', // WITH DIAERESIS, LATIN CAPITAL LETTER
|
||||
0x00D2: 'O', // WITH GRAVE, LATIN CAPITAL LETTER
|
||||
0x019F: 'O', // WITH MIDDLE TILDE, LATIN CAPITAL LETTER
|
||||
0x00D8: 'O', // WITH STROKE, LATIN CAPITAL LETTER
|
||||
0x00D5: 'O', // WITH TILDE, LATIN CAPITAL LETTER
|
||||
0x0186: 'O', // , LATIN CAPITAL LETTER OPEN
|
||||
0x1D0F: 'O', // , LATIN LETTER SMALL CAPITAL
|
||||
0x1D10: 'O', // , LATIN LETTER SMALL CAPITAL OPEN
|
||||
0x1D18: 'P', // , LATIN LETTER SMALL CAPITAL
|
||||
0x024A: 'Q', // WITH HOOK TAIL, LATIN CAPITAL LETTER SMALL
|
||||
0x024C: 'R', // WITH STROKE, LATIN CAPITAL LETTER
|
||||
0x0280: 'R', // , LATIN LETTER SMALL CAPITAL
|
||||
0x0281: 'R', // , LATIN LETTER SMALL CAPITAL INVERTED
|
||||
0x1D19: 'R', // , LATIN LETTER SMALL CAPITAL REVERSED
|
||||
0x1D1A: 'R', // , LATIN LETTER SMALL CAPITAL TURNED
|
||||
0x023E: 'T', // WITH DIAGONAL STROKE, LATIN CAPITAL LETTER
|
||||
0x01AE: 'T', // WITH RETROFLEX HOOK, LATIN CAPITAL LETTER
|
||||
0x1D1B: 'T', // , LATIN LETTER SMALL CAPITAL
|
||||
0x0244: 'U', // BAR, LATIN CAPITAL LETTER
|
||||
0x00DA: 'U', // WITH ACUTE, LATIN CAPITAL LETTER
|
||||
0x00DB: 'U', // WITH CIRCUMFLEX, LATIN CAPITAL LETTER
|
||||
0x00DC: 'U', // WITH DIAERESIS, LATIN CAPITAL LETTER
|
||||
0x00D9: 'U', // WITH GRAVE, LATIN CAPITAL LETTER
|
||||
0x1D1C: 'U', // , LATIN LETTER SMALL CAPITAL
|
||||
0x01B2: 'V', // WITH HOOK, LATIN CAPITAL LETTER
|
||||
0x0245: 'V', // , LATIN CAPITAL LETTER TURNED
|
||||
0x1D20: 'V', // , LATIN LETTER SMALL CAPITAL
|
||||
0x1D21: 'W', // , LATIN LETTER SMALL CAPITAL
|
||||
0x00DD: 'Y', // WITH ACUTE, LATIN CAPITAL LETTER
|
||||
0x0178: 'Y', // WITH DIAERESIS, LATIN CAPITAL LETTER
|
||||
0x024E: 'Y', // WITH STROKE, LATIN CAPITAL LETTER
|
||||
0x028F: 'Y', // , LATIN LETTER SMALL CAPITAL
|
||||
0x1D22: 'Z', // , LATIN LETTER SMALL CAPITAL
|
||||
|
||||
'Ắ': 'A',
|
||||
'Ấ': 'A',
|
||||
'Ằ': 'A',
|
||||
'Ầ': 'A',
|
||||
'Ẳ': 'A',
|
||||
'Ẩ': 'A',
|
||||
'Ẵ': 'A',
|
||||
'Ẫ': 'A',
|
||||
'Ặ': 'A',
|
||||
'Ậ': 'A',
|
||||
|
||||
'ắ': 'a',
|
||||
'ấ': 'a',
|
||||
'ằ': 'a',
|
||||
'ầ': 'a',
|
||||
'ẳ': 'a',
|
||||
'ẩ': 'a',
|
||||
'ẵ': 'a',
|
||||
'ẫ': 'a',
|
||||
'ặ': 'a',
|
||||
'ậ': 'a',
|
||||
|
||||
'Ế': 'E',
|
||||
'Ề': 'E',
|
||||
'Ể': 'E',
|
||||
'Ễ': 'E',
|
||||
'Ệ': 'E',
|
||||
|
||||
'ế': 'e',
|
||||
'ề': 'e',
|
||||
'ể': 'e',
|
||||
'ễ': 'e',
|
||||
'ệ': 'e',
|
||||
|
||||
'Ố': 'O',
|
||||
'Ớ': 'O',
|
||||
'Ồ': 'O',
|
||||
'Ờ': 'O',
|
||||
'Ổ': 'O',
|
||||
'Ở': 'O',
|
||||
'Ỗ': 'O',
|
||||
'Ỡ': 'O',
|
||||
'Ộ': 'O',
|
||||
'Ợ': 'O',
|
||||
|
||||
'ố': 'o',
|
||||
'ớ': 'o',
|
||||
'ồ': 'o',
|
||||
'ờ': 'o',
|
||||
'ổ': 'o',
|
||||
'ở': 'o',
|
||||
'ỗ': 'o',
|
||||
'ỡ': 'o',
|
||||
'ộ': 'o',
|
||||
'ợ': 'o',
|
||||
|
||||
'Ứ': 'U',
|
||||
'Ừ': 'U',
|
||||
'Ử': 'U',
|
||||
'Ữ': 'U',
|
||||
'Ự': 'U',
|
||||
|
||||
'ứ': 'u',
|
||||
'ừ': 'u',
|
||||
'ử': 'u',
|
||||
'ữ': 'u',
|
||||
'ự': 'u',
|
||||
}
|
||||
|
||||
// NormalizeRunes normalizes latin script letters
|
||||
func NormalizeRunes(runes []rune) []rune {
|
||||
ret := make([]rune, len(runes))
|
||||
copy(ret, runes)
|
||||
for idx, r := range runes {
|
||||
if r < 0x00C0 || r > 0x2184 {
|
||||
continue
|
||||
}
|
||||
n := normalized[r]
|
||||
if n > 0 {
|
||||
ret[idx] = normalized[r]
|
||||
}
|
||||
}
|
||||
return ret
|
||||
}
|
||||
412
src/ansi.go
412
src/ansi.go
@@ -1,8 +1,7 @@
|
||||
package fzf
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"regexp"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
@@ -15,111 +14,376 @@ type ansiOffset struct {
|
||||
color ansiState
|
||||
}
|
||||
|
||||
type url struct {
|
||||
uri string
|
||||
params string
|
||||
}
|
||||
|
||||
type ansiState struct {
|
||||
fg tui.Color
|
||||
bg tui.Color
|
||||
attr tui.Attr
|
||||
lbg tui.Color
|
||||
url *url
|
||||
}
|
||||
|
||||
func (s *ansiState) colored() bool {
|
||||
return s.fg != -1 || s.bg != -1 || s.attr > 0
|
||||
return s.fg != -1 || s.bg != -1 || s.attr > 0 || s.lbg >= 0 || s.url != nil
|
||||
}
|
||||
|
||||
func (s *ansiState) equals(t *ansiState) bool {
|
||||
if t == nil {
|
||||
return !s.colored()
|
||||
}
|
||||
return s.fg == t.fg && s.bg == t.bg && s.attr == t.attr
|
||||
return s.fg == t.fg && s.bg == t.bg && s.attr == t.attr && s.lbg == t.lbg && s.url == t.url
|
||||
}
|
||||
|
||||
var ansiRegex *regexp.Regexp
|
||||
func (s *ansiState) ToString() string {
|
||||
if !s.colored() {
|
||||
return ""
|
||||
}
|
||||
|
||||
func init() {
|
||||
ansiRegex = regexp.MustCompile("\x1b\\[[0-9;]*.|[\x0e\x0f]")
|
||||
ret := ""
|
||||
if s.attr&tui.Bold > 0 {
|
||||
ret += "1;"
|
||||
}
|
||||
if s.attr&tui.Dim > 0 {
|
||||
ret += "2;"
|
||||
}
|
||||
if s.attr&tui.Italic > 0 {
|
||||
ret += "3;"
|
||||
}
|
||||
if s.attr&tui.Underline > 0 {
|
||||
ret += "4;"
|
||||
}
|
||||
if s.attr&tui.Blink > 0 {
|
||||
ret += "5;"
|
||||
}
|
||||
if s.attr&tui.Reverse > 0 {
|
||||
ret += "7;"
|
||||
}
|
||||
if s.attr&tui.StrikeThrough > 0 {
|
||||
ret += "9;"
|
||||
}
|
||||
ret += toAnsiString(s.fg, 30) + toAnsiString(s.bg, 40)
|
||||
|
||||
ret = "\x1b[" + strings.TrimSuffix(ret, ";") + "m"
|
||||
if s.url != nil {
|
||||
ret = fmt.Sprintf("\x1b]8;%s;%s\x1b\\%s\x1b]8;;\x1b", s.url.params, s.url.uri, ret)
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
func toAnsiString(color tui.Color, offset int) string {
|
||||
col := int(color)
|
||||
ret := ""
|
||||
if col == -1 {
|
||||
ret += strconv.Itoa(offset + 9)
|
||||
} else if col < 8 {
|
||||
ret += strconv.Itoa(offset + col)
|
||||
} else if col < 16 {
|
||||
ret += strconv.Itoa(offset - 30 + 90 + col - 8)
|
||||
} else if col < 256 {
|
||||
ret += strconv.Itoa(offset+8) + ";5;" + strconv.Itoa(col)
|
||||
} else if col >= (1 << 24) {
|
||||
r := strconv.Itoa((col >> 16) & 0xff)
|
||||
g := strconv.Itoa((col >> 8) & 0xff)
|
||||
b := strconv.Itoa(col & 0xff)
|
||||
ret += strconv.Itoa(offset+8) + ";2;" + r + ";" + g + ";" + b
|
||||
}
|
||||
return ret + ";"
|
||||
}
|
||||
|
||||
func isPrint(c uint8) bool {
|
||||
return '\x20' <= c && c <= '\x7e'
|
||||
}
|
||||
|
||||
func matchOperatingSystemCommand(s string) int {
|
||||
// `\x1b][0-9][;:][[:print:]]+(?:\x1b\\\\|\x07)`
|
||||
// ^ match starting here
|
||||
//
|
||||
i := 5 // prefix matched in nextAnsiEscapeSequence()
|
||||
for ; i < len(s) && isPrint(s[i]); i++ {
|
||||
}
|
||||
if i < len(s) {
|
||||
if s[i] == '\x07' {
|
||||
return i + 1
|
||||
}
|
||||
// `\x1b]8;PARAMS;URI\x1b\\TITLE\x1b]8;;\x1b`
|
||||
// ------
|
||||
if s[i] == '\x1b' && i < len(s)-1 && s[i+1] == '\\' {
|
||||
return i + 2
|
||||
}
|
||||
}
|
||||
|
||||
// `\x1b]8;PARAMS;URI\x1b\\TITLE\x1b]8;;\x1b`
|
||||
// ------------
|
||||
if i < len(s) && s[:i+1] == "\x1b]8;;\x1b" {
|
||||
return i + 1
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
|
||||
func matchControlSequence(s string) int {
|
||||
// `\x1b[\\[()][0-9;:?]*[a-zA-Z@]`
|
||||
// ^ match starting here
|
||||
//
|
||||
i := 2 // prefix matched in nextAnsiEscapeSequence()
|
||||
for ; i < len(s); i++ {
|
||||
c := s[i]
|
||||
switch c {
|
||||
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', ';', ':', '?':
|
||||
// ok
|
||||
default:
|
||||
if 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || c == '@' {
|
||||
return i + 1
|
||||
}
|
||||
return -1
|
||||
}
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
func isCtrlSeqStart(c uint8) bool {
|
||||
switch c {
|
||||
case '\\', '[', '(', ')':
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// nextAnsiEscapeSequence returns the ANSI escape sequence and is equivalent to
|
||||
// calling FindStringIndex() on the below regex (which was originally used):
|
||||
//
|
||||
// "(?:\x1b[\\[()][0-9;:?]*[a-zA-Z@]|\x1b][0-9][;:][[:print:]]+(?:\x1b\\\\|\x07)|\x1b.|[\x0e\x0f]|.\x08)"
|
||||
func nextAnsiEscapeSequence(s string) (int, int) {
|
||||
// fast check for ANSI escape sequences
|
||||
i := 0
|
||||
for ; i < len(s); i++ {
|
||||
switch s[i] {
|
||||
case '\x0e', '\x0f', '\x1b', '\x08':
|
||||
// We ignore the fact that '\x08' cannot be the first char
|
||||
// in the string and be an escape sequence for the sake of
|
||||
// speed and simplicity.
|
||||
goto Loop
|
||||
}
|
||||
}
|
||||
return -1, -1
|
||||
|
||||
Loop:
|
||||
for ; i < len(s); i++ {
|
||||
switch s[i] {
|
||||
case '\x08':
|
||||
// backtrack to match: `.\x08`
|
||||
if i > 0 && s[i-1] != '\n' {
|
||||
if s[i-1] < utf8.RuneSelf {
|
||||
return i - 1, i + 1
|
||||
}
|
||||
_, n := utf8.DecodeLastRuneInString(s[:i])
|
||||
return i - n, i + 1
|
||||
}
|
||||
case '\x1b':
|
||||
// match: `\x1b[\\[()][0-9;:?]*[a-zA-Z@]`
|
||||
if i+2 < len(s) && isCtrlSeqStart(s[i+1]) {
|
||||
if j := matchControlSequence(s[i:]); j != -1 {
|
||||
return i, i + j
|
||||
}
|
||||
}
|
||||
|
||||
// match: `\x1b][0-9][;:][[:print:]]+(?:\x1b\\\\|\x07)`
|
||||
if i+5 < len(s) && s[i+1] == ']' && isNumeric(s[i+2]) &&
|
||||
(s[i+3] == ';' || s[i+3] == ':') && isPrint(s[i+4]) {
|
||||
|
||||
if j := matchOperatingSystemCommand(s[i:]); j != -1 {
|
||||
return i, i + j
|
||||
}
|
||||
}
|
||||
|
||||
// match: `\x1b.`
|
||||
if i+1 < len(s) && s[i+1] != '\n' {
|
||||
if s[i+1] < utf8.RuneSelf {
|
||||
return i, i + 2
|
||||
}
|
||||
_, n := utf8.DecodeRuneInString(s[i+1:])
|
||||
return i, i + n + 1
|
||||
}
|
||||
case '\x0e', '\x0f':
|
||||
// match: `[\x0e\x0f]`
|
||||
return i, i + 1
|
||||
}
|
||||
}
|
||||
return -1, -1
|
||||
}
|
||||
|
||||
func extractColor(str string, state *ansiState, proc func(string, *ansiState) bool) (string, *[]ansiOffset, *ansiState) {
|
||||
var offsets []ansiOffset
|
||||
var output bytes.Buffer
|
||||
// We append to a stack allocated variable that we'll
|
||||
// later copy and return, to save on allocations.
|
||||
offsets := make([]ansiOffset, 0, 32)
|
||||
|
||||
if state != nil {
|
||||
offsets = append(offsets, ansiOffset{[2]int32{0, 0}, *state})
|
||||
}
|
||||
|
||||
idx := 0
|
||||
for _, offset := range ansiRegex.FindAllStringIndex(str, -1) {
|
||||
prev := str[idx:offset[0]]
|
||||
output.WriteString(prev)
|
||||
var (
|
||||
pstate *ansiState // lazily allocated
|
||||
output strings.Builder
|
||||
prevIdx int
|
||||
runeCount int
|
||||
)
|
||||
for idx := 0; idx < len(str); {
|
||||
// Make sure that we found an ANSI code
|
||||
start, end := nextAnsiEscapeSequence(str[idx:])
|
||||
if start == -1 {
|
||||
break
|
||||
}
|
||||
start += idx
|
||||
idx += end
|
||||
|
||||
// Check if we should continue
|
||||
prev := str[prevIdx:start]
|
||||
if proc != nil && !proc(prev, state) {
|
||||
return "", nil, nil
|
||||
}
|
||||
newState := interpretCode(str[offset[0]:offset[1]], state)
|
||||
prevIdx = idx
|
||||
|
||||
if len(prev) != 0 {
|
||||
runeCount += utf8.RuneCountInString(prev)
|
||||
// Grow the buffer size to the maximum possible length (string length
|
||||
// containing ansi codes) to avoid repetitive allocation
|
||||
if output.Cap() == 0 {
|
||||
output.Grow(len(str))
|
||||
}
|
||||
output.WriteString(prev)
|
||||
}
|
||||
|
||||
newState := interpretCode(str[start:idx], state)
|
||||
if !newState.equals(state) {
|
||||
if state != nil {
|
||||
// Update last offset
|
||||
(&offsets[len(offsets)-1]).offset[1] = int32(utf8.RuneCount(output.Bytes()))
|
||||
(&offsets[len(offsets)-1]).offset[1] = int32(runeCount)
|
||||
}
|
||||
|
||||
if newState.colored() {
|
||||
// Append new offset
|
||||
state = newState
|
||||
newLen := int32(utf8.RuneCount(output.Bytes()))
|
||||
offsets = append(offsets, ansiOffset{[2]int32{newLen, newLen}, *state})
|
||||
if pstate == nil {
|
||||
pstate = &ansiState{}
|
||||
}
|
||||
*pstate = newState
|
||||
state = pstate
|
||||
offsets = append(offsets, ansiOffset{
|
||||
[2]int32{int32(runeCount), int32(runeCount)},
|
||||
newState,
|
||||
})
|
||||
} else {
|
||||
// Discard state
|
||||
state = nil
|
||||
}
|
||||
}
|
||||
|
||||
idx = offset[1]
|
||||
}
|
||||
|
||||
rest := str[idx:]
|
||||
if len(rest) > 0 {
|
||||
var rest string
|
||||
var trimmed string
|
||||
if prevIdx == 0 {
|
||||
// No ANSI code found
|
||||
rest = str
|
||||
trimmed = str
|
||||
} else {
|
||||
rest = str[prevIdx:]
|
||||
output.WriteString(rest)
|
||||
if state != nil {
|
||||
// Update last offset
|
||||
(&offsets[len(offsets)-1]).offset[1] = int32(utf8.RuneCount(output.Bytes()))
|
||||
}
|
||||
trimmed = output.String()
|
||||
}
|
||||
if proc != nil {
|
||||
proc(rest, state)
|
||||
}
|
||||
if len(offsets) == 0 {
|
||||
return output.String(), nil, state
|
||||
if len(offsets) > 0 {
|
||||
if len(rest) > 0 && state != nil {
|
||||
// Update last offset
|
||||
runeCount += utf8.RuneCountInString(rest)
|
||||
(&offsets[len(offsets)-1]).offset[1] = int32(runeCount)
|
||||
}
|
||||
// Return a copy of the offsets slice
|
||||
a := make([]ansiOffset, len(offsets))
|
||||
copy(a, offsets)
|
||||
return trimmed, &a, state
|
||||
}
|
||||
return output.String(), &offsets, state
|
||||
return trimmed, nil, state
|
||||
}
|
||||
|
||||
func interpretCode(ansiCode string, prevState *ansiState) *ansiState {
|
||||
// State
|
||||
var state *ansiState
|
||||
if prevState == nil {
|
||||
state = &ansiState{-1, -1, 0}
|
||||
func parseAnsiCode(s string, delimiter byte) (int, byte, string) {
|
||||
var remaining string
|
||||
var i int
|
||||
if delimiter == 0 {
|
||||
// Faster than strings.IndexAny(";:")
|
||||
i = strings.IndexByte(s, ';')
|
||||
if i < 0 {
|
||||
i = strings.IndexByte(s, ':')
|
||||
}
|
||||
} else {
|
||||
state = &ansiState{prevState.fg, prevState.bg, prevState.attr}
|
||||
i = strings.IndexByte(s, delimiter)
|
||||
}
|
||||
if ansiCode[0] != '\x1b' || ansiCode[len(ansiCode)-1] != 'm' {
|
||||
if i >= 0 {
|
||||
delimiter = s[i]
|
||||
remaining = s[i+1:]
|
||||
s = s[:i]
|
||||
}
|
||||
|
||||
if len(s) > 0 {
|
||||
// Inlined version of strconv.Atoi() that only handles positive
|
||||
// integers and does not allocate on error.
|
||||
code := 0
|
||||
for _, ch := range stringBytes(s) {
|
||||
ch -= '0'
|
||||
if ch > 9 {
|
||||
return -1, delimiter, remaining
|
||||
}
|
||||
code = code*10 + int(ch)
|
||||
}
|
||||
return code, delimiter, remaining
|
||||
}
|
||||
|
||||
return -1, delimiter, remaining
|
||||
}
|
||||
|
||||
func interpretCode(ansiCode string, prevState *ansiState) ansiState {
|
||||
var state ansiState
|
||||
if prevState == nil {
|
||||
state = ansiState{-1, -1, 0, -1, nil}
|
||||
} else {
|
||||
state = ansiState{prevState.fg, prevState.bg, prevState.attr, prevState.lbg, prevState.url}
|
||||
}
|
||||
if ansiCode[0] != '\x1b' || ansiCode[1] != '[' || ansiCode[len(ansiCode)-1] != 'm' {
|
||||
if prevState != nil && strings.HasSuffix(ansiCode, "0K") {
|
||||
state.lbg = prevState.bg
|
||||
} else if ansiCode == "\x1b]8;;\x1b\\" { // End of a hyperlink
|
||||
state.url = nil
|
||||
} else if strings.HasPrefix(ansiCode, "\x1b]8;") && strings.HasSuffix(ansiCode, "\x1b\\") {
|
||||
if paramsEnd := strings.IndexRune(ansiCode[4:], ';'); paramsEnd >= 0 {
|
||||
params := ansiCode[4 : 4+paramsEnd]
|
||||
uri := ansiCode[5+paramsEnd : len(ansiCode)-2]
|
||||
state.url = &url{uri: uri, params: params}
|
||||
}
|
||||
}
|
||||
return state
|
||||
}
|
||||
|
||||
ptr := &state.fg
|
||||
state256 := 0
|
||||
|
||||
init := func() {
|
||||
if len(ansiCode) <= 3 {
|
||||
state.fg = -1
|
||||
state.bg = -1
|
||||
state.attr = 0
|
||||
state256 = 0
|
||||
return state
|
||||
}
|
||||
|
||||
ansiCode = ansiCode[2 : len(ansiCode)-1]
|
||||
if len(ansiCode) == 0 {
|
||||
init()
|
||||
}
|
||||
for _, code := range strings.Split(ansiCode, ";") {
|
||||
if num, err := strconv.Atoi(code); err == nil {
|
||||
|
||||
state256 := 0
|
||||
ptr := &state.fg
|
||||
|
||||
var delimiter byte
|
||||
count := 0
|
||||
for len(ansiCode) != 0 {
|
||||
var num int
|
||||
if num, delimiter, ansiCode = parseAnsiCode(ansiCode, delimiter); num != -1 {
|
||||
count++
|
||||
switch state256 {
|
||||
case 0:
|
||||
switch num {
|
||||
@@ -134,17 +398,37 @@ func interpretCode(ansiCode string, prevState *ansiState) *ansiState {
|
||||
case 49:
|
||||
state.bg = -1
|
||||
case 1:
|
||||
state.attr = tui.Bold
|
||||
state.attr = state.attr | tui.Bold
|
||||
case 2:
|
||||
state.attr = tui.Dim
|
||||
state.attr = state.attr | tui.Dim
|
||||
case 3:
|
||||
state.attr = state.attr | tui.Italic
|
||||
case 4:
|
||||
state.attr = tui.Underline
|
||||
state.attr = state.attr | tui.Underline
|
||||
case 5:
|
||||
state.attr = tui.Blink
|
||||
state.attr = state.attr | tui.Blink
|
||||
case 7:
|
||||
state.attr = tui.Reverse
|
||||
state.attr = state.attr | tui.Reverse
|
||||
case 9:
|
||||
state.attr = state.attr | tui.StrikeThrough
|
||||
case 22:
|
||||
state.attr = state.attr &^ tui.Bold
|
||||
state.attr = state.attr &^ tui.Dim
|
||||
case 23: // tput rmso
|
||||
state.attr = state.attr &^ tui.Italic
|
||||
case 24: // tput rmul
|
||||
state.attr = state.attr &^ tui.Underline
|
||||
case 25:
|
||||
state.attr = state.attr &^ tui.Blink
|
||||
case 27:
|
||||
state.attr = state.attr &^ tui.Reverse
|
||||
case 29:
|
||||
state.attr = state.attr &^ tui.StrikeThrough
|
||||
case 0:
|
||||
init()
|
||||
state.fg = -1
|
||||
state.bg = -1
|
||||
state.attr = 0
|
||||
state256 = 0
|
||||
default:
|
||||
if num >= 30 && num <= 37 {
|
||||
state.fg = tui.Color(num - 30)
|
||||
@@ -158,6 +442,8 @@ func interpretCode(ansiCode string, prevState *ansiState) *ansiState {
|
||||
}
|
||||
case 1:
|
||||
switch num {
|
||||
case 2:
|
||||
state256 = 10 // MAGIC
|
||||
case 5:
|
||||
state256++
|
||||
default:
|
||||
@@ -166,8 +452,28 @@ func interpretCode(ansiCode string, prevState *ansiState) *ansiState {
|
||||
case 2:
|
||||
*ptr = tui.Color(num)
|
||||
state256 = 0
|
||||
case 10:
|
||||
*ptr = tui.Color(1<<24) | tui.Color(num<<16)
|
||||
state256++
|
||||
case 11:
|
||||
*ptr = *ptr | tui.Color(num<<8)
|
||||
state256++
|
||||
case 12:
|
||||
*ptr = *ptr | tui.Color(num)
|
||||
state256 = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Empty sequence: reset
|
||||
if count == 0 {
|
||||
state.fg = -1
|
||||
state.bg = -1
|
||||
state.attr = 0
|
||||
}
|
||||
|
||||
if state256 > 0 {
|
||||
*ptr = -1
|
||||
}
|
||||
return state
|
||||
}
|
||||
|
||||
281
src/ansi_test.go
281
src/ansi_test.go
@@ -1,12 +1,192 @@
|
||||
package fzf
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"regexp"
|
||||
"strings"
|
||||
"testing"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/junegunn/fzf/src/tui"
|
||||
)
|
||||
|
||||
// The following regular expression will include not all but most of the
|
||||
// frequently used ANSI sequences. This regex is used as a reference for
|
||||
// testing nextAnsiEscapeSequence().
|
||||
//
|
||||
// References:
|
||||
// - https://github.com/gnachman/iTerm2
|
||||
// - https://web.archive.org/web/20090204053813/http://ascii-table.com/ansi-escape-sequences.php
|
||||
// (archived from http://ascii-table.com/ansi-escape-sequences.php)
|
||||
// - https://web.archive.org/web/20090227051140/http://ascii-table.com/ansi-escape-sequences-vt-100.php
|
||||
// (archived from http://ascii-table.com/ansi-escape-sequences-vt-100.php)
|
||||
// - http://tldp.org/HOWTO/Bash-Prompt-HOWTO/x405.html
|
||||
// - https://invisible-island.net/xterm/ctlseqs/ctlseqs.html
|
||||
var ansiRegexReference = regexp.MustCompile("(?:\x1b[\\[()][0-9;:]*[a-zA-Z@]|\x1b][0-9][;:][[:print:]]+(?:\x1b\\\\|\x07)|\x1b.|[\x0e\x0f]|.\x08)")
|
||||
|
||||
func testParserReference(t testing.TB, str string) {
|
||||
t.Helper()
|
||||
|
||||
toSlice := func(start, end int) []int {
|
||||
if start == -1 {
|
||||
return nil
|
||||
}
|
||||
return []int{start, end}
|
||||
}
|
||||
|
||||
s := str
|
||||
for i := 0; ; i++ {
|
||||
got := toSlice(nextAnsiEscapeSequence(s))
|
||||
exp := ansiRegexReference.FindStringIndex(s)
|
||||
|
||||
equal := len(got) == len(exp)
|
||||
if equal {
|
||||
for i := 0; i < len(got); i++ {
|
||||
if got[i] != exp[i] {
|
||||
equal = false
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if !equal {
|
||||
var exps, gots []rune
|
||||
if len(got) == 2 {
|
||||
gots = []rune(s[got[0]:got[1]])
|
||||
}
|
||||
if len(exp) == 2 {
|
||||
exps = []rune(s[exp[0]:exp[1]])
|
||||
}
|
||||
t.Errorf("%d: %q: got: %v (%q) want: %v (%q)", i, s, got, gots, exp, exps)
|
||||
return
|
||||
}
|
||||
if len(exp) == 0 {
|
||||
return
|
||||
}
|
||||
s = s[exp[1]:]
|
||||
}
|
||||
}
|
||||
|
||||
func TestNextAnsiEscapeSequence(t *testing.T) {
|
||||
testStrs := []string{
|
||||
"\x1b[0mhello world",
|
||||
"\x1b[1mhello world",
|
||||
"椙\x1b[1m椙",
|
||||
"椙\x1b[1椙m椙",
|
||||
"\x1b[1mhello \x1b[mw\x1b7o\x1b8r\x1b(Bl\x1b[2@d",
|
||||
"\x1b[1mhello \x1b[Kworld",
|
||||
"hello \x1b[34;45;1mworld",
|
||||
"hello \x1b[34;45;1mwor\x1b[34;45;1mld",
|
||||
"hello \x1b[34;45;1mwor\x1b[0mld",
|
||||
"hello \x1b[34;48;5;233;1mwo\x1b[38;5;161mr\x1b[0ml\x1b[38;5;161md",
|
||||
"hello \x1b[38;5;38;48;5;48;1mwor\x1b[38;5;48;48;5;38ml\x1b[0md",
|
||||
"hello \x1b[32;1mworld",
|
||||
"hello world",
|
||||
"hello \x1b[0;38;5;200;48;5;100mworld",
|
||||
"\x1b椙",
|
||||
"椙\x08",
|
||||
"\n\x08",
|
||||
"X\x08",
|
||||
"",
|
||||
"\x1b]4;3;rgb:aa/bb/cc\x07 ",
|
||||
"\x1b]4;3;rgb:aa/bb/cc\x1b\\ ",
|
||||
ansiBenchmarkString,
|
||||
}
|
||||
|
||||
for _, s := range testStrs {
|
||||
testParserReference(t, s)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNextAnsiEscapeSequence_Fuzz_Modified(t *testing.T) {
|
||||
t.Parallel()
|
||||
if testing.Short() {
|
||||
t.Skip("short test")
|
||||
}
|
||||
|
||||
testStrs := []string{
|
||||
"\x1b[0mhello world",
|
||||
"\x1b[1mhello world",
|
||||
"椙\x1b[1m椙",
|
||||
"椙\x1b[1椙m椙",
|
||||
"\x1b[1mhello \x1b[mw\x1b7o\x1b8r\x1b(Bl\x1b[2@d",
|
||||
"\x1b[1mhello \x1b[Kworld",
|
||||
"hello \x1b[34;45;1mworld",
|
||||
"hello \x1b[34;45;1mwor\x1b[34;45;1mld",
|
||||
"hello \x1b[34;45;1mwor\x1b[0mld",
|
||||
"hello \x1b[34;48;5;233;1mwo\x1b[38;5;161mr\x1b[0ml\x1b[38;5;161md",
|
||||
"hello \x1b[38;5;38;48;5;48;1mwor\x1b[38;5;48;48;5;38ml\x1b[0md",
|
||||
"hello \x1b[32;1mworld",
|
||||
"hello world",
|
||||
"hello \x1b[0;38;5;200;48;5;100mworld",
|
||||
ansiBenchmarkString,
|
||||
}
|
||||
|
||||
replacementBytes := [...]rune{'\x0e', '\x0f', '\x1b', '\x08'}
|
||||
|
||||
modifyString := func(s string, rr *rand.Rand) string {
|
||||
n := rr.Intn(len(s))
|
||||
b := []rune(s)
|
||||
for ; n >= 0 && len(b) != 0; n-- {
|
||||
i := rr.Intn(len(b))
|
||||
switch x := rr.Intn(4); x {
|
||||
case 0:
|
||||
b = append(b[:i], b[i+1:]...)
|
||||
case 1:
|
||||
j := rr.Intn(len(replacementBytes) - 1)
|
||||
b[i] = replacementBytes[j]
|
||||
case 2:
|
||||
x := rune(rr.Intn(utf8.MaxRune))
|
||||
for !utf8.ValidRune(x) {
|
||||
x = rune(rr.Intn(utf8.MaxRune))
|
||||
}
|
||||
b[i] = x
|
||||
case 3:
|
||||
b[i] = rune(rr.Intn(utf8.MaxRune)) // potentially invalid
|
||||
default:
|
||||
t.Fatalf("unsupported value: %d", x)
|
||||
}
|
||||
}
|
||||
return string(b)
|
||||
}
|
||||
|
||||
rr := rand.New(rand.NewSource(1))
|
||||
for _, s := range testStrs {
|
||||
for i := 1_000; i >= 0; i-- {
|
||||
testParserReference(t, modifyString(s, rr))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestNextAnsiEscapeSequence_Fuzz_Random(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
if testing.Short() {
|
||||
t.Skip("short test")
|
||||
}
|
||||
|
||||
randomString := func(rr *rand.Rand) string {
|
||||
numChars := rand.Intn(50)
|
||||
codePoints := make([]rune, numChars)
|
||||
for i := 0; i < len(codePoints); i++ {
|
||||
var r rune
|
||||
for n := 0; n < 1000; n++ {
|
||||
r = rune(rr.Intn(utf8.MaxRune))
|
||||
// Allow 10% of runes to be invalid
|
||||
if utf8.ValidRune(r) || rr.Float64() < 0.10 {
|
||||
break
|
||||
}
|
||||
}
|
||||
codePoints[i] = r
|
||||
}
|
||||
return string(codePoints)
|
||||
}
|
||||
|
||||
rr := rand.New(rand.NewSource(1))
|
||||
for i := 0; i < 100_000; i++ {
|
||||
testParserReference(t, randomString(rr))
|
||||
}
|
||||
}
|
||||
|
||||
func TestExtractColor(t *testing.T) {
|
||||
assert := func(offset ansiOffset, b int32, e int32, fg tui.Color, bg tui.Color, bold bool) {
|
||||
var attr tui.Attr
|
||||
@@ -26,9 +206,9 @@ func TestExtractColor(t *testing.T) {
|
||||
output, ansiOffsets, newState := extractColor(src, state, nil)
|
||||
state = newState
|
||||
if output != "hello world" {
|
||||
t.Errorf("Invalid output: {}", output)
|
||||
t.Errorf("Invalid output: %s %v", output, []rune(output))
|
||||
}
|
||||
fmt.Println(src, ansiOffsets, clean)
|
||||
t.Log(src, ansiOffsets, clean)
|
||||
assertion(ansiOffsets, state)
|
||||
}
|
||||
|
||||
@@ -56,7 +236,7 @@ func TestExtractColor(t *testing.T) {
|
||||
})
|
||||
|
||||
state = nil
|
||||
src = "\x1b[1mhello \x1b[mworld"
|
||||
src = "\x1b[1mhello \x1b[mw\x1b7o\x1b8r\x1b(Bl\x1b[2@d"
|
||||
check(func(offsets *[]ansiOffset, state *ansiState) {
|
||||
if len(*offsets) != 1 {
|
||||
t.Fail()
|
||||
@@ -156,3 +336,96 @@ func TestExtractColor(t *testing.T) {
|
||||
assert((*offsets)[1], 6, 11, 200, 100, false)
|
||||
})
|
||||
}
|
||||
|
||||
func TestAnsiCodeStringConversion(t *testing.T) {
|
||||
assert := func(code string, prevState *ansiState, expected string) {
|
||||
state := interpretCode(code, prevState)
|
||||
if expected != state.ToString() {
|
||||
t.Errorf("expected: %s, actual: %s",
|
||||
strings.ReplaceAll(expected, "\x1b[", "\\x1b["),
|
||||
strings.ReplaceAll(state.ToString(), "\x1b[", "\\x1b["))
|
||||
}
|
||||
}
|
||||
assert("\x1b[m", nil, "")
|
||||
assert("\x1b[m", &ansiState{attr: tui.Blink, lbg: -1}, "")
|
||||
assert("\x1b[0m", &ansiState{fg: 4, bg: 4, lbg: -1}, "")
|
||||
assert("\x1b[;m", &ansiState{fg: 4, bg: 4, lbg: -1}, "")
|
||||
assert("\x1b[;;m", &ansiState{fg: 4, bg: 4, lbg: -1}, "")
|
||||
|
||||
assert("\x1b[31m", nil, "\x1b[31;49m")
|
||||
assert("\x1b[41m", nil, "\x1b[39;41m")
|
||||
|
||||
assert("\x1b[92m", nil, "\x1b[92;49m")
|
||||
assert("\x1b[102m", nil, "\x1b[39;102m")
|
||||
|
||||
assert("\x1b[31m", &ansiState{fg: 4, bg: 4, lbg: -1}, "\x1b[31;44m")
|
||||
assert("\x1b[1;2;31m", &ansiState{fg: 2, bg: -1, attr: tui.Reverse, lbg: -1}, "\x1b[1;2;7;31;49m")
|
||||
assert("\x1b[38;5;100;48;5;200m", nil, "\x1b[38;5;100;48;5;200m")
|
||||
assert("\x1b[38:5:100:48:5:200m", nil, "\x1b[38;5;100;48;5;200m")
|
||||
assert("\x1b[48;5;100;38;5;200m", nil, "\x1b[38;5;200;48;5;100m")
|
||||
assert("\x1b[48;5;100;38;2;10;20;30;1m", nil, "\x1b[1;38;2;10;20;30;48;5;100m")
|
||||
assert("\x1b[48;5;100;38;2;10;20;30;7m",
|
||||
&ansiState{attr: tui.Dim | tui.Italic, fg: 1, bg: 1},
|
||||
"\x1b[2;3;7;38;2;10;20;30;48;5;100m")
|
||||
}
|
||||
|
||||
func TestParseAnsiCode(t *testing.T) {
|
||||
tests := []struct {
|
||||
In, Exp string
|
||||
N int
|
||||
}{
|
||||
{"123", "", 123},
|
||||
{"1a", "", -1},
|
||||
{"1a;12", "12", -1},
|
||||
{"12;a", "a", 12},
|
||||
{"-2", "", -1},
|
||||
}
|
||||
for _, x := range tests {
|
||||
n, _, s := parseAnsiCode(x.In, 0)
|
||||
if n != x.N || s != x.Exp {
|
||||
t.Fatalf("%q: got: (%d %q) want: (%d %q)", x.In, n, s, x.N, x.Exp)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// kernel/bpf/preload/iterators/README
|
||||
const ansiBenchmarkString = "\x1b[38;5;81m\x1b[01;31m\x1b[Kkernel/\x1b[0m\x1b[38:5:81mbpf/" +
|
||||
"\x1b[0m\x1b[38:5:81mpreload/\x1b[0m\x1b[38;5;81miterators/" +
|
||||
"\x1b[0m\x1b[38:5:149mMakefile\x1b[m\x1b[K\x1b[0m"
|
||||
|
||||
func BenchmarkNextAnsiEscapeSequence(b *testing.B) {
|
||||
b.SetBytes(int64(len(ansiBenchmarkString)))
|
||||
for i := 0; i < b.N; i++ {
|
||||
s := ansiBenchmarkString
|
||||
for {
|
||||
_, o := nextAnsiEscapeSequence(s)
|
||||
if o == -1 {
|
||||
break
|
||||
}
|
||||
s = s[o:]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Baseline test to compare the speed of nextAnsiEscapeSequence() to the
|
||||
// previously used regex based implementation.
|
||||
func BenchmarkNextAnsiEscapeSequence_Regex(b *testing.B) {
|
||||
b.SetBytes(int64(len(ansiBenchmarkString)))
|
||||
for i := 0; i < b.N; i++ {
|
||||
s := ansiBenchmarkString
|
||||
for {
|
||||
a := ansiRegexReference.FindStringIndex(s)
|
||||
if len(a) == 0 {
|
||||
break
|
||||
}
|
||||
s = s[a[1]:]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkExtractColor(b *testing.B) {
|
||||
b.SetBytes(int64(len(ansiBenchmarkString)))
|
||||
for i := 0; i < b.N; i++ {
|
||||
extractColor(ansiBenchmarkString, nil, nil)
|
||||
}
|
||||
}
|
||||
|
||||
60
src/cache.go
60
src/cache.go
@@ -3,7 +3,7 @@ package fzf
|
||||
import "sync"
|
||||
|
||||
// queryCache associates strings to lists of items
|
||||
type queryCache map[string][]*Result
|
||||
type queryCache map[string][]Result
|
||||
|
||||
// ChunkCache associates Chunk and query string to lists of items
|
||||
type ChunkCache struct {
|
||||
@@ -12,12 +12,26 @@ type ChunkCache struct {
|
||||
}
|
||||
|
||||
// NewChunkCache returns a new ChunkCache
|
||||
func NewChunkCache() ChunkCache {
|
||||
return ChunkCache{sync.Mutex{}, make(map[*Chunk]*queryCache)}
|
||||
func NewChunkCache() *ChunkCache {
|
||||
return &ChunkCache{sync.Mutex{}, make(map[*Chunk]*queryCache)}
|
||||
}
|
||||
|
||||
func (cc *ChunkCache) Clear() {
|
||||
cc.mutex.Lock()
|
||||
cc.cache = make(map[*Chunk]*queryCache)
|
||||
cc.mutex.Unlock()
|
||||
}
|
||||
|
||||
func (cc *ChunkCache) retire(chunk ...*Chunk) {
|
||||
cc.mutex.Lock()
|
||||
for _, c := range chunk {
|
||||
delete(cc.cache, c)
|
||||
}
|
||||
cc.mutex.Unlock()
|
||||
}
|
||||
|
||||
// Add adds the list to the cache
|
||||
func (cc *ChunkCache) Add(chunk *Chunk, key string, list []*Result) {
|
||||
func (cc *ChunkCache) Add(chunk *Chunk, key string, list []Result) {
|
||||
if len(key) == 0 || !chunk.IsFull() || len(list) > queryCacheMax {
|
||||
return
|
||||
}
|
||||
@@ -33,10 +47,10 @@ func (cc *ChunkCache) Add(chunk *Chunk, key string, list []*Result) {
|
||||
(*qc)[key] = list
|
||||
}
|
||||
|
||||
// Find is called to lookup ChunkCache
|
||||
func (cc *ChunkCache) Find(chunk *Chunk, key string) ([]*Result, bool) {
|
||||
// Lookup is called to lookup ChunkCache
|
||||
func (cc *ChunkCache) Lookup(chunk *Chunk, key string) []Result {
|
||||
if len(key) == 0 || !chunk.IsFull() {
|
||||
return nil, false
|
||||
return nil
|
||||
}
|
||||
|
||||
cc.mutex.Lock()
|
||||
@@ -46,8 +60,36 @@ func (cc *ChunkCache) Find(chunk *Chunk, key string) ([]*Result, bool) {
|
||||
if ok {
|
||||
list, ok := (*qc)[key]
|
||||
if ok {
|
||||
return list, true
|
||||
return list
|
||||
}
|
||||
}
|
||||
return nil, false
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cc *ChunkCache) Search(chunk *Chunk, key string) []Result {
|
||||
if len(key) == 0 || !chunk.IsFull() {
|
||||
return nil
|
||||
}
|
||||
|
||||
cc.mutex.Lock()
|
||||
defer cc.mutex.Unlock()
|
||||
|
||||
qc, ok := cc.cache[chunk]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
for idx := 1; idx < len(key); idx++ {
|
||||
// [---------| ] | [ |---------]
|
||||
// [--------| ] | [ |--------]
|
||||
// [-------| ] | [ |-------]
|
||||
prefix := key[:len(key)-idx]
|
||||
suffix := key[idx:]
|
||||
for _, substr := range [2]string{prefix, suffix} {
|
||||
if cached, found := (*qc)[substr]; found {
|
||||
return cached
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -4,37 +4,36 @@ import "testing"
|
||||
|
||||
func TestChunkCache(t *testing.T) {
|
||||
cache := NewChunkCache()
|
||||
chunk2 := make(Chunk, chunkSize)
|
||||
chunk1p := &Chunk{}
|
||||
chunk2p := &chunk2
|
||||
items1 := []*Result{&Result{}}
|
||||
items2 := []*Result{&Result{}, &Result{}}
|
||||
chunk2p := &Chunk{count: chunkSize}
|
||||
items1 := []Result{{}}
|
||||
items2 := []Result{{}, {}}
|
||||
cache.Add(chunk1p, "foo", items1)
|
||||
cache.Add(chunk2p, "foo", items1)
|
||||
cache.Add(chunk2p, "bar", items2)
|
||||
|
||||
{ // chunk1 is not full
|
||||
cached, found := cache.Find(chunk1p, "foo")
|
||||
if found {
|
||||
t.Error("Cached disabled for non-empty chunks", found, cached)
|
||||
cached := cache.Lookup(chunk1p, "foo")
|
||||
if cached != nil {
|
||||
t.Error("Cached disabled for non-empty chunks", cached)
|
||||
}
|
||||
}
|
||||
{
|
||||
cached, found := cache.Find(chunk2p, "foo")
|
||||
if !found || len(cached) != 1 {
|
||||
t.Error("Expected 1 item cached", found, cached)
|
||||
cached := cache.Lookup(chunk2p, "foo")
|
||||
if cached == nil || len(cached) != 1 {
|
||||
t.Error("Expected 1 item cached", cached)
|
||||
}
|
||||
}
|
||||
{
|
||||
cached, found := cache.Find(chunk2p, "bar")
|
||||
if !found || len(cached) != 2 {
|
||||
t.Error("Expected 2 items cached", found, cached)
|
||||
cached := cache.Lookup(chunk2p, "bar")
|
||||
if cached == nil || len(cached) != 2 {
|
||||
t.Error("Expected 2 items cached", cached)
|
||||
}
|
||||
}
|
||||
{
|
||||
cached, found := cache.Find(chunk1p, "foobar")
|
||||
if found {
|
||||
t.Error("Expected 0 item cached", found, cached)
|
||||
cached := cache.Lookup(chunk1p, "foobar")
|
||||
if cached != nil {
|
||||
t.Error("Expected 0 item cached", cached)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
111
src/chunklist.go
111
src/chunklist.go
@@ -2,34 +2,35 @@ package fzf
|
||||
|
||||
import "sync"
|
||||
|
||||
// Chunk is a list of Item pointers whose size has the upper limit of chunkSize
|
||||
type Chunk []*Item // >>> []Item
|
||||
// Chunk is a list of Items whose size has the upper limit of chunkSize
|
||||
type Chunk struct {
|
||||
items [chunkSize]Item
|
||||
count int
|
||||
}
|
||||
|
||||
// ItemBuilder is a closure type that builds Item object from a pointer to a
|
||||
// string and an integer
|
||||
type ItemBuilder func([]byte, int) *Item
|
||||
// ItemBuilder is a closure type that builds Item object from byte array
|
||||
type ItemBuilder func(*Item, []byte) bool
|
||||
|
||||
// ChunkList is a list of Chunks
|
||||
type ChunkList struct {
|
||||
chunks []*Chunk
|
||||
count int
|
||||
mutex sync.Mutex
|
||||
trans ItemBuilder
|
||||
cache *ChunkCache
|
||||
}
|
||||
|
||||
// NewChunkList returns a new ChunkList
|
||||
func NewChunkList(trans ItemBuilder) *ChunkList {
|
||||
func NewChunkList(cache *ChunkCache, trans ItemBuilder) *ChunkList {
|
||||
return &ChunkList{
|
||||
chunks: []*Chunk{},
|
||||
count: 0,
|
||||
mutex: sync.Mutex{},
|
||||
trans: trans}
|
||||
trans: trans,
|
||||
cache: cache}
|
||||
}
|
||||
|
||||
func (c *Chunk) push(trans ItemBuilder, data []byte, index int) bool {
|
||||
item := trans(data, index)
|
||||
if item != nil {
|
||||
*c = append(*c, item)
|
||||
func (c *Chunk) push(trans ItemBuilder, data []byte) bool {
|
||||
if trans(&c.items[c.count], data) {
|
||||
c.count++
|
||||
return true
|
||||
}
|
||||
return false
|
||||
@@ -37,7 +38,7 @@ func (c *Chunk) push(trans ItemBuilder, data []byte, index int) bool {
|
||||
|
||||
// IsFull returns true if the Chunk is full
|
||||
func (c *Chunk) IsFull() bool {
|
||||
return len(*c) == chunkSize
|
||||
return c.count == chunkSize
|
||||
}
|
||||
|
||||
func (cl *ChunkList) lastChunk() *Chunk {
|
||||
@@ -49,45 +50,85 @@ func CountItems(cs []*Chunk) int {
|
||||
if len(cs) == 0 {
|
||||
return 0
|
||||
}
|
||||
return chunkSize*(len(cs)-1) + len(*(cs[len(cs)-1]))
|
||||
if len(cs) == 1 {
|
||||
return cs[0].count
|
||||
}
|
||||
|
||||
// First chunk might not be full due to --tail=N
|
||||
return cs[0].count + chunkSize*(len(cs)-2) + cs[len(cs)-1].count
|
||||
}
|
||||
|
||||
// Push adds the item to the list
|
||||
func (cl *ChunkList) Push(data []byte) bool {
|
||||
cl.mutex.Lock()
|
||||
defer cl.mutex.Unlock()
|
||||
|
||||
if len(cl.chunks) == 0 || cl.lastChunk().IsFull() {
|
||||
newChunk := Chunk(make([]*Item, 0, chunkSize))
|
||||
cl.chunks = append(cl.chunks, &newChunk)
|
||||
cl.chunks = append(cl.chunks, &Chunk{})
|
||||
}
|
||||
|
||||
if cl.lastChunk().push(cl.trans, data, cl.count) {
|
||||
cl.count++
|
||||
return true
|
||||
}
|
||||
return false
|
||||
ret := cl.lastChunk().push(cl.trans, data)
|
||||
cl.mutex.Unlock()
|
||||
return ret
|
||||
}
|
||||
|
||||
// Clear clears the data
|
||||
func (cl *ChunkList) Clear() {
|
||||
cl.mutex.Lock()
|
||||
cl.chunks = nil
|
||||
cl.mutex.Unlock()
|
||||
}
|
||||
|
||||
// Snapshot returns immutable snapshot of the ChunkList
|
||||
func (cl *ChunkList) Snapshot() ([]*Chunk, int) {
|
||||
func (cl *ChunkList) Snapshot(tail int) ([]*Chunk, int, bool) {
|
||||
cl.mutex.Lock()
|
||||
defer cl.mutex.Unlock()
|
||||
|
||||
changed := false
|
||||
if tail > 0 && CountItems(cl.chunks) > tail {
|
||||
changed = true
|
||||
// Find the number of chunks to keep
|
||||
numChunks := 0
|
||||
for left, i := tail, len(cl.chunks)-1; left > 0 && i >= 0; i-- {
|
||||
numChunks++
|
||||
left -= cl.chunks[i].count
|
||||
}
|
||||
|
||||
// Copy the chunks to keep
|
||||
ret := make([]*Chunk, numChunks)
|
||||
minIndex := len(cl.chunks) - numChunks
|
||||
cl.cache.retire(cl.chunks[:minIndex]...)
|
||||
copy(ret, cl.chunks[minIndex:])
|
||||
|
||||
for left, i := tail, len(ret)-1; i >= 0; i-- {
|
||||
chunk := ret[i]
|
||||
if chunk.count > left {
|
||||
newChunk := *chunk
|
||||
newChunk.count = left
|
||||
oldCount := chunk.count
|
||||
for i := 0; i < left; i++ {
|
||||
newChunk.items[i] = chunk.items[oldCount-left+i]
|
||||
}
|
||||
ret[i] = &newChunk
|
||||
cl.cache.retire(chunk)
|
||||
break
|
||||
}
|
||||
left -= chunk.count
|
||||
}
|
||||
cl.chunks = ret
|
||||
}
|
||||
|
||||
ret := make([]*Chunk, len(cl.chunks))
|
||||
copy(ret, cl.chunks)
|
||||
|
||||
// Duplicate the last chunk
|
||||
// Duplicate the first and the last chunk
|
||||
if cnt := len(ret); cnt > 0 {
|
||||
ret[cnt-1] = ret[cnt-1].dupe()
|
||||
if tail > 0 && cnt > 1 {
|
||||
newChunk := *ret[0]
|
||||
ret[0] = &newChunk
|
||||
}
|
||||
newChunk := *ret[cnt-1]
|
||||
ret[cnt-1] = &newChunk
|
||||
}
|
||||
return ret, cl.count
|
||||
}
|
||||
|
||||
func (c *Chunk) dupe() *Chunk {
|
||||
newChunk := make(Chunk, len(*c))
|
||||
for idx, ptr := range *c {
|
||||
newChunk[idx] = ptr
|
||||
}
|
||||
return &newChunk
|
||||
cl.mutex.Unlock()
|
||||
return ret, CountItems(ret), changed
|
||||
}
|
||||
|
||||
@@ -11,12 +11,13 @@ func TestChunkList(t *testing.T) {
|
||||
// FIXME global
|
||||
sortCriteria = []criterion{byScore, byLength}
|
||||
|
||||
cl := NewChunkList(func(s []byte, i int) *Item {
|
||||
return &Item{text: util.ToChars(s), index: int32(i * 2)}
|
||||
cl := NewChunkList(NewChunkCache(), func(item *Item, s []byte) bool {
|
||||
item.text = util.ToChars(s)
|
||||
return true
|
||||
})
|
||||
|
||||
// Snapshot
|
||||
snapshot, count := cl.Snapshot()
|
||||
snapshot, count, _ := cl.Snapshot(0)
|
||||
if len(snapshot) > 0 || count > 0 {
|
||||
t.Error("Snapshot should be empty now")
|
||||
}
|
||||
@@ -31,18 +32,18 @@ func TestChunkList(t *testing.T) {
|
||||
}
|
||||
|
||||
// But the new snapshot should contain the added items
|
||||
snapshot, count = cl.Snapshot()
|
||||
snapshot, count, _ = cl.Snapshot(0)
|
||||
if len(snapshot) != 1 && count != 2 {
|
||||
t.Error("Snapshot should not be empty now")
|
||||
}
|
||||
|
||||
// Check the content of the ChunkList
|
||||
chunk1 := snapshot[0]
|
||||
if len(*chunk1) != 2 {
|
||||
if chunk1.count != 2 {
|
||||
t.Error("Snapshot should contain only two items")
|
||||
}
|
||||
if (*chunk1)[0].text.ToString() != "hello" || (*chunk1)[0].index != 0 ||
|
||||
(*chunk1)[1].text.ToString() != "world" || (*chunk1)[1].index != 2 {
|
||||
if chunk1.items[0].text.ToString() != "hello" ||
|
||||
chunk1.items[1].text.ToString() != "world" {
|
||||
t.Error("Invalid data")
|
||||
}
|
||||
if chunk1.IsFull() {
|
||||
@@ -60,20 +61,56 @@ func TestChunkList(t *testing.T) {
|
||||
}
|
||||
|
||||
// New snapshot
|
||||
snapshot, count = cl.Snapshot()
|
||||
snapshot, count, _ = cl.Snapshot(0)
|
||||
if len(snapshot) != 3 || !snapshot[0].IsFull() ||
|
||||
!snapshot[1].IsFull() || snapshot[2].IsFull() || count != chunkSize*2+2 {
|
||||
t.Error("Expected two full chunks and one more chunk")
|
||||
}
|
||||
if len(*snapshot[2]) != 2 {
|
||||
if snapshot[2].count != 2 {
|
||||
t.Error("Unexpected number of items")
|
||||
}
|
||||
|
||||
cl.Push([]byte("hello"))
|
||||
cl.Push([]byte("world"))
|
||||
|
||||
lastChunkCount := len(*snapshot[len(snapshot)-1])
|
||||
lastChunkCount := snapshot[len(snapshot)-1].count
|
||||
if lastChunkCount != 2 {
|
||||
t.Error("Unexpected number of items:", lastChunkCount)
|
||||
}
|
||||
}
|
||||
|
||||
func TestChunkListTail(t *testing.T) {
|
||||
cl := NewChunkList(NewChunkCache(), func(item *Item, s []byte) bool {
|
||||
item.text = util.ToChars(s)
|
||||
return true
|
||||
})
|
||||
total := chunkSize*2 + chunkSize/2
|
||||
for i := 0; i < total; i++ {
|
||||
cl.Push([]byte(fmt.Sprintf("item %d", i)))
|
||||
}
|
||||
|
||||
snapshot, count, changed := cl.Snapshot(0)
|
||||
assertCount := func(expected int, shouldChange bool) {
|
||||
if count != expected || CountItems(snapshot) != expected {
|
||||
t.Errorf("Unexpected count: %d (expected: %d)", count, expected)
|
||||
}
|
||||
if changed != shouldChange {
|
||||
t.Error("Unexpected change status")
|
||||
}
|
||||
}
|
||||
assertCount(total, false)
|
||||
|
||||
tail := chunkSize + chunkSize/2
|
||||
snapshot, count, changed = cl.Snapshot(tail)
|
||||
assertCount(tail, true)
|
||||
|
||||
snapshot, count, changed = cl.Snapshot(tail)
|
||||
assertCount(tail, false)
|
||||
|
||||
snapshot, count, changed = cl.Snapshot(0)
|
||||
assertCount(tail, false)
|
||||
|
||||
tail = chunkSize / 2
|
||||
snapshot, count, changed = cl.Snapshot(tail)
|
||||
assertCount(tail, true)
|
||||
}
|
||||
|
||||
@@ -1,27 +1,33 @@
|
||||
package fzf
|
||||
|
||||
import (
|
||||
"math"
|
||||
"time"
|
||||
|
||||
"github.com/junegunn/fzf/src/util"
|
||||
)
|
||||
|
||||
const (
|
||||
// Current version
|
||||
version = "0.15.6"
|
||||
|
||||
// Core
|
||||
coordinatorDelayMax time.Duration = 100 * time.Millisecond
|
||||
coordinatorDelayStep time.Duration = 10 * time.Millisecond
|
||||
|
||||
// Reader
|
||||
readerBufferSize = 64 * 1024
|
||||
readerBufferSize = 64 * 1024
|
||||
readerSlabSize = 128 * 1024
|
||||
readerPollIntervalMin = 10 * time.Millisecond
|
||||
readerPollIntervalStep = 5 * time.Millisecond
|
||||
readerPollIntervalMax = 50 * time.Millisecond
|
||||
|
||||
// Terminal
|
||||
initialDelay = 20 * time.Millisecond
|
||||
initialDelayTac = 100 * time.Millisecond
|
||||
spinnerDuration = 200 * time.Millisecond
|
||||
maxPatternLength = 100
|
||||
initialDelay = 20 * time.Millisecond
|
||||
initialDelayTac = 100 * time.Millisecond
|
||||
spinnerDuration = 100 * time.Millisecond
|
||||
previewCancelWait = 500 * time.Millisecond
|
||||
previewChunkDelay = 100 * time.Millisecond
|
||||
previewDelayed = 500 * time.Millisecond
|
||||
maxPatternLength = 300
|
||||
maxMulti = math.MaxInt32
|
||||
|
||||
// Matcher
|
||||
numPartitionsMultiplier = 8
|
||||
@@ -56,12 +62,14 @@ const (
|
||||
EvtSearchProgress
|
||||
EvtSearchFin
|
||||
EvtHeader
|
||||
EvtClose
|
||||
EvtReady
|
||||
EvtQuit
|
||||
)
|
||||
|
||||
const (
|
||||
exitOk = 0
|
||||
exitNoMatch = 1
|
||||
exitError = 2
|
||||
exitInterrupt = 130
|
||||
ExitOk = 0
|
||||
ExitNoMatch = 1
|
||||
ExitError = 2
|
||||
ExitBecome = 126
|
||||
ExitInterrupt = 130
|
||||
)
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
// +build !windows
|
||||
|
||||
package fzf
|
||||
|
||||
const (
|
||||
// Reader
|
||||
defaultCommand = `find . -path '*/\.*' -prune -o -type f -print -o -type l -print 2> /dev/null | sed s/^..//`
|
||||
)
|
||||
@@ -1,8 +0,0 @@
|
||||
// +build windows
|
||||
|
||||
package fzf
|
||||
|
||||
const (
|
||||
// Reader
|
||||
defaultCommand = `dir /s/b`
|
||||
)
|
||||
403
src/core.go
403
src/core.go
@@ -1,33 +1,9 @@
|
||||
/*
|
||||
Package fzf implements fzf, a command-line fuzzy finder.
|
||||
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2016 Junegunn Choi
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
*/
|
||||
// Package fzf implements fzf, a command-line fuzzy finder.
|
||||
package fzf
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/junegunn/fzf/src/util"
|
||||
@@ -42,16 +18,52 @@ Matcher -> EvtSearchFin -> Terminal (update list)
|
||||
Matcher -> EvtHeader -> Terminal (update header)
|
||||
*/
|
||||
|
||||
type revision struct {
|
||||
major int
|
||||
minor int
|
||||
}
|
||||
|
||||
func (r *revision) bumpMajor() {
|
||||
r.major++
|
||||
r.minor = 0
|
||||
}
|
||||
|
||||
func (r *revision) bumpMinor() {
|
||||
r.minor++
|
||||
}
|
||||
|
||||
func (r revision) compatible(other revision) bool {
|
||||
return r.major == other.major
|
||||
}
|
||||
|
||||
// Run starts fzf
|
||||
func Run(opts *Options) {
|
||||
func Run(opts *Options) (int, error) {
|
||||
if opts.Filter == nil {
|
||||
if opts.Tmux != nil && len(os.Getenv("TMUX")) > 0 && opts.Tmux.index >= opts.Height.index {
|
||||
return runTmux(os.Args, opts)
|
||||
}
|
||||
|
||||
if needWinpty(opts) {
|
||||
return runWinpty(os.Args, opts)
|
||||
}
|
||||
}
|
||||
|
||||
if err := postProcessOptions(opts); err != nil {
|
||||
return ExitError, err
|
||||
}
|
||||
|
||||
defer util.RunAtExitFuncs()
|
||||
|
||||
// Output channel given
|
||||
if opts.Output != nil {
|
||||
opts.Printer = func(str string) {
|
||||
opts.Output <- str
|
||||
}
|
||||
}
|
||||
|
||||
sort := opts.Sort > 0
|
||||
sortCriteria = opts.Criteria
|
||||
|
||||
if opts.Version {
|
||||
fmt.Println(version)
|
||||
os.Exit(exitOk)
|
||||
}
|
||||
|
||||
// Event channel
|
||||
eventBox := util.NewEventBox()
|
||||
|
||||
@@ -59,94 +71,132 @@ func Run(opts *Options) {
|
||||
ansiProcessor := func(data []byte) (util.Chars, *[]ansiOffset) {
|
||||
return util.ToChars(data), nil
|
||||
}
|
||||
ansiProcessorRunes := func(data []rune) (util.Chars, *[]ansiOffset) {
|
||||
return util.RunesToChars(data), nil
|
||||
}
|
||||
|
||||
var lineAnsiState, prevLineAnsiState *ansiState
|
||||
if opts.Ansi {
|
||||
if opts.Theme != nil {
|
||||
var state *ansiState
|
||||
if opts.Theme.Colored {
|
||||
ansiProcessor = func(data []byte) (util.Chars, *[]ansiOffset) {
|
||||
trimmed, offsets, newState := extractColor(string(data), state, nil)
|
||||
state = newState
|
||||
return util.RunesToChars([]rune(trimmed)), offsets
|
||||
prevLineAnsiState = lineAnsiState
|
||||
trimmed, offsets, newState := extractColor(byteString(data), lineAnsiState, nil)
|
||||
lineAnsiState = newState
|
||||
return util.ToChars(stringBytes(trimmed)), offsets
|
||||
}
|
||||
} else {
|
||||
// When color is disabled but ansi option is given,
|
||||
// we simply strip out ANSI codes from the input
|
||||
ansiProcessor = func(data []byte) (util.Chars, *[]ansiOffset) {
|
||||
trimmed, _, _ := extractColor(string(data), nil, nil)
|
||||
return util.RunesToChars([]rune(trimmed)), nil
|
||||
trimmed, _, _ := extractColor(byteString(data), nil, nil)
|
||||
return util.ToChars(stringBytes(trimmed)), nil
|
||||
}
|
||||
}
|
||||
ansiProcessorRunes = func(data []rune) (util.Chars, *[]ansiOffset) {
|
||||
return ansiProcessor([]byte(string(data)))
|
||||
}
|
||||
}
|
||||
|
||||
// Chunk list
|
||||
cache := NewChunkCache()
|
||||
var chunkList *ChunkList
|
||||
var itemIndex int32
|
||||
header := make([]string, 0, opts.HeaderLines)
|
||||
if len(opts.WithNth) == 0 {
|
||||
chunkList = NewChunkList(func(data []byte, index int) *Item {
|
||||
chunkList = NewChunkList(cache, func(item *Item, data []byte) bool {
|
||||
if len(header) < opts.HeaderLines {
|
||||
header = append(header, string(data))
|
||||
header = append(header, byteString(data))
|
||||
eventBox.Set(EvtHeader, header)
|
||||
return nil
|
||||
return false
|
||||
}
|
||||
chars, colors := ansiProcessor(data)
|
||||
return &Item{
|
||||
index: int32(index),
|
||||
text: chars,
|
||||
colors: colors}
|
||||
item.text, item.colors = ansiProcessor(data)
|
||||
item.text.Index = itemIndex
|
||||
itemIndex++
|
||||
return true
|
||||
})
|
||||
} else {
|
||||
chunkList = NewChunkList(func(data []byte, index int) *Item {
|
||||
tokens := Tokenize(util.ToChars(data), opts.Delimiter)
|
||||
trans := Transform(tokens, opts.WithNth)
|
||||
if len(header) < opts.HeaderLines {
|
||||
header = append(header, string(joinTokens(trans)))
|
||||
eventBox.Set(EvtHeader, header)
|
||||
return nil
|
||||
chunkList = NewChunkList(cache, func(item *Item, data []byte) bool {
|
||||
tokens := Tokenize(byteString(data), opts.Delimiter)
|
||||
if opts.Ansi && opts.Theme.Colored && len(tokens) > 1 {
|
||||
var ansiState *ansiState
|
||||
if prevLineAnsiState != nil {
|
||||
ansiStateDup := *prevLineAnsiState
|
||||
ansiState = &ansiStateDup
|
||||
}
|
||||
for _, token := range tokens {
|
||||
prevAnsiState := ansiState
|
||||
_, _, ansiState = extractColor(token.text.ToString(), ansiState, nil)
|
||||
if prevAnsiState != nil {
|
||||
token.text.Prepend("\x1b[m" + prevAnsiState.ToString())
|
||||
} else {
|
||||
token.text.Prepend("\x1b[m")
|
||||
}
|
||||
}
|
||||
}
|
||||
textRunes := joinTokens(trans)
|
||||
item := Item{
|
||||
index: int32(index),
|
||||
origText: &data,
|
||||
colors: nil}
|
||||
|
||||
trimmed, colors := ansiProcessorRunes(textRunes)
|
||||
item.text = trimmed
|
||||
item.colors = colors
|
||||
return &item
|
||||
trans := Transform(tokens, opts.WithNth)
|
||||
transformed := joinTokens(trans)
|
||||
if len(header) < opts.HeaderLines {
|
||||
header = append(header, transformed)
|
||||
eventBox.Set(EvtHeader, header)
|
||||
return false
|
||||
}
|
||||
item.text, item.colors = ansiProcessor(stringBytes(transformed))
|
||||
item.text.TrimTrailingWhitespaces()
|
||||
item.text.Index = itemIndex
|
||||
item.origText = &data
|
||||
itemIndex++
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
// Process executor
|
||||
executor := util.NewExecutor(opts.WithShell)
|
||||
|
||||
// Terminal I/O
|
||||
var terminal *Terminal
|
||||
var err error
|
||||
var initialEnv []string
|
||||
initialReload := opts.extractReloadOnStart()
|
||||
if opts.Filter == nil {
|
||||
terminal, err = NewTerminal(opts, eventBox, executor)
|
||||
if err != nil {
|
||||
return ExitError, err
|
||||
}
|
||||
if len(initialReload) > 0 {
|
||||
var temps []string
|
||||
initialReload, temps = terminal.replacePlaceholderInInitialCommand(initialReload)
|
||||
initialEnv = terminal.environ()
|
||||
defer removeFiles(temps)
|
||||
}
|
||||
}
|
||||
|
||||
// Reader
|
||||
streamingFilter := opts.Filter != nil && !sort && !opts.Tac && !opts.Sync
|
||||
var reader *Reader
|
||||
if !streamingFilter {
|
||||
reader := Reader{func(data []byte) bool {
|
||||
reader = NewReader(func(data []byte) bool {
|
||||
return chunkList.Push(data)
|
||||
}, eventBox, opts.ReadZero}
|
||||
go reader.ReadSource()
|
||||
}, eventBox, executor, opts.ReadZero, opts.Filter == nil)
|
||||
|
||||
go reader.ReadSource(opts.Input, opts.WalkerRoot, opts.WalkerOpts, opts.WalkerSkip, initialReload, initialEnv)
|
||||
}
|
||||
|
||||
// Matcher
|
||||
forward := true
|
||||
for _, cri := range opts.Criteria[1:] {
|
||||
if cri == byEnd {
|
||||
withPos := false
|
||||
for idx := len(opts.Criteria) - 1; idx > 0; idx-- {
|
||||
switch opts.Criteria[idx] {
|
||||
case byChunk:
|
||||
withPos = true
|
||||
case byEnd:
|
||||
forward = false
|
||||
break
|
||||
}
|
||||
if cri == byBegin {
|
||||
break
|
||||
case byBegin:
|
||||
forward = true
|
||||
}
|
||||
}
|
||||
patternCache := make(map[string]*Pattern)
|
||||
patternBuilder := func(runes []rune) *Pattern {
|
||||
return BuildPattern(
|
||||
opts.Fuzzy, opts.FuzzyAlgo, opts.Extended, opts.Case, forward,
|
||||
return BuildPattern(cache, patternCache,
|
||||
opts.Fuzzy, opts.FuzzyAlgo, opts.Extended, opts.Case, opts.Normalize, forward, withPos,
|
||||
opts.Filter == nil, opts.Nth, opts.Delimiter, runes)
|
||||
}
|
||||
matcher := NewMatcher(patternBuilder, sort, opts.Tac, eventBox)
|
||||
inputRevision := revision{}
|
||||
snapshotRevision := revision{}
|
||||
matcher := NewMatcher(cache, patternBuilder, sort, opts.Tac, eventBox, inputRevision)
|
||||
|
||||
// Filtering mode
|
||||
if opts.Filter != nil {
|
||||
@@ -155,27 +205,32 @@ func Run(opts *Options) {
|
||||
}
|
||||
|
||||
pattern := patternBuilder([]rune(*opts.Filter))
|
||||
matcher.sort = pattern.sortable
|
||||
|
||||
found := false
|
||||
if streamingFilter {
|
||||
slab := util.MakeSlab(slab16Size, slab32Size)
|
||||
reader := Reader{
|
||||
mutex := sync.Mutex{}
|
||||
reader := NewReader(
|
||||
func(runes []byte) bool {
|
||||
item := chunkList.trans(runes, 0)
|
||||
if item != nil {
|
||||
if result, _, _ := pattern.MatchItem(item, false, slab); result != nil {
|
||||
item := Item{}
|
||||
if chunkList.trans(&item, runes) {
|
||||
mutex.Lock()
|
||||
if result, _, _ := pattern.MatchItem(&item, false, slab); result != nil {
|
||||
opts.Printer(item.text.ToString())
|
||||
found = true
|
||||
}
|
||||
mutex.Unlock()
|
||||
}
|
||||
return false
|
||||
}, eventBox, opts.ReadZero}
|
||||
reader.ReadSource()
|
||||
}, eventBox, executor, opts.ReadZero, false)
|
||||
reader.ReadSource(opts.Input, opts.WalkerRoot, opts.WalkerOpts, opts.WalkerSkip, initialReload, initialEnv)
|
||||
} else {
|
||||
eventBox.Unwatch(EvtReadNew)
|
||||
eventBox.WaitFor(EvtReadFin)
|
||||
|
||||
snapshot, _ := chunkList.Snapshot()
|
||||
// NOTE: Streaming filter is inherently not compatible with --tail
|
||||
snapshot, _, _ := chunkList.Snapshot(opts.Tail)
|
||||
merger, _ := matcher.scan(MatchRequest{
|
||||
chunks: snapshot,
|
||||
pattern: pattern})
|
||||
@@ -185,9 +240,9 @@ func Run(opts *Options) {
|
||||
}
|
||||
}
|
||||
if found {
|
||||
os.Exit(exitOk)
|
||||
return ExitOk, nil
|
||||
}
|
||||
os.Exit(exitNoMatch)
|
||||
return ExitNoMatch, nil
|
||||
}
|
||||
|
||||
// Synchronous search
|
||||
@@ -198,40 +253,154 @@ func Run(opts *Options) {
|
||||
|
||||
// Go interactive
|
||||
go matcher.Loop()
|
||||
defer matcher.Stop()
|
||||
|
||||
// Terminal I/O
|
||||
terminal := NewTerminal(opts, eventBox)
|
||||
deferred := opts.Select1 || opts.Exit0
|
||||
// Handling adaptive height
|
||||
maxFit := 0 // Maximum number of items that can fit on screen
|
||||
padHeight := 0
|
||||
heightUnknown := opts.Height.auto
|
||||
if heightUnknown {
|
||||
maxFit, padHeight = terminal.MaxFitAndPad()
|
||||
}
|
||||
deferred := opts.Select1 || opts.Exit0 || opts.Sync
|
||||
go terminal.Loop()
|
||||
if !deferred {
|
||||
terminal.startChan <- true
|
||||
if !deferred && !heightUnknown {
|
||||
// Start right away
|
||||
terminal.startChan <- fitpad{-1, -1}
|
||||
}
|
||||
|
||||
// Event coordination
|
||||
reading := true
|
||||
ticks := 0
|
||||
var nextCommand *commandSpec
|
||||
var nextEnviron []string
|
||||
eventBox.Watch(EvtReadNew)
|
||||
total := 0
|
||||
query := []rune{}
|
||||
determine := func(final bool) {
|
||||
if heightUnknown {
|
||||
if total >= maxFit || final {
|
||||
deferred = false
|
||||
heightUnknown = false
|
||||
terminal.startChan <- fitpad{util.Min(total, maxFit), padHeight}
|
||||
}
|
||||
} else if deferred {
|
||||
deferred = false
|
||||
terminal.startChan <- fitpad{-1, -1}
|
||||
}
|
||||
}
|
||||
|
||||
useSnapshot := false
|
||||
var snapshot []*Chunk
|
||||
var count int
|
||||
restart := func(command commandSpec, environ []string) {
|
||||
reading = true
|
||||
chunkList.Clear()
|
||||
itemIndex = 0
|
||||
inputRevision.bumpMajor()
|
||||
header = make([]string, 0, opts.HeaderLines)
|
||||
go reader.restart(command, environ)
|
||||
}
|
||||
|
||||
exitCode := ExitOk
|
||||
stop := false
|
||||
for {
|
||||
delay := true
|
||||
ticks++
|
||||
input := func() []rune {
|
||||
paused, input := terminal.Input()
|
||||
if !paused {
|
||||
query = input
|
||||
}
|
||||
return query
|
||||
}
|
||||
eventBox.Wait(func(events *util.Events) {
|
||||
defer events.Clear()
|
||||
if _, fin := (*events)[EvtReadFin]; fin {
|
||||
delete(*events, EvtReadNew)
|
||||
}
|
||||
for evt, value := range *events {
|
||||
switch evt {
|
||||
|
||||
case EvtQuit:
|
||||
if reading {
|
||||
reader.terminate()
|
||||
}
|
||||
quitSignal := value.(quitSignal)
|
||||
exitCode = quitSignal.code
|
||||
err = quitSignal.err
|
||||
stop = true
|
||||
return
|
||||
case EvtReadNew, EvtReadFin:
|
||||
reading = reading && evt == EvtReadNew
|
||||
snapshot, count := chunkList.Snapshot()
|
||||
terminal.UpdateCount(count, !reading)
|
||||
matcher.Reset(snapshot, terminal.Input(), false, !reading, sort)
|
||||
if evt == EvtReadFin && nextCommand != nil {
|
||||
restart(*nextCommand, nextEnviron)
|
||||
nextCommand = nil
|
||||
nextEnviron = nil
|
||||
break
|
||||
} else {
|
||||
reading = reading && evt == EvtReadNew
|
||||
}
|
||||
if useSnapshot && evt == EvtReadFin {
|
||||
useSnapshot = false
|
||||
}
|
||||
if !useSnapshot {
|
||||
if !snapshotRevision.compatible(inputRevision) {
|
||||
query = []rune{}
|
||||
}
|
||||
var changed bool
|
||||
snapshot, count, changed = chunkList.Snapshot(opts.Tail)
|
||||
if changed {
|
||||
inputRevision.bumpMinor()
|
||||
}
|
||||
snapshotRevision = inputRevision
|
||||
}
|
||||
total = count
|
||||
terminal.UpdateCount(total, !reading, value.(*string))
|
||||
if heightUnknown && !deferred {
|
||||
determine(!reading)
|
||||
}
|
||||
matcher.Reset(snapshot, input(), false, !reading, sort, snapshotRevision)
|
||||
|
||||
case EvtSearchNew:
|
||||
var command *commandSpec
|
||||
var environ []string
|
||||
var changed bool
|
||||
switch val := value.(type) {
|
||||
case bool:
|
||||
sort = val
|
||||
case searchRequest:
|
||||
sort = val.sort
|
||||
command = val.command
|
||||
environ = val.environ
|
||||
changed = val.changed
|
||||
if command != nil {
|
||||
useSnapshot = val.sync
|
||||
}
|
||||
}
|
||||
snapshot, _ := chunkList.Snapshot()
|
||||
matcher.Reset(snapshot, terminal.Input(), true, !reading, sort)
|
||||
if command != nil {
|
||||
if reading {
|
||||
reader.terminate()
|
||||
nextCommand = command
|
||||
nextEnviron = environ
|
||||
} else {
|
||||
restart(*command, environ)
|
||||
}
|
||||
}
|
||||
if !changed {
|
||||
break
|
||||
}
|
||||
if !useSnapshot {
|
||||
newSnapshot, newCount, changed := chunkList.Snapshot(opts.Tail)
|
||||
if changed {
|
||||
inputRevision.bumpMinor()
|
||||
}
|
||||
// We want to avoid showing empty list when reload is triggered
|
||||
// and the query string is changed at the same time i.e. command != nil && changed
|
||||
if command == nil || newCount > 0 {
|
||||
if snapshotRevision != inputRevision {
|
||||
query = []rune{}
|
||||
}
|
||||
snapshot = newSnapshot
|
||||
snapshotRevision = inputRevision
|
||||
}
|
||||
}
|
||||
matcher.Reset(snapshot, input(), true, !reading, sort, snapshotRevision)
|
||||
delay = false
|
||||
|
||||
case EvtSearchProgress:
|
||||
@@ -241,7 +410,9 @@ func Run(opts *Options) {
|
||||
}
|
||||
|
||||
case EvtHeader:
|
||||
terminal.UpdateHeader(value.([]string))
|
||||
headerPadded := make([]string, opts.HeaderLines)
|
||||
copy(headerPadded, value.([]string))
|
||||
terminal.UpdateHeader(headerPadded)
|
||||
|
||||
case EvtSearchFin:
|
||||
switch val := value.(type) {
|
||||
@@ -249,8 +420,7 @@ func Run(opts *Options) {
|
||||
if deferred {
|
||||
count := val.Length()
|
||||
if opts.Select1 && count > 1 || opts.Exit0 && !opts.Select1 && count > 0 {
|
||||
deferred = false
|
||||
terminal.startChan <- true
|
||||
determine(val.final)
|
||||
} else if val.final {
|
||||
if opts.Exit0 && count == 0 || opts.Select1 && count == 1 {
|
||||
if opts.PrintQuery {
|
||||
@@ -262,20 +432,24 @@ func Run(opts *Options) {
|
||||
for i := 0; i < count; i++ {
|
||||
opts.Printer(val.Get(i).item.AsString(opts.Ansi))
|
||||
}
|
||||
if count > 0 {
|
||||
os.Exit(exitOk)
|
||||
if count == 0 {
|
||||
exitCode = ExitNoMatch
|
||||
}
|
||||
os.Exit(exitNoMatch)
|
||||
stop = true
|
||||
return
|
||||
}
|
||||
deferred = false
|
||||
terminal.startChan <- true
|
||||
determine(val.final)
|
||||
}
|
||||
}
|
||||
terminal.UpdateList(val)
|
||||
}
|
||||
}
|
||||
}
|
||||
events.Clear()
|
||||
})
|
||||
if stop {
|
||||
break
|
||||
}
|
||||
if delay && reading {
|
||||
dur := util.DurWithin(
|
||||
time.Duration(ticks)*coordinatorDelayStep,
|
||||
@@ -283,4 +457,5 @@ func Run(opts *Options) {
|
||||
time.Sleep(dur)
|
||||
}
|
||||
}
|
||||
return exitCode, err
|
||||
}
|
||||
|
||||
35
src/functions.go
Normal file
35
src/functions.go
Normal file
@@ -0,0 +1,35 @@
|
||||
package fzf
|
||||
|
||||
import (
|
||||
"os"
|
||||
"strings"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
func WriteTemporaryFile(data []string, printSep string) string {
|
||||
f, err := os.CreateTemp("", "fzf-temp-*")
|
||||
if err != nil {
|
||||
// Unable to create temporary file
|
||||
// FIXME: Should we terminate the program?
|
||||
return ""
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
f.WriteString(strings.Join(data, printSep))
|
||||
f.WriteString(printSep)
|
||||
return f.Name()
|
||||
}
|
||||
|
||||
func removeFiles(files []string) {
|
||||
for _, filename := range files {
|
||||
os.Remove(filename)
|
||||
}
|
||||
}
|
||||
|
||||
func stringBytes(data string) []byte {
|
||||
return unsafe.Slice(unsafe.StringData(data), len(data))
|
||||
}
|
||||
|
||||
func byteString(data []byte) string {
|
||||
return unsafe.String(unsafe.SliceData(data), len(data))
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
package main
|
||||
|
||||
import "github.com/junegunn/fzf/src"
|
||||
|
||||
func main() {
|
||||
fzf.Run(fzf.ParseOptions())
|
||||
}
|
||||
@@ -2,7 +2,6 @@ package fzf
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
@@ -26,12 +25,12 @@ func NewHistory(path string, maxSize int) (*History, error) {
|
||||
}
|
||||
|
||||
// Read history file
|
||||
data, err := ioutil.ReadFile(path)
|
||||
data, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
// If it doesn't exist, check if we can create a file with the name
|
||||
if os.IsNotExist(err) {
|
||||
data = []byte{}
|
||||
if err := ioutil.WriteFile(path, data, 0600); err != nil {
|
||||
if err := os.WriteFile(path, data, 0600); err != nil {
|
||||
return nil, fmtError(err)
|
||||
}
|
||||
} else {
|
||||
@@ -59,14 +58,14 @@ func (h *History) append(line string) error {
|
||||
|
||||
lines := append(h.lines[:len(h.lines)-1], line)
|
||||
if len(lines) > h.maxSize {
|
||||
lines = lines[len(lines)-h.maxSize : len(lines)]
|
||||
lines = lines[len(lines)-h.maxSize:]
|
||||
}
|
||||
h.lines = append(lines, "")
|
||||
return ioutil.WriteFile(h.path, []byte(strings.Join(h.lines, "\n")), 0600)
|
||||
return os.WriteFile(h.path, []byte(strings.Join(h.lines, "\n")), 0600)
|
||||
}
|
||||
|
||||
func (h *History) override(str string) {
|
||||
// You can update the history but they're not written to the file
|
||||
// You can update the history, but they're not written to the file
|
||||
if h.cursor == len(h.lines)-1 {
|
||||
h.lines[h.cursor] = str
|
||||
} else if h.cursor < len(h.lines)-1 {
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
package fzf
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"os/user"
|
||||
"runtime"
|
||||
"testing"
|
||||
)
|
||||
@@ -12,16 +10,12 @@ func TestHistory(t *testing.T) {
|
||||
maxHistory := 50
|
||||
|
||||
// Invalid arguments
|
||||
user, _ := user.Current()
|
||||
var paths []string
|
||||
if runtime.GOOS == "windows" {
|
||||
// GOPATH should exist, so we shouldn't be able to override it
|
||||
paths = []string{os.Getenv("GOPATH")}
|
||||
} else {
|
||||
paths = []string{"/etc", "/proc"}
|
||||
if user.Name != "root" {
|
||||
paths = append(paths, "/etc/sudoers")
|
||||
}
|
||||
}
|
||||
|
||||
for _, path := range paths {
|
||||
@@ -30,7 +24,7 @@ func TestHistory(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
f, _ := ioutil.TempFile("", "fzf-history")
|
||||
f, _ := os.CreateTemp("", "fzf-history")
|
||||
f.Close()
|
||||
|
||||
{ // Append lines
|
||||
|
||||
21
src/item.go
21
src/item.go
@@ -1,21 +1,28 @@
|
||||
package fzf
|
||||
|
||||
import (
|
||||
"math"
|
||||
|
||||
"github.com/junegunn/fzf/src/util"
|
||||
)
|
||||
|
||||
// Item represents each input line
|
||||
// Item represents each input line. 56 bytes.
|
||||
type Item struct {
|
||||
index int32
|
||||
text util.Chars
|
||||
origText *[]byte
|
||||
colors *[]ansiOffset
|
||||
transformed []Token
|
||||
text util.Chars // 32 = 24 + 1 + 1 + 2 + 4
|
||||
transformed *[]Token // 8
|
||||
origText *[]byte // 8
|
||||
colors *[]ansiOffset // 8
|
||||
}
|
||||
|
||||
// Index returns ordinal index of the Item
|
||||
func (item *Item) Index() int32 {
|
||||
return item.index
|
||||
return item.text.Index
|
||||
}
|
||||
|
||||
var minItem = Item{text: util.Chars{Index: math.MinInt32}}
|
||||
|
||||
func (item *Item) TrimLength() uint16 {
|
||||
return item.text.TrimLength()
|
||||
}
|
||||
|
||||
// Colors returns ansiOffsets of the Item
|
||||
|
||||
@@ -12,14 +12,16 @@ import (
|
||||
|
||||
// MatchRequest represents a search request
|
||||
type MatchRequest struct {
|
||||
chunks []*Chunk
|
||||
pattern *Pattern
|
||||
final bool
|
||||
sort bool
|
||||
chunks []*Chunk
|
||||
pattern *Pattern
|
||||
final bool
|
||||
sort bool
|
||||
revision revision
|
||||
}
|
||||
|
||||
// Matcher is responsible for performing search
|
||||
type Matcher struct {
|
||||
cache *ChunkCache
|
||||
patternBuilder func([]rune) *Pattern
|
||||
sort bool
|
||||
tac bool
|
||||
@@ -28,6 +30,7 @@ type Matcher struct {
|
||||
partitions int
|
||||
slab []*util.Slab
|
||||
mergerCache map[string]*Merger
|
||||
revision revision
|
||||
}
|
||||
|
||||
const (
|
||||
@@ -36,10 +39,11 @@ const (
|
||||
)
|
||||
|
||||
// NewMatcher returns a new Matcher
|
||||
func NewMatcher(patternBuilder func([]rune) *Pattern,
|
||||
sort bool, tac bool, eventBox *util.EventBox) *Matcher {
|
||||
func NewMatcher(cache *ChunkCache, patternBuilder func([]rune) *Pattern,
|
||||
sort bool, tac bool, eventBox *util.EventBox, revision revision) *Matcher {
|
||||
partitions := util.Min(numPartitionsMultiplier*runtime.NumCPU(), maxPartitions)
|
||||
return &Matcher{
|
||||
cache: cache,
|
||||
patternBuilder: patternBuilder,
|
||||
sort: sort,
|
||||
tac: tac,
|
||||
@@ -47,7 +51,8 @@ func NewMatcher(patternBuilder func([]rune) *Pattern,
|
||||
reqBox: util.NewEventBox(),
|
||||
partitions: partitions,
|
||||
slab: make([]*util.Slab, partitions),
|
||||
mergerCache: make(map[string]*Merger)}
|
||||
mergerCache: make(map[string]*Merger),
|
||||
revision: revision}
|
||||
}
|
||||
|
||||
// Loop puts Matcher in action
|
||||
@@ -57,8 +62,13 @@ func (m *Matcher) Loop() {
|
||||
for {
|
||||
var request MatchRequest
|
||||
|
||||
stop := false
|
||||
m.reqBox.Wait(func(events *util.Events) {
|
||||
for _, val := range *events {
|
||||
for t, val := range *events {
|
||||
if t == reqQuit {
|
||||
stop = true
|
||||
return
|
||||
}
|
||||
switch val := val.(type) {
|
||||
case MatchRequest:
|
||||
request = val
|
||||
@@ -68,11 +78,19 @@ func (m *Matcher) Loop() {
|
||||
}
|
||||
events.Clear()
|
||||
})
|
||||
if stop {
|
||||
break
|
||||
}
|
||||
|
||||
if request.sort != m.sort {
|
||||
cacheCleared := false
|
||||
if request.sort != m.sort || request.revision != m.revision {
|
||||
m.sort = request.sort
|
||||
m.revision = request.revision
|
||||
m.mergerCache = make(map[string]*Merger)
|
||||
clearChunkCache()
|
||||
if !request.revision.compatible(m.revision) {
|
||||
m.cache.Clear()
|
||||
}
|
||||
cacheCleared = true
|
||||
}
|
||||
|
||||
// Restart search
|
||||
@@ -81,20 +99,20 @@ func (m *Matcher) Loop() {
|
||||
cancelled := false
|
||||
count := CountItems(request.chunks)
|
||||
|
||||
foundCache := false
|
||||
if count == prevCount {
|
||||
// Look up mergerCache
|
||||
if cached, found := m.mergerCache[patternString]; found {
|
||||
foundCache = true
|
||||
merger = cached
|
||||
if !cacheCleared {
|
||||
if count == prevCount {
|
||||
// Look up mergerCache
|
||||
if cached, found := m.mergerCache[patternString]; found {
|
||||
merger = cached
|
||||
}
|
||||
} else {
|
||||
// Invalidate mergerCache
|
||||
prevCount = count
|
||||
m.mergerCache = make(map[string]*Merger)
|
||||
}
|
||||
} else {
|
||||
// Invalidate mergerCache
|
||||
prevCount = count
|
||||
m.mergerCache = make(map[string]*Merger)
|
||||
}
|
||||
|
||||
if !foundCache {
|
||||
if merger == nil {
|
||||
merger, cancelled = m.scan(request)
|
||||
}
|
||||
|
||||
@@ -131,7 +149,7 @@ func (m *Matcher) sliceChunks(chunks []*Chunk) [][]*Chunk {
|
||||
|
||||
type partialResult struct {
|
||||
index int
|
||||
matches []*Result
|
||||
matches []Result
|
||||
}
|
||||
|
||||
func (m *Matcher) scan(request MatchRequest) (*Merger, bool) {
|
||||
@@ -139,13 +157,14 @@ func (m *Matcher) scan(request MatchRequest) (*Merger, bool) {
|
||||
|
||||
numChunks := len(request.chunks)
|
||||
if numChunks == 0 {
|
||||
return EmptyMerger, false
|
||||
return EmptyMerger(request.revision), false
|
||||
}
|
||||
pattern := request.pattern
|
||||
if pattern.IsEmpty() {
|
||||
return PassMerger(&request.chunks, m.tac), false
|
||||
return PassMerger(&request.chunks, m.tac, request.revision), false
|
||||
}
|
||||
|
||||
minIndex := request.chunks[0].items[0].Index()
|
||||
cancelled := util.NewAtomicBool(false)
|
||||
|
||||
slices := m.sliceChunks(request.chunks)
|
||||
@@ -162,7 +181,7 @@ func (m *Matcher) scan(request MatchRequest) (*Merger, bool) {
|
||||
go func(idx int, slab *util.Slab, chunks []*Chunk) {
|
||||
defer func() { waitGroup.Done() }()
|
||||
count := 0
|
||||
allMatches := make([][]*Result, len(chunks))
|
||||
allMatches := make([][]Result, len(chunks))
|
||||
for idx, chunk := range chunks {
|
||||
matches := request.pattern.Match(chunk, slab)
|
||||
allMatches[idx] = matches
|
||||
@@ -172,11 +191,11 @@ func (m *Matcher) scan(request MatchRequest) (*Merger, bool) {
|
||||
}
|
||||
countChan <- len(matches)
|
||||
}
|
||||
sliceMatches := make([]*Result, 0, count)
|
||||
sliceMatches := make([]Result, 0, count)
|
||||
for _, matches := range allMatches {
|
||||
sliceMatches = append(sliceMatches, matches...)
|
||||
}
|
||||
if m.sort {
|
||||
if m.sort && request.pattern.sortable {
|
||||
if m.tac {
|
||||
sort.Sort(ByRelevanceTac(sliceMatches))
|
||||
} else {
|
||||
@@ -207,21 +226,21 @@ func (m *Matcher) scan(request MatchRequest) (*Merger, bool) {
|
||||
return nil, wait()
|
||||
}
|
||||
|
||||
if time.Now().Sub(startedAt) > progressMinDuration {
|
||||
if time.Since(startedAt) > progressMinDuration {
|
||||
m.eventBox.Set(EvtSearchProgress, float32(count)/float32(numChunks))
|
||||
}
|
||||
}
|
||||
|
||||
partialResults := make([][]*Result, numSlices)
|
||||
for _ = range slices {
|
||||
partialResults := make([][]Result, numSlices)
|
||||
for range slices {
|
||||
partialResult := <-resultChan
|
||||
partialResults[partialResult.index] = partialResult.matches
|
||||
}
|
||||
return NewMerger(pattern, partialResults, m.sort, m.tac), false
|
||||
return NewMerger(pattern, partialResults, m.sort && request.pattern.sortable, m.tac, request.revision, minIndex), false
|
||||
}
|
||||
|
||||
// Reset is called to interrupt/signal the ongoing search
|
||||
func (m *Matcher) Reset(chunks []*Chunk, patternRunes []rune, cancel bool, final bool, sort bool) {
|
||||
func (m *Matcher) Reset(chunks []*Chunk, patternRunes []rune, cancel bool, final bool, sort bool, revision revision) {
|
||||
pattern := m.patternBuilder(patternRunes)
|
||||
|
||||
var event util.EventType
|
||||
@@ -230,5 +249,9 @@ func (m *Matcher) Reset(chunks []*Chunk, patternRunes []rune, cancel bool, final
|
||||
} else {
|
||||
event = reqRetry
|
||||
}
|
||||
m.reqBox.Set(event, MatchRequest{chunks, pattern, final, sort})
|
||||
m.reqBox.Set(event, MatchRequest{chunks, pattern, final, sort, revision})
|
||||
}
|
||||
|
||||
func (m *Matcher) Stop() {
|
||||
m.reqBox.Set(reqQuit, nil)
|
||||
}
|
||||
|
||||
113
src/merger.go
113
src/merger.go
@@ -3,49 +3,63 @@ package fzf
|
||||
import "fmt"
|
||||
|
||||
// EmptyMerger is a Merger with no data
|
||||
var EmptyMerger = NewMerger(nil, [][]*Result{}, false, false)
|
||||
func EmptyMerger(revision revision) *Merger {
|
||||
return NewMerger(nil, [][]Result{}, false, false, revision, 0)
|
||||
}
|
||||
|
||||
// Merger holds a set of locally sorted lists of items and provides the view of
|
||||
// a single, globally-sorted list
|
||||
type Merger struct {
|
||||
pattern *Pattern
|
||||
lists [][]*Result
|
||||
merged []*Result
|
||||
chunks *[]*Chunk
|
||||
cursors []int
|
||||
sorted bool
|
||||
tac bool
|
||||
final bool
|
||||
count int
|
||||
pattern *Pattern
|
||||
lists [][]Result
|
||||
merged []Result
|
||||
chunks *[]*Chunk
|
||||
cursors []int
|
||||
sorted bool
|
||||
tac bool
|
||||
final bool
|
||||
count int
|
||||
pass bool
|
||||
revision revision
|
||||
minIndex int32
|
||||
}
|
||||
|
||||
// PassMerger returns a new Merger that simply returns the items in the
|
||||
// original order
|
||||
func PassMerger(chunks *[]*Chunk, tac bool) *Merger {
|
||||
func PassMerger(chunks *[]*Chunk, tac bool, revision revision) *Merger {
|
||||
var minIndex int32
|
||||
if len(*chunks) > 0 {
|
||||
minIndex = (*chunks)[0].items[0].Index()
|
||||
}
|
||||
mg := Merger{
|
||||
pattern: nil,
|
||||
chunks: chunks,
|
||||
tac: tac,
|
||||
count: 0}
|
||||
pattern: nil,
|
||||
chunks: chunks,
|
||||
tac: tac,
|
||||
count: 0,
|
||||
pass: true,
|
||||
revision: revision,
|
||||
minIndex: minIndex}
|
||||
|
||||
for _, chunk := range *mg.chunks {
|
||||
mg.count += len(*chunk)
|
||||
mg.count += chunk.count
|
||||
}
|
||||
return &mg
|
||||
}
|
||||
|
||||
// NewMerger returns a new Merger
|
||||
func NewMerger(pattern *Pattern, lists [][]*Result, sorted bool, tac bool) *Merger {
|
||||
func NewMerger(pattern *Pattern, lists [][]Result, sorted bool, tac bool, revision revision, minIndex int32) *Merger {
|
||||
mg := Merger{
|
||||
pattern: pattern,
|
||||
lists: lists,
|
||||
merged: []*Result{},
|
||||
chunks: nil,
|
||||
cursors: make([]int, len(lists)),
|
||||
sorted: sorted,
|
||||
tac: tac,
|
||||
final: false,
|
||||
count: 0}
|
||||
pattern: pattern,
|
||||
lists: lists,
|
||||
merged: []Result{},
|
||||
chunks: nil,
|
||||
cursors: make([]int, len(lists)),
|
||||
sorted: sorted,
|
||||
tac: tac,
|
||||
final: false,
|
||||
count: 0,
|
||||
revision: revision,
|
||||
minIndex: minIndex}
|
||||
|
||||
for _, list := range mg.lists {
|
||||
mg.count += len(list)
|
||||
@@ -53,19 +67,57 @@ func NewMerger(pattern *Pattern, lists [][]*Result, sorted bool, tac bool) *Merg
|
||||
return &mg
|
||||
}
|
||||
|
||||
// Revision returns revision number
|
||||
func (mg *Merger) Revision() revision {
|
||||
return mg.revision
|
||||
}
|
||||
|
||||
// Length returns the number of items
|
||||
func (mg *Merger) Length() int {
|
||||
return mg.count
|
||||
}
|
||||
|
||||
func (mg *Merger) First() Result {
|
||||
if mg.tac && !mg.sorted {
|
||||
return mg.Get(mg.count - 1)
|
||||
}
|
||||
return mg.Get(0)
|
||||
}
|
||||
|
||||
// FindIndex returns the index of the item with the given item index
|
||||
func (mg *Merger) FindIndex(itemIndex int32) int {
|
||||
index := -1
|
||||
if mg.pass {
|
||||
index = int(itemIndex - mg.minIndex)
|
||||
if mg.tac {
|
||||
index = mg.count - index - 1
|
||||
}
|
||||
} else {
|
||||
for i := 0; i < mg.count; i++ {
|
||||
if mg.Get(i).item.Index() == itemIndex {
|
||||
index = i
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return index
|
||||
}
|
||||
|
||||
// Get returns the pointer to the Result object indexed by the given integer
|
||||
func (mg *Merger) Get(idx int) *Result {
|
||||
func (mg *Merger) Get(idx int) Result {
|
||||
if mg.chunks != nil {
|
||||
if mg.tac {
|
||||
idx = mg.count - idx - 1
|
||||
}
|
||||
firstChunk := (*mg.chunks)[0]
|
||||
if firstChunk.count < chunkSize && idx >= firstChunk.count {
|
||||
idx -= firstChunk.count
|
||||
|
||||
chunk := (*mg.chunks)[idx/chunkSize+1]
|
||||
return Result{item: &chunk.items[idx%chunkSize]}
|
||||
}
|
||||
chunk := (*mg.chunks)[idx/chunkSize]
|
||||
return &Result{item: (*chunk)[idx%chunkSize]}
|
||||
return Result{item: &chunk.items[idx%chunkSize]}
|
||||
}
|
||||
|
||||
if mg.sorted {
|
||||
@@ -89,7 +141,7 @@ func (mg *Merger) cacheable() bool {
|
||||
return mg.count < mergerCacheMax
|
||||
}
|
||||
|
||||
func (mg *Merger) mergedGet(idx int) *Result {
|
||||
func (mg *Merger) mergedGet(idx int) Result {
|
||||
for i := len(mg.merged); i <= idx; i++ {
|
||||
minRank := minRank()
|
||||
minIdx := -1
|
||||
@@ -100,13 +152,12 @@ func (mg *Merger) mergedGet(idx int) *Result {
|
||||
continue
|
||||
}
|
||||
if cursor >= 0 {
|
||||
rank := list[cursor].rank
|
||||
rank := list[cursor]
|
||||
if minIdx < 0 || compareRanks(rank, minRank, mg.tac) {
|
||||
minRank = rank
|
||||
minIdx = listIdx
|
||||
}
|
||||
}
|
||||
mg.cursors[listIdx] = cursor
|
||||
}
|
||||
|
||||
if minIdx >= 0 {
|
||||
|
||||
@@ -15,28 +15,29 @@ func assert(t *testing.T, cond bool, msg ...string) {
|
||||
}
|
||||
}
|
||||
|
||||
func randResult() *Result {
|
||||
func randResult() Result {
|
||||
str := fmt.Sprintf("%d", rand.Uint32())
|
||||
return &Result{
|
||||
item: &Item{text: util.RunesToChars([]rune(str))},
|
||||
rank: rank{index: rand.Int31()}}
|
||||
chars := util.ToChars([]byte(str))
|
||||
chars.Index = rand.Int31()
|
||||
return Result{item: &Item{text: chars}}
|
||||
}
|
||||
|
||||
func TestEmptyMerger(t *testing.T) {
|
||||
assert(t, EmptyMerger.Length() == 0, "Not empty")
|
||||
assert(t, EmptyMerger.count == 0, "Invalid count")
|
||||
assert(t, len(EmptyMerger.lists) == 0, "Invalid lists")
|
||||
assert(t, len(EmptyMerger.merged) == 0, "Invalid merged list")
|
||||
r := revision{}
|
||||
assert(t, EmptyMerger(r).Length() == 0, "Not empty")
|
||||
assert(t, EmptyMerger(r).count == 0, "Invalid count")
|
||||
assert(t, len(EmptyMerger(r).lists) == 0, "Invalid lists")
|
||||
assert(t, len(EmptyMerger(r).merged) == 0, "Invalid merged list")
|
||||
}
|
||||
|
||||
func buildLists(partiallySorted bool) ([][]*Result, []*Result) {
|
||||
func buildLists(partiallySorted bool) ([][]Result, []Result) {
|
||||
numLists := 4
|
||||
lists := make([][]*Result, numLists)
|
||||
lists := make([][]Result, numLists)
|
||||
cnt := 0
|
||||
for i := 0; i < numLists; i++ {
|
||||
numResults := rand.Int() % 20
|
||||
cnt += numResults
|
||||
lists[i] = make([]*Result, numResults)
|
||||
lists[i] = make([]Result, numResults)
|
||||
for j := 0; j < numResults; j++ {
|
||||
item := randResult()
|
||||
lists[i][j] = item
|
||||
@@ -45,7 +46,7 @@ func buildLists(partiallySorted bool) ([][]*Result, []*Result) {
|
||||
sort.Sort(ByRelevance(lists[i]))
|
||||
}
|
||||
}
|
||||
items := []*Result{}
|
||||
items := []Result{}
|
||||
for _, list := range lists {
|
||||
items = append(items, list...)
|
||||
}
|
||||
@@ -57,7 +58,7 @@ func TestMergerUnsorted(t *testing.T) {
|
||||
cnt := len(items)
|
||||
|
||||
// Not sorted: same order
|
||||
mg := NewMerger(nil, lists, false, false)
|
||||
mg := NewMerger(nil, lists, false, false, revision{}, 0)
|
||||
assert(t, cnt == mg.Length(), "Invalid Length")
|
||||
for i := 0; i < cnt; i++ {
|
||||
assert(t, items[i] == mg.Get(i), "Invalid Get")
|
||||
@@ -69,7 +70,7 @@ func TestMergerSorted(t *testing.T) {
|
||||
cnt := len(items)
|
||||
|
||||
// Sorted sorted order
|
||||
mg := NewMerger(nil, lists, true, false)
|
||||
mg := NewMerger(nil, lists, true, false, revision{}, 0)
|
||||
assert(t, cnt == mg.Length(), "Invalid Length")
|
||||
sort.Sort(ByRelevance(items))
|
||||
for i := 0; i < cnt; i++ {
|
||||
@@ -79,7 +80,7 @@ func TestMergerSorted(t *testing.T) {
|
||||
}
|
||||
|
||||
// Inverse order
|
||||
mg2 := NewMerger(nil, lists, true, false)
|
||||
mg2 := NewMerger(nil, lists, true, false, revision{}, 0)
|
||||
for i := cnt - 1; i >= 0; i-- {
|
||||
if items[i] != mg2.Get(i) {
|
||||
t.Error("Not sorted", items[i], mg2.Get(i))
|
||||
|
||||
3107
src/options.go
3107
src/options.go
File diff suppressed because it is too large
Load Diff
13
src/options_no_pprof.go
Normal file
13
src/options_no_pprof.go
Normal file
@@ -0,0 +1,13 @@
|
||||
//go:build !pprof
|
||||
// +build !pprof
|
||||
|
||||
package fzf
|
||||
|
||||
import "errors"
|
||||
|
||||
func (o *Options) initProfiling() error {
|
||||
if o.CPUProfile != "" || o.MEMProfile != "" || o.BlockProfile != "" || o.MutexProfile != "" {
|
||||
return errors.New("error: profiling not supported: FZF must be built with '-tags=pprof' to enable profiling")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
73
src/options_pprof.go
Normal file
73
src/options_pprof.go
Normal file
@@ -0,0 +1,73 @@
|
||||
//go:build pprof
|
||||
// +build pprof
|
||||
|
||||
package fzf
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"runtime"
|
||||
"runtime/pprof"
|
||||
|
||||
"github.com/junegunn/fzf/src/util"
|
||||
)
|
||||
|
||||
func (o *Options) initProfiling() error {
|
||||
if o.CPUProfile != "" {
|
||||
f, err := os.Create(o.CPUProfile)
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not create CPU profile: %w", err)
|
||||
}
|
||||
|
||||
if err := pprof.StartCPUProfile(f); err != nil {
|
||||
return fmt.Errorf("could not start CPU profile: %w", err)
|
||||
}
|
||||
|
||||
util.AtExit(func() {
|
||||
pprof.StopCPUProfile()
|
||||
if err := f.Close(); err != nil {
|
||||
fmt.Fprintln(os.Stderr, "Error: closing cpu profile:", err)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
stopProfile := func(name string, f *os.File) {
|
||||
if err := pprof.Lookup(name).WriteTo(f, 0); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error: could not write %s profile: %v\n", name, err)
|
||||
}
|
||||
if err := f.Close(); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error: closing %s profile: %v\n", name, err)
|
||||
}
|
||||
}
|
||||
|
||||
if o.MEMProfile != "" {
|
||||
f, err := os.Create(o.MEMProfile)
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not create MEM profile: %w", err)
|
||||
}
|
||||
util.AtExit(func() {
|
||||
runtime.GC()
|
||||
stopProfile("allocs", f)
|
||||
})
|
||||
}
|
||||
|
||||
if o.BlockProfile != "" {
|
||||
runtime.SetBlockProfileRate(1)
|
||||
f, err := os.Create(o.BlockProfile)
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not create BLOCK profile: %w", err)
|
||||
}
|
||||
util.AtExit(func() { stopProfile("block", f) })
|
||||
}
|
||||
|
||||
if o.MutexProfile != "" {
|
||||
runtime.SetMutexProfileFraction(1)
|
||||
f, err := os.Create(o.MutexProfile)
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not create MUTEX profile: %w", err)
|
||||
}
|
||||
util.AtExit(func() { stopProfile("mutex", f) })
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
89
src/options_pprof_test.go
Normal file
89
src/options_pprof_test.go
Normal file
@@ -0,0 +1,89 @@
|
||||
//go:build pprof
|
||||
// +build pprof
|
||||
|
||||
package fzf
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"flag"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/junegunn/fzf/src/util"
|
||||
)
|
||||
|
||||
// runInitProfileTests is an internal flag used TestInitProfiling
|
||||
var runInitProfileTests = flag.Bool("test-init-profile", false, "run init profile tests")
|
||||
|
||||
func TestInitProfiling(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("short test")
|
||||
}
|
||||
|
||||
// Run this test in a separate process since it interferes with
|
||||
// profiling and modifies the global atexit state. Without this
|
||||
// running `go test -bench . -cpuprofile cpu.out` will fail.
|
||||
if !*runInitProfileTests {
|
||||
t.Parallel()
|
||||
|
||||
// Make sure we are not the child process.
|
||||
if os.Getenv("_FZF_CHILD_PROC") != "" {
|
||||
t.Fatal("already running as child process!")
|
||||
}
|
||||
|
||||
cmd := exec.Command(os.Args[0],
|
||||
"-test.timeout", "30s",
|
||||
"-test.run", "^"+t.Name()+"$",
|
||||
"-test-init-profile",
|
||||
)
|
||||
cmd.Env = append(os.Environ(), "_FZF_CHILD_PROC=1")
|
||||
|
||||
out, err := cmd.CombinedOutput()
|
||||
out = bytes.TrimSpace(out)
|
||||
if err != nil {
|
||||
t.Fatalf("Child test process failed: %v:\n%s", err, out)
|
||||
}
|
||||
// Make sure the test actually ran
|
||||
if bytes.Contains(out, []byte("no tests to run")) {
|
||||
t.Fatalf("Failed to run test %q:\n%s", t.Name(), out)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Child process
|
||||
|
||||
tempdir := t.TempDir()
|
||||
t.Cleanup(util.RunAtExitFuncs)
|
||||
|
||||
o := Options{
|
||||
CPUProfile: filepath.Join(tempdir, "cpu.prof"),
|
||||
MEMProfile: filepath.Join(tempdir, "mem.prof"),
|
||||
BlockProfile: filepath.Join(tempdir, "block.prof"),
|
||||
MutexProfile: filepath.Join(tempdir, "mutex.prof"),
|
||||
}
|
||||
if err := o.initProfiling(); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
profiles := []string{
|
||||
o.CPUProfile,
|
||||
o.MEMProfile,
|
||||
o.BlockProfile,
|
||||
o.MutexProfile,
|
||||
}
|
||||
for _, name := range profiles {
|
||||
if _, err := os.Stat(name); err != nil {
|
||||
t.Errorf("Failed to create profile %s: %v", filepath.Base(name), err)
|
||||
}
|
||||
}
|
||||
|
||||
util.RunAtExitFuncs()
|
||||
|
||||
for _, name := range profiles {
|
||||
if _, err := os.Stat(name); err != nil {
|
||||
t.Errorf("Failed to write profile %s: %v", filepath.Base(name), err)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,11 +2,10 @@ package fzf
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/junegunn/fzf/src/tui"
|
||||
"github.com/junegunn/fzf/src/util"
|
||||
)
|
||||
|
||||
func TestDelimiterRegex(t *testing.T) {
|
||||
@@ -44,20 +43,20 @@ func TestDelimiterRegex(t *testing.T) {
|
||||
|
||||
func TestDelimiterRegexString(t *testing.T) {
|
||||
delim := delimiterRegexp("*")
|
||||
tokens := Tokenize(util.RunesToChars([]rune("-*--*---**---")), delim)
|
||||
tokens := Tokenize("-*--*---**---", delim)
|
||||
if delim.regex != nil ||
|
||||
tokens[0].text.ToString() != "-*" ||
|
||||
tokens[1].text.ToString() != "--*" ||
|
||||
tokens[2].text.ToString() != "---*" ||
|
||||
tokens[3].text.ToString() != "*" ||
|
||||
tokens[4].text.ToString() != "---" {
|
||||
t.Errorf("%s %s %d", delim, tokens, len(tokens))
|
||||
t.Errorf("%s %v %d", delim, tokens, len(tokens))
|
||||
}
|
||||
}
|
||||
|
||||
func TestDelimiterRegexRegex(t *testing.T) {
|
||||
delim := delimiterRegexp("--\\*")
|
||||
tokens := Tokenize(util.RunesToChars([]rune("-*--*---**---")), delim)
|
||||
tokens := Tokenize("-*--*---**---", delim)
|
||||
if delim.str != nil ||
|
||||
tokens[0].text.ToString() != "-*--*" ||
|
||||
tokens[1].text.ToString() != "---*" ||
|
||||
@@ -66,17 +65,30 @@ func TestDelimiterRegexRegex(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestDelimiterRegexRegexCaret(t *testing.T) {
|
||||
delim := delimiterRegexp(`(^\s*|\s+)`)
|
||||
tokens := Tokenize("foo bar baz", delim)
|
||||
if delim.str != nil ||
|
||||
len(tokens) != 4 ||
|
||||
tokens[0].text.ToString() != "" ||
|
||||
tokens[1].text.ToString() != "foo " ||
|
||||
tokens[2].text.ToString() != "bar " ||
|
||||
tokens[3].text.ToString() != "baz" {
|
||||
t.Errorf("%s %d", tokens, len(tokens))
|
||||
}
|
||||
}
|
||||
|
||||
func TestSplitNth(t *testing.T) {
|
||||
{
|
||||
ranges := splitNth("..")
|
||||
ranges, _ := splitNth("..")
|
||||
if len(ranges) != 1 ||
|
||||
ranges[0].begin != rangeEllipsis ||
|
||||
ranges[0].end != rangeEllipsis {
|
||||
t.Errorf("%s", ranges)
|
||||
t.Errorf("%v", ranges)
|
||||
}
|
||||
}
|
||||
{
|
||||
ranges := splitNth("..3,1..,2..3,4..-1,-3..-2,..,2,-2,2..-2,1..-1")
|
||||
ranges, _ := splitNth("..3,1..,2..3,4..-1,-3..-2,..,2,-2,2..-2,1..-1")
|
||||
if len(ranges) != 10 ||
|
||||
ranges[0].begin != rangeEllipsis || ranges[0].end != 3 ||
|
||||
ranges[1].begin != rangeEllipsis || ranges[1].end != rangeEllipsis ||
|
||||
@@ -88,94 +100,99 @@ func TestSplitNth(t *testing.T) {
|
||||
ranges[7].begin != -2 || ranges[7].end != -2 ||
|
||||
ranges[8].begin != 2 || ranges[8].end != -2 ||
|
||||
ranges[9].begin != rangeEllipsis || ranges[9].end != rangeEllipsis {
|
||||
t.Errorf("%s", ranges)
|
||||
t.Errorf("%v", ranges)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestIrrelevantNth(t *testing.T) {
|
||||
index := 0
|
||||
{
|
||||
opts := defaultOptions()
|
||||
words := []string{"--nth", "..", "-x"}
|
||||
parseOptions(opts, words)
|
||||
parseOptions(&index, opts, words)
|
||||
postProcessOptions(opts)
|
||||
if len(opts.Nth) != 0 {
|
||||
t.Errorf("nth should be empty: %s", opts.Nth)
|
||||
t.Errorf("nth should be empty: %v", opts.Nth)
|
||||
}
|
||||
}
|
||||
for _, words := range [][]string{[]string{"--nth", "..,3", "+x"}, []string{"--nth", "3,1..", "+x"}, []string{"--nth", "..-1,1", "+x"}} {
|
||||
for _, words := range [][]string{{"--nth", "..,3", "+x"}, {"--nth", "3,1..", "+x"}, {"--nth", "..-1,1", "+x"}} {
|
||||
{
|
||||
opts := defaultOptions()
|
||||
parseOptions(opts, words)
|
||||
parseOptions(&index, opts, words)
|
||||
postProcessOptions(opts)
|
||||
if len(opts.Nth) != 0 {
|
||||
t.Errorf("nth should be empty: %s", opts.Nth)
|
||||
t.Errorf("nth should be empty: %v", opts.Nth)
|
||||
}
|
||||
}
|
||||
{
|
||||
opts := defaultOptions()
|
||||
words = append(words, "-x")
|
||||
parseOptions(opts, words)
|
||||
parseOptions(&index, opts, words)
|
||||
postProcessOptions(opts)
|
||||
if len(opts.Nth) != 2 {
|
||||
t.Errorf("nth should not be empty: %s", opts.Nth)
|
||||
t.Errorf("nth should not be empty: %v", opts.Nth)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseKeys(t *testing.T) {
|
||||
pairs := parseKeyChords("ctrl-z,alt-z,f2,@,Alt-a,!,ctrl-G,J,g,ALT-enter,alt-SPACE", "")
|
||||
check := func(i int, s string) {
|
||||
if pairs[i] != s {
|
||||
t.Errorf("%s != %s", pairs[i], s)
|
||||
pairs, _ := parseKeyChords("ctrl-z,alt-z,f2,@,Alt-a,!,ctrl-G,J,g,ctrl-alt-a,ALT-enter,alt-SPACE", "")
|
||||
checkEvent := func(e tui.Event, s string) {
|
||||
if pairs[e] != s {
|
||||
t.Errorf("%s != %s", pairs[e], s)
|
||||
}
|
||||
}
|
||||
if len(pairs) != 11 {
|
||||
t.Error(11)
|
||||
check := func(et tui.EventType, s string) {
|
||||
checkEvent(et.AsEvent(), s)
|
||||
}
|
||||
if len(pairs) != 12 {
|
||||
t.Error(12)
|
||||
}
|
||||
check(tui.CtrlZ, "ctrl-z")
|
||||
check(tui.AltZ, "alt-z")
|
||||
check(tui.F2, "f2")
|
||||
check(tui.AltZ+'@', "@")
|
||||
check(tui.AltA, "Alt-a")
|
||||
check(tui.AltZ+'!', "!")
|
||||
check(tui.CtrlA+'g'-'a', "ctrl-G")
|
||||
check(tui.AltZ+'J', "J")
|
||||
check(tui.AltZ+'g', "g")
|
||||
check(tui.AltEnter, "ALT-enter")
|
||||
check(tui.AltSpace, "alt-SPACE")
|
||||
check(tui.CtrlG, "ctrl-G")
|
||||
checkEvent(tui.AltKey('z'), "alt-z")
|
||||
checkEvent(tui.Key('@'), "@")
|
||||
checkEvent(tui.AltKey('a'), "Alt-a")
|
||||
checkEvent(tui.Key('!'), "!")
|
||||
checkEvent(tui.Key('J'), "J")
|
||||
checkEvent(tui.Key('g'), "g")
|
||||
checkEvent(tui.CtrlAltKey('a'), "ctrl-alt-a")
|
||||
checkEvent(tui.CtrlAltKey('m'), "ALT-enter")
|
||||
checkEvent(tui.AltKey(' '), "alt-SPACE")
|
||||
|
||||
// Synonyms
|
||||
pairs = parseKeyChords("enter,Return,space,tab,btab,esc,up,down,left,right", "")
|
||||
pairs, _ = parseKeyChords("enter,Return,space,tab,btab,esc,up,down,left,right", "")
|
||||
if len(pairs) != 9 {
|
||||
t.Error(9)
|
||||
}
|
||||
check(tui.CtrlM, "Return")
|
||||
check(tui.AltZ+' ', "space")
|
||||
checkEvent(tui.Key(' '), "space")
|
||||
check(tui.Tab, "tab")
|
||||
check(tui.BTab, "btab")
|
||||
check(tui.ESC, "esc")
|
||||
check(tui.ShiftTab, "btab")
|
||||
check(tui.Esc, "esc")
|
||||
check(tui.Up, "up")
|
||||
check(tui.Down, "down")
|
||||
check(tui.Left, "left")
|
||||
check(tui.Right, "right")
|
||||
|
||||
pairs = parseKeyChords("Tab,Ctrl-I,PgUp,page-up,pgdn,Page-Down,Home,End,Alt-BS,Alt-BSpace,shift-left,shift-right,btab,shift-tab,return,Enter,bspace", "")
|
||||
pairs, _ = parseKeyChords("Tab,Ctrl-I,PgUp,page-up,pgdn,Page-Down,Home,End,Alt-BS,Alt-BSpace,shift-left,shift-right,btab,shift-tab,return,Enter,bspace", "")
|
||||
if len(pairs) != 11 {
|
||||
t.Error(11)
|
||||
}
|
||||
check(tui.Tab, "Ctrl-I")
|
||||
check(tui.PgUp, "page-up")
|
||||
check(tui.PgDn, "Page-Down")
|
||||
check(tui.PageUp, "page-up")
|
||||
check(tui.PageDown, "Page-Down")
|
||||
check(tui.Home, "Home")
|
||||
check(tui.End, "End")
|
||||
check(tui.AltBS, "Alt-BSpace")
|
||||
check(tui.SLeft, "shift-left")
|
||||
check(tui.SRight, "shift-right")
|
||||
check(tui.BTab, "shift-tab")
|
||||
check(tui.AltBackspace, "Alt-BSpace")
|
||||
check(tui.ShiftLeft, "shift-left")
|
||||
check(tui.ShiftRight, "shift-right")
|
||||
check(tui.ShiftTab, "shift-tab")
|
||||
check(tui.CtrlM, "Enter")
|
||||
check(tui.BSpace, "bspace")
|
||||
check(tui.Backspace, "bspace")
|
||||
}
|
||||
|
||||
func TestParseKeysWithComma(t *testing.T) {
|
||||
@@ -184,95 +201,103 @@ func TestParseKeysWithComma(t *testing.T) {
|
||||
t.Errorf("%d != %d", a, b)
|
||||
}
|
||||
}
|
||||
check := func(pairs map[int]string, i int, s string) {
|
||||
if pairs[i] != s {
|
||||
t.Errorf("%s != %s", pairs[i], s)
|
||||
check := func(pairs map[tui.Event]string, e tui.Event, s string) {
|
||||
if pairs[e] != s {
|
||||
t.Errorf("%s != %s", pairs[e], s)
|
||||
}
|
||||
}
|
||||
|
||||
pairs := parseKeyChords(",", "")
|
||||
pairs, _ := parseKeyChords(",", "")
|
||||
checkN(len(pairs), 1)
|
||||
check(pairs, tui.AltZ+',', ",")
|
||||
check(pairs, tui.Key(','), ",")
|
||||
|
||||
pairs = parseKeyChords(",,a,b", "")
|
||||
pairs, _ = parseKeyChords(",,a,b", "")
|
||||
checkN(len(pairs), 3)
|
||||
check(pairs, tui.AltZ+'a', "a")
|
||||
check(pairs, tui.AltZ+'b', "b")
|
||||
check(pairs, tui.AltZ+',', ",")
|
||||
check(pairs, tui.Key('a'), "a")
|
||||
check(pairs, tui.Key('b'), "b")
|
||||
check(pairs, tui.Key(','), ",")
|
||||
|
||||
pairs = parseKeyChords("a,b,,", "")
|
||||
pairs, _ = parseKeyChords("a,b,,", "")
|
||||
checkN(len(pairs), 3)
|
||||
check(pairs, tui.AltZ+'a', "a")
|
||||
check(pairs, tui.AltZ+'b', "b")
|
||||
check(pairs, tui.AltZ+',', ",")
|
||||
check(pairs, tui.Key('a'), "a")
|
||||
check(pairs, tui.Key('b'), "b")
|
||||
check(pairs, tui.Key(','), ",")
|
||||
|
||||
pairs = parseKeyChords("a,,,b", "")
|
||||
pairs, _ = parseKeyChords("a,,,b", "")
|
||||
checkN(len(pairs), 3)
|
||||
check(pairs, tui.AltZ+'a', "a")
|
||||
check(pairs, tui.AltZ+'b', "b")
|
||||
check(pairs, tui.AltZ+',', ",")
|
||||
check(pairs, tui.Key('a'), "a")
|
||||
check(pairs, tui.Key('b'), "b")
|
||||
check(pairs, tui.Key(','), ",")
|
||||
|
||||
pairs = parseKeyChords("a,,,b,c", "")
|
||||
pairs, _ = parseKeyChords("a,,,b,c", "")
|
||||
checkN(len(pairs), 4)
|
||||
check(pairs, tui.AltZ+'a', "a")
|
||||
check(pairs, tui.AltZ+'b', "b")
|
||||
check(pairs, tui.AltZ+'c', "c")
|
||||
check(pairs, tui.AltZ+',', ",")
|
||||
check(pairs, tui.Key('a'), "a")
|
||||
check(pairs, tui.Key('b'), "b")
|
||||
check(pairs, tui.Key('c'), "c")
|
||||
check(pairs, tui.Key(','), ",")
|
||||
|
||||
pairs = parseKeyChords(",,,", "")
|
||||
pairs, _ = parseKeyChords(",,,", "")
|
||||
checkN(len(pairs), 1)
|
||||
check(pairs, tui.AltZ+',', ",")
|
||||
check(pairs, tui.Key(','), ",")
|
||||
|
||||
pairs, _ = parseKeyChords(",ALT-,,", "")
|
||||
checkN(len(pairs), 1)
|
||||
check(pairs, tui.AltKey(','), "ALT-,")
|
||||
}
|
||||
|
||||
func TestBind(t *testing.T) {
|
||||
check := func(action actionType, expected actionType) {
|
||||
if action != expected {
|
||||
t.Errorf("%d != %d", action, expected)
|
||||
}
|
||||
}
|
||||
checkString := func(action string, expected string) {
|
||||
if action != expected {
|
||||
t.Errorf("%d != %d", action, expected)
|
||||
}
|
||||
}
|
||||
keymap := defaultKeymap()
|
||||
execmap := make(map[int]string)
|
||||
check(actBeginningOfLine, keymap[tui.CtrlA])
|
||||
parseKeymap(keymap, execmap,
|
||||
"ctrl-a:kill-line,ctrl-b:toggle-sort,c:page-up,alt-z:page-down,"+
|
||||
"f1:execute(ls {}),f2:execute/echo {}, {}, {}/,f3:execute[echo '({})'],f4:execute;less {};,"+
|
||||
"alt-a:execute@echo (,),[,],/,:,;,%,{}@,alt-b:execute;echo (,),[,],/,:,@,%,{};"+
|
||||
",,:abort,::accept,X:execute:\nfoobar,Y:execute(baz)")
|
||||
check(actKillLine, keymap[tui.CtrlA])
|
||||
check(actToggleSort, keymap[tui.CtrlB])
|
||||
check(actPageUp, keymap[tui.AltZ+'c'])
|
||||
check(actAbort, keymap[tui.AltZ+','])
|
||||
check(actAccept, keymap[tui.AltZ+':'])
|
||||
check(actPageDown, keymap[tui.AltZ])
|
||||
check(actExecute, keymap[tui.F1])
|
||||
check(actExecute, keymap[tui.F2])
|
||||
check(actExecute, keymap[tui.F3])
|
||||
check(actExecute, keymap[tui.F4])
|
||||
checkString("ls {}", execmap[tui.F1])
|
||||
checkString("echo {}, {}, {}", execmap[tui.F2])
|
||||
checkString("echo '({})'", execmap[tui.F3])
|
||||
checkString("less {}", execmap[tui.F4])
|
||||
checkString("echo (,),[,],/,:,;,%,{}", execmap[tui.AltA])
|
||||
checkString("echo (,),[,],/,:,@,%,{}", execmap[tui.AltB])
|
||||
checkString("\nfoobar,Y:execute(baz)", execmap[tui.AltZ+'X'])
|
||||
check := func(event tui.Event, arg1 string, types ...actionType) {
|
||||
if len(keymap[event]) != len(types) {
|
||||
t.Errorf("invalid number of actions for %v (%d != %d)",
|
||||
event, len(types), len(keymap[event]))
|
||||
return
|
||||
}
|
||||
for idx, action := range keymap[event] {
|
||||
if types[idx] != action.t {
|
||||
t.Errorf("invalid action type (%d != %d)", types[idx], action.t)
|
||||
}
|
||||
}
|
||||
if len(arg1) > 0 && keymap[event][0].a != arg1 {
|
||||
t.Errorf("invalid action argument: (%s != %s)", arg1, keymap[event][0].a)
|
||||
}
|
||||
}
|
||||
check(tui.CtrlA.AsEvent(), "", actBeginningOfLine)
|
||||
parseKeymap(keymap,
|
||||
"ctrl-a:kill-line,ctrl-b:toggle-sort+up+down,c:page-up,alt-z:page-down,"+
|
||||
"f1:execute(ls {+})+abort+execute(echo \n{+})+select-all,f2:execute/echo {}, {}, {}/,f3:execute[echo '({})'],f4:execute;less {};,"+
|
||||
"alt-a:execute-Multi@echo (,),[,],/,:,;,%,{}@,alt-b:execute;echo (,),[,],/,:,@,%,{};,"+
|
||||
"x:Execute(foo+bar),X:execute/bar+baz/"+
|
||||
",f1:+first,f1:+top"+
|
||||
",,:abort,::accept,+:execute:++\nfoobar,Y:execute(baz)+up")
|
||||
check(tui.CtrlA.AsEvent(), "", actKillLine)
|
||||
check(tui.CtrlB.AsEvent(), "", actToggleSort, actUp, actDown)
|
||||
check(tui.Key('c'), "", actPageUp)
|
||||
check(tui.Key(','), "", actAbort)
|
||||
check(tui.Key(':'), "", actAccept)
|
||||
check(tui.AltKey('z'), "", actPageDown)
|
||||
check(tui.F1.AsEvent(), "ls {+}", actExecute, actAbort, actExecute, actSelectAll, actFirst, actFirst)
|
||||
check(tui.F2.AsEvent(), "echo {}, {}, {}", actExecute)
|
||||
check(tui.F3.AsEvent(), "echo '({})'", actExecute)
|
||||
check(tui.F4.AsEvent(), "less {}", actExecute)
|
||||
check(tui.Key('x'), "foo+bar", actExecute)
|
||||
check(tui.Key('X'), "bar+baz", actExecute)
|
||||
check(tui.AltKey('a'), "echo (,),[,],/,:,;,%,{}", actExecuteMulti)
|
||||
check(tui.AltKey('b'), "echo (,),[,],/,:,@,%,{}", actExecute)
|
||||
check(tui.Key('+'), "++\nfoobar,Y:execute(baz)+up", actExecute)
|
||||
|
||||
for idx, char := range []rune{'~', '!', '@', '#', '$', '%', '^', '&', '*', '|', ';', '/'} {
|
||||
parseKeymap(keymap, execmap, fmt.Sprintf("%d:execute%cfoobar%c", idx%10, char, char))
|
||||
checkString("foobar", execmap[tui.AltZ+int([]rune(fmt.Sprintf("%d", idx%10))[0])])
|
||||
parseKeymap(keymap, fmt.Sprintf("%d:execute%cfoobar%c", idx%10, char, char))
|
||||
check(tui.Key([]rune(fmt.Sprintf("%d", idx%10))[0]), "foobar", actExecute)
|
||||
}
|
||||
|
||||
parseKeymap(keymap, execmap, "f1:abort")
|
||||
check(actAbort, keymap[tui.F1])
|
||||
parseKeymap(keymap, "f1:abort")
|
||||
check(tui.F1.AsEvent(), "", actAbort)
|
||||
}
|
||||
|
||||
func TestColorSpec(t *testing.T) {
|
||||
theme := tui.Dark256
|
||||
dark := parseTheme(theme, "dark")
|
||||
dark, _ := parseTheme(theme, "dark")
|
||||
if *dark != *theme {
|
||||
t.Errorf("colors should be equivalent")
|
||||
}
|
||||
@@ -280,7 +305,7 @@ func TestColorSpec(t *testing.T) {
|
||||
t.Errorf("point should not be equivalent")
|
||||
}
|
||||
|
||||
light := parseTheme(theme, "dark,light")
|
||||
light, _ := parseTheme(theme, "dark,light")
|
||||
if *light == *theme {
|
||||
t.Errorf("should not be equivalent")
|
||||
}
|
||||
@@ -291,8 +316,8 @@ func TestColorSpec(t *testing.T) {
|
||||
t.Errorf("point should not be equivalent")
|
||||
}
|
||||
|
||||
customized := parseTheme(theme, "fg:231,bg:232")
|
||||
if customized.Fg != 231 || customized.Bg != 232 {
|
||||
customized, _ := parseTheme(theme, "fg:231,bg:232")
|
||||
if customized.Fg.Color != 231 || customized.Bg.Color != 232 {
|
||||
t.Errorf("color not customized")
|
||||
}
|
||||
if *tui.Dark256 == *customized {
|
||||
@@ -304,30 +329,20 @@ func TestColorSpec(t *testing.T) {
|
||||
t.Errorf("colors should now be equivalent: %v, %v", tui.Dark256, customized)
|
||||
}
|
||||
|
||||
customized = parseTheme(theme, "fg:231,dark,bg:232")
|
||||
customized, _ = parseTheme(theme, "fg:231,dark,bg:232")
|
||||
if customized.Fg != tui.Dark256.Fg || customized.Bg == tui.Dark256.Bg {
|
||||
t.Errorf("color not customized")
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseNilTheme(t *testing.T) {
|
||||
var theme *tui.ColorTheme
|
||||
newTheme := parseTheme(theme, "prompt:12")
|
||||
if newTheme != nil {
|
||||
t.Errorf("color is disabled. keep it that way.")
|
||||
}
|
||||
newTheme = parseTheme(theme, "prompt:12,dark,prompt:13")
|
||||
if newTheme.Prompt != 13 {
|
||||
t.Errorf("color should now be enabled and customized")
|
||||
}
|
||||
}
|
||||
|
||||
func TestDefaultCtrlNP(t *testing.T) {
|
||||
check := func(words []string, key int, expected actionType) {
|
||||
index := 0
|
||||
check := func(words []string, et tui.EventType, expected actionType) {
|
||||
e := et.AsEvent()
|
||||
opts := defaultOptions()
|
||||
parseOptions(opts, words)
|
||||
parseOptions(&index, opts, words)
|
||||
postProcessOptions(opts)
|
||||
if opts.Keymap[key] != expected {
|
||||
if opts.Keymap[e][0].t != expected {
|
||||
t.Error()
|
||||
}
|
||||
}
|
||||
@@ -337,22 +352,23 @@ func TestDefaultCtrlNP(t *testing.T) {
|
||||
check([]string{"--bind=ctrl-n:accept"}, tui.CtrlN, actAccept)
|
||||
check([]string{"--bind=ctrl-p:accept"}, tui.CtrlP, actAccept)
|
||||
|
||||
f, _ := ioutil.TempFile("", "fzf-history")
|
||||
f, _ := os.CreateTemp("", "fzf-history")
|
||||
f.Close()
|
||||
hist := "--history=" + f.Name()
|
||||
check([]string{hist}, tui.CtrlN, actNextHistory)
|
||||
check([]string{hist}, tui.CtrlP, actPreviousHistory)
|
||||
check([]string{hist}, tui.CtrlP, actPrevHistory)
|
||||
|
||||
check([]string{hist, "--bind=ctrl-n:accept"}, tui.CtrlN, actAccept)
|
||||
check([]string{hist, "--bind=ctrl-n:accept"}, tui.CtrlP, actPreviousHistory)
|
||||
check([]string{hist, "--bind=ctrl-n:accept"}, tui.CtrlP, actPrevHistory)
|
||||
|
||||
check([]string{hist, "--bind=ctrl-p:accept"}, tui.CtrlN, actNextHistory)
|
||||
check([]string{hist, "--bind=ctrl-p:accept"}, tui.CtrlP, actAccept)
|
||||
}
|
||||
|
||||
func optsFor(words ...string) *Options {
|
||||
index := 0
|
||||
opts := defaultOptions()
|
||||
parseOptions(opts, words)
|
||||
parseOptions(&index, opts, words)
|
||||
postProcessOptions(opts)
|
||||
return opts
|
||||
}
|
||||
@@ -378,26 +394,109 @@ func TestPreviewOpts(t *testing.T) {
|
||||
opts := optsFor()
|
||||
if !(opts.Preview.command == "" &&
|
||||
opts.Preview.hidden == false &&
|
||||
opts.Preview.wrap == false &&
|
||||
opts.Preview.position == posRight &&
|
||||
opts.Preview.size.percent == true &&
|
||||
opts.Preview.size.size == 50) {
|
||||
t.Error()
|
||||
}
|
||||
opts = optsFor("--preview", "cat {}", "--preview-window=left:15:hidden")
|
||||
opts = optsFor("--preview", "cat {}", "--preview-window=left:15,hidden,wrap:+{1}-/2")
|
||||
if !(opts.Preview.command == "cat {}" &&
|
||||
opts.Preview.hidden == true &&
|
||||
opts.Preview.wrap == true &&
|
||||
opts.Preview.position == posLeft &&
|
||||
opts.Preview.scroll == "+{1}-/2" &&
|
||||
opts.Preview.size.percent == false &&
|
||||
opts.Preview.size.size == 15+2) {
|
||||
opts.Preview.size.size == 15) {
|
||||
t.Error(opts.Preview)
|
||||
}
|
||||
|
||||
opts = optsFor("--preview-window=left:15:hidden", "--preview-window=down")
|
||||
opts = optsFor("--preview-window=up,15,wrap,hidden,+{1}+3-1-2/2", "--preview-window=down", "--preview-window=cycle")
|
||||
if !(opts.Preview.command == "" &&
|
||||
opts.Preview.hidden == false &&
|
||||
opts.Preview.hidden == true &&
|
||||
opts.Preview.wrap == true &&
|
||||
opts.Preview.cycle == true &&
|
||||
opts.Preview.position == posDown &&
|
||||
opts.Preview.scroll == "+{1}+3-1-2/2" &&
|
||||
opts.Preview.size.percent == false &&
|
||||
opts.Preview.size.size == 15) {
|
||||
t.Error(opts.Preview.size.size)
|
||||
}
|
||||
opts = optsFor("--preview-window=up:15:wrap:hidden")
|
||||
if !(opts.Preview.command == "" &&
|
||||
opts.Preview.hidden == true &&
|
||||
opts.Preview.wrap == true &&
|
||||
opts.Preview.position == posUp &&
|
||||
opts.Preview.size.percent == false &&
|
||||
opts.Preview.size.size == 15) {
|
||||
t.Error(opts.Preview)
|
||||
}
|
||||
opts = optsFor("--preview=foo", "--preview-window=up", "--preview-window=default:70%")
|
||||
if !(opts.Preview.command == "foo" &&
|
||||
opts.Preview.position == posRight &&
|
||||
opts.Preview.size.percent == true &&
|
||||
opts.Preview.size.size == 50) {
|
||||
opts.Preview.size.size == 70) {
|
||||
t.Error(opts.Preview)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAdditiveExpect(t *testing.T) {
|
||||
opts := optsFor("--expect=a", "--expect", "b", "--expect=c")
|
||||
if len(opts.Expect) != 3 {
|
||||
t.Error(opts.Expect)
|
||||
}
|
||||
}
|
||||
|
||||
func TestValidateSign(t *testing.T) {
|
||||
testCases := []struct {
|
||||
inputSign string
|
||||
isValid bool
|
||||
}{
|
||||
{"> ", true},
|
||||
{"아", true},
|
||||
{"😀", true},
|
||||
{">>>", false},
|
||||
}
|
||||
|
||||
for _, testCase := range testCases {
|
||||
err := validateSign(testCase.inputSign, "")
|
||||
if testCase.isValid && err != nil {
|
||||
t.Errorf("Input sign `%s` caused error", testCase.inputSign)
|
||||
}
|
||||
|
||||
if !testCase.isValid && err == nil {
|
||||
t.Errorf("Input sign `%s` did not cause error", testCase.inputSign)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseSingleActionList(t *testing.T) {
|
||||
actions, _ := parseSingleActionList("Execute@foo+bar,baz@+up+up+reload:down+down")
|
||||
if len(actions) != 4 {
|
||||
t.Errorf("Invalid number of actions parsed:%d", len(actions))
|
||||
}
|
||||
if actions[0].t != actExecute || actions[0].a != "foo+bar,baz" {
|
||||
t.Errorf("Invalid action parsed: %v", actions[0])
|
||||
}
|
||||
if actions[1].t != actUp || actions[2].t != actUp {
|
||||
t.Errorf("Invalid action parsed: %v / %v", actions[1], actions[2])
|
||||
}
|
||||
if actions[3].t != actReload || actions[3].a != "down+down" {
|
||||
t.Errorf("Invalid action parsed: %v", actions[3])
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseSingleActionListError(t *testing.T) {
|
||||
_, err := parseSingleActionList("change-query(foobar)baz")
|
||||
if err == nil {
|
||||
t.Errorf("Failed to detect error")
|
||||
}
|
||||
}
|
||||
|
||||
func TestMaskActionContents(t *testing.T) {
|
||||
original := ":execute((f)(o)(o)(b)(a)(r))+change-query@qu@ry@+up,x:reload:hello:world"
|
||||
expected := ":execute +change-query +up,x:reload "
|
||||
masked := maskActionContents(original)
|
||||
if masked != expected {
|
||||
t.Errorf("Not masked: %s", masked)
|
||||
}
|
||||
}
|
||||
|
||||
242
src/pattern.go
242
src/pattern.go
@@ -1,6 +1,7 @@
|
||||
package fzf
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
@@ -10,18 +11,19 @@ import (
|
||||
|
||||
// fuzzy
|
||||
// 'exact
|
||||
// ^exact-prefix
|
||||
// exact-suffix$
|
||||
// !not-fuzzy
|
||||
// !'not-exact
|
||||
// !^not-exact-prefix
|
||||
// !not-exact-suffix$
|
||||
// ^prefix-exact
|
||||
// suffix-exact$
|
||||
// !inverse-exact
|
||||
// !'inverse-fuzzy
|
||||
// !^inverse-prefix-exact
|
||||
// !inverse-suffix-exact$
|
||||
|
||||
type termType int
|
||||
|
||||
const (
|
||||
termFuzzy termType = iota
|
||||
termExact
|
||||
termExactBoundary
|
||||
termPrefix
|
||||
termSuffix
|
||||
termEqual
|
||||
@@ -32,7 +34,12 @@ type term struct {
|
||||
inv bool
|
||||
text []rune
|
||||
caseSensitive bool
|
||||
origText []rune
|
||||
normalize bool
|
||||
}
|
||||
|
||||
// String returns the string representation of a term.
|
||||
func (t term) String() string {
|
||||
return fmt.Sprintf("term{typ: %d, inv: %v, text: []rune(%q), caseSensitive: %v}", t.typ, t.inv, string(t.text), t.caseSensitive)
|
||||
}
|
||||
|
||||
type termSet []term
|
||||
@@ -43,71 +50,76 @@ type Pattern struct {
|
||||
fuzzyAlgo algo.Algo
|
||||
extended bool
|
||||
caseSensitive bool
|
||||
normalize bool
|
||||
forward bool
|
||||
withPos bool
|
||||
text []rune
|
||||
termSets []termSet
|
||||
sortable bool
|
||||
cacheable bool
|
||||
cacheKey string
|
||||
delimiter Delimiter
|
||||
nth []Range
|
||||
procFun map[termType]algo.Algo
|
||||
cache *ChunkCache
|
||||
}
|
||||
|
||||
var (
|
||||
_patternCache map[string]*Pattern
|
||||
_splitRegex *regexp.Regexp
|
||||
_cache ChunkCache
|
||||
)
|
||||
var _splitRegex *regexp.Regexp
|
||||
|
||||
func init() {
|
||||
_splitRegex = regexp.MustCompile("\\s+")
|
||||
clearPatternCache()
|
||||
clearChunkCache()
|
||||
}
|
||||
|
||||
func clearPatternCache() {
|
||||
// We can uniquely identify the pattern for a given string since
|
||||
// search mode and caseMode do not change while the program is running
|
||||
_patternCache = make(map[string]*Pattern)
|
||||
}
|
||||
|
||||
func clearChunkCache() {
|
||||
_cache = NewChunkCache()
|
||||
_splitRegex = regexp.MustCompile(" +")
|
||||
}
|
||||
|
||||
// BuildPattern builds Pattern object from the given arguments
|
||||
func BuildPattern(fuzzy bool, fuzzyAlgo algo.Algo, extended bool, caseMode Case, forward bool,
|
||||
cacheable bool, nth []Range, delimiter Delimiter, runes []rune) *Pattern {
|
||||
func BuildPattern(cache *ChunkCache, patternCache map[string]*Pattern, fuzzy bool, fuzzyAlgo algo.Algo, extended bool, caseMode Case, normalize bool, forward bool,
|
||||
withPos bool, cacheable bool, nth []Range, delimiter Delimiter, runes []rune) *Pattern {
|
||||
|
||||
var asString string
|
||||
if extended {
|
||||
asString = strings.Trim(string(runes), " ")
|
||||
asString = strings.TrimLeft(string(runes), " ")
|
||||
for strings.HasSuffix(asString, " ") && !strings.HasSuffix(asString, "\\ ") {
|
||||
asString = asString[:len(asString)-1]
|
||||
}
|
||||
} else {
|
||||
asString = string(runes)
|
||||
}
|
||||
|
||||
cached, found := _patternCache[asString]
|
||||
// We can uniquely identify the pattern for a given string since
|
||||
// search mode and caseMode do not change while the program is running
|
||||
cached, found := patternCache[asString]
|
||||
if found {
|
||||
return cached
|
||||
}
|
||||
|
||||
caseSensitive := true
|
||||
sortable := true
|
||||
termSets := []termSet{}
|
||||
|
||||
if extended {
|
||||
termSets = parseTerms(fuzzy, caseMode, asString)
|
||||
termSets = parseTerms(fuzzy, caseMode, normalize, asString)
|
||||
// We should not sort the result if there are only inverse search terms
|
||||
sortable = false
|
||||
Loop:
|
||||
for _, termSet := range termSets {
|
||||
for idx, term := range termSet {
|
||||
if !term.inv {
|
||||
sortable = true
|
||||
}
|
||||
// If the query contains inverse search terms or OR operators,
|
||||
// we cannot cache the search scope
|
||||
if !cacheable || idx > 0 || term.inv {
|
||||
if !cacheable || idx > 0 || term.inv || fuzzy && term.typ != termFuzzy || !fuzzy && term.typ != termExact {
|
||||
cacheable = false
|
||||
break Loop
|
||||
if sortable {
|
||||
// Can't break until we see at least one non-inverse term
|
||||
break Loop
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
lowerString := strings.ToLower(asString)
|
||||
normalize = normalize &&
|
||||
lowerString == string(algo.NormalizeRunes([]rune(lowerString)))
|
||||
caseSensitive = caseMode == CaseRespect ||
|
||||
caseMode == CaseSmart && lowerString != asString
|
||||
if !caseSensitive {
|
||||
@@ -120,46 +132,57 @@ func BuildPattern(fuzzy bool, fuzzyAlgo algo.Algo, extended bool, caseMode Case,
|
||||
fuzzyAlgo: fuzzyAlgo,
|
||||
extended: extended,
|
||||
caseSensitive: caseSensitive,
|
||||
normalize: normalize,
|
||||
forward: forward,
|
||||
withPos: withPos,
|
||||
text: []rune(asString),
|
||||
termSets: termSets,
|
||||
sortable: sortable,
|
||||
cacheable: cacheable,
|
||||
nth: nth,
|
||||
delimiter: delimiter,
|
||||
cache: cache,
|
||||
procFun: make(map[termType]algo.Algo)}
|
||||
|
||||
ptr.cacheKey = ptr.buildCacheKey()
|
||||
ptr.procFun[termFuzzy] = fuzzyAlgo
|
||||
ptr.procFun[termEqual] = algo.EqualMatch
|
||||
ptr.procFun[termExact] = algo.ExactMatchNaive
|
||||
ptr.procFun[termExactBoundary] = algo.ExactMatchBoundary
|
||||
ptr.procFun[termPrefix] = algo.PrefixMatch
|
||||
ptr.procFun[termSuffix] = algo.SuffixMatch
|
||||
|
||||
_patternCache[asString] = ptr
|
||||
patternCache[asString] = ptr
|
||||
return ptr
|
||||
}
|
||||
|
||||
func parseTerms(fuzzy bool, caseMode Case, str string) []termSet {
|
||||
func parseTerms(fuzzy bool, caseMode Case, normalize bool, str string) []termSet {
|
||||
str = strings.ReplaceAll(str, "\\ ", "\t")
|
||||
tokens := _splitRegex.Split(str, -1)
|
||||
sets := []termSet{}
|
||||
set := termSet{}
|
||||
switchSet := false
|
||||
afterBar := false
|
||||
for _, token := range tokens {
|
||||
typ, inv, text := termFuzzy, false, token
|
||||
typ, inv, text := termFuzzy, false, strings.ReplaceAll(token, "\t", " ")
|
||||
lowerText := strings.ToLower(text)
|
||||
caseSensitive := caseMode == CaseRespect ||
|
||||
caseMode == CaseSmart && text != lowerText
|
||||
normalizeTerm := normalize &&
|
||||
lowerText == string(algo.NormalizeRunes([]rune(lowerText)))
|
||||
if !caseSensitive {
|
||||
text = lowerText
|
||||
}
|
||||
origText := []rune(text)
|
||||
if !fuzzy {
|
||||
typ = termExact
|
||||
}
|
||||
|
||||
if text == "|" {
|
||||
if len(set) > 0 && !afterBar && text == "|" {
|
||||
switchSet = false
|
||||
afterBar = true
|
||||
continue
|
||||
}
|
||||
afterBar = false
|
||||
|
||||
if strings.HasPrefix(text, "!") {
|
||||
inv = true
|
||||
@@ -167,26 +190,29 @@ func parseTerms(fuzzy bool, caseMode Case, str string) []termSet {
|
||||
text = text[1:]
|
||||
}
|
||||
|
||||
if strings.HasPrefix(text, "'") {
|
||||
if text != "$" && strings.HasSuffix(text, "$") {
|
||||
typ = termSuffix
|
||||
text = text[:len(text)-1]
|
||||
}
|
||||
|
||||
if len(text) > 2 && strings.HasPrefix(text, "'") && strings.HasSuffix(text, "'") {
|
||||
typ = termExactBoundary
|
||||
text = text[1 : len(text)-1]
|
||||
} else if strings.HasPrefix(text, "'") {
|
||||
// Flip exactness
|
||||
if fuzzy && !inv {
|
||||
typ = termExact
|
||||
text = text[1:]
|
||||
} else {
|
||||
typ = termFuzzy
|
||||
text = text[1:]
|
||||
}
|
||||
text = text[1:]
|
||||
} else if strings.HasPrefix(text, "^") {
|
||||
if strings.HasSuffix(text, "$") {
|
||||
if typ == termSuffix {
|
||||
typ = termEqual
|
||||
text = text[1 : len(text)-1]
|
||||
} else {
|
||||
typ = termPrefix
|
||||
text = text[1:]
|
||||
}
|
||||
} else if strings.HasSuffix(text, "$") {
|
||||
typ = termSuffix
|
||||
text = text[:len(text)-1]
|
||||
text = text[1:]
|
||||
}
|
||||
|
||||
if len(text) > 0 {
|
||||
@@ -194,12 +220,16 @@ func parseTerms(fuzzy bool, caseMode Case, str string) []termSet {
|
||||
sets = append(sets, set)
|
||||
set = termSet{}
|
||||
}
|
||||
textRunes := []rune(text)
|
||||
if normalizeTerm {
|
||||
textRunes = algo.NormalizeRunes(textRunes)
|
||||
}
|
||||
set = append(set, term{
|
||||
typ: typ,
|
||||
inv: inv,
|
||||
text: []rune(text),
|
||||
text: textRunes,
|
||||
caseSensitive: caseSensitive,
|
||||
origText: origText})
|
||||
normalize: normalizeTerm})
|
||||
switchSet = true
|
||||
}
|
||||
}
|
||||
@@ -222,68 +252,58 @@ func (p *Pattern) AsString() string {
|
||||
return string(p.text)
|
||||
}
|
||||
|
||||
// CacheKey is used to build string to be used as the key of result cache
|
||||
func (p *Pattern) CacheKey() string {
|
||||
func (p *Pattern) buildCacheKey() string {
|
||||
if !p.extended {
|
||||
return p.AsString()
|
||||
}
|
||||
cacheableTerms := []string{}
|
||||
for _, termSet := range p.termSets {
|
||||
if len(termSet) == 1 && !termSet[0].inv && (p.fuzzy || termSet[0].typ == termExact) {
|
||||
cacheableTerms = append(cacheableTerms, string(termSet[0].origText))
|
||||
cacheableTerms = append(cacheableTerms, string(termSet[0].text))
|
||||
}
|
||||
}
|
||||
return strings.Join(cacheableTerms, " ")
|
||||
return strings.Join(cacheableTerms, "\t")
|
||||
}
|
||||
|
||||
// CacheKey is used to build string to be used as the key of result cache
|
||||
func (p *Pattern) CacheKey() string {
|
||||
return p.cacheKey
|
||||
}
|
||||
|
||||
// Match returns the list of matches Items in the given Chunk
|
||||
func (p *Pattern) Match(chunk *Chunk, slab *util.Slab) []*Result {
|
||||
func (p *Pattern) Match(chunk *Chunk, slab *util.Slab) []Result {
|
||||
// ChunkCache: Exact match
|
||||
cacheKey := p.CacheKey()
|
||||
if p.cacheable {
|
||||
if cached, found := _cache.Find(chunk, cacheKey); found {
|
||||
if cached := p.cache.Lookup(chunk, cacheKey); cached != nil {
|
||||
return cached
|
||||
}
|
||||
}
|
||||
|
||||
// Prefix/suffix cache
|
||||
var space []*Result
|
||||
Loop:
|
||||
for idx := 1; idx < len(cacheKey); idx++ {
|
||||
// [---------| ] | [ |---------]
|
||||
// [--------| ] | [ |--------]
|
||||
// [-------| ] | [ |-------]
|
||||
prefix := cacheKey[:len(cacheKey)-idx]
|
||||
suffix := cacheKey[idx:]
|
||||
for _, substr := range [2]*string{&prefix, &suffix} {
|
||||
if cached, found := _cache.Find(chunk, *substr); found {
|
||||
space = cached
|
||||
break Loop
|
||||
}
|
||||
}
|
||||
}
|
||||
space := p.cache.Search(chunk, cacheKey)
|
||||
|
||||
matches := p.matchChunk(chunk, space, slab)
|
||||
|
||||
if p.cacheable {
|
||||
_cache.Add(chunk, cacheKey, matches)
|
||||
p.cache.Add(chunk, cacheKey, matches)
|
||||
}
|
||||
return matches
|
||||
}
|
||||
|
||||
func (p *Pattern) matchChunk(chunk *Chunk, space []*Result, slab *util.Slab) []*Result {
|
||||
matches := []*Result{}
|
||||
func (p *Pattern) matchChunk(chunk *Chunk, space []Result, slab *util.Slab) []Result {
|
||||
matches := []Result{}
|
||||
|
||||
if space == nil {
|
||||
for _, item := range *chunk {
|
||||
if match, _, _ := p.MatchItem(item, false, slab); match != nil {
|
||||
matches = append(matches, match)
|
||||
for idx := 0; idx < chunk.count; idx++ {
|
||||
if match, _, _ := p.MatchItem(&chunk.items[idx], p.withPos, slab); match != nil {
|
||||
matches = append(matches, *match)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for _, result := range space {
|
||||
if match, _, _ := p.MatchItem(result.item, false, slab); match != nil {
|
||||
matches = append(matches, match)
|
||||
if match, _, _ := p.MatchItem(result.item, p.withPos, slab); match != nil {
|
||||
matches = append(matches, *match)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -293,32 +313,43 @@ func (p *Pattern) matchChunk(chunk *Chunk, space []*Result, slab *util.Slab) []*
|
||||
// MatchItem returns true if the Item is a match
|
||||
func (p *Pattern) MatchItem(item *Item, withPos bool, slab *util.Slab) (*Result, []Offset, *[]int) {
|
||||
if p.extended {
|
||||
if offsets, bonus, trimLen, pos := p.extendedMatch(item, withPos, slab); len(offsets) == len(p.termSets) {
|
||||
return buildResult(item, offsets, bonus, trimLen), offsets, pos
|
||||
if offsets, bonus, pos := p.extendedMatch(item, withPos, slab); len(offsets) == len(p.termSets) {
|
||||
result := buildResult(item, offsets, bonus)
|
||||
return &result, offsets, pos
|
||||
}
|
||||
return nil, nil, nil
|
||||
}
|
||||
offset, bonus, trimLen, pos := p.basicMatch(item, withPos, slab)
|
||||
offset, bonus, pos := p.basicMatch(item, withPos, slab)
|
||||
if sidx := offset[0]; sidx >= 0 {
|
||||
offsets := []Offset{offset}
|
||||
return buildResult(item, offsets, bonus, trimLen), offsets, pos
|
||||
result := buildResult(item, offsets, bonus)
|
||||
return &result, offsets, pos
|
||||
}
|
||||
return nil, nil, nil
|
||||
}
|
||||
|
||||
func (p *Pattern) basicMatch(item *Item, withPos bool, slab *util.Slab) (Offset, int, int, *[]int) {
|
||||
input := p.prepareInput(item)
|
||||
if p.fuzzy {
|
||||
return p.iter(p.fuzzyAlgo, input, p.caseSensitive, p.forward, p.text, withPos, slab)
|
||||
func (p *Pattern) basicMatch(item *Item, withPos bool, slab *util.Slab) (Offset, int, *[]int) {
|
||||
var input []Token
|
||||
if len(p.nth) == 0 {
|
||||
input = []Token{{text: &item.text, prefixLength: 0}}
|
||||
} else {
|
||||
input = p.transformInput(item)
|
||||
}
|
||||
return p.iter(algo.ExactMatchNaive, input, p.caseSensitive, p.forward, p.text, withPos, slab)
|
||||
if p.fuzzy {
|
||||
return p.iter(p.fuzzyAlgo, input, p.caseSensitive, p.normalize, p.forward, p.text, withPos, slab)
|
||||
}
|
||||
return p.iter(algo.ExactMatchNaive, input, p.caseSensitive, p.normalize, p.forward, p.text, withPos, slab)
|
||||
}
|
||||
|
||||
func (p *Pattern) extendedMatch(item *Item, withPos bool, slab *util.Slab) ([]Offset, int, int, *[]int) {
|
||||
input := p.prepareInput(item)
|
||||
func (p *Pattern) extendedMatch(item *Item, withPos bool, slab *util.Slab) ([]Offset, int, *[]int) {
|
||||
var input []Token
|
||||
if len(p.nth) == 0 {
|
||||
input = []Token{{text: &item.text, prefixLength: 0}}
|
||||
} else {
|
||||
input = p.transformInput(item)
|
||||
}
|
||||
offsets := []Offset{}
|
||||
var totalScore int
|
||||
var totalTrimLen int
|
||||
var allPos *[]int
|
||||
if withPos {
|
||||
allPos = &[]int{}
|
||||
@@ -326,16 +357,15 @@ func (p *Pattern) extendedMatch(item *Item, withPos bool, slab *util.Slab) ([]Of
|
||||
for _, termSet := range p.termSets {
|
||||
var offset Offset
|
||||
var currentScore int
|
||||
var trimLen int
|
||||
matched := false
|
||||
for _, term := range termSet {
|
||||
pfun := p.procFun[term.typ]
|
||||
off, score, tLen, pos := p.iter(pfun, input, term.caseSensitive, p.forward, term.text, withPos, slab)
|
||||
off, score, pos := p.iter(pfun, input, term.caseSensitive, term.normalize, p.forward, term.text, withPos, slab)
|
||||
if sidx := off[0]; sidx >= 0 {
|
||||
if term.inv {
|
||||
continue
|
||||
}
|
||||
offset, currentScore, trimLen = off, score, tLen
|
||||
offset, currentScore = off, score
|
||||
matched = true
|
||||
if withPos {
|
||||
if pos != nil {
|
||||
@@ -348,7 +378,7 @@ func (p *Pattern) extendedMatch(item *Item, withPos bool, slab *util.Slab) ([]Of
|
||||
}
|
||||
break
|
||||
} else if term.inv {
|
||||
offset, currentScore, trimLen = Offset{0, 0}, 0, 0
|
||||
offset, currentScore = Offset{0, 0}, 0
|
||||
matched = true
|
||||
continue
|
||||
}
|
||||
@@ -356,31 +386,25 @@ func (p *Pattern) extendedMatch(item *Item, withPos bool, slab *util.Slab) ([]Of
|
||||
if matched {
|
||||
offsets = append(offsets, offset)
|
||||
totalScore += currentScore
|
||||
totalTrimLen += trimLen
|
||||
}
|
||||
}
|
||||
return offsets, totalScore, totalTrimLen, allPos
|
||||
return offsets, totalScore, allPos
|
||||
}
|
||||
|
||||
func (p *Pattern) prepareInput(item *Item) []Token {
|
||||
func (p *Pattern) transformInput(item *Item) []Token {
|
||||
if item.transformed != nil {
|
||||
return item.transformed
|
||||
return *item.transformed
|
||||
}
|
||||
|
||||
var ret []Token
|
||||
if len(p.nth) == 0 {
|
||||
ret = []Token{Token{text: &item.text, prefixLength: 0, trimLength: int32(item.text.TrimLength())}}
|
||||
} else {
|
||||
tokens := Tokenize(item.text, p.delimiter)
|
||||
ret = Transform(tokens, p.nth)
|
||||
}
|
||||
item.transformed = ret
|
||||
tokens := Tokenize(item.text.ToString(), p.delimiter)
|
||||
ret := Transform(tokens, p.nth)
|
||||
item.transformed = &ret
|
||||
return ret
|
||||
}
|
||||
|
||||
func (p *Pattern) iter(pfun algo.Algo, tokens []Token, caseSensitive bool, forward bool, pattern []rune, withPos bool, slab *util.Slab) (Offset, int, int, *[]int) {
|
||||
func (p *Pattern) iter(pfun algo.Algo, tokens []Token, caseSensitive bool, normalize bool, forward bool, pattern []rune, withPos bool, slab *util.Slab) (Offset, int, *[]int) {
|
||||
for _, part := range tokens {
|
||||
if res, pos := pfun(caseSensitive, forward, *part.text, pattern, withPos, slab); res.Start >= 0 {
|
||||
if res, pos := pfun(caseSensitive, normalize, forward, part.text, pattern, withPos, slab); res.Start >= 0 {
|
||||
sidx := int32(res.Start) + part.prefixLength
|
||||
eidx := int32(res.End) + part.prefixLength
|
||||
if pos != nil {
|
||||
@@ -388,8 +412,8 @@ func (p *Pattern) iter(pfun algo.Algo, tokens []Token, caseSensitive bool, forwa
|
||||
(*pos)[idx] += int(part.prefixLength)
|
||||
}
|
||||
}
|
||||
return Offset{sidx, eidx}, res.Score, int(part.trimLength), pos
|
||||
return Offset{sidx, eidx}, res.Score, pos
|
||||
}
|
||||
}
|
||||
return Offset{-1, -1}, 0, -1, nil
|
||||
return Offset{-1, -1}, 0, nil
|
||||
}
|
||||
|
||||
@@ -15,8 +15,8 @@ func init() {
|
||||
}
|
||||
|
||||
func TestParseTermsExtended(t *testing.T) {
|
||||
terms := parseTerms(true, CaseSmart,
|
||||
"| aaa 'bbb ^ccc ddd$ !eee !'fff !^ggg !hhh$ | ^iii$ ^xxx | 'yyy | | zzz$ | !ZZZ |")
|
||||
terms := parseTerms(true, CaseSmart, false,
|
||||
"aaa 'bbb ^ccc ddd$ !eee !'fff !^ggg !hhh$ | ^iii$ ^xxx | 'yyy | zzz$ | !ZZZ |")
|
||||
if len(terms) != 9 ||
|
||||
terms[0][0].typ != termFuzzy || terms[0][0].inv ||
|
||||
terms[1][0].typ != termExact || terms[1][0].inv ||
|
||||
@@ -31,26 +31,18 @@ func TestParseTermsExtended(t *testing.T) {
|
||||
terms[8][1].typ != termExact || terms[8][1].inv ||
|
||||
terms[8][2].typ != termSuffix || terms[8][2].inv ||
|
||||
terms[8][3].typ != termExact || !terms[8][3].inv {
|
||||
t.Errorf("%s", terms)
|
||||
t.Errorf("%v", terms)
|
||||
}
|
||||
for idx, termSet := range terms[:8] {
|
||||
for _, termSet := range terms[:8] {
|
||||
term := termSet[0]
|
||||
if len(term.text) != 3 {
|
||||
t.Errorf("%s", term)
|
||||
}
|
||||
if idx > 0 && len(term.origText) != 4+idx/5 {
|
||||
t.Errorf("%s", term)
|
||||
}
|
||||
}
|
||||
for _, term := range terms[8] {
|
||||
if len(term.origText) != 4 {
|
||||
t.Errorf("%s", term)
|
||||
t.Errorf("%v", term)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseTermsExtendedExact(t *testing.T) {
|
||||
terms := parseTerms(false, CaseSmart,
|
||||
terms := parseTerms(false, CaseSmart, false,
|
||||
"aaa 'bbb ^ccc ddd$ !eee !'fff !^ggg !hhh$")
|
||||
if len(terms) != 8 ||
|
||||
terms[0][0].typ != termExact || terms[0][0].inv || len(terms[0][0].text) != 3 ||
|
||||
@@ -61,26 +53,32 @@ func TestParseTermsExtendedExact(t *testing.T) {
|
||||
terms[5][0].typ != termFuzzy || !terms[5][0].inv || len(terms[5][0].text) != 3 ||
|
||||
terms[6][0].typ != termPrefix || !terms[6][0].inv || len(terms[6][0].text) != 3 ||
|
||||
terms[7][0].typ != termSuffix || !terms[7][0].inv || len(terms[7][0].text) != 3 {
|
||||
t.Errorf("%s", terms)
|
||||
t.Errorf("%v", terms)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseTermsEmpty(t *testing.T) {
|
||||
terms := parseTerms(true, CaseSmart, "' $ ^ !' !^ !$")
|
||||
terms := parseTerms(true, CaseSmart, false, "' ^ !' !^")
|
||||
if len(terms) != 0 {
|
||||
t.Errorf("%s", terms)
|
||||
t.Errorf("%v", terms)
|
||||
}
|
||||
}
|
||||
|
||||
func buildPattern(fuzzy bool, fuzzyAlgo algo.Algo, extended bool, caseMode Case, normalize bool, forward bool,
|
||||
withPos bool, cacheable bool, nth []Range, delimiter Delimiter, runes []rune) *Pattern {
|
||||
return BuildPattern(NewChunkCache(), make(map[string]*Pattern),
|
||||
fuzzy, fuzzyAlgo, extended, caseMode, normalize, forward,
|
||||
withPos, cacheable, nth, delimiter, runes)
|
||||
}
|
||||
|
||||
func TestExact(t *testing.T) {
|
||||
defer clearPatternCache()
|
||||
clearPatternCache()
|
||||
pattern := BuildPattern(true, algo.FuzzyMatchV2, true, CaseSmart, true, true,
|
||||
pattern := buildPattern(true, algo.FuzzyMatchV2, true, CaseSmart, false, true, false, true,
|
||||
[]Range{}, Delimiter{}, []rune("'abc"))
|
||||
chars := util.ToChars([]byte("aabbcc abc"))
|
||||
res, pos := algo.ExactMatchNaive(
|
||||
pattern.caseSensitive, pattern.forward, util.RunesToChars([]rune("aabbcc abc")), pattern.termSets[0][0].text, true, nil)
|
||||
pattern.caseSensitive, pattern.normalize, pattern.forward, &chars, pattern.termSets[0][0].text, true, nil)
|
||||
if res.Start != 7 || res.End != 10 {
|
||||
t.Errorf("%s / %d / %d", pattern.termSets, res.Start, res.End)
|
||||
t.Errorf("%v / %d / %d", pattern.termSets, res.Start, res.End)
|
||||
}
|
||||
if pos != nil {
|
||||
t.Errorf("pos is expected to be nil")
|
||||
@@ -88,15 +86,14 @@ func TestExact(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestEqual(t *testing.T) {
|
||||
defer clearPatternCache()
|
||||
clearPatternCache()
|
||||
pattern := BuildPattern(true, algo.FuzzyMatchV2, true, CaseSmart, true, true, []Range{}, Delimiter{}, []rune("^AbC$"))
|
||||
pattern := buildPattern(true, algo.FuzzyMatchV2, true, CaseSmart, false, true, false, true, []Range{}, Delimiter{}, []rune("^AbC$"))
|
||||
|
||||
match := func(str string, sidxExpected int, eidxExpected int) {
|
||||
chars := util.ToChars([]byte(str))
|
||||
res, pos := algo.EqualMatch(
|
||||
pattern.caseSensitive, pattern.forward, util.RunesToChars([]rune(str)), pattern.termSets[0][0].text, true, nil)
|
||||
pattern.caseSensitive, pattern.normalize, pattern.forward, &chars, pattern.termSets[0][0].text, true, nil)
|
||||
if res.Start != sidxExpected || res.End != eidxExpected {
|
||||
t.Errorf("%s / %d / %d", pattern.termSets, res.Start, res.End)
|
||||
t.Errorf("%v / %d / %d", pattern.termSets, res.Start, res.End)
|
||||
}
|
||||
if pos != nil {
|
||||
t.Errorf("pos is expected to be nil")
|
||||
@@ -104,22 +101,18 @@ func TestEqual(t *testing.T) {
|
||||
}
|
||||
match("ABC", -1, -1)
|
||||
match("AbC", 0, 3)
|
||||
match("AbC ", 0, 3)
|
||||
match(" AbC ", 1, 4)
|
||||
match(" AbC", 2, 5)
|
||||
}
|
||||
|
||||
func TestCaseSensitivity(t *testing.T) {
|
||||
defer clearPatternCache()
|
||||
clearPatternCache()
|
||||
pat1 := BuildPattern(true, algo.FuzzyMatchV2, false, CaseSmart, true, true, []Range{}, Delimiter{}, []rune("abc"))
|
||||
clearPatternCache()
|
||||
pat2 := BuildPattern(true, algo.FuzzyMatchV2, false, CaseSmart, true, true, []Range{}, Delimiter{}, []rune("Abc"))
|
||||
clearPatternCache()
|
||||
pat3 := BuildPattern(true, algo.FuzzyMatchV2, false, CaseIgnore, true, true, []Range{}, Delimiter{}, []rune("abc"))
|
||||
clearPatternCache()
|
||||
pat4 := BuildPattern(true, algo.FuzzyMatchV2, false, CaseIgnore, true, true, []Range{}, Delimiter{}, []rune("Abc"))
|
||||
clearPatternCache()
|
||||
pat5 := BuildPattern(true, algo.FuzzyMatchV2, false, CaseRespect, true, true, []Range{}, Delimiter{}, []rune("abc"))
|
||||
clearPatternCache()
|
||||
pat6 := BuildPattern(true, algo.FuzzyMatchV2, false, CaseRespect, true, true, []Range{}, Delimiter{}, []rune("Abc"))
|
||||
pat1 := buildPattern(true, algo.FuzzyMatchV2, false, CaseSmart, false, true, false, true, []Range{}, Delimiter{}, []rune("abc"))
|
||||
pat2 := buildPattern(true, algo.FuzzyMatchV2, false, CaseSmart, false, true, false, true, []Range{}, Delimiter{}, []rune("Abc"))
|
||||
pat3 := buildPattern(true, algo.FuzzyMatchV2, false, CaseIgnore, false, true, false, true, []Range{}, Delimiter{}, []rune("abc"))
|
||||
pat4 := buildPattern(true, algo.FuzzyMatchV2, false, CaseIgnore, false, true, false, true, []Range{}, Delimiter{}, []rune("Abc"))
|
||||
pat5 := buildPattern(true, algo.FuzzyMatchV2, false, CaseRespect, false, true, false, true, []Range{}, Delimiter{}, []rune("abc"))
|
||||
pat6 := buildPattern(true, algo.FuzzyMatchV2, false, CaseRespect, false, true, false, true, []Range{}, Delimiter{}, []rune("Abc"))
|
||||
|
||||
if string(pat1.text) != "abc" || pat1.caseSensitive != false ||
|
||||
string(pat2.text) != "Abc" || pat2.caseSensitive != true ||
|
||||
@@ -132,31 +125,30 @@ func TestCaseSensitivity(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestOrigTextAndTransformed(t *testing.T) {
|
||||
pattern := BuildPattern(true, algo.FuzzyMatchV2, true, CaseSmart, true, true, []Range{}, Delimiter{}, []rune("jg"))
|
||||
tokens := Tokenize(util.RunesToChars([]rune("junegunn")), Delimiter{})
|
||||
trans := Transform(tokens, []Range{Range{1, 1}})
|
||||
pattern := buildPattern(true, algo.FuzzyMatchV2, true, CaseSmart, false, true, false, true, []Range{}, Delimiter{}, []rune("jg"))
|
||||
tokens := Tokenize("junegunn", Delimiter{})
|
||||
trans := Transform(tokens, []Range{{1, 1}})
|
||||
|
||||
origBytes := []byte("junegunn.choi")
|
||||
for _, extended := range []bool{false, true} {
|
||||
chunk := Chunk{
|
||||
&Item{
|
||||
text: util.RunesToChars([]rune("junegunn")),
|
||||
origText: &origBytes,
|
||||
transformed: trans},
|
||||
}
|
||||
chunk := Chunk{count: 1}
|
||||
chunk.items[0] = Item{
|
||||
text: util.ToChars([]byte("junegunn")),
|
||||
origText: &origBytes,
|
||||
transformed: &trans}
|
||||
pattern.extended = extended
|
||||
matches := pattern.matchChunk(&chunk, nil, slab) // No cache
|
||||
if !(matches[0].item.text.ToString() == "junegunn" &&
|
||||
string(*matches[0].item.origText) == "junegunn.choi" &&
|
||||
reflect.DeepEqual(matches[0].item.transformed, trans)) {
|
||||
reflect.DeepEqual(*matches[0].item.transformed, trans)) {
|
||||
t.Error("Invalid match result", matches)
|
||||
}
|
||||
|
||||
match, offsets, pos := pattern.MatchItem(chunk[0], true, slab)
|
||||
match, offsets, pos := pattern.MatchItem(&chunk.items[0], true, slab)
|
||||
if !(match.item.text.ToString() == "junegunn" &&
|
||||
string(*match.item.origText) == "junegunn.choi" &&
|
||||
offsets[0][0] == 0 && offsets[0][1] == 5 &&
|
||||
reflect.DeepEqual(match.item.transformed, trans)) {
|
||||
reflect.DeepEqual(*match.item.transformed, trans)) {
|
||||
t.Error("Invalid match result", match, offsets, extended)
|
||||
}
|
||||
if !((*pos)[0] == 4 && (*pos)[1] == 0) {
|
||||
@@ -167,22 +159,43 @@ func TestOrigTextAndTransformed(t *testing.T) {
|
||||
|
||||
func TestCacheKey(t *testing.T) {
|
||||
test := func(extended bool, patStr string, expected string, cacheable bool) {
|
||||
pat := BuildPattern(true, algo.FuzzyMatchV2, extended, CaseSmart, true, true, []Range{}, Delimiter{}, []rune(patStr))
|
||||
pat := buildPattern(true, algo.FuzzyMatchV2, extended, CaseSmart, false, true, false, true, []Range{}, Delimiter{}, []rune(patStr))
|
||||
if pat.CacheKey() != expected {
|
||||
t.Errorf("Expected: %s, actual: %s", expected, pat.CacheKey())
|
||||
}
|
||||
if pat.cacheable != cacheable {
|
||||
t.Errorf("Expected: %s, actual: %s (%s)", cacheable, pat.cacheable, patStr)
|
||||
t.Errorf("Expected: %t, actual: %t (%s)", cacheable, pat.cacheable, patStr)
|
||||
}
|
||||
clearPatternCache()
|
||||
}
|
||||
test(false, "foo !bar", "foo !bar", true)
|
||||
test(false, "foo | bar !baz", "foo | bar !baz", true)
|
||||
test(true, "foo bar baz", "foo bar baz", true)
|
||||
test(true, "foo bar baz", "foo\tbar\tbaz", true)
|
||||
test(true, "foo !bar", "foo", false)
|
||||
test(true, "foo !bar baz", "foo baz", false)
|
||||
test(true, "foo !bar baz", "foo\tbaz", false)
|
||||
test(true, "foo | bar baz", "baz", false)
|
||||
test(true, "foo | bar | baz", "", false)
|
||||
test(true, "foo | bar !baz", "", false)
|
||||
test(true, "| | | foo", "foo", true)
|
||||
test(true, "| | foo", "", false)
|
||||
test(true, "| | | foo", "foo", false)
|
||||
}
|
||||
|
||||
func TestCacheable(t *testing.T) {
|
||||
test := func(fuzzy bool, str string, expected string, cacheable bool) {
|
||||
pat := buildPattern(fuzzy, algo.FuzzyMatchV2, true, CaseSmart, true, true, false, true, []Range{}, Delimiter{}, []rune(str))
|
||||
if pat.CacheKey() != expected {
|
||||
t.Errorf("Expected: %s, actual: %s", expected, pat.CacheKey())
|
||||
}
|
||||
if cacheable != pat.cacheable {
|
||||
t.Errorf("Invalid Pattern.cacheable for \"%s\": %v (expected: %v)", str, pat.cacheable, cacheable)
|
||||
}
|
||||
}
|
||||
test(true, "foo bar", "foo\tbar", true)
|
||||
test(true, "foo 'bar", "foo\tbar", false)
|
||||
test(true, "foo !bar", "foo", false)
|
||||
|
||||
test(false, "foo bar", "foo\tbar", true)
|
||||
test(false, "foo 'bar", "foo", false)
|
||||
test(false, "foo '", "foo", true)
|
||||
test(false, "foo 'bar", "foo", false)
|
||||
test(false, "foo !bar", "foo", false)
|
||||
}
|
||||
|
||||
6
src/protector/protector.go
Normal file
6
src/protector/protector.go
Normal file
@@ -0,0 +1,6 @@
|
||||
//go:build !openbsd
|
||||
|
||||
package protector
|
||||
|
||||
// Protect calls OS specific protections like pledge on OpenBSD
|
||||
func Protect() {}
|
||||
10
src/protector/protector_openbsd.go
Normal file
10
src/protector/protector_openbsd.go
Normal file
@@ -0,0 +1,10 @@
|
||||
//go:build openbsd
|
||||
|
||||
package protector
|
||||
|
||||
import "golang.org/x/sys/unix"
|
||||
|
||||
// Protect calls OS specific protections like pledge on OpenBSD
|
||||
func Protect() {
|
||||
unix.PledgePromises("stdio dpath wpath rpath tty proc exec inet tmppath")
|
||||
}
|
||||
158
src/proxy.go
Normal file
158
src/proxy.go
Normal file
@@ -0,0 +1,158 @@
|
||||
package fzf
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"os/exec"
|
||||
"os/signal"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/junegunn/fzf/src/tui"
|
||||
"github.com/junegunn/fzf/src/util"
|
||||
)
|
||||
|
||||
const becomeSuffix = ".become"
|
||||
|
||||
func escapeSingleQuote(str string) string {
|
||||
return "'" + strings.ReplaceAll(str, "'", "'\\''") + "'"
|
||||
}
|
||||
|
||||
func fifo(name string) (string, error) {
|
||||
ns := time.Now().UnixNano()
|
||||
output := filepath.Join(os.TempDir(), fmt.Sprintf("fzf-%s-%d", name, ns))
|
||||
output, err := mkfifo(output, 0600)
|
||||
if err != nil {
|
||||
return output, err
|
||||
}
|
||||
return output, nil
|
||||
}
|
||||
|
||||
func runProxy(commandPrefix string, cmdBuilder func(temp string, needBash bool) (*exec.Cmd, error), opts *Options, withExports bool) (int, error) {
|
||||
output, err := fifo("proxy-output")
|
||||
if err != nil {
|
||||
return ExitError, err
|
||||
}
|
||||
defer os.Remove(output)
|
||||
|
||||
// Take the output
|
||||
go func() {
|
||||
withOutputPipe(output, func(outputFile io.ReadCloser) {
|
||||
if opts.Output == nil {
|
||||
io.Copy(os.Stdout, outputFile)
|
||||
} else {
|
||||
reader := bufio.NewReader(outputFile)
|
||||
sep := opts.PrintSep[0]
|
||||
for {
|
||||
item, err := reader.ReadString(sep)
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
opts.Output <- item
|
||||
}
|
||||
}
|
||||
})
|
||||
}()
|
||||
|
||||
var command string
|
||||
commandPrefix += ` --no-force-tty-in --proxy-script "$0"`
|
||||
if opts.Input == nil && (opts.ForceTtyIn || util.IsTty(os.Stdin)) {
|
||||
command = fmt.Sprintf(`%s > %q`, commandPrefix, output)
|
||||
} else {
|
||||
input, err := fifo("proxy-input")
|
||||
if err != nil {
|
||||
return ExitError, err
|
||||
}
|
||||
defer os.Remove(input)
|
||||
|
||||
go func() {
|
||||
withInputPipe(input, func(inputFile io.WriteCloser) {
|
||||
if opts.Input == nil {
|
||||
io.Copy(inputFile, os.Stdin)
|
||||
} else {
|
||||
for item := range opts.Input {
|
||||
fmt.Fprint(inputFile, item+opts.PrintSep)
|
||||
}
|
||||
}
|
||||
})
|
||||
}()
|
||||
|
||||
if withExports {
|
||||
command = fmt.Sprintf(`%s < %q > %q`, commandPrefix, input, output)
|
||||
} else {
|
||||
// For mintty: cannot directly read named pipe from Go code
|
||||
command = fmt.Sprintf(`command cat %q | %s > %q`, input, commandPrefix, output)
|
||||
}
|
||||
}
|
||||
|
||||
// To ensure that the options are processed by a POSIX-compliant shell,
|
||||
// we need to write the command to a temporary file and execute it with sh.
|
||||
var exports []string
|
||||
needBash := false
|
||||
if withExports {
|
||||
validIdentifier := regexp.MustCompile(`^[a-zA-Z_][a-zA-Z0-9_]*$`)
|
||||
for _, pairStr := range os.Environ() {
|
||||
pair := strings.SplitN(pairStr, "=", 2)
|
||||
if validIdentifier.MatchString(pair[0]) {
|
||||
exports = append(exports, fmt.Sprintf("export %s=%s", pair[0], escapeSingleQuote(pair[1])))
|
||||
} else if strings.HasPrefix(pair[0], "BASH_FUNC_") && strings.HasSuffix(pair[0], "%%") {
|
||||
name := pair[0][10 : len(pair[0])-2]
|
||||
exports = append(exports, name+pair[1])
|
||||
exports = append(exports, "export -f "+name)
|
||||
needBash = true
|
||||
}
|
||||
}
|
||||
}
|
||||
temp := WriteTemporaryFile(append(exports, command), "\n")
|
||||
defer os.Remove(temp)
|
||||
|
||||
cmd, err := cmdBuilder(temp, needBash)
|
||||
if err != nil {
|
||||
return ExitError, err
|
||||
}
|
||||
cmd.Stderr = os.Stderr
|
||||
intChan := make(chan os.Signal, 1)
|
||||
defer close(intChan)
|
||||
go func() {
|
||||
if sig, valid := <-intChan; valid {
|
||||
cmd.Process.Signal(sig)
|
||||
}
|
||||
}()
|
||||
signal.Notify(intChan, os.Interrupt)
|
||||
if err := cmd.Run(); err != nil {
|
||||
if exitError, ok := err.(*exec.ExitError); ok {
|
||||
code := exitError.ExitCode()
|
||||
if code == ExitBecome {
|
||||
becomeFile := temp + becomeSuffix
|
||||
data, err := os.ReadFile(becomeFile)
|
||||
os.Remove(becomeFile)
|
||||
if err != nil {
|
||||
return ExitError, err
|
||||
}
|
||||
elems := strings.Split(string(data), "\x00")
|
||||
if len(elems) < 1 {
|
||||
return ExitError, errors.New("invalid become command")
|
||||
}
|
||||
command := elems[0]
|
||||
env := []string{}
|
||||
if len(elems) > 1 {
|
||||
env = elems[1:]
|
||||
}
|
||||
executor := util.NewExecutor(opts.WithShell)
|
||||
ttyin, err := tui.TtyIn()
|
||||
if err != nil {
|
||||
return ExitError, err
|
||||
}
|
||||
executor.Become(ttyin, env, command)
|
||||
}
|
||||
return code, err
|
||||
}
|
||||
}
|
||||
|
||||
return ExitOk, nil
|
||||
}
|
||||
41
src/proxy_unix.go
Normal file
41
src/proxy_unix.go
Normal file
@@ -0,0 +1,41 @@
|
||||
//go:build !windows
|
||||
|
||||
package fzf
|
||||
|
||||
import (
|
||||
"io"
|
||||
"os"
|
||||
|
||||
"golang.org/x/sys/unix"
|
||||
)
|
||||
|
||||
func sh(bash bool) (string, error) {
|
||||
if bash {
|
||||
return "bash", nil
|
||||
}
|
||||
return "sh", nil
|
||||
}
|
||||
|
||||
func mkfifo(path string, mode uint32) (string, error) {
|
||||
return path, unix.Mkfifo(path, mode)
|
||||
}
|
||||
|
||||
func withOutputPipe(output string, task func(io.ReadCloser)) error {
|
||||
outputFile, err := os.OpenFile(output, os.O_RDONLY, 0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
task(outputFile)
|
||||
outputFile.Close()
|
||||
return nil
|
||||
}
|
||||
|
||||
func withInputPipe(input string, task func(io.WriteCloser)) error {
|
||||
inputFile, err := os.OpenFile(input, os.O_WRONLY, 0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
task(inputFile)
|
||||
inputFile.Close()
|
||||
return nil
|
||||
}
|
||||
85
src/proxy_windows.go
Normal file
85
src/proxy_windows.go
Normal file
@@ -0,0 +1,85 @@
|
||||
//go:build windows
|
||||
|
||||
package fzf
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os/exec"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync/atomic"
|
||||
)
|
||||
|
||||
var shPath atomic.Value
|
||||
|
||||
func sh(bash bool) (string, error) {
|
||||
if cached := shPath.Load(); cached != nil {
|
||||
return cached.(string), nil
|
||||
}
|
||||
|
||||
name := "sh"
|
||||
if bash {
|
||||
name = "bash"
|
||||
}
|
||||
cmd := exec.Command("cygpath", "-w", "/usr/bin/"+name)
|
||||
bytes, err := cmd.Output()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
sh := strings.TrimSpace(string(bytes))
|
||||
shPath.Store(sh)
|
||||
return sh, nil
|
||||
}
|
||||
|
||||
func mkfifo(path string, mode uint32) (string, error) {
|
||||
m := strconv.FormatUint(uint64(mode), 8)
|
||||
sh, err := sh(false)
|
||||
if err != nil {
|
||||
return path, err
|
||||
}
|
||||
cmd := exec.Command(sh, "-c", fmt.Sprintf(`command mkfifo -m %s %q`, m, path))
|
||||
if err := cmd.Run(); err != nil {
|
||||
return path, err
|
||||
}
|
||||
return path + ".lnk", nil
|
||||
}
|
||||
|
||||
func withOutputPipe(output string, task func(io.ReadCloser)) error {
|
||||
sh, err := sh(false)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cmd := exec.Command(sh, "-c", fmt.Sprintf(`command cat %q`, output))
|
||||
outputFile, err := cmd.StdoutPipe()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := cmd.Start(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
task(outputFile)
|
||||
cmd.Wait()
|
||||
return nil
|
||||
}
|
||||
|
||||
func withInputPipe(input string, task func(io.WriteCloser)) error {
|
||||
sh, err := sh(false)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cmd := exec.Command(sh, "-c", fmt.Sprintf(`command cat - > %q`, input))
|
||||
inputFile, err := cmd.StdinPipe()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := cmd.Start(); err != nil {
|
||||
return err
|
||||
}
|
||||
task(inputFile)
|
||||
inputFile.Close()
|
||||
cmd.Wait()
|
||||
return nil
|
||||
}
|
||||
310
src/reader.go
310
src/reader.go
@@ -1,78 +1,318 @@
|
||||
package fzf
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"context"
|
||||
"io"
|
||||
"io/fs"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/charlievieth/fastwalk"
|
||||
"github.com/junegunn/fzf/src/util"
|
||||
)
|
||||
|
||||
// Reader reads from command or standard input
|
||||
type Reader struct {
|
||||
pusher func([]byte) bool
|
||||
executor *util.Executor
|
||||
eventBox *util.EventBox
|
||||
delimNil bool
|
||||
event int32
|
||||
finChan chan bool
|
||||
mutex sync.Mutex
|
||||
exec *exec.Cmd
|
||||
execOut io.ReadCloser
|
||||
command *string
|
||||
killed bool
|
||||
wait bool
|
||||
}
|
||||
|
||||
// NewReader returns new Reader object
|
||||
func NewReader(pusher func([]byte) bool, eventBox *util.EventBox, executor *util.Executor, delimNil bool, wait bool) *Reader {
|
||||
return &Reader{pusher, executor, eventBox, delimNil, int32(EvtReady), make(chan bool, 1), sync.Mutex{}, nil, nil, nil, false, wait}
|
||||
}
|
||||
|
||||
func (r *Reader) startEventPoller() {
|
||||
go func() {
|
||||
ptr := &r.event
|
||||
pollInterval := readerPollIntervalMin
|
||||
for {
|
||||
if atomic.CompareAndSwapInt32(ptr, int32(EvtReadNew), int32(EvtReady)) {
|
||||
r.eventBox.Set(EvtReadNew, (*string)(nil))
|
||||
pollInterval = readerPollIntervalMin
|
||||
} else if atomic.LoadInt32(ptr) == int32(EvtReadFin) {
|
||||
if r.wait {
|
||||
r.finChan <- true
|
||||
}
|
||||
return
|
||||
} else {
|
||||
pollInterval += readerPollIntervalStep
|
||||
if pollInterval > readerPollIntervalMax {
|
||||
pollInterval = readerPollIntervalMax
|
||||
}
|
||||
}
|
||||
time.Sleep(pollInterval)
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func (r *Reader) fin(success bool) {
|
||||
atomic.StoreInt32(&r.event, int32(EvtReadFin))
|
||||
if r.wait {
|
||||
<-r.finChan
|
||||
}
|
||||
|
||||
r.mutex.Lock()
|
||||
ret := r.command
|
||||
if success || r.killed {
|
||||
ret = nil
|
||||
}
|
||||
r.mutex.Unlock()
|
||||
|
||||
r.eventBox.Set(EvtReadFin, ret)
|
||||
}
|
||||
|
||||
func (r *Reader) terminate() {
|
||||
r.mutex.Lock()
|
||||
r.killed = true
|
||||
if r.exec != nil && r.exec.Process != nil {
|
||||
r.execOut.Close()
|
||||
util.KillCommand(r.exec)
|
||||
} else {
|
||||
os.Stdin.Close()
|
||||
}
|
||||
r.mutex.Unlock()
|
||||
}
|
||||
|
||||
func (r *Reader) restart(command commandSpec, environ []string) {
|
||||
r.event = int32(EvtReady)
|
||||
r.startEventPoller()
|
||||
success := r.readFromCommand(command.command, environ)
|
||||
r.fin(success)
|
||||
removeFiles(command.tempFiles)
|
||||
}
|
||||
|
||||
func (r *Reader) readChannel(inputChan chan string) bool {
|
||||
for {
|
||||
item, more := <-inputChan
|
||||
if !more {
|
||||
break
|
||||
}
|
||||
if r.pusher([]byte(item)) {
|
||||
atomic.StoreInt32(&r.event, int32(EvtReadNew))
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// ReadSource reads data from the default command or from standard input
|
||||
func (r *Reader) ReadSource() {
|
||||
if util.IsTty() {
|
||||
func (r *Reader) ReadSource(inputChan chan string, root string, opts walkerOpts, ignores []string, initCmd string, initEnv []string) {
|
||||
r.startEventPoller()
|
||||
var success bool
|
||||
if inputChan != nil {
|
||||
success = r.readChannel(inputChan)
|
||||
} else if len(initCmd) > 0 {
|
||||
success = r.readFromCommand(initCmd, initEnv)
|
||||
} else if util.IsTty(os.Stdin) {
|
||||
cmd := os.Getenv("FZF_DEFAULT_COMMAND")
|
||||
if len(cmd) == 0 {
|
||||
cmd = defaultCommand
|
||||
success = r.readFiles(root, opts, ignores)
|
||||
} else {
|
||||
success = r.readFromCommand(cmd, initEnv)
|
||||
}
|
||||
r.readFromCommand(cmd)
|
||||
} else {
|
||||
r.readFromStdin()
|
||||
success = r.readFromStdin()
|
||||
}
|
||||
r.eventBox.Set(EvtReadFin, nil)
|
||||
r.fin(success)
|
||||
}
|
||||
|
||||
func (r *Reader) feed(src io.Reader) {
|
||||
/*
|
||||
readerSlabSize, ae := strconv.Atoi(os.Getenv("SLAB_KB"))
|
||||
if ae != nil {
|
||||
readerSlabSize = 128 * 1024
|
||||
} else {
|
||||
readerSlabSize *= 1024
|
||||
}
|
||||
readerBufferSize, be := strconv.Atoi(os.Getenv("BUF_KB"))
|
||||
if be != nil {
|
||||
readerBufferSize = 64 * 1024
|
||||
} else {
|
||||
readerBufferSize *= 1024
|
||||
}
|
||||
*/
|
||||
|
||||
delim := byte('\n')
|
||||
trimCR := util.IsWindows()
|
||||
if r.delimNil {
|
||||
delim = '\000'
|
||||
trimCR = false
|
||||
}
|
||||
reader := bufio.NewReaderSize(src, readerBufferSize)
|
||||
|
||||
slab := make([]byte, readerSlabSize)
|
||||
leftover := []byte{}
|
||||
var err error
|
||||
for {
|
||||
// ReadBytes returns err != nil if and only if the returned data does not
|
||||
// end in delim.
|
||||
bytea, err := reader.ReadBytes(delim)
|
||||
byteaLen := len(bytea)
|
||||
if len(bytea) > 0 {
|
||||
if err == nil {
|
||||
// get rid of carriage return if under Windows:
|
||||
if util.IsWindows() && byteaLen >= 2 && bytea[byteaLen-2] == byte('\r') {
|
||||
bytea = bytea[:byteaLen-2]
|
||||
} else {
|
||||
bytea = bytea[:byteaLen-1]
|
||||
}
|
||||
}
|
||||
if r.pusher(bytea) {
|
||||
r.eventBox.Set(EvtReadNew, nil)
|
||||
n := 0
|
||||
scope := slab[:util.Min(len(slab), readerBufferSize)]
|
||||
for i := 0; i < 100; i++ {
|
||||
n, err = src.Read(scope)
|
||||
if n > 0 || err != nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
|
||||
// We're not making any progress after 100 tries. Stop.
|
||||
if n == 0 {
|
||||
break
|
||||
}
|
||||
|
||||
buf := slab[:n]
|
||||
slab = slab[n:]
|
||||
|
||||
for len(buf) > 0 {
|
||||
if i := bytes.IndexByte(buf, delim); i >= 0 {
|
||||
// Found the delimiter
|
||||
slice := buf[:i+1]
|
||||
buf = buf[i+1:]
|
||||
if trimCR && len(slice) >= 2 && slice[len(slice)-2] == byte('\r') {
|
||||
slice = slice[:len(slice)-2]
|
||||
} else {
|
||||
slice = slice[:len(slice)-1]
|
||||
}
|
||||
if len(leftover) > 0 {
|
||||
slice = append(leftover, slice...)
|
||||
leftover = []byte{}
|
||||
}
|
||||
if (err == nil || len(slice) > 0) && r.pusher(slice) {
|
||||
atomic.StoreInt32(&r.event, int32(EvtReadNew))
|
||||
}
|
||||
} else {
|
||||
// Could not find the delimiter in the buffer
|
||||
// NOTE: We can further optimize this by keeping track of the cursor
|
||||
// position in the slab so that a straddling item that doesn't go
|
||||
// beyond the boundary of a slab doesn't need to be copied to
|
||||
// another buffer. However, the performance gain is negligible in
|
||||
// practice (< 0.1%) and is not
|
||||
// worth the added complexity.
|
||||
leftover = append(leftover, buf...)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if err == io.EOF {
|
||||
leftover = append(leftover, buf...)
|
||||
break
|
||||
}
|
||||
|
||||
if len(slab) == 0 {
|
||||
slab = make([]byte, readerSlabSize)
|
||||
}
|
||||
}
|
||||
if len(leftover) > 0 && r.pusher(leftover) {
|
||||
atomic.StoreInt32(&r.event, int32(EvtReadNew))
|
||||
}
|
||||
}
|
||||
|
||||
func (r *Reader) readFromStdin() {
|
||||
func (r *Reader) readFromStdin() bool {
|
||||
r.feed(os.Stdin)
|
||||
return true
|
||||
}
|
||||
|
||||
func (r *Reader) readFromCommand(cmd string) {
|
||||
listCommand := util.ExecCommand(cmd)
|
||||
out, err := listCommand.StdoutPipe()
|
||||
if err != nil {
|
||||
return
|
||||
func isSymlinkToDir(path string, de os.DirEntry) bool {
|
||||
if de.Type()&fs.ModeSymlink == 0 {
|
||||
return false
|
||||
}
|
||||
err = listCommand.Start()
|
||||
if err != nil {
|
||||
return
|
||||
if s, err := os.Stat(path); err == nil {
|
||||
return s.IsDir()
|
||||
}
|
||||
defer listCommand.Wait()
|
||||
r.feed(out)
|
||||
return false
|
||||
}
|
||||
|
||||
func trimPath(path string) string {
|
||||
bytes := stringBytes(path)
|
||||
|
||||
for len(bytes) > 1 && bytes[0] == '.' && (bytes[1] == '/' || bytes[1] == '\\') {
|
||||
bytes = bytes[2:]
|
||||
}
|
||||
|
||||
if len(bytes) == 0 {
|
||||
return "."
|
||||
}
|
||||
|
||||
return byteString(bytes)
|
||||
}
|
||||
|
||||
func (r *Reader) readFiles(root string, opts walkerOpts, ignores []string) bool {
|
||||
r.killed = false
|
||||
conf := fastwalk.Config{
|
||||
Follow: opts.follow,
|
||||
// Use forward slashes when running a Windows binary under WSL or MSYS
|
||||
ToSlash: fastwalk.DefaultToSlash(),
|
||||
Sort: fastwalk.SortFilesFirst,
|
||||
}
|
||||
fn := func(path string, de os.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
path = trimPath(path)
|
||||
if path != "." {
|
||||
isDir := de.IsDir()
|
||||
if isDir || opts.follow && isSymlinkToDir(path, de) {
|
||||
base := filepath.Base(path)
|
||||
if !opts.hidden && base[0] == '.' {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
for _, ignore := range ignores {
|
||||
if ignore == base {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
}
|
||||
}
|
||||
if ((opts.file && !isDir) || (opts.dir && isDir)) && r.pusher(stringBytes(path)) {
|
||||
atomic.StoreInt32(&r.event, int32(EvtReadNew))
|
||||
}
|
||||
}
|
||||
r.mutex.Lock()
|
||||
defer r.mutex.Unlock()
|
||||
if r.killed {
|
||||
return context.Canceled
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return fastwalk.Walk(&conf, root, fn) == nil
|
||||
}
|
||||
|
||||
func (r *Reader) readFromCommand(command string, environ []string) bool {
|
||||
r.mutex.Lock()
|
||||
r.killed = false
|
||||
r.command = &command
|
||||
r.exec = r.executor.ExecCommand(command, true)
|
||||
if environ != nil {
|
||||
r.exec.Env = environ
|
||||
}
|
||||
|
||||
var err error
|
||||
r.execOut, err = r.exec.StdoutPipe()
|
||||
if err != nil {
|
||||
r.exec = nil
|
||||
r.mutex.Unlock()
|
||||
return false
|
||||
}
|
||||
|
||||
err = r.exec.Start()
|
||||
if err != nil {
|
||||
r.exec = nil
|
||||
r.mutex.Unlock()
|
||||
return false
|
||||
}
|
||||
|
||||
r.mutex.Unlock()
|
||||
r.feed(r.execOut)
|
||||
return r.exec.Wait() == nil
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package fzf
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/junegunn/fzf/src/util"
|
||||
)
|
||||
@@ -9,9 +10,12 @@ import (
|
||||
func TestReadFromCommand(t *testing.T) {
|
||||
strs := []string{}
|
||||
eb := util.NewEventBox()
|
||||
reader := Reader{
|
||||
pusher: func(s []byte) bool { strs = append(strs, string(s)); return true },
|
||||
eventBox: eb}
|
||||
exec := util.NewExecutor("")
|
||||
reader := NewReader(
|
||||
func(s []byte) bool { strs = append(strs, string(s)); return true },
|
||||
eb, exec, false, true)
|
||||
|
||||
reader.startEventPoller()
|
||||
|
||||
// Check EventBox
|
||||
if eb.Peek(EvtReadNew) {
|
||||
@@ -19,21 +23,16 @@ func TestReadFromCommand(t *testing.T) {
|
||||
}
|
||||
|
||||
// Normal command
|
||||
reader.readFromCommand(`echo abc && echo def`)
|
||||
reader.fin(reader.readFromCommand(`echo abc&&echo def`, nil))
|
||||
if len(strs) != 2 || strs[0] != "abc" || strs[1] != "def" {
|
||||
t.Errorf("%s", strs)
|
||||
}
|
||||
|
||||
// Check EventBox again
|
||||
if !eb.Peek(EvtReadNew) {
|
||||
t.Error("EvtReadNew should be set yet")
|
||||
}
|
||||
eb.WaitFor(EvtReadFin)
|
||||
|
||||
// Wait should return immediately
|
||||
eb.Wait(func(events *util.Events) {
|
||||
if _, found := (*events)[EvtReadNew]; !found {
|
||||
t.Errorf("%s", events)
|
||||
}
|
||||
events.Clear()
|
||||
})
|
||||
|
||||
@@ -42,8 +41,14 @@ func TestReadFromCommand(t *testing.T) {
|
||||
t.Error("EvtReadNew should not be set yet")
|
||||
}
|
||||
|
||||
// Make sure that event poller is finished
|
||||
time.Sleep(readerPollIntervalMax)
|
||||
|
||||
// Restart event poller
|
||||
reader.startEventPoller()
|
||||
|
||||
// Failing command
|
||||
reader.readFromCommand(`no-such-command`)
|
||||
reader.fin(reader.readFromCommand(`no-such-command`, nil))
|
||||
strs = []string{}
|
||||
if len(strs) > 0 {
|
||||
t.Errorf("%s", strs)
|
||||
@@ -51,6 +56,9 @@ func TestReadFromCommand(t *testing.T) {
|
||||
|
||||
// Check EventBox again
|
||||
if eb.Peek(EvtReadNew) {
|
||||
t.Error("Command failed. EvtReadNew should be set")
|
||||
t.Error("Command failed. EvtReadNew should not be set")
|
||||
}
|
||||
if !eb.Peek(EvtReadFin) {
|
||||
t.Error("EvtReadFin should be set")
|
||||
}
|
||||
}
|
||||
|
||||
153
src/result.go
153
src/result.go
@@ -15,34 +15,31 @@ type Offset [2]int32
|
||||
type colorOffset struct {
|
||||
offset [2]int32
|
||||
color tui.ColorPair
|
||||
attr tui.Attr
|
||||
index int32
|
||||
}
|
||||
|
||||
type rank struct {
|
||||
points [4]uint16
|
||||
index int32
|
||||
match bool
|
||||
url *url
|
||||
}
|
||||
|
||||
type Result struct {
|
||||
item *Item
|
||||
rank rank
|
||||
item *Item
|
||||
points [4]uint16
|
||||
}
|
||||
|
||||
func buildResult(item *Item, offsets []Offset, score int, trimLen int) *Result {
|
||||
func buildResult(item *Item, offsets []Offset, score int) Result {
|
||||
if len(offsets) > 1 {
|
||||
sort.Sort(ByOrder(offsets))
|
||||
}
|
||||
|
||||
result := Result{item: item, rank: rank{index: item.index}}
|
||||
result := Result{item: item}
|
||||
numChars := item.text.Length()
|
||||
minBegin := math.MaxUint16
|
||||
minEnd := math.MaxUint16
|
||||
maxEnd := 0
|
||||
validOffsetFound := false
|
||||
for _, offset := range offsets {
|
||||
b, e := int(offset[0]), int(offset[1])
|
||||
if b < e {
|
||||
minBegin = util.Min(b, minBegin)
|
||||
minEnd = util.Min(e, minEnd)
|
||||
maxEnd = util.Max(e, maxEnd)
|
||||
validOffsetFound = true
|
||||
}
|
||||
@@ -54,9 +51,24 @@ func buildResult(item *Item, offsets []Offset, score int, trimLen int) *Result {
|
||||
case byScore:
|
||||
// Higher is better
|
||||
val = math.MaxUint16 - util.AsUint16(score)
|
||||
case byChunk:
|
||||
if validOffsetFound {
|
||||
b := minBegin
|
||||
e := maxEnd
|
||||
for ; b >= 1; b-- {
|
||||
if unicode.IsSpace(item.text.Get(b - 1)) {
|
||||
break
|
||||
}
|
||||
}
|
||||
for ; e < numChars; e++ {
|
||||
if unicode.IsSpace(item.text.Get(e)) {
|
||||
break
|
||||
}
|
||||
}
|
||||
val = util.AsUint16(e - b)
|
||||
}
|
||||
case byLength:
|
||||
// If offsets is empty, trimLen will be 0, but we don't care
|
||||
val = util.AsUint16(trimLen)
|
||||
val = item.TrimLength()
|
||||
case byBegin, byEnd:
|
||||
if validOffsetFound {
|
||||
whitePrefixLen := 0
|
||||
@@ -68,16 +80,16 @@ func buildResult(item *Item, offsets []Offset, score int, trimLen int) *Result {
|
||||
}
|
||||
}
|
||||
if criterion == byBegin {
|
||||
val = util.AsUint16(minBegin - whitePrefixLen)
|
||||
val = util.AsUint16(minEnd - whitePrefixLen)
|
||||
} else {
|
||||
val = util.AsUint16(math.MaxUint16 - math.MaxUint16*(maxEnd-whitePrefixLen)/trimLen)
|
||||
val = util.AsUint16(math.MaxUint16 - math.MaxUint16*(maxEnd-whitePrefixLen)/(int(item.TrimLength())+1))
|
||||
}
|
||||
}
|
||||
}
|
||||
result.rank.points[idx] = val
|
||||
result.points[3-idx] = val
|
||||
}
|
||||
|
||||
return &result
|
||||
return result
|
||||
}
|
||||
|
||||
// Sort criteria to use. Never changes once fzf is started.
|
||||
@@ -85,21 +97,21 @@ var sortCriteria []criterion
|
||||
|
||||
// Index returns ordinal index of the Item
|
||||
func (result *Result) Index() int32 {
|
||||
return result.item.index
|
||||
return result.item.Index()
|
||||
}
|
||||
|
||||
func minRank() rank {
|
||||
return rank{index: 0, points: [4]uint16{math.MaxUint16, 0, 0, 0}}
|
||||
func minRank() Result {
|
||||
return Result{item: &minItem, points: [4]uint16{math.MaxUint16, 0, 0, 0}}
|
||||
}
|
||||
|
||||
func (result *Result) colorOffsets(matchOffsets []Offset, theme *tui.ColorTheme, color tui.ColorPair, attr tui.Attr, current bool) []colorOffset {
|
||||
func (result *Result) colorOffsets(matchOffsets []Offset, theme *tui.ColorTheme, colBase tui.ColorPair, colMatch tui.ColorPair, current bool) []colorOffset {
|
||||
itemColors := result.item.Colors()
|
||||
|
||||
// No ANSI code, or --color=no
|
||||
// No ANSI codes
|
||||
if len(itemColors) == 0 {
|
||||
var offsets []colorOffset
|
||||
for _, off := range matchOffsets {
|
||||
offsets = append(offsets, colorOffset{offset: [2]int32{off[0], off[1]}, color: color, attr: attr})
|
||||
offsets = append(offsets, colorOffset{offset: [2]int32{off[0], off[1]}, color: colMatch, match: true})
|
||||
}
|
||||
return offsets
|
||||
}
|
||||
@@ -116,17 +128,21 @@ func (result *Result) colorOffsets(matchOffsets []Offset, theme *tui.ColorTheme,
|
||||
maxCol = ansi.offset[1]
|
||||
}
|
||||
}
|
||||
cols := make([]int, maxCol)
|
||||
|
||||
cols := make([]int, maxCol)
|
||||
for colorIndex, ansi := range itemColors {
|
||||
for i := ansi.offset[0]; i < ansi.offset[1]; i++ {
|
||||
cols[i] = colorIndex + 1 // XXX
|
||||
cols[i] = colorIndex + 1 // 1-based index of itemColors
|
||||
}
|
||||
}
|
||||
|
||||
for _, off := range matchOffsets {
|
||||
for i := off[0]; i < off[1]; i++ {
|
||||
cols[i] = -1
|
||||
// Negative of 1-based index of itemColors
|
||||
// - The extra -1 means highlighted
|
||||
if cols[i] >= 0 {
|
||||
cols[i] = cols[i]*-1 - 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -138,34 +154,58 @@ func (result *Result) colorOffsets(matchOffsets []Offset, theme *tui.ColorTheme,
|
||||
// --++++++++-- --++++++++++---
|
||||
curr := 0
|
||||
start := 0
|
||||
ansiToColorPair := func(ansi ansiOffset, base tui.ColorPair) tui.ColorPair {
|
||||
fg := ansi.color.fg
|
||||
bg := ansi.color.bg
|
||||
if fg == -1 {
|
||||
if current {
|
||||
fg = theme.Current.Color
|
||||
} else {
|
||||
fg = theme.Fg.Color
|
||||
}
|
||||
}
|
||||
if bg == -1 {
|
||||
if current {
|
||||
bg = theme.DarkBg.Color
|
||||
} else {
|
||||
bg = theme.Bg.Color
|
||||
}
|
||||
}
|
||||
return tui.NewColorPair(fg, bg, ansi.color.attr).MergeAttr(base)
|
||||
}
|
||||
var colors []colorOffset
|
||||
add := func(idx int) {
|
||||
if curr != 0 && idx > start {
|
||||
if curr == -1 {
|
||||
if curr < 0 {
|
||||
color := colMatch
|
||||
var url *url
|
||||
if curr < -1 && theme.Colored {
|
||||
ansi := itemColors[-curr-2]
|
||||
url = ansi.color.url
|
||||
origColor := ansiToColorPair(ansi, colMatch)
|
||||
// hl or hl+ only sets the foreground color, so colMatch is the
|
||||
// combination of either [hl and bg] or [hl+ and bg+].
|
||||
//
|
||||
// If the original text already has background color, and the
|
||||
// foreground color of colMatch is -1, we shouldn't only apply the
|
||||
// background color of colMatch.
|
||||
// e.g. echo -e "\x1b[32;7mfoo\x1b[mbar" | fzf --ansi --color bg+:1,hl+:-1:underline
|
||||
// echo -e "\x1b[42mfoo\x1b[mbar" | fzf --ansi --color bg+:1,hl+:-1:underline
|
||||
if color.Fg().IsDefault() && origColor.HasBg() {
|
||||
color = origColor
|
||||
} else {
|
||||
color = origColor.MergeNonDefault(color)
|
||||
}
|
||||
}
|
||||
colors = append(colors, colorOffset{
|
||||
offset: [2]int32{int32(start), int32(idx)}, color: color, attr: attr})
|
||||
offset: [2]int32{int32(start), int32(idx)}, color: color, match: true, url: url})
|
||||
} else {
|
||||
ansi := itemColors[curr-1]
|
||||
fg := ansi.color.fg
|
||||
if fg == -1 {
|
||||
if current {
|
||||
fg = theme.Current
|
||||
} else {
|
||||
fg = theme.Fg
|
||||
}
|
||||
}
|
||||
bg := ansi.color.bg
|
||||
if bg == -1 {
|
||||
if current {
|
||||
bg = theme.DarkBg
|
||||
} else {
|
||||
bg = theme.Bg
|
||||
}
|
||||
}
|
||||
colors = append(colors, colorOffset{
|
||||
offset: [2]int32{int32(start), int32(idx)},
|
||||
color: tui.PairFor(fg, bg),
|
||||
attr: ansi.color.attr.Merge(attr)})
|
||||
color: ansiToColorPair(ansi, colBase),
|
||||
match: false,
|
||||
url: ansi.color.url})
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -198,7 +238,7 @@ func (a ByOrder) Less(i, j int) bool {
|
||||
}
|
||||
|
||||
// ByRelevance is for sorting Items
|
||||
type ByRelevance []*Result
|
||||
type ByRelevance []Result
|
||||
|
||||
func (a ByRelevance) Len() int {
|
||||
return len(a)
|
||||
@@ -209,11 +249,11 @@ func (a ByRelevance) Swap(i, j int) {
|
||||
}
|
||||
|
||||
func (a ByRelevance) Less(i, j int) bool {
|
||||
return compareRanks((*a[i]).rank, (*a[j]).rank, false)
|
||||
return compareRanks(a[i], a[j], false)
|
||||
}
|
||||
|
||||
// ByRelevanceTac is for sorting Items
|
||||
type ByRelevanceTac []*Result
|
||||
type ByRelevanceTac []Result
|
||||
|
||||
func (a ByRelevanceTac) Len() int {
|
||||
return len(a)
|
||||
@@ -224,18 +264,5 @@ func (a ByRelevanceTac) Swap(i, j int) {
|
||||
}
|
||||
|
||||
func (a ByRelevanceTac) Less(i, j int) bool {
|
||||
return compareRanks((*a[i]).rank, (*a[j]).rank, true)
|
||||
}
|
||||
|
||||
func compareRanks(irank rank, jrank rank, tac bool) bool {
|
||||
for idx := 0; idx < 4; idx++ {
|
||||
left := irank.points[idx]
|
||||
right := jrank.points[idx]
|
||||
if left < right {
|
||||
return true
|
||||
} else if left > right {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return (irank.index <= jrank.index) != tac
|
||||
return compareRanks(a[i], a[j], true)
|
||||
}
|
||||
|
||||
16
src/result_others.go
Normal file
16
src/result_others.go
Normal file
@@ -0,0 +1,16 @@
|
||||
//go:build !386 && !amd64
|
||||
|
||||
package fzf
|
||||
|
||||
func compareRanks(irank Result, jrank Result, tac bool) bool {
|
||||
for idx := 3; idx >= 0; idx-- {
|
||||
left := irank.points[idx]
|
||||
right := jrank.points[idx]
|
||||
if left < right {
|
||||
return true
|
||||
} else if left > right {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return (irank.item.Index() <= jrank.item.Index()) != tac
|
||||
}
|
||||
@@ -1,5 +1,3 @@
|
||||
// +build !tcell
|
||||
|
||||
package fzf
|
||||
|
||||
import (
|
||||
@@ -11,10 +9,15 @@ import (
|
||||
"github.com/junegunn/fzf/src/util"
|
||||
)
|
||||
|
||||
func withIndex(i *Item, index int) *Item {
|
||||
(*i).text.Index = int32(index)
|
||||
return i
|
||||
}
|
||||
|
||||
func TestOffsetSort(t *testing.T) {
|
||||
offsets := []Offset{
|
||||
Offset{3, 5}, Offset{2, 7},
|
||||
Offset{1, 3}, Offset{2, 9}}
|
||||
{3, 5}, {2, 7},
|
||||
{1, 3}, {2, 9}}
|
||||
sort.Sort(ByOrder(offsets))
|
||||
|
||||
if offsets[0][0] != 1 || offsets[0][1] != 3 ||
|
||||
@@ -26,10 +29,10 @@ func TestOffsetSort(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestRankComparison(t *testing.T) {
|
||||
rank := func(vals ...uint16) rank {
|
||||
return rank{
|
||||
rank := func(vals ...uint16) Result {
|
||||
return Result{
|
||||
points: [4]uint16{vals[0], vals[1], vals[2], vals[3]},
|
||||
index: int32(vals[4])}
|
||||
item: &Item{text: util.Chars{Index: int32(vals[4])}}}
|
||||
}
|
||||
if compareRanks(rank(3, 0, 0, 0, 5), rank(2, 0, 0, 0, 7), false) ||
|
||||
!compareRanks(rank(3, 0, 0, 0, 5), rank(3, 0, 0, 0, 6), false) ||
|
||||
@@ -51,37 +54,42 @@ func TestResultRank(t *testing.T) {
|
||||
// FIXME global
|
||||
sortCriteria = []criterion{byScore, byLength}
|
||||
|
||||
strs := [][]rune{[]rune("foo"), []rune("foobar"), []rune("bar"), []rune("baz")}
|
||||
item1 := buildResult(&Item{text: util.RunesToChars(strs[0]), index: 1}, []Offset{}, 2, 3)
|
||||
if item1.rank.points[0] != math.MaxUint16-2 || // Bonus
|
||||
item1.rank.points[1] != 3 || // Length
|
||||
item1.rank.points[2] != 0 || // Unused
|
||||
item1.rank.points[3] != 0 || // Unused
|
||||
item1.item.index != 1 {
|
||||
t.Error(item1.rank)
|
||||
str := []rune("foo")
|
||||
item1 := buildResult(
|
||||
withIndex(&Item{text: util.RunesToChars(str)}, 1), []Offset{}, 2)
|
||||
if item1.points[3] != math.MaxUint16-2 || // Bonus
|
||||
item1.points[2] != 3 || // Length
|
||||
item1.points[1] != 0 || // Unused
|
||||
item1.points[0] != 0 || // Unused
|
||||
item1.item.Index() != 1 {
|
||||
t.Error(item1)
|
||||
}
|
||||
// Only differ in index
|
||||
item2 := buildResult(&Item{text: util.RunesToChars(strs[0])}, []Offset{}, 2, 3)
|
||||
item2 := buildResult(&Item{text: util.RunesToChars(str)}, []Offset{}, 2)
|
||||
|
||||
items := []*Result{item1, item2}
|
||||
items := []Result{item1, item2}
|
||||
sort.Sort(ByRelevance(items))
|
||||
if items[0] != item2 || items[1] != item1 {
|
||||
t.Error(items)
|
||||
}
|
||||
|
||||
items = []*Result{item2, item1, item1, item2}
|
||||
items = []Result{item2, item1, item1, item2}
|
||||
sort.Sort(ByRelevance(items))
|
||||
if items[0] != item2 || items[1] != item2 ||
|
||||
items[2] != item1 || items[3] != item1 {
|
||||
t.Error(items, item1, item1.item.index, item2, item2.item.index)
|
||||
t.Error(items, item1, item1.item.Index(), item2, item2.item.Index())
|
||||
}
|
||||
|
||||
// Sort by relevance
|
||||
item3 := buildResult(&Item{index: 2}, []Offset{Offset{1, 3}, Offset{5, 7}}, 3, 0)
|
||||
item4 := buildResult(&Item{index: 2}, []Offset{Offset{1, 2}, Offset{6, 7}}, 4, 0)
|
||||
item5 := buildResult(&Item{index: 2}, []Offset{Offset{1, 3}, Offset{5, 7}}, 5, 0)
|
||||
item6 := buildResult(&Item{index: 2}, []Offset{Offset{1, 2}, Offset{6, 7}}, 6, 0)
|
||||
items = []*Result{item1, item2, item3, item4, item5, item6}
|
||||
item3 := buildResult(
|
||||
withIndex(&Item{}, 2), []Offset{{1, 3}, {5, 7}}, 3)
|
||||
item4 := buildResult(
|
||||
withIndex(&Item{}, 2), []Offset{{1, 2}, {6, 7}}, 4)
|
||||
item5 := buildResult(
|
||||
withIndex(&Item{}, 2), []Offset{{1, 3}, {5, 7}}, 5)
|
||||
item6 := buildResult(
|
||||
withIndex(&Item{}, 2), []Offset{{1, 2}, {6, 7}}, 6)
|
||||
items = []Result{item1, item2, item3, item4, item5, item6}
|
||||
sort.Sort(ByRelevance(items))
|
||||
if !(items[0] == item6 && items[1] == item5 &&
|
||||
items[2] == item4 && items[3] == item3 &&
|
||||
@@ -90,36 +98,77 @@ func TestResultRank(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestChunkTiebreak(t *testing.T) {
|
||||
// FIXME global
|
||||
sortCriteria = []criterion{byScore, byChunk}
|
||||
|
||||
score := 100
|
||||
test := func(input string, offset Offset, chunk string) {
|
||||
item := buildResult(withIndex(&Item{text: util.RunesToChars([]rune(input))}, 1), []Offset{offset}, score)
|
||||
if !(item.points[3] == math.MaxUint16-uint16(score) && item.points[2] == uint16(len(chunk))) {
|
||||
t.Error(item.points)
|
||||
}
|
||||
}
|
||||
test("hello foobar goodbye", Offset{8, 9}, "foobar")
|
||||
test("hello foobar goodbye", Offset{7, 18}, "foobar goodbye")
|
||||
test("hello foobar goodbye", Offset{0, 1}, "hello")
|
||||
test("hello foobar goodbye", Offset{5, 7}, "hello foobar") // TBD
|
||||
}
|
||||
|
||||
func TestColorOffset(t *testing.T) {
|
||||
// ------------ 20 ---- -- ----
|
||||
// ++++++++ ++++++++++
|
||||
// --++++++++-- --++++++++++---
|
||||
|
||||
offsets := []Offset{Offset{5, 15}, Offset{25, 35}}
|
||||
offsets := []Offset{{5, 15}, {10, 12}, {25, 35}}
|
||||
item := Result{
|
||||
item: &Item{
|
||||
colors: &[]ansiOffset{
|
||||
ansiOffset{[2]int32{0, 20}, ansiState{1, 5, 0}},
|
||||
ansiOffset{[2]int32{22, 27}, ansiState{2, 6, tui.Bold}},
|
||||
ansiOffset{[2]int32{30, 32}, ansiState{3, 7, 0}},
|
||||
ansiOffset{[2]int32{33, 40}, ansiState{4, 8, tui.Bold}}}}}
|
||||
// [{[0 5] 9 false} {[5 15] 99 false} {[15 20] 9 false} {[22 25] 10 true} {[25 35] 99 false} {[35 40] 11 true}]
|
||||
{[2]int32{0, 20}, ansiState{1, 5, 0, -1, nil}},
|
||||
{[2]int32{22, 27}, ansiState{2, 6, tui.Bold, -1, nil}},
|
||||
{[2]int32{30, 32}, ansiState{3, 7, 0, -1, nil}},
|
||||
{[2]int32{33, 40}, ansiState{4, 8, tui.Bold, -1, nil}}}}}
|
||||
|
||||
colors := item.colorOffsets(offsets, tui.Dark256, 99, 0, true)
|
||||
assert := func(idx int, b int32, e int32, c tui.ColorPair, bold bool) {
|
||||
var attr tui.Attr
|
||||
if bold {
|
||||
attr = tui.Bold
|
||||
}
|
||||
colBase := tui.NewColorPair(89, 189, tui.AttrUndefined)
|
||||
colMatch := tui.NewColorPair(99, 199, tui.AttrUndefined)
|
||||
colors := item.colorOffsets(offsets, tui.Dark256, colBase, colMatch, true)
|
||||
assert := func(idx int, b int32, e int32, c tui.ColorPair) {
|
||||
o := colors[idx]
|
||||
if o.offset[0] != b || o.offset[1] != e || o.color != c || o.attr != attr {
|
||||
t.Error(o)
|
||||
if o.offset[0] != b || o.offset[1] != e || o.color != c {
|
||||
t.Error(o, b, e, c)
|
||||
}
|
||||
}
|
||||
assert(0, 0, 5, tui.ColUser, false)
|
||||
assert(1, 5, 15, 99, false)
|
||||
assert(2, 15, 20, tui.ColUser, false)
|
||||
assert(3, 22, 25, tui.ColUser+1, true)
|
||||
assert(4, 25, 35, 99, false)
|
||||
assert(5, 35, 40, tui.ColUser+2, true)
|
||||
// [{[0 5] {1 5 0}} {[5 15] {99 199 0}} {[15 20] {1 5 0}}
|
||||
// {[22 25] {2 6 1}} {[25 27] {99 199 1}} {[27 30] {99 199 0}}
|
||||
// {[30 32] {99 199 0}} {[32 33] {99 199 0}} {[33 35] {99 199 1}}
|
||||
// {[35 40] {4 8 1}}]
|
||||
assert(0, 0, 5, tui.NewColorPair(1, 5, tui.AttrUndefined))
|
||||
assert(1, 5, 15, colMatch)
|
||||
assert(2, 15, 20, tui.NewColorPair(1, 5, tui.AttrUndefined))
|
||||
assert(3, 22, 25, tui.NewColorPair(2, 6, tui.Bold))
|
||||
assert(4, 25, 27, colMatch.WithAttr(tui.Bold))
|
||||
assert(5, 27, 30, colMatch)
|
||||
assert(6, 30, 32, colMatch)
|
||||
assert(7, 32, 33, colMatch) // TODO: Should we merge consecutive blocks?
|
||||
assert(8, 33, 35, colMatch.WithAttr(tui.Bold))
|
||||
assert(9, 35, 40, tui.NewColorPair(4, 8, tui.Bold))
|
||||
|
||||
colRegular := tui.NewColorPair(-1, -1, tui.AttrUndefined)
|
||||
colUnderline := tui.NewColorPair(-1, -1, tui.Underline)
|
||||
colors = item.colorOffsets(offsets, tui.Dark256, colRegular, colUnderline, true)
|
||||
|
||||
// [{[0 5] {1 5 0}} {[5 15] {1 5 8}} {[15 20] {1 5 0}}
|
||||
// {[22 25] {2 6 1}} {[25 27] {2 6 9}} {[27 30] {-1 -1 8}}
|
||||
// {[30 32] {3 7 8}} {[32 33] {-1 -1 8}} {[33 35] {4 8 9}}
|
||||
// {[35 40] {4 8 1}}]
|
||||
assert(0, 0, 5, tui.NewColorPair(1, 5, tui.AttrUndefined))
|
||||
assert(1, 5, 15, tui.NewColorPair(1, 5, tui.Underline))
|
||||
assert(2, 15, 20, tui.NewColorPair(1, 5, tui.AttrUndefined))
|
||||
assert(3, 22, 25, tui.NewColorPair(2, 6, tui.Bold))
|
||||
assert(4, 25, 27, tui.NewColorPair(2, 6, tui.Bold|tui.Underline))
|
||||
assert(5, 27, 30, colUnderline)
|
||||
assert(6, 30, 32, tui.NewColorPair(3, 7, tui.Underline))
|
||||
assert(7, 32, 33, colUnderline)
|
||||
assert(8, 33, 35, tui.NewColorPair(4, 8, tui.Bold|tui.Underline))
|
||||
assert(9, 35, 40, tui.NewColorPair(4, 8, tui.Bold))
|
||||
}
|
||||
|
||||
16
src/result_x86.go
Normal file
16
src/result_x86.go
Normal file
@@ -0,0 +1,16 @@
|
||||
//go:build 386 || amd64
|
||||
|
||||
package fzf
|
||||
|
||||
import "unsafe"
|
||||
|
||||
func compareRanks(irank Result, jrank Result, tac bool) bool {
|
||||
left := *(*uint64)(unsafe.Pointer(&irank.points[0]))
|
||||
right := *(*uint64)(unsafe.Pointer(&jrank.points[0]))
|
||||
if left < right {
|
||||
return true
|
||||
} else if left > right {
|
||||
return false
|
||||
}
|
||||
return (irank.item.Index() <= jrank.item.Index()) != tac
|
||||
}
|
||||
246
src/server.go
Normal file
246
src/server.go
Normal file
@@ -0,0 +1,246 @@
|
||||
package fzf
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"crypto/subtle"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net"
|
||||
"os"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
var getRegex *regexp.Regexp
|
||||
|
||||
func init() {
|
||||
getRegex = regexp.MustCompile(`^GET /(?:\?([a-z0-9=&]+))? HTTP`)
|
||||
}
|
||||
|
||||
type getParams struct {
|
||||
limit int
|
||||
offset int
|
||||
}
|
||||
|
||||
const (
|
||||
crlf = "\r\n"
|
||||
httpOk = "HTTP/1.1 200 OK" + crlf
|
||||
httpBadRequest = "HTTP/1.1 400 Bad Request" + crlf
|
||||
httpUnauthorized = "HTTP/1.1 401 Unauthorized" + crlf
|
||||
httpUnavailable = "HTTP/1.1 503 Service Unavailable" + crlf
|
||||
httpReadTimeout = 10 * time.Second
|
||||
channelTimeout = 2 * time.Second
|
||||
jsonContentType = "Content-Type: application/json" + crlf
|
||||
maxContentLength = 1024 * 1024
|
||||
)
|
||||
|
||||
type httpServer struct {
|
||||
apiKey []byte
|
||||
actionChannel chan []*action
|
||||
getHandler func(getParams) string
|
||||
}
|
||||
|
||||
type listenAddress struct {
|
||||
host string
|
||||
port int
|
||||
}
|
||||
|
||||
func (addr listenAddress) IsLocal() bool {
|
||||
return addr.host == "localhost" || addr.host == "127.0.0.1"
|
||||
}
|
||||
|
||||
var defaultListenAddr = listenAddress{"localhost", 0}
|
||||
|
||||
func parseListenAddress(address string) (listenAddress, error) {
|
||||
parts := strings.SplitN(address, ":", 3)
|
||||
if len(parts) == 1 {
|
||||
parts = []string{"localhost", parts[0]}
|
||||
}
|
||||
if len(parts) != 2 {
|
||||
return defaultListenAddr, fmt.Errorf("invalid listen address: %s", address)
|
||||
}
|
||||
portStr := parts[len(parts)-1]
|
||||
port, err := strconv.Atoi(portStr)
|
||||
if err != nil || port < 0 || port > 65535 {
|
||||
return defaultListenAddr, fmt.Errorf("invalid listen port: %s", portStr)
|
||||
}
|
||||
if len(parts[0]) == 0 {
|
||||
parts[0] = "localhost"
|
||||
}
|
||||
return listenAddress{parts[0], port}, nil
|
||||
}
|
||||
|
||||
func startHttpServer(address listenAddress, actionChannel chan []*action, getHandler func(getParams) string) (net.Listener, int, error) {
|
||||
host := address.host
|
||||
port := address.port
|
||||
apiKey := os.Getenv("FZF_API_KEY")
|
||||
if !address.IsLocal() && len(apiKey) == 0 {
|
||||
return nil, port, errors.New("FZF_API_KEY is required to allow remote access")
|
||||
}
|
||||
addrStr := fmt.Sprintf("%s:%d", host, port)
|
||||
listener, err := net.Listen("tcp", addrStr)
|
||||
if err != nil {
|
||||
return nil, port, fmt.Errorf("failed to listen on %s", addrStr)
|
||||
}
|
||||
if port == 0 {
|
||||
addr := listener.Addr().String()
|
||||
parts := strings.Split(addr, ":")
|
||||
if len(parts) < 2 {
|
||||
return nil, port, fmt.Errorf("cannot extract port: %s", addr)
|
||||
}
|
||||
var err error
|
||||
port, err = strconv.Atoi(parts[len(parts)-1])
|
||||
if err != nil {
|
||||
return nil, port, err
|
||||
}
|
||||
}
|
||||
|
||||
server := httpServer{
|
||||
apiKey: []byte(apiKey),
|
||||
actionChannel: actionChannel,
|
||||
getHandler: getHandler,
|
||||
}
|
||||
|
||||
go func() {
|
||||
for {
|
||||
conn, err := listener.Accept()
|
||||
if err != nil {
|
||||
if errors.Is(err, net.ErrClosed) {
|
||||
return
|
||||
}
|
||||
continue
|
||||
}
|
||||
conn.Write([]byte(server.handleHttpRequest(conn)))
|
||||
conn.Close()
|
||||
}
|
||||
}()
|
||||
|
||||
return listener, port, nil
|
||||
}
|
||||
|
||||
// Here we are writing a simplistic HTTP server without using net/http
|
||||
// package to reduce the size of the binary.
|
||||
//
|
||||
// * No --listen: 2.8MB
|
||||
// * --listen with net/http: 5.7MB
|
||||
// * --listen w/o net/http: 3.3MB
|
||||
func (server *httpServer) handleHttpRequest(conn net.Conn) string {
|
||||
contentLength := 0
|
||||
apiKey := ""
|
||||
body := ""
|
||||
answer := func(code string, message string) string {
|
||||
message += "\n"
|
||||
return code + fmt.Sprintf("Content-Length: %d%s", len(message), crlf+crlf+message)
|
||||
}
|
||||
unauthorized := func(message string) string {
|
||||
return answer(httpUnauthorized, message)
|
||||
}
|
||||
bad := func(message string) string {
|
||||
return answer(httpBadRequest, message)
|
||||
}
|
||||
good := func(message string) string {
|
||||
return answer(httpOk+jsonContentType, message)
|
||||
}
|
||||
conn.SetReadDeadline(time.Now().Add(httpReadTimeout))
|
||||
scanner := bufio.NewScanner(conn)
|
||||
scanner.Split(func(data []byte, atEOF bool) (int, []byte, error) {
|
||||
found := bytes.Index(data, []byte(crlf))
|
||||
if found >= 0 {
|
||||
token := data[:found+len(crlf)]
|
||||
return len(token), token, nil
|
||||
}
|
||||
if atEOF || len(body)+len(data) >= contentLength {
|
||||
return 0, data, bufio.ErrFinalToken
|
||||
}
|
||||
return 0, nil, nil
|
||||
})
|
||||
|
||||
section := 0
|
||||
for scanner.Scan() {
|
||||
text := scanner.Text()
|
||||
switch section {
|
||||
case 0:
|
||||
getMatch := getRegex.FindStringSubmatch(text)
|
||||
if len(getMatch) > 0 {
|
||||
response := server.getHandler(parseGetParams(getMatch[1]))
|
||||
if len(response) > 0 {
|
||||
return good(response)
|
||||
}
|
||||
return answer(httpUnavailable+jsonContentType, `{"error":"timeout"}`)
|
||||
} else if !strings.HasPrefix(text, "POST / HTTP") {
|
||||
return bad("invalid request method")
|
||||
}
|
||||
section++
|
||||
case 1:
|
||||
if text == crlf {
|
||||
if contentLength == 0 {
|
||||
return bad("content-length header missing")
|
||||
}
|
||||
section++
|
||||
continue
|
||||
}
|
||||
pair := strings.SplitN(text, ":", 2)
|
||||
if len(pair) == 2 {
|
||||
switch strings.ToLower(pair[0]) {
|
||||
case "content-length":
|
||||
length, err := strconv.Atoi(strings.TrimSpace(pair[1]))
|
||||
if err != nil || length <= 0 || length > maxContentLength {
|
||||
return bad("invalid content length")
|
||||
}
|
||||
contentLength = length
|
||||
case "x-api-key":
|
||||
apiKey = strings.TrimSpace(pair[1])
|
||||
}
|
||||
}
|
||||
case 2:
|
||||
body += text
|
||||
}
|
||||
}
|
||||
|
||||
if len(server.apiKey) != 0 && subtle.ConstantTimeCompare([]byte(apiKey), server.apiKey) != 1 {
|
||||
return unauthorized("invalid api key")
|
||||
}
|
||||
|
||||
if len(body) < contentLength {
|
||||
return bad("incomplete request")
|
||||
}
|
||||
body = body[:contentLength]
|
||||
|
||||
actions, err := parseSingleActionList(strings.Trim(string(body), "\r\n"))
|
||||
if err != nil {
|
||||
return bad(err.Error())
|
||||
}
|
||||
if len(actions) == 0 {
|
||||
return bad("no action specified")
|
||||
}
|
||||
|
||||
select {
|
||||
case server.actionChannel <- actions:
|
||||
case <-time.After(channelTimeout):
|
||||
return httpUnavailable + crlf
|
||||
}
|
||||
return httpOk + crlf
|
||||
}
|
||||
|
||||
func parseGetParams(query string) getParams {
|
||||
params := getParams{limit: 100, offset: 0}
|
||||
for _, pair := range strings.Split(query, "&") {
|
||||
parts := strings.SplitN(pair, "=", 2)
|
||||
if len(parts) == 2 {
|
||||
switch parts[0] {
|
||||
case "limit", "offset":
|
||||
if val, err := strconv.Atoi(parts[1]); err == nil {
|
||||
if parts[0] == "limit" {
|
||||
params.limit = val
|
||||
} else {
|
||||
params.offset = val
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return params
|
||||
}
|
||||
5033
src/terminal.go
5033
src/terminal.go
File diff suppressed because it is too large
Load Diff
@@ -1,73 +1,659 @@
|
||||
package fzf
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
"testing"
|
||||
"text/template"
|
||||
|
||||
"github.com/junegunn/fzf/src/util"
|
||||
)
|
||||
|
||||
func newItem(str string) *Item {
|
||||
bytes := []byte(str)
|
||||
trimmed, _, _ := extractColor(str, nil, nil)
|
||||
return &Item{origText: &bytes, text: util.RunesToChars([]rune(trimmed))}
|
||||
func replacePlaceholderTest(template string, stripAnsi bool, delimiter Delimiter, printsep string, forcePlus bool, query string, allItems []*Item) string {
|
||||
replaced, _ := replacePlaceholder(replacePlaceholderParams{
|
||||
template: template,
|
||||
stripAnsi: stripAnsi,
|
||||
delimiter: delimiter,
|
||||
printsep: printsep,
|
||||
forcePlus: forcePlus,
|
||||
query: query,
|
||||
allItems: allItems,
|
||||
lastAction: actBackwardDeleteCharEof,
|
||||
prompt: "prompt",
|
||||
executor: util.NewExecutor(""),
|
||||
})
|
||||
return replaced
|
||||
}
|
||||
|
||||
func TestReplacePlaceholder(t *testing.T) {
|
||||
items1 := []*Item{newItem(" foo'bar \x1b[31mbaz\x1b[m")}
|
||||
item1 := newItem(" foo'bar \x1b[31mbaz\x1b[m")
|
||||
items1 := []*Item{item1, item1}
|
||||
items2 := []*Item{
|
||||
newItem("foo'bar \x1b[31mbaz\x1b[m"),
|
||||
newItem("foo'bar \x1b[31mbaz\x1b[m"),
|
||||
newItem("FOO'BAR \x1b[31mBAZ\x1b[m")}
|
||||
|
||||
delim := "'"
|
||||
var regex *regexp.Regexp
|
||||
|
||||
var result string
|
||||
check := func(expected string) {
|
||||
if result != expected {
|
||||
t.Errorf("expected: %s, actual: %s", expected, result)
|
||||
}
|
||||
}
|
||||
// helper function that converts template format into string and carries out the check()
|
||||
checkFormat := func(format string) {
|
||||
type quotes struct{ O, I, S string } // outer, inner quotes, print separator
|
||||
unixStyle := quotes{`'`, `'\''`, "\n"}
|
||||
windowsStyle := quotes{`^"`, `'`, "\n"}
|
||||
var effectiveStyle quotes
|
||||
|
||||
if util.IsWindows() {
|
||||
effectiveStyle = windowsStyle
|
||||
} else {
|
||||
effectiveStyle = unixStyle
|
||||
}
|
||||
|
||||
expected := templateToString(format, effectiveStyle)
|
||||
check(expected)
|
||||
}
|
||||
printsep := "\n"
|
||||
|
||||
/*
|
||||
Test multiple placeholders and the function parameters.
|
||||
*/
|
||||
|
||||
// {}, preserve ansi
|
||||
result = replacePlaceholder("echo {}", false, Delimiter{}, "query", items1)
|
||||
check("echo ' foo'\\''bar \x1b[31mbaz\x1b[m'")
|
||||
result = replacePlaceholderTest("echo {}", false, Delimiter{}, printsep, false, "query", items1)
|
||||
checkFormat("echo {{.O}} foo{{.I}}bar \x1b[31mbaz\x1b[m{{.O}}")
|
||||
|
||||
// {}, strip ansi
|
||||
result = replacePlaceholder("echo {}", true, Delimiter{}, "query", items1)
|
||||
check("echo ' foo'\\''bar baz'")
|
||||
result = replacePlaceholderTest("echo {}", true, Delimiter{}, printsep, false, "query", items1)
|
||||
checkFormat("echo {{.O}} foo{{.I}}bar baz{{.O}}")
|
||||
|
||||
// {}, with multiple items
|
||||
result = replacePlaceholder("echo {}", true, Delimiter{}, "query", items2)
|
||||
check("echo 'foo'\\''bar baz' 'FOO'\\''BAR BAZ'")
|
||||
result = replacePlaceholderTest("echo {}", true, Delimiter{}, printsep, false, "query", items2)
|
||||
checkFormat("echo {{.O}}foo{{.I}}bar baz{{.O}}")
|
||||
|
||||
// {..}, strip leading whitespaces, preserve ansi
|
||||
result = replacePlaceholder("echo {..}", false, Delimiter{}, "query", items1)
|
||||
check("echo 'foo'\\''bar \x1b[31mbaz\x1b[m'")
|
||||
result = replacePlaceholderTest("echo {..}", false, Delimiter{}, printsep, false, "query", items1)
|
||||
checkFormat("echo {{.O}}foo{{.I}}bar \x1b[31mbaz\x1b[m{{.O}}")
|
||||
|
||||
// {..}, strip leading whitespaces, strip ansi
|
||||
result = replacePlaceholder("echo {..}", true, Delimiter{}, "query", items1)
|
||||
check("echo 'foo'\\''bar baz'")
|
||||
result = replacePlaceholderTest("echo {..}", true, Delimiter{}, printsep, false, "query", items1)
|
||||
checkFormat("echo {{.O}}foo{{.I}}bar baz{{.O}}")
|
||||
|
||||
// {q}
|
||||
result = replacePlaceholder("echo {} {q}", true, Delimiter{}, "query", items1)
|
||||
check("echo ' foo'\\''bar baz' 'query'")
|
||||
result = replacePlaceholderTest("echo {} {q}", true, Delimiter{}, printsep, false, "query", items1)
|
||||
checkFormat("echo {{.O}} foo{{.I}}bar baz{{.O}} {{.O}}query{{.O}}")
|
||||
|
||||
// {q}, multiple items
|
||||
result = replacePlaceholder("echo {}{q}{}", true, Delimiter{}, "query 'string'", items2)
|
||||
check("echo 'foo'\\''bar baz' 'FOO'\\''BAR BAZ''query '\\''string'\\''''foo'\\''bar baz' 'FOO'\\''BAR BAZ'")
|
||||
result = replacePlaceholderTest("echo {+}{q}{+}", true, Delimiter{}, printsep, false, "query 'string'", items2)
|
||||
checkFormat("echo {{.O}}foo{{.I}}bar baz{{.O}} {{.O}}FOO{{.I}}BAR BAZ{{.O}}{{.O}}query {{.I}}string{{.I}}{{.O}}{{.O}}foo{{.I}}bar baz{{.O}} {{.O}}FOO{{.I}}BAR BAZ{{.O}}")
|
||||
|
||||
result = replacePlaceholder("echo {1}/{2}/{2,1}/{-1}/{-2}/{}/{..}/{n.t}/\\{}/\\{1}/\\{q}/{3}", true, Delimiter{}, "query", items1)
|
||||
check("echo 'foo'\\''bar'/'baz'/'bazfoo'\\''bar'/'baz'/'foo'\\''bar'/' foo'\\''bar baz'/'foo'\\''bar baz'/{n.t}/{}/{1}/{q}/''")
|
||||
result = replacePlaceholderTest("echo {}{q}{}", true, Delimiter{}, printsep, false, "query 'string'", items2)
|
||||
checkFormat("echo {{.O}}foo{{.I}}bar baz{{.O}}{{.O}}query {{.I}}string{{.I}}{{.O}}{{.O}}foo{{.I}}bar baz{{.O}}")
|
||||
|
||||
result = replacePlaceholder("echo {1}/{2}/{-1}/{-2}/{..}/{n.t}/\\{}/\\{1}/\\{q}/{3}", true, Delimiter{}, "query", items2)
|
||||
check("echo 'foo'\\''bar' 'FOO'\\''BAR'/'baz' 'BAZ'/'baz' 'BAZ'/'foo'\\''bar' 'FOO'\\''BAR'/'foo'\\''bar baz' 'FOO'\\''BAR BAZ'/{n.t}/{}/{1}/{q}/'' ''")
|
||||
result = replacePlaceholderTest("echo {1}/{2}/{2,1}/{-1}/{-2}/{}/{..}/{n.t}/\\{}/\\{1}/\\{q}/{3}", true, Delimiter{}, printsep, false, "query", items1)
|
||||
checkFormat("echo {{.O}}foo{{.I}}bar{{.O}}/{{.O}}baz{{.O}}/{{.O}}bazfoo{{.I}}bar{{.O}}/{{.O}}baz{{.O}}/{{.O}}foo{{.I}}bar{{.O}}/{{.O}} foo{{.I}}bar baz{{.O}}/{{.O}}foo{{.I}}bar baz{{.O}}/{n.t}/{}/{1}/{q}/{{.O}}{{.O}}")
|
||||
|
||||
result = replacePlaceholderTest("echo {1}/{2}/{-1}/{-2}/{..}/{n.t}/\\{}/\\{1}/\\{q}/{3}", true, Delimiter{}, printsep, false, "query", items2)
|
||||
checkFormat("echo {{.O}}foo{{.I}}bar{{.O}}/{{.O}}baz{{.O}}/{{.O}}baz{{.O}}/{{.O}}foo{{.I}}bar{{.O}}/{{.O}}foo{{.I}}bar baz{{.O}}/{n.t}/{}/{1}/{q}/{{.O}}{{.O}}")
|
||||
|
||||
result = replacePlaceholderTest("echo {+1}/{+2}/{+-1}/{+-2}/{+..}/{n.t}/\\{}/\\{1}/\\{q}/{+3}", true, Delimiter{}, printsep, false, "query", items2)
|
||||
checkFormat("echo {{.O}}foo{{.I}}bar{{.O}} {{.O}}FOO{{.I}}BAR{{.O}}/{{.O}}baz{{.O}} {{.O}}BAZ{{.O}}/{{.O}}baz{{.O}} {{.O}}BAZ{{.O}}/{{.O}}foo{{.I}}bar{{.O}} {{.O}}FOO{{.I}}BAR{{.O}}/{{.O}}foo{{.I}}bar baz{{.O}} {{.O}}FOO{{.I}}BAR BAZ{{.O}}/{n.t}/{}/{1}/{q}/{{.O}}{{.O}} {{.O}}{{.O}}")
|
||||
|
||||
// forcePlus
|
||||
result = replacePlaceholderTest("echo {1}/{2}/{-1}/{-2}/{..}/{n.t}/\\{}/\\{1}/\\{q}/{3}", true, Delimiter{}, printsep, true, "query", items2)
|
||||
checkFormat("echo {{.O}}foo{{.I}}bar{{.O}} {{.O}}FOO{{.I}}BAR{{.O}}/{{.O}}baz{{.O}} {{.O}}BAZ{{.O}}/{{.O}}baz{{.O}} {{.O}}BAZ{{.O}}/{{.O}}foo{{.I}}bar{{.O}} {{.O}}FOO{{.I}}BAR{{.O}}/{{.O}}foo{{.I}}bar baz{{.O}} {{.O}}FOO{{.I}}BAR BAZ{{.O}}/{n.t}/{}/{1}/{q}/{{.O}}{{.O}} {{.O}}{{.O}}")
|
||||
|
||||
// Whitespace preserving flag with "'" delimiter
|
||||
result = replacePlaceholderTest("echo {s1}", true, Delimiter{str: &delim}, printsep, false, "query", items1)
|
||||
checkFormat("echo {{.O}} foo{{.O}}")
|
||||
|
||||
result = replacePlaceholderTest("echo {s2}", true, Delimiter{str: &delim}, printsep, false, "query", items1)
|
||||
checkFormat("echo {{.O}}bar baz{{.O}}")
|
||||
|
||||
result = replacePlaceholderTest("echo {s}", true, Delimiter{str: &delim}, printsep, false, "query", items1)
|
||||
checkFormat("echo {{.O}} foo{{.I}}bar baz{{.O}}")
|
||||
|
||||
result = replacePlaceholderTest("echo {s..}", true, Delimiter{str: &delim}, printsep, false, "query", items1)
|
||||
checkFormat("echo {{.O}} foo{{.I}}bar baz{{.O}}")
|
||||
|
||||
// Whitespace preserving flag with regex delimiter
|
||||
regex = regexp.MustCompile(`\w+`)
|
||||
|
||||
result = replacePlaceholderTest("echo {s1}", true, Delimiter{regex: regex}, printsep, false, "query", items1)
|
||||
checkFormat("echo {{.O}} {{.O}}")
|
||||
|
||||
result = replacePlaceholderTest("echo {s2}", true, Delimiter{regex: regex}, printsep, false, "query", items1)
|
||||
checkFormat("echo {{.O}}{{.I}}{{.O}}")
|
||||
|
||||
result = replacePlaceholderTest("echo {s3}", true, Delimiter{regex: regex}, printsep, false, "query", items1)
|
||||
checkFormat("echo {{.O}} {{.O}}")
|
||||
|
||||
// No match
|
||||
result = replacePlaceholderTest("echo {}/{+}", true, Delimiter{}, printsep, false, "query", []*Item{nil, nil})
|
||||
check("echo /")
|
||||
|
||||
// No match, but with selections
|
||||
result = replacePlaceholderTest("echo {}/{+}", true, Delimiter{}, printsep, false, "query", []*Item{nil, item1})
|
||||
checkFormat("echo /{{.O}} foo{{.I}}bar baz{{.O}}")
|
||||
|
||||
// String delimiter
|
||||
delim := "'"
|
||||
result = replacePlaceholder("echo {}/{1}/{2}", true, Delimiter{str: &delim}, "query", items1)
|
||||
check("echo ' foo'\\''bar baz'/'foo'/'bar baz'")
|
||||
result = replacePlaceholderTest("echo {}/{1}/{2}", true, Delimiter{str: &delim}, printsep, false, "query", items1)
|
||||
checkFormat("echo {{.O}} foo{{.I}}bar baz{{.O}}/{{.O}}foo{{.O}}/{{.O}}bar baz{{.O}}")
|
||||
|
||||
// Regex delimiter
|
||||
regex := regexp.MustCompile("[oa]+")
|
||||
regex = regexp.MustCompile("[oa]+")
|
||||
// foo'bar baz
|
||||
result = replacePlaceholder("echo {}/{1}/{3}/{2..3}", true, Delimiter{regex: regex}, "query", items1)
|
||||
check("echo ' foo'\\''bar baz'/'f'/'r b'/''\\''bar b'")
|
||||
result = replacePlaceholderTest("echo {}/{1}/{3}/{2..3}", true, Delimiter{regex: regex}, printsep, false, "query", items1)
|
||||
checkFormat("echo {{.O}} foo{{.I}}bar baz{{.O}}/{{.O}}f{{.O}}/{{.O}}r b{{.O}}/{{.O}}{{.I}}bar b{{.O}}")
|
||||
|
||||
/*
|
||||
Test single placeholders, but focus on the placeholders' parameters (e.g. flags).
|
||||
see: TestParsePlaceholder
|
||||
*/
|
||||
items3 := []*Item{
|
||||
// single line
|
||||
newItem("1a 1b 1c 1d 1e 1f"),
|
||||
// multi line
|
||||
newItem("1a 1b 1c 1d 1e 1f"),
|
||||
newItem("2a 2b 2c 2d 2e 2f"),
|
||||
newItem("3a 3b 3c 3d 3e 3f"),
|
||||
newItem("4a 4b 4c 4d 4e 4f"),
|
||||
newItem("5a 5b 5c 5d 5e 5f"),
|
||||
newItem("6a 6b 6c 6d 6e 6f"),
|
||||
newItem("7a 7b 7c 7d 7e 7f"),
|
||||
}
|
||||
stripAnsi := false
|
||||
forcePlus := false
|
||||
query := "sample query"
|
||||
|
||||
templateToOutput := make(map[string]string)
|
||||
templateToFile := make(map[string]string) // same as above, but the file contents will be matched
|
||||
// I. item type placeholder
|
||||
templateToOutput[`{}`] = `{{.O}}1a 1b 1c 1d 1e 1f{{.O}}`
|
||||
templateToOutput[`{+}`] = `{{.O}}1a 1b 1c 1d 1e 1f{{.O}} {{.O}}2a 2b 2c 2d 2e 2f{{.O}} {{.O}}3a 3b 3c 3d 3e 3f{{.O}} {{.O}}4a 4b 4c 4d 4e 4f{{.O}} {{.O}}5a 5b 5c 5d 5e 5f{{.O}} {{.O}}6a 6b 6c 6d 6e 6f{{.O}} {{.O}}7a 7b 7c 7d 7e 7f{{.O}}`
|
||||
templateToOutput[`{n}`] = `0`
|
||||
templateToOutput[`{+n}`] = `0 0 0 0 0 0 0`
|
||||
templateToFile[`{f}`] = `1a 1b 1c 1d 1e 1f{{.S}}`
|
||||
templateToFile[`{+f}`] = `1a 1b 1c 1d 1e 1f{{.S}}2a 2b 2c 2d 2e 2f{{.S}}3a 3b 3c 3d 3e 3f{{.S}}4a 4b 4c 4d 4e 4f{{.S}}5a 5b 5c 5d 5e 5f{{.S}}6a 6b 6c 6d 6e 6f{{.S}}7a 7b 7c 7d 7e 7f{{.S}}`
|
||||
templateToFile[`{nf}`] = `0{{.S}}`
|
||||
templateToFile[`{+nf}`] = `0{{.S}}0{{.S}}0{{.S}}0{{.S}}0{{.S}}0{{.S}}0{{.S}}`
|
||||
|
||||
// II. token type placeholders
|
||||
templateToOutput[`{..}`] = templateToOutput[`{}`]
|
||||
templateToOutput[`{1..}`] = templateToOutput[`{}`]
|
||||
templateToOutput[`{..2}`] = `{{.O}}1a 1b{{.O}}`
|
||||
templateToOutput[`{1..2}`] = templateToOutput[`{..2}`]
|
||||
templateToOutput[`{-2..-1}`] = `{{.O}}1e 1f{{.O}}`
|
||||
// shorthand for x..x range
|
||||
templateToOutput[`{1}`] = `{{.O}}1a{{.O}}`
|
||||
templateToOutput[`{1..1}`] = templateToOutput[`{1}`]
|
||||
templateToOutput[`{-6}`] = templateToOutput[`{1}`]
|
||||
// multiple ranges
|
||||
templateToOutput[`{1,2}`] = templateToOutput[`{1..2}`]
|
||||
templateToOutput[`{1,2,4}`] = `{{.O}}1a 1b 1d{{.O}}`
|
||||
templateToOutput[`{1,2..4}`] = `{{.O}}1a 1b 1c 1d{{.O}}`
|
||||
templateToOutput[`{1..2,-4..-3}`] = `{{.O}}1a 1b 1c 1d{{.O}}`
|
||||
// flags
|
||||
templateToOutput[`{+1}`] = `{{.O}}1a{{.O}} {{.O}}2a{{.O}} {{.O}}3a{{.O}} {{.O}}4a{{.O}} {{.O}}5a{{.O}} {{.O}}6a{{.O}} {{.O}}7a{{.O}}`
|
||||
templateToOutput[`{+-1}`] = `{{.O}}1f{{.O}} {{.O}}2f{{.O}} {{.O}}3f{{.O}} {{.O}}4f{{.O}} {{.O}}5f{{.O}} {{.O}}6f{{.O}} {{.O}}7f{{.O}}`
|
||||
templateToOutput[`{s1}`] = `{{.O}}1a {{.O}}`
|
||||
templateToFile[`{f1}`] = `1a{{.S}}`
|
||||
templateToOutput[`{+s1..2}`] = `{{.O}}1a 1b {{.O}} {{.O}}2a 2b {{.O}} {{.O}}3a 3b {{.O}} {{.O}}4a 4b {{.O}} {{.O}}5a 5b {{.O}} {{.O}}6a 6b {{.O}} {{.O}}7a 7b {{.O}}`
|
||||
templateToFile[`{+sf1..2}`] = `1a 1b {{.S}}2a 2b {{.S}}3a 3b {{.S}}4a 4b {{.S}}5a 5b {{.S}}6a 6b {{.S}}7a 7b {{.S}}`
|
||||
|
||||
// III. query type placeholder
|
||||
// query flag is not removed after parsing, so it gets doubled
|
||||
// while the double q is invalid, it is useful here for testing purposes
|
||||
templateToOutput[`{q}`] = "{{.O}}" + query + "{{.O}}"
|
||||
templateToOutput[`{fzf:query}`] = "{{.O}}" + query + "{{.O}}"
|
||||
templateToOutput[`{fzf:action} {fzf:prompt}`] = "backward-delete-char-eof 'prompt'"
|
||||
|
||||
// IV. escaping placeholder
|
||||
templateToOutput[`\{}`] = `{}`
|
||||
templateToOutput[`\{q}`] = `{q}`
|
||||
templateToOutput[`\{fzf:query}`] = `{fzf:query}`
|
||||
templateToOutput[`\{fzf:action}`] = `{fzf:action}`
|
||||
templateToOutput[`\{++}`] = `{++}`
|
||||
templateToOutput[`{++}`] = templateToOutput[`{+}`]
|
||||
|
||||
for giveTemplate, wantOutput := range templateToOutput {
|
||||
result = replacePlaceholderTest(giveTemplate, stripAnsi, Delimiter{}, printsep, forcePlus, query, items3)
|
||||
checkFormat(wantOutput)
|
||||
}
|
||||
for giveTemplate, wantOutput := range templateToFile {
|
||||
path := replacePlaceholderTest(giveTemplate, stripAnsi, Delimiter{}, printsep, forcePlus, query, items3)
|
||||
|
||||
data, err := readFile(path)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot read the content of the temp file %s.", path)
|
||||
}
|
||||
result = string(data)
|
||||
|
||||
checkFormat(wantOutput)
|
||||
}
|
||||
}
|
||||
|
||||
func TestQuoteEntry(t *testing.T) {
|
||||
type quotes struct{ E, O, SQ, DQ, BS string } // standalone escape, outer, single and double quotes, backslash
|
||||
unixStyle := quotes{``, `'`, `'\''`, `"`, `\`}
|
||||
windowsStyle := quotes{`^`, `^"`, `'`, `\^"`, `\\`}
|
||||
var effectiveStyle quotes
|
||||
exec := util.NewExecutor("")
|
||||
|
||||
if util.IsWindows() {
|
||||
effectiveStyle = windowsStyle
|
||||
} else {
|
||||
effectiveStyle = unixStyle
|
||||
}
|
||||
|
||||
tests := map[string]string{
|
||||
`'`: `{{.O}}{{.SQ}}{{.O}}`,
|
||||
`"`: `{{.O}}{{.DQ}}{{.O}}`,
|
||||
`\`: `{{.O}}{{.BS}}{{.O}}`,
|
||||
`\"`: `{{.O}}{{.BS}}{{.DQ}}{{.O}}`,
|
||||
`"\\\"`: `{{.O}}{{.DQ}}{{.BS}}{{.BS}}{{.BS}}{{.DQ}}{{.O}}`,
|
||||
|
||||
`$`: `{{.O}}${{.O}}`,
|
||||
`$HOME`: `{{.O}}$HOME{{.O}}`,
|
||||
`'$HOME'`: `{{.O}}{{.SQ}}$HOME{{.SQ}}{{.O}}`,
|
||||
|
||||
`&`: `{{.O}}{{.E}}&{{.O}}`,
|
||||
`|`: `{{.O}}{{.E}}|{{.O}}`,
|
||||
`<`: `{{.O}}{{.E}}<{{.O}}`,
|
||||
`>`: `{{.O}}{{.E}}>{{.O}}`,
|
||||
`(`: `{{.O}}{{.E}}({{.O}}`,
|
||||
`)`: `{{.O}}{{.E}}){{.O}}`,
|
||||
`@`: `{{.O}}{{.E}}@{{.O}}`,
|
||||
`^`: `{{.O}}{{.E}}^{{.O}}`,
|
||||
`%`: `{{.O}}{{.E}}%{{.O}}`,
|
||||
`!`: `{{.O}}{{.E}}!{{.O}}`,
|
||||
`%USERPROFILE%`: `{{.O}}{{.E}}%USERPROFILE{{.E}}%{{.O}}`,
|
||||
`C:\Program Files (x86)\`: `{{.O}}C:{{.BS}}Program Files {{.E}}(x86{{.E}}){{.BS}}{{.O}}`,
|
||||
`"C:\Program Files"`: `{{.O}}{{.DQ}}C:{{.BS}}Program Files{{.DQ}}{{.O}}`,
|
||||
}
|
||||
|
||||
for input, expected := range tests {
|
||||
escaped := exec.QuoteEntry(input)
|
||||
expected = templateToString(expected, effectiveStyle)
|
||||
if escaped != expected {
|
||||
t.Errorf("Input: %s, expected: %s, actual %s", input, expected, escaped)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// purpose of this test is to demonstrate some shortcomings of fzf's templating system on Unix
|
||||
func TestUnixCommands(t *testing.T) {
|
||||
if util.IsWindows() {
|
||||
t.SkipNow()
|
||||
}
|
||||
tests := []testCase{
|
||||
// reference: give{template, query, items}, want{output OR match}
|
||||
|
||||
// 1) working examples
|
||||
|
||||
// paths that does not have to evaluated will work fine, when quoted
|
||||
{give{`grep foo {}`, ``, newItems(`test`)}, want{output: `grep foo 'test'`}},
|
||||
{give{`grep foo {}`, ``, newItems(`/home/user/test`)}, want{output: `grep foo '/home/user/test'`}},
|
||||
{give{`grep foo {}`, ``, newItems(`./test`)}, want{output: `grep foo './test'`}},
|
||||
|
||||
// only placeholders are escaped as data, this will lookup tilde character in a test file in your home directory
|
||||
// quoting the tilde is required (to be treated as string)
|
||||
{give{`grep {} ~/test`, ``, newItems(`~`)}, want{output: `grep '~' ~/test`}},
|
||||
|
||||
// 2) problematic examples
|
||||
// (not necessarily unexpected)
|
||||
|
||||
// paths that need to expand some part of it won't work (special characters and variables)
|
||||
{give{`cat {}`, ``, newItems(`~/test`)}, want{output: `cat '~/test'`}},
|
||||
{give{`cat {}`, ``, newItems(`$HOME/test`)}, want{output: `cat '$HOME/test'`}},
|
||||
}
|
||||
testCommands(t, tests)
|
||||
}
|
||||
|
||||
// purpose of this test is to demonstrate some shortcomings of fzf's templating system on Windows
|
||||
func TestWindowsCommands(t *testing.T) {
|
||||
// XXX Deprecated
|
||||
t.SkipNow()
|
||||
|
||||
tests := []testCase{
|
||||
// reference: give{template, query, items}, want{output OR match}
|
||||
|
||||
// 1) working examples
|
||||
|
||||
// example of redundantly escaped backslash in the output, besides looking bit ugly, it won't cause any issue
|
||||
{give{`type {}`, ``, newItems(`C:\test.txt`)}, want{output: `type ^"C:\\test.txt^"`}},
|
||||
{give{`rg -- "package" {}`, ``, newItems(`.\test.go`)}, want{output: `rg -- "package" ^".\\test.go^"`}},
|
||||
// example of mandatorily escaped backslash in the output, otherwise `rg -- "C:\test.txt"` is matching for tabulator
|
||||
{give{`rg -- {}`, ``, newItems(`C:\test.txt`)}, want{output: `rg -- ^"C:\\test.txt^"`}},
|
||||
// example of mandatorily escaped double quote in the output, otherwise `rg -- ""C:\\test.txt""` is not matching for the double quotes around the path
|
||||
{give{`rg -- {}`, ``, newItems(`"C:\test.txt"`)}, want{output: `rg -- ^"\^"C:\\test.txt\^"^"`}},
|
||||
|
||||
// 2) problematic examples
|
||||
// (not necessarily unexpected)
|
||||
|
||||
// notepad++'s parser can't handle `-n"12"` generate by fzf, expects `-n12`
|
||||
{give{`notepad++ -n{1} {2}`, ``, newItems(`12 C:\Work\Test Folder\File.txt`)}, want{output: `notepad++ -n^"12^" ^"C:\\Work\\Test Folder\\File.txt^"`}},
|
||||
|
||||
// cat is parsing `\"` as a part of the file path, double quote is illegal character for paths on Windows
|
||||
// cat: "C:\\test.txt: Invalid argument
|
||||
{give{`cat {}`, ``, newItems(`"C:\test.txt"`)}, want{output: `cat ^"\^"C:\\test.txt\^"^"`}},
|
||||
// cat: "C:\\test.txt": Invalid argument
|
||||
{give{`cmd /c {}`, ``, newItems(`cat "C:\test.txt"`)}, want{output: `cmd /c ^"cat \^"C:\\test.txt\^"^"`}},
|
||||
|
||||
// the "file" flag in the pattern won't create *.bat or *.cmd file so the command in the output tries to edit the file, instead of executing it
|
||||
// the temp file contains: `cat "C:\test.txt"`
|
||||
// TODO this should actually work
|
||||
{give{`cmd /c {f}`, ``, newItems(`cat "C:\test.txt"`)}, want{match: `^cmd /c .*\fzf-preview-[0-9]{9}$`}},
|
||||
}
|
||||
testCommands(t, tests)
|
||||
}
|
||||
|
||||
// purpose of this test is to demonstrate some shortcomings of fzf's templating system on Windows in Powershell
|
||||
func TestPowershellCommands(t *testing.T) {
|
||||
if !util.IsWindows() {
|
||||
t.SkipNow()
|
||||
}
|
||||
|
||||
tests := []testCase{
|
||||
// reference: give{template, query, items}, want{output OR match}
|
||||
|
||||
/*
|
||||
You can read each line in the following table as a pipeline that
|
||||
consist of series of parsers that act upon your input (col. 1) and
|
||||
each cell represents the output value.
|
||||
|
||||
For example:
|
||||
- exec.Command("program.exe", `\''`)
|
||||
- goes to win32 api which will process it transparently as it contains no special characters, see [CommandLineToArgvW][].
|
||||
- powershell command will receive it as is, that is two arguments: a literal backslash and empty string in single quotes
|
||||
- native command run via/from powershell will receive only one argument: a literal backslash. Because extra parsing rules apply, see [NativeCallsFromPowershell][].
|
||||
- some¹ apps have internal parser, that requires one more level of escaping (yes, this is completely application-specific, but see terminal_test.go#TestWindowsCommands)
|
||||
|
||||
Character⁰ CommandLineToArgvW Powershell commands Native commands from Powershell Apps requiring escapes¹ | Being tested below
|
||||
---------- ------------------ ------------------------------ ------------------------------- -------------------------- | ------------------
|
||||
" empty string² missing argument error ... ... |
|
||||
\" literal " unbalanced quote error ... ... |
|
||||
'\"' literal '"' literal " empty string empty string (match all) | yes
|
||||
'\\\"' literal '\"' literal \" literal " literal " |
|
||||
---------- ------------------ ------------------------------ ------------------------------- -------------------------- | ------------------
|
||||
\ transparent transparent transparent regex error |
|
||||
'\' transparent literal \ literal \ regex error | yes
|
||||
\\ transparent transparent transparent literal \ |
|
||||
'\\' transparent literal \\ literal \\ literal \ |
|
||||
---------- ------------------ ------------------------------ ------------------------------- -------------------------- | ------------------
|
||||
' transparent unbalanced quote error ... ... |
|
||||
\' transparent literal \ and unb. quote error ... ... |
|
||||
\'' transparent literal \ and empty string literal \ regex error | no, but given as example above
|
||||
''' transparent unbalanced quote error ... ... |
|
||||
'''' transparent literal ' literal ' literal ' | yes
|
||||
---------- ------------------ ------------------------------ ------------------------------- -------------------------- | ------------------
|
||||
|
||||
⁰: charatecter or characters 'x' as an argument to a program in go's call: exec.Command("program.exe", `x`)
|
||||
¹: native commands like grep, git grep, ripgrep
|
||||
²: interpreted as a grouping quote, affects argument parser and gets removed from the result
|
||||
|
||||
[CommandLineToArgvW]: https://docs.microsoft.com/en-gb/windows/win32/api/shellapi/nf-shellapi-commandlinetoargvw#remarks
|
||||
[NativeCallsFromPowershell]: https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_parsing?view=powershell-7.1#passing-arguments-that-contain-quote-characters
|
||||
*/
|
||||
|
||||
// 1) working examples
|
||||
|
||||
{give{`Get-Content {}`, ``, newItems(`C:\test.txt`)}, want{output: `Get-Content 'C:\test.txt'`}},
|
||||
{give{`rg -- "package" {}`, ``, newItems(`.\test.go`)}, want{output: `rg -- "package" '.\test.go'`}},
|
||||
|
||||
// example of escaping single quotes
|
||||
{give{`rg -- {}`, ``, newItems(`'foobar'`)}, want{output: `rg -- '''foobar'''`}},
|
||||
|
||||
// chaining powershells
|
||||
{give{`powershell -NoProfile -Command {}`, ``, newItems(`cat "C:\test.txt"`)}, want{output: `powershell -NoProfile -Command 'cat \"C:\test.txt\"'`}},
|
||||
|
||||
// 2) problematic examples
|
||||
// (not necessarily unexpected)
|
||||
|
||||
// looking for a path string will only work with escaped backslashes
|
||||
{give{`rg -- {}`, ``, newItems(`C:\test.txt`)}, want{output: `rg -- 'C:\test.txt'`}},
|
||||
// looking for a literal double quote will only work with triple escaped double quotes
|
||||
{give{`rg -- {}`, ``, newItems(`"C:\test.txt"`)}, want{output: `rg -- '\"C:\test.txt\"'`}},
|
||||
|
||||
// Get-Content (i.e. cat alias) is parsing `"` as a part of the file path, returns an error:
|
||||
// Get-Content : Cannot find drive. A drive with the name '"C:' does not exist.
|
||||
{give{`cat {}`, ``, newItems(`"C:\test.txt"`)}, want{output: `cat '\"C:\test.txt\"'`}},
|
||||
|
||||
// the "file" flag in the pattern won't create *.ps1 file so the powershell will offload this "unknown" filetype
|
||||
// to explorer, which will prompt user to pick editing program for the fzf-preview file
|
||||
// the temp file contains: `cat "C:\test.txt"`
|
||||
// TODO this should actually work
|
||||
{give{`powershell -NoProfile -Command {f}`, ``, newItems(`cat "C:\test.txt"`)}, want{match: `^powershell -NoProfile -Command .*\fzf-preview-[0-9]{9}$`}},
|
||||
}
|
||||
|
||||
// to force powershell-style escaping we temporarily set environment variable that fzf honors
|
||||
shellBackup := os.Getenv("SHELL")
|
||||
os.Setenv("SHELL", "powershell")
|
||||
testCommands(t, tests)
|
||||
os.Setenv("SHELL", shellBackup)
|
||||
}
|
||||
|
||||
/*
|
||||
Test typical valid placeholders and parsing of them.
|
||||
|
||||
Also since the parser assumes the input is matched with `placeholder` regex,
|
||||
the regex is tested here as well.
|
||||
*/
|
||||
func TestParsePlaceholder(t *testing.T) {
|
||||
// give, want pairs
|
||||
templates := map[string]string{
|
||||
// I. item type placeholder
|
||||
`{}`: `{}`,
|
||||
`{+}`: `{+}`,
|
||||
`{n}`: `{n}`,
|
||||
`{+n}`: `{+n}`,
|
||||
`{f}`: `{f}`,
|
||||
`{+nf}`: `{+nf}`,
|
||||
|
||||
// II. token type placeholders
|
||||
`{..}`: `{..}`,
|
||||
`{1..}`: `{1..}`,
|
||||
`{..2}`: `{..2}`,
|
||||
`{1..2}`: `{1..2}`,
|
||||
`{-2..-1}`: `{-2..-1}`,
|
||||
// shorthand for x..x range
|
||||
`{1}`: `{1}`,
|
||||
`{1..1}`: `{1..1}`,
|
||||
`{-6}`: `{-6}`,
|
||||
// multiple ranges
|
||||
`{1,2}`: `{1,2}`,
|
||||
`{1,2,4}`: `{1,2,4}`,
|
||||
`{1,2..4}`: `{1,2..4}`,
|
||||
`{1..2,-4..-3}`: `{1..2,-4..-3}`,
|
||||
// flags
|
||||
`{+1}`: `{+1}`,
|
||||
`{+-1}`: `{+-1}`,
|
||||
`{s1}`: `{s1}`,
|
||||
`{f1}`: `{f1}`,
|
||||
`{+s1..2}`: `{+s1..2}`,
|
||||
`{+sf1..2}`: `{+sf1..2}`,
|
||||
|
||||
// III. query type placeholder
|
||||
// query flag is not removed after parsing, so it gets doubled
|
||||
// while the double q is invalid, it is useful here for testing purposes
|
||||
`{q}`: `{qq}`,
|
||||
|
||||
// IV. escaping placeholder
|
||||
`\{}`: `{}`,
|
||||
`\{++}`: `{++}`,
|
||||
`{++}`: `{+}`,
|
||||
}
|
||||
|
||||
for giveTemplate, wantTemplate := range templates {
|
||||
if !placeholder.MatchString(giveTemplate) {
|
||||
t.Errorf(`given placeholder %s does not match placeholder regex, so attempt to parse it is unexpected`, giveTemplate)
|
||||
continue
|
||||
}
|
||||
|
||||
_, placeholderWithoutFlags, flags := parsePlaceholder(giveTemplate)
|
||||
gotTemplate := placeholderWithoutFlags[:1] + flags.encodePlaceholder() + placeholderWithoutFlags[1:]
|
||||
|
||||
if gotTemplate != wantTemplate {
|
||||
t.Errorf(`parsed placeholder "%s" into "%s", but want "%s"`, giveTemplate, gotTemplate, wantTemplate)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* utilities section */
|
||||
|
||||
// Item represents one line in fzf UI. Usually it is relative path to files and folders.
|
||||
func newItem(str string) *Item {
|
||||
bytes := []byte(str)
|
||||
trimmed, _, _ := extractColor(str, nil, nil)
|
||||
return &Item{origText: &bytes, text: util.ToChars([]byte(trimmed))}
|
||||
}
|
||||
|
||||
// Functions tested in this file require array of items (allItems). The array needs
|
||||
// to consist of at least two nils. This is helper function.
|
||||
func newItems(str ...string) []*Item {
|
||||
result := make([]*Item, util.Max(len(str), 2))
|
||||
for i, s := range str {
|
||||
result[i] = newItem(s)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// (for logging purposes)
|
||||
func (item *Item) String() string {
|
||||
return item.AsString(true)
|
||||
}
|
||||
|
||||
// Helper function to parse, execute and convert "text/template" to string. Panics on error.
|
||||
func templateToString(format string, data interface{}) string {
|
||||
bb := &bytes.Buffer{}
|
||||
|
||||
err := template.Must(template.New("").Parse(format)).Execute(bb, data)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return bb.String()
|
||||
}
|
||||
|
||||
// ad hoc types for test cases
|
||||
type give struct {
|
||||
template string
|
||||
query string
|
||||
allItems []*Item
|
||||
}
|
||||
type want struct {
|
||||
/*
|
||||
Unix:
|
||||
The `want.output` string is supposed to be formatted for evaluation by
|
||||
`sh -c command` system call.
|
||||
|
||||
Windows:
|
||||
The `want.output` string is supposed to be formatted for evaluation by
|
||||
`cmd.exe /s /c "command"` system call. The `/s` switch enables so called old
|
||||
behaviour, which is more favourable for nesting (possibly escaped)
|
||||
special characters. This is the relevant section of `help cmd`:
|
||||
|
||||
...old behavior is to see if the first character is
|
||||
a quote character and if so, strip the leading character and
|
||||
remove the last quote character on the command line, preserving
|
||||
any text after the last quote character.
|
||||
*/
|
||||
output string // literal output
|
||||
match string // output is matched against this regex (when output is empty string)
|
||||
}
|
||||
type testCase struct {
|
||||
give
|
||||
want
|
||||
}
|
||||
|
||||
func testCommands(t *testing.T, tests []testCase) {
|
||||
// common test parameters
|
||||
delim := "\t"
|
||||
delimiter := Delimiter{str: &delim}
|
||||
printsep := ""
|
||||
stripAnsi := false
|
||||
forcePlus := false
|
||||
|
||||
// evaluate the test cases
|
||||
for idx, test := range tests {
|
||||
gotOutput := replacePlaceholderTest(
|
||||
test.give.template, stripAnsi, delimiter, printsep, forcePlus,
|
||||
test.give.query,
|
||||
test.give.allItems)
|
||||
switch {
|
||||
case test.want.output != "":
|
||||
if gotOutput != test.want.output {
|
||||
t.Errorf("tests[%v]:\ngave{\n\ttemplate: '%s',\n\tquery: '%s',\n\tallItems: %s}\nand got '%s',\nbut want '%s'",
|
||||
idx,
|
||||
test.give.template, test.give.query, test.give.allItems,
|
||||
gotOutput, test.want.output)
|
||||
}
|
||||
case test.want.match != "":
|
||||
wantMatch := strings.ReplaceAll(test.want.match, `\`, `\\`)
|
||||
wantRegex := regexp.MustCompile(wantMatch)
|
||||
if !wantRegex.MatchString(gotOutput) {
|
||||
t.Errorf("tests[%v]:\ngave{\n\ttemplate: '%s',\n\tquery: '%s',\n\tallItems: %s}\nand got '%s',\nbut want '%s'",
|
||||
idx,
|
||||
test.give.template, test.give.query, test.give.allItems,
|
||||
gotOutput, test.want.match)
|
||||
}
|
||||
default:
|
||||
t.Errorf("tests[%v]: test case does not describe 'want' property", idx)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// naive encoder of placeholder flags
|
||||
func (flags placeholderFlags) encodePlaceholder() string {
|
||||
encoded := ""
|
||||
if flags.plus {
|
||||
encoded += "+"
|
||||
}
|
||||
if flags.preserveSpace {
|
||||
encoded += "s"
|
||||
}
|
||||
if flags.number {
|
||||
encoded += "n"
|
||||
}
|
||||
if flags.file {
|
||||
encoded += "f"
|
||||
}
|
||||
if flags.forceUpdate { // FIXME
|
||||
encoded += "q"
|
||||
}
|
||||
return encoded
|
||||
}
|
||||
|
||||
// can be replaced with os.ReadFile() in go 1.16+
|
||||
func readFile(path string) ([]byte, error) {
|
||||
file, err := os.Open(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
data := make([]byte, 0, 128)
|
||||
for {
|
||||
if len(data) >= cap(data) {
|
||||
d := append(data[:cap(data)], 0)
|
||||
data = d[:len(data)]
|
||||
}
|
||||
|
||||
n, err := file.Read(data[len(data):cap(data)])
|
||||
data = data[:len(data)+n]
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
err = nil
|
||||
}
|
||||
return data, err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// +build !windows
|
||||
//go:build !windows
|
||||
|
||||
package fzf
|
||||
|
||||
@@ -6,8 +6,19 @@ import (
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
|
||||
"golang.org/x/sys/unix"
|
||||
)
|
||||
|
||||
func notifyOnResize(resizeChan chan<- os.Signal) {
|
||||
signal.Notify(resizeChan, syscall.SIGWINCH)
|
||||
}
|
||||
|
||||
func notifyStop(p *os.Process) {
|
||||
pid := p.Pid
|
||||
pgid, err := unix.Getpgid(pid)
|
||||
if err == nil {
|
||||
pid = pgid * -1
|
||||
}
|
||||
unix.Kill(pid, syscall.SIGTSTP)
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// +build windows
|
||||
//go:build windows
|
||||
|
||||
package fzf
|
||||
|
||||
@@ -9,3 +9,7 @@ import (
|
||||
func notifyOnResize(resizeChan chan<- os.Signal) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
func notifyStop(p *os.Process) {
|
||||
// NOOP
|
||||
}
|
||||
|
||||
60
src/tmux.go
Normal file
60
src/tmux.go
Normal file
@@ -0,0 +1,60 @@
|
||||
package fzf
|
||||
|
||||
import (
|
||||
"os"
|
||||
"os/exec"
|
||||
|
||||
"github.com/junegunn/fzf/src/tui"
|
||||
)
|
||||
|
||||
func runTmux(args []string, opts *Options) (int, error) {
|
||||
// Prepare arguments
|
||||
fzf := args[0]
|
||||
args = append([]string{"--bind=ctrl-z:ignore"}, args[1:]...)
|
||||
if opts.BorderShape == tui.BorderUndefined {
|
||||
args = append(args, "--border")
|
||||
}
|
||||
argStr := escapeSingleQuote(fzf)
|
||||
for _, arg := range args {
|
||||
argStr += " " + escapeSingleQuote(arg)
|
||||
}
|
||||
argStr += ` --no-tmux --no-height`
|
||||
|
||||
// Get current directory
|
||||
dir, err := os.Getwd()
|
||||
if err != nil {
|
||||
dir = "."
|
||||
}
|
||||
|
||||
// Set tmux options for popup placement
|
||||
// C Both The centre of the terminal
|
||||
// R -x The right side of the terminal
|
||||
// P Both The bottom left of the pane
|
||||
// M Both The mouse position
|
||||
// W Both The window position on the status line
|
||||
// S -y The line above or below the status line
|
||||
tmuxArgs := []string{"display-popup", "-E", "-B", "-d", dir}
|
||||
switch opts.Tmux.position {
|
||||
case posUp:
|
||||
tmuxArgs = append(tmuxArgs, "-xC", "-y0")
|
||||
case posDown:
|
||||
tmuxArgs = append(tmuxArgs, "-xC", "-y9999")
|
||||
case posLeft:
|
||||
tmuxArgs = append(tmuxArgs, "-x0", "-yC")
|
||||
case posRight:
|
||||
tmuxArgs = append(tmuxArgs, "-xR", "-yC")
|
||||
case posCenter:
|
||||
tmuxArgs = append(tmuxArgs, "-xC", "-yC")
|
||||
}
|
||||
tmuxArgs = append(tmuxArgs, "-w"+opts.Tmux.width.String())
|
||||
tmuxArgs = append(tmuxArgs, "-h"+opts.Tmux.height.String())
|
||||
|
||||
return runProxy(argStr, func(temp string, needBash bool) (*exec.Cmd, error) {
|
||||
sh, err := sh(needBash)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tmuxArgs = append(tmuxArgs, sh, temp)
|
||||
return exec.Command("tmux", tmuxArgs...), nil
|
||||
}, opts, true)
|
||||
}
|
||||
@@ -1,6 +1,8 @@
|
||||
package fzf
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
@@ -20,7 +22,11 @@ type Range struct {
|
||||
type Token struct {
|
||||
text *util.Chars
|
||||
prefixLength int32
|
||||
trimLength int32
|
||||
}
|
||||
|
||||
// String returns the string representation of a Token.
|
||||
func (t Token) String() string {
|
||||
return fmt.Sprintf("Token{text: %s, prefixLength: %d}", t.text, t.prefixLength)
|
||||
}
|
||||
|
||||
// Delimiter for tokenizing the input
|
||||
@@ -29,6 +35,11 @@ type Delimiter struct {
|
||||
str *string
|
||||
}
|
||||
|
||||
// String returns the string representation of a Delimiter.
|
||||
func (d Delimiter) String() string {
|
||||
return fmt.Sprintf("Delimiter{regex: %v, str: &%q}", d.regex, *d.str)
|
||||
}
|
||||
|
||||
func newRange(begin int, end int) Range {
|
||||
if begin == 1 {
|
||||
begin = rangeEllipsis
|
||||
@@ -75,14 +86,14 @@ func ParseRange(str *string) (Range, bool) {
|
||||
return newRange(n, n), true
|
||||
}
|
||||
|
||||
func withPrefixLengths(tokens []util.Chars, begin int) []Token {
|
||||
func withPrefixLengths(tokens []string, begin int) []Token {
|
||||
ret := make([]Token, len(tokens))
|
||||
|
||||
prefixLength := begin
|
||||
for idx, token := range tokens {
|
||||
// NOTE: &tokens[idx] instead of &tokens
|
||||
ret[idx] = Token{&tokens[idx], int32(prefixLength), int32(token.TrimLength())}
|
||||
prefixLength += token.Length()
|
||||
for idx := range tokens {
|
||||
chars := util.ToChars(stringBytes(tokens[idx]))
|
||||
ret[idx] = Token{&chars, int32(prefixLength)}
|
||||
prefixLength += chars.Length()
|
||||
}
|
||||
return ret
|
||||
}
|
||||
@@ -93,16 +104,15 @@ const (
|
||||
awkWhite
|
||||
)
|
||||
|
||||
func awkTokenizer(input util.Chars) ([]util.Chars, int) {
|
||||
func awkTokenizer(input string) ([]string, int) {
|
||||
// 9, 32
|
||||
ret := []util.Chars{}
|
||||
ret := []string{}
|
||||
prefixLength := 0
|
||||
state := awkNil
|
||||
numChars := input.Length()
|
||||
begin := 0
|
||||
end := 0
|
||||
for idx := 0; idx < numChars; idx++ {
|
||||
r := input.Get(idx)
|
||||
for idx := 0; idx < len(input); idx++ {
|
||||
r := input[idx]
|
||||
white := r == 9 || r == 32
|
||||
switch state {
|
||||
case awkNil:
|
||||
@@ -120,19 +130,19 @@ func awkTokenizer(input util.Chars) ([]util.Chars, int) {
|
||||
if white {
|
||||
end = idx + 1
|
||||
} else {
|
||||
ret = append(ret, input.Slice(begin, end))
|
||||
ret = append(ret, input[begin:end])
|
||||
state, begin, end = awkBlack, idx, idx+1
|
||||
}
|
||||
}
|
||||
}
|
||||
if begin < end {
|
||||
ret = append(ret, input.Slice(begin, end))
|
||||
ret = append(ret, input[begin:end])
|
||||
}
|
||||
return ret, prefixLength
|
||||
}
|
||||
|
||||
// Tokenize tokenizes the given string with the delimiter
|
||||
func Tokenize(text util.Chars, delimiter Delimiter) []Token {
|
||||
func Tokenize(text string, delimiter Delimiter) []Token {
|
||||
if delimiter.str == nil && delimiter.regex == nil {
|
||||
// AWK-style (\S+\s*)
|
||||
tokens, prefixLength := awkTokenizer(text)
|
||||
@@ -140,36 +150,31 @@ func Tokenize(text util.Chars, delimiter Delimiter) []Token {
|
||||
}
|
||||
|
||||
if delimiter.str != nil {
|
||||
return withPrefixLengths(text.Split(*delimiter.str), 0)
|
||||
return withPrefixLengths(strings.SplitAfter(text, *delimiter.str), 0)
|
||||
}
|
||||
|
||||
// FIXME performance
|
||||
var tokens []string
|
||||
if delimiter.regex != nil {
|
||||
str := text.ToString()
|
||||
for len(str) > 0 {
|
||||
loc := delimiter.regex.FindStringIndex(str)
|
||||
if loc == nil {
|
||||
loc = []int{0, len(str)}
|
||||
}
|
||||
last := util.Max(loc[1], 1)
|
||||
tokens = append(tokens, str[:last])
|
||||
str = str[last:]
|
||||
locs := delimiter.regex.FindAllStringIndex(text, -1)
|
||||
begin := 0
|
||||
for _, loc := range locs {
|
||||
tokens = append(tokens, text[begin:loc[1]])
|
||||
begin = loc[1]
|
||||
}
|
||||
if begin < len(text) {
|
||||
tokens = append(tokens, text[begin:])
|
||||
}
|
||||
}
|
||||
asRunes := make([]util.Chars, len(tokens))
|
||||
for i, token := range tokens {
|
||||
asRunes[i] = util.RunesToChars([]rune(token))
|
||||
}
|
||||
return withPrefixLengths(asRunes, 0)
|
||||
return withPrefixLengths(tokens, 0)
|
||||
}
|
||||
|
||||
func joinTokens(tokens []Token) []rune {
|
||||
ret := []rune{}
|
||||
func joinTokens(tokens []Token) string {
|
||||
var output bytes.Buffer
|
||||
for _, token := range tokens {
|
||||
ret = append(ret, token.text.ToRunes()...)
|
||||
output.WriteString(token.text.ToString())
|
||||
}
|
||||
return ret
|
||||
return output.String()
|
||||
}
|
||||
|
||||
// Transform is used to transform the input when --with-nth option is given
|
||||
@@ -182,7 +187,7 @@ func Transform(tokens []Token, withNth []Range) []Token {
|
||||
if r.begin == r.end {
|
||||
idx := r.begin
|
||||
if idx == rangeEllipsis {
|
||||
chars := util.RunesToChars(joinTokens(tokens))
|
||||
chars := util.ToChars(stringBytes(joinTokens(tokens)))
|
||||
parts = append(parts, &chars)
|
||||
} else {
|
||||
if idx < 0 {
|
||||
@@ -225,15 +230,15 @@ func Transform(tokens []Token, withNth []Range) []Token {
|
||||
var merged util.Chars
|
||||
switch len(parts) {
|
||||
case 0:
|
||||
merged = util.RunesToChars([]rune{})
|
||||
merged = util.ToChars([]byte{})
|
||||
case 1:
|
||||
merged = *parts[0]
|
||||
default:
|
||||
runes := []rune{}
|
||||
var output bytes.Buffer
|
||||
for _, part := range parts {
|
||||
runes = append(runes, part.ToRunes()...)
|
||||
output.WriteString(part.ToString())
|
||||
}
|
||||
merged = util.RunesToChars(runes)
|
||||
merged = util.ToChars(output.Bytes())
|
||||
}
|
||||
|
||||
var prefixLength int32
|
||||
@@ -242,7 +247,7 @@ func Transform(tokens []Token, withNth []Range) []Token {
|
||||
} else {
|
||||
prefixLength = 0
|
||||
}
|
||||
transTokens[idx] = Token{&merged, prefixLength, int32(merged.TrimLength())}
|
||||
transTokens[idx] = Token{&merged, prefixLength}
|
||||
}
|
||||
return transTokens
|
||||
}
|
||||
|
||||
@@ -2,8 +2,6 @@ package fzf
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/junegunn/fzf/src/util"
|
||||
)
|
||||
|
||||
func TestParseRange(t *testing.T) {
|
||||
@@ -11,35 +9,35 @@ func TestParseRange(t *testing.T) {
|
||||
i := ".."
|
||||
r, _ := ParseRange(&i)
|
||||
if r.begin != rangeEllipsis || r.end != rangeEllipsis {
|
||||
t.Errorf("%s", r)
|
||||
t.Errorf("%v", r)
|
||||
}
|
||||
}
|
||||
{
|
||||
i := "3.."
|
||||
r, _ := ParseRange(&i)
|
||||
if r.begin != 3 || r.end != rangeEllipsis {
|
||||
t.Errorf("%s", r)
|
||||
t.Errorf("%v", r)
|
||||
}
|
||||
}
|
||||
{
|
||||
i := "3..5"
|
||||
r, _ := ParseRange(&i)
|
||||
if r.begin != 3 || r.end != 5 {
|
||||
t.Errorf("%s", r)
|
||||
t.Errorf("%v", r)
|
||||
}
|
||||
}
|
||||
{
|
||||
i := "-3..-5"
|
||||
r, _ := ParseRange(&i)
|
||||
if r.begin != -3 || r.end != -5 {
|
||||
t.Errorf("%s", r)
|
||||
t.Errorf("%v", r)
|
||||
}
|
||||
}
|
||||
{
|
||||
i := "3"
|
||||
r, _ := ParseRange(&i)
|
||||
if r.begin != 3 || r.end != 3 {
|
||||
t.Errorf("%s", r)
|
||||
t.Errorf("%v", r)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -47,23 +45,23 @@ func TestParseRange(t *testing.T) {
|
||||
func TestTokenize(t *testing.T) {
|
||||
// AWK-style
|
||||
input := " abc: def: ghi "
|
||||
tokens := Tokenize(util.RunesToChars([]rune(input)), Delimiter{})
|
||||
if tokens[0].text.ToString() != "abc: " || tokens[0].prefixLength != 2 || tokens[0].trimLength != 4 {
|
||||
tokens := Tokenize(input, Delimiter{})
|
||||
if tokens[0].text.ToString() != "abc: " || tokens[0].prefixLength != 2 {
|
||||
t.Errorf("%s", tokens)
|
||||
}
|
||||
|
||||
// With delimiter
|
||||
tokens = Tokenize(util.RunesToChars([]rune(input)), delimiterRegexp(":"))
|
||||
if tokens[0].text.ToString() != " abc:" || tokens[0].prefixLength != 0 || tokens[0].trimLength != 4 {
|
||||
t.Errorf("%s", tokens)
|
||||
tokens = Tokenize(input, delimiterRegexp(":"))
|
||||
if tokens[0].text.ToString() != " abc:" || tokens[0].prefixLength != 0 {
|
||||
t.Error(tokens[0].text.ToString(), tokens[0].prefixLength)
|
||||
}
|
||||
|
||||
// With delimiter regex
|
||||
tokens = Tokenize(util.RunesToChars([]rune(input)), delimiterRegexp("\\s+"))
|
||||
if tokens[0].text.ToString() != " " || tokens[0].prefixLength != 0 || tokens[0].trimLength != 0 ||
|
||||
tokens[1].text.ToString() != "abc: " || tokens[1].prefixLength != 2 || tokens[1].trimLength != 4 ||
|
||||
tokens[2].text.ToString() != "def: " || tokens[2].prefixLength != 8 || tokens[2].trimLength != 4 ||
|
||||
tokens[3].text.ToString() != "ghi " || tokens[3].prefixLength != 14 || tokens[3].trimLength != 3 {
|
||||
tokens = Tokenize(input, delimiterRegexp("\\s+"))
|
||||
if tokens[0].text.ToString() != " " || tokens[0].prefixLength != 0 ||
|
||||
tokens[1].text.ToString() != "abc: " || tokens[1].prefixLength != 2 ||
|
||||
tokens[2].text.ToString() != "def: " || tokens[2].prefixLength != 8 ||
|
||||
tokens[3].text.ToString() != "ghi " || tokens[3].prefixLength != 14 {
|
||||
t.Errorf("%s", tokens)
|
||||
}
|
||||
}
|
||||
@@ -71,16 +69,16 @@ func TestTokenize(t *testing.T) {
|
||||
func TestTransform(t *testing.T) {
|
||||
input := " abc: def: ghi: jkl"
|
||||
{
|
||||
tokens := Tokenize(util.RunesToChars([]rune(input)), Delimiter{})
|
||||
tokens := Tokenize(input, Delimiter{})
|
||||
{
|
||||
ranges := splitNth("1,2,3")
|
||||
ranges, _ := splitNth("1,2,3")
|
||||
tx := Transform(tokens, ranges)
|
||||
if string(joinTokens(tx)) != "abc: def: ghi: " {
|
||||
if joinTokens(tx) != "abc: def: ghi: " {
|
||||
t.Errorf("%s", tx)
|
||||
}
|
||||
}
|
||||
{
|
||||
ranges := splitNth("1..2,3,2..,1")
|
||||
ranges, _ := splitNth("1..2,3,2..,1")
|
||||
tx := Transform(tokens, ranges)
|
||||
if string(joinTokens(tx)) != "abc: def: ghi: def: ghi: jklabc: " ||
|
||||
len(tx) != 4 ||
|
||||
@@ -93,11 +91,11 @@ func TestTransform(t *testing.T) {
|
||||
}
|
||||
}
|
||||
{
|
||||
tokens := Tokenize(util.RunesToChars([]rune(input)), delimiterRegexp(":"))
|
||||
tokens := Tokenize(input, delimiterRegexp(":"))
|
||||
{
|
||||
ranges := splitNth("1..2,3,2..,1")
|
||||
ranges, _ := splitNth("1..2,3,2..,1")
|
||||
tx := Transform(tokens, ranges)
|
||||
if string(joinTokens(tx)) != " abc: def: ghi: def: ghi: jkl abc:" ||
|
||||
if joinTokens(tx) != " abc: def: ghi: def: ghi: jkl abc:" ||
|
||||
len(tx) != 4 ||
|
||||
tx[0].text.ToString() != " abc: def:" || tx[0].prefixLength != 0 ||
|
||||
tx[1].text.ToString() != " ghi:" || tx[1].prefixLength != 12 ||
|
||||
@@ -110,5 +108,6 @@ func TestTransform(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestTransformIndexOutOfBounds(t *testing.T) {
|
||||
Transform([]Token{}, splitNth("1"))
|
||||
s, _ := splitNth("1")
|
||||
Transform([]Token{}, s)
|
||||
}
|
||||
|
||||
53
src/tui/dummy.go
Normal file
53
src/tui/dummy.go
Normal file
@@ -0,0 +1,53 @@
|
||||
//go:build !tcell && !windows
|
||||
|
||||
package tui
|
||||
|
||||
type Attr int32
|
||||
|
||||
func HasFullscreenRenderer() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
var DefaultBorderShape = BorderRounded
|
||||
|
||||
func (a Attr) Merge(b Attr) Attr {
|
||||
return a | b
|
||||
}
|
||||
|
||||
const (
|
||||
AttrUndefined = Attr(0)
|
||||
AttrRegular = Attr(1 << 8)
|
||||
AttrClear = Attr(1 << 9)
|
||||
|
||||
Bold = Attr(1)
|
||||
Dim = Attr(1 << 1)
|
||||
Italic = Attr(1 << 2)
|
||||
Underline = Attr(1 << 3)
|
||||
Blink = Attr(1 << 4)
|
||||
Blink2 = Attr(1 << 5)
|
||||
Reverse = Attr(1 << 6)
|
||||
StrikeThrough = Attr(1 << 7)
|
||||
)
|
||||
|
||||
func (r *FullscreenRenderer) Init() error { return nil }
|
||||
func (r *FullscreenRenderer) Resize(maxHeightFunc func(int) int) {}
|
||||
func (r *FullscreenRenderer) Pause(bool) {}
|
||||
func (r *FullscreenRenderer) Resume(bool, bool) {}
|
||||
func (r *FullscreenRenderer) PassThrough(string) {}
|
||||
func (r *FullscreenRenderer) Clear() {}
|
||||
func (r *FullscreenRenderer) NeedScrollbarRedraw() bool { return false }
|
||||
func (r *FullscreenRenderer) ShouldEmitResizeEvent() bool { return false }
|
||||
func (r *FullscreenRenderer) Refresh() {}
|
||||
func (r *FullscreenRenderer) Close() {}
|
||||
func (r *FullscreenRenderer) Size() TermSize { return TermSize{} }
|
||||
|
||||
func (r *FullscreenRenderer) GetChar() Event { return Event{} }
|
||||
func (r *FullscreenRenderer) Top() int { return 0 }
|
||||
func (r *FullscreenRenderer) MaxX() int { return 0 }
|
||||
func (r *FullscreenRenderer) MaxY() int { return 0 }
|
||||
|
||||
func (r *FullscreenRenderer) RefreshWindows(windows []Window) {}
|
||||
|
||||
func (r *FullscreenRenderer) NewWindow(top int, left int, width int, height int, preview bool, borderStyle BorderStyle) Window {
|
||||
return nil
|
||||
}
|
||||
122
src/tui/eventtype_string.go
Normal file
122
src/tui/eventtype_string.go
Normal file
@@ -0,0 +1,122 @@
|
||||
// Code generated by "stringer -type=EventType"; DO NOT EDIT.
|
||||
|
||||
package tui
|
||||
|
||||
import "strconv"
|
||||
|
||||
func _() {
|
||||
// An "invalid array index" compiler error signifies that the constant values have changed.
|
||||
// Re-run the stringer command to generate them again.
|
||||
var x [1]struct{}
|
||||
_ = x[Rune-0]
|
||||
_ = x[CtrlA-1]
|
||||
_ = x[CtrlB-2]
|
||||
_ = x[CtrlC-3]
|
||||
_ = x[CtrlD-4]
|
||||
_ = x[CtrlE-5]
|
||||
_ = x[CtrlF-6]
|
||||
_ = x[CtrlG-7]
|
||||
_ = x[CtrlH-8]
|
||||
_ = x[Tab-9]
|
||||
_ = x[CtrlJ-10]
|
||||
_ = x[CtrlK-11]
|
||||
_ = x[CtrlL-12]
|
||||
_ = x[CtrlM-13]
|
||||
_ = x[CtrlN-14]
|
||||
_ = x[CtrlO-15]
|
||||
_ = x[CtrlP-16]
|
||||
_ = x[CtrlQ-17]
|
||||
_ = x[CtrlR-18]
|
||||
_ = x[CtrlS-19]
|
||||
_ = x[CtrlT-20]
|
||||
_ = x[CtrlU-21]
|
||||
_ = x[CtrlV-22]
|
||||
_ = x[CtrlW-23]
|
||||
_ = x[CtrlX-24]
|
||||
_ = x[CtrlY-25]
|
||||
_ = x[CtrlZ-26]
|
||||
_ = x[Esc-27]
|
||||
_ = x[CtrlSpace-28]
|
||||
_ = x[CtrlDelete-29]
|
||||
_ = x[CtrlBackSlash-30]
|
||||
_ = x[CtrlRightBracket-31]
|
||||
_ = x[CtrlCaret-32]
|
||||
_ = x[CtrlSlash-33]
|
||||
_ = x[ShiftTab-34]
|
||||
_ = x[Backspace-35]
|
||||
_ = x[Delete-36]
|
||||
_ = x[PageUp-37]
|
||||
_ = x[PageDown-38]
|
||||
_ = x[Up-39]
|
||||
_ = x[Down-40]
|
||||
_ = x[Left-41]
|
||||
_ = x[Right-42]
|
||||
_ = x[Home-43]
|
||||
_ = x[End-44]
|
||||
_ = x[Insert-45]
|
||||
_ = x[ShiftUp-46]
|
||||
_ = x[ShiftDown-47]
|
||||
_ = x[ShiftLeft-48]
|
||||
_ = x[ShiftRight-49]
|
||||
_ = x[ShiftDelete-50]
|
||||
_ = x[F1-51]
|
||||
_ = x[F2-52]
|
||||
_ = x[F3-53]
|
||||
_ = x[F4-54]
|
||||
_ = x[F5-55]
|
||||
_ = x[F6-56]
|
||||
_ = x[F7-57]
|
||||
_ = x[F8-58]
|
||||
_ = x[F9-59]
|
||||
_ = x[F10-60]
|
||||
_ = x[F11-61]
|
||||
_ = x[F12-62]
|
||||
_ = x[AltBackspace-63]
|
||||
_ = x[AltUp-64]
|
||||
_ = x[AltDown-65]
|
||||
_ = x[AltLeft-66]
|
||||
_ = x[AltRight-67]
|
||||
_ = x[AltShiftUp-68]
|
||||
_ = x[AltShiftDown-69]
|
||||
_ = x[AltShiftLeft-70]
|
||||
_ = x[AltShiftRight-71]
|
||||
_ = x[Alt-72]
|
||||
_ = x[CtrlAlt-73]
|
||||
_ = x[Invalid-74]
|
||||
_ = x[Fatal-75]
|
||||
_ = x[Mouse-76]
|
||||
_ = x[DoubleClick-77]
|
||||
_ = x[LeftClick-78]
|
||||
_ = x[RightClick-79]
|
||||
_ = x[SLeftClick-80]
|
||||
_ = x[SRightClick-81]
|
||||
_ = x[ScrollUp-82]
|
||||
_ = x[ScrollDown-83]
|
||||
_ = x[SScrollUp-84]
|
||||
_ = x[SScrollDown-85]
|
||||
_ = x[PreviewScrollUp-86]
|
||||
_ = x[PreviewScrollDown-87]
|
||||
_ = x[Resize-88]
|
||||
_ = x[Change-89]
|
||||
_ = x[BackwardEOF-90]
|
||||
_ = x[Start-91]
|
||||
_ = x[Load-92]
|
||||
_ = x[Focus-93]
|
||||
_ = x[One-94]
|
||||
_ = x[Zero-95]
|
||||
_ = x[Result-96]
|
||||
_ = x[Jump-97]
|
||||
_ = x[JumpCancel-98]
|
||||
_ = x[ClickHeader-99]
|
||||
}
|
||||
|
||||
const _EventType_name = "RuneCtrlACtrlBCtrlCCtrlDCtrlECtrlFCtrlGCtrlHTabCtrlJCtrlKCtrlLCtrlMCtrlNCtrlOCtrlPCtrlQCtrlRCtrlSCtrlTCtrlUCtrlVCtrlWCtrlXCtrlYCtrlZEscCtrlSpaceCtrlDeleteCtrlBackSlashCtrlRightBracketCtrlCaretCtrlSlashShiftTabBackspaceDeletePageUpPageDownUpDownLeftRightHomeEndInsertShiftUpShiftDownShiftLeftShiftRightShiftDeleteF1F2F3F4F5F6F7F8F9F10F11F12AltBackspaceAltUpAltDownAltLeftAltRightAltShiftUpAltShiftDownAltShiftLeftAltShiftRightAltCtrlAltInvalidFatalMouseDoubleClickLeftClickRightClickSLeftClickSRightClickScrollUpScrollDownSScrollUpSScrollDownPreviewScrollUpPreviewScrollDownResizeChangeBackwardEOFStartLoadFocusOneZeroResultJumpJumpCancelClickHeader"
|
||||
|
||||
var _EventType_index = [...]uint16{0, 4, 9, 14, 19, 24, 29, 34, 39, 44, 47, 52, 57, 62, 67, 72, 77, 82, 87, 92, 97, 102, 107, 112, 117, 122, 127, 132, 135, 144, 154, 167, 183, 192, 201, 209, 218, 224, 230, 238, 240, 244, 248, 253, 257, 260, 266, 273, 282, 291, 301, 312, 314, 316, 318, 320, 322, 324, 326, 328, 330, 333, 336, 339, 351, 356, 363, 370, 378, 388, 400, 412, 425, 428, 435, 442, 447, 452, 463, 472, 482, 492, 503, 511, 521, 530, 541, 556, 573, 579, 585, 596, 601, 605, 610, 613, 617, 623, 627, 637, 648}
|
||||
|
||||
func (i EventType) String() string {
|
||||
if i < 0 || i >= EventType(len(_EventType_index)-1) {
|
||||
return "EventType(" + strconv.FormatInt(int64(i), 10) + ")"
|
||||
}
|
||||
return _EventType_name[_EventType_index[i]:_EventType_index[i+1]]
|
||||
}
|
||||
1168
src/tui/light.go
Normal file
1168
src/tui/light.go
Normal file
File diff suppressed because it is too large
Load Diff
125
src/tui/light_unix.go
Normal file
125
src/tui/light_unix.go
Normal file
@@ -0,0 +1,125 @@
|
||||
//go:build !windows
|
||||
|
||||
package tui
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strings"
|
||||
"syscall"
|
||||
|
||||
"github.com/junegunn/fzf/src/util"
|
||||
"golang.org/x/sys/unix"
|
||||
"golang.org/x/term"
|
||||
)
|
||||
|
||||
func IsLightRendererSupported() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (r *LightRenderer) defaultTheme() *ColorTheme {
|
||||
if strings.Contains(os.Getenv("TERM"), "256") {
|
||||
return Dark256
|
||||
}
|
||||
colors, err := exec.Command("tput", "colors").Output()
|
||||
if err == nil && atoi(strings.TrimSpace(string(colors)), 16) > 16 {
|
||||
return Dark256
|
||||
}
|
||||
return Default16
|
||||
}
|
||||
|
||||
func (r *LightRenderer) fd() int {
|
||||
return int(r.ttyin.Fd())
|
||||
}
|
||||
|
||||
func (r *LightRenderer) initPlatform() (err error) {
|
||||
r.origState, err = term.MakeRaw(r.fd())
|
||||
return err
|
||||
}
|
||||
|
||||
func (r *LightRenderer) closePlatform() {
|
||||
r.ttyout.Close()
|
||||
}
|
||||
|
||||
func openTty(mode int) (*os.File, error) {
|
||||
in, err := os.OpenFile(consoleDevice, mode, 0)
|
||||
if err != nil {
|
||||
tty := ttyname()
|
||||
if len(tty) > 0 {
|
||||
if in, err := os.OpenFile(tty, mode, 0); err == nil {
|
||||
return in, nil
|
||||
}
|
||||
}
|
||||
return nil, errors.New("failed to open " + consoleDevice)
|
||||
}
|
||||
return in, nil
|
||||
}
|
||||
|
||||
func openTtyIn() (*os.File, error) {
|
||||
return openTty(syscall.O_RDONLY)
|
||||
}
|
||||
|
||||
func openTtyOut() (*os.File, error) {
|
||||
return openTty(syscall.O_WRONLY)
|
||||
}
|
||||
|
||||
func (r *LightRenderer) setupTerminal() {
|
||||
term.MakeRaw(r.fd())
|
||||
}
|
||||
|
||||
func (r *LightRenderer) restoreTerminal() {
|
||||
term.Restore(r.fd(), r.origState)
|
||||
}
|
||||
|
||||
func (r *LightRenderer) updateTerminalSize() {
|
||||
width, height, err := term.GetSize(r.fd())
|
||||
|
||||
if err == nil {
|
||||
r.width = width
|
||||
r.height = r.maxHeightFunc(height)
|
||||
} else {
|
||||
r.width = getEnv("COLUMNS", defaultWidth)
|
||||
r.height = r.maxHeightFunc(getEnv("LINES", defaultHeight))
|
||||
}
|
||||
}
|
||||
|
||||
func (r *LightRenderer) findOffset() (row int, col int) {
|
||||
r.csi("6n")
|
||||
r.flush()
|
||||
var err error
|
||||
bytes := []byte{}
|
||||
for tries := 0; tries < offsetPollTries; tries++ {
|
||||
bytes, err = r.getBytesInternal(bytes, tries > 0)
|
||||
if err != nil {
|
||||
return -1, -1
|
||||
}
|
||||
|
||||
offsets := offsetRegexp.FindSubmatch(bytes)
|
||||
if len(offsets) > 3 {
|
||||
// Add anything we skipped over to the input buffer
|
||||
r.buffer = append(r.buffer, offsets[1]...)
|
||||
return atoi(string(offsets[2]), 0) - 1, atoi(string(offsets[3]), 0) - 1
|
||||
}
|
||||
}
|
||||
return -1, -1
|
||||
}
|
||||
|
||||
func (r *LightRenderer) getch(nonblock bool) (int, bool) {
|
||||
b := make([]byte, 1)
|
||||
fd := r.fd()
|
||||
util.SetNonblock(r.ttyin, nonblock)
|
||||
_, err := util.Read(fd, b)
|
||||
if err != nil {
|
||||
return 0, false
|
||||
}
|
||||
return int(b[0]), true
|
||||
}
|
||||
|
||||
func (r *LightRenderer) Size() TermSize {
|
||||
ws, err := unix.IoctlGetWinsize(int(r.ttyin.Fd()), unix.TIOCGWINSZ)
|
||||
if err != nil {
|
||||
return TermSize{}
|
||||
}
|
||||
return TermSize{int(ws.Row), int(ws.Col), int(ws.Xpixel), int(ws.Ypixel)}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user